| class SimpleGPT { | |
| constructor() { | |
| this.knowledgeBase = []; | |
| this.logicChain = []; | |
| this.markovChains = {}; | |
| this.letterChains = {}; | |
| this.syntacticChains = {}; | |
| this.sentimentKeywords = ['счастлив', 'грустно', 'зло', 'взволнован', 'скука', 'радость', 'гнев', 'удивление', 'страх']; | |
| this.wordAssociations = {}; | |
| this.semanticNetwork = {}; | |
| this.questionWords = ['как', 'что', 'почему', 'когда', 'где', 'кто', 'чей', 'сколько', 'зачем', 'какой']; | |
| this.contextMemory = []; | |
| this.deepMemory = []; | |
| this.attentionWeights = {}; | |
| this.hierarchicalChains = {}; | |
| this.transformerLayers = []; | |
| this.quantumStates = {}; | |
| this.neuroplasticWeights = {}; | |
| this.holographicMemory = {}; | |
| this.fractalPatterns = {}; | |
| this.temporalNets = {}; | |
| this.chaosFields = {}; | |
| this.entropicMatrices = {}; | |
| this.adaptiveFilters = {}; | |
| this.cognitiveMaps = {}; | |
| this.synapticPaths = {}; | |
| this.neuralOscillators = {}; | |
| this.memeticFields = {}; | |
| this.geneticOperators = {}; | |
| this.hyperdimensionalVectors = {}; | |
| this.topologicalManifolds = {}; | |
| this.morphogeneticFields = {}; | |
| this.quantumEntanglement = {}; | |
| this.holographicProjection = {}; | |
| this.fractalEncoding = {}; | |
| this.temporalFolding = {}; | |
| this.chaosSynchronization = {}; | |
| this.entropicBalancing = {}; | |
| this.adaptiveResonance = {}; | |
| this.cognitiveDissonance = {}; | |
| this.synapticPruning = {}; | |
| this.neuralHarmonics = {}; | |
| this.memeticReplication = {}; | |
| this.geneticDrift = {}; | |
| this.hyperdimensionalProjection = {}; | |
| this.topologicalCompression = {}; | |
| this.morphogeneticResonance = {}; | |
| this.conceptCorrelations = {}; | |
| this.narrativeStructures = {}; | |
| this.emotionalGradients = {}; | |
| this.symbolicMaps = {}; | |
| this.linguisticPatterns = {}; | |
| this.causalChains = {}; | |
| this.abstractionLevels = {}; | |
| this.contextualFrames = {}; | |
| this.eigenVectors = {}; | |
| this.stateMachines = {}; | |
| this.bayesianNetworks = {}; | |
| this.fuzzyLogicSets = {}; | |
| this.feedbackLoops = {}; | |
| this.resonanceChambers = {}; | |
| this.informationFlows = {}; | |
| this.phaseSpace = {}; | |
| this.attractorBasins = {}; | |
| this.semanticGradients = {}; | |
| this.conceptualKernels = {}; | |
| this.rhetoricalDevices = {}; | |
| this.stylisticMatrices = {}; | |
| this.dialogueActs = {}; | |
| this.pragmaticContext = {}; | |
| this.buildTransformerLayers(); | |
| } | |
| loadTrainingData(data) { | |
| this.knowledgeBase = data.split('\n').filter(line => line.trim() !== ''); | |
| const buildFunctions = [ | |
| this.buildMarkovChains, this.buildLetterChains, this.buildWordAssociations, this.buildSemanticNetwork, | |
| this.buildSyntacticChains, this.buildHierarchicalChains, this.buildQuantumStates, this.buildNeuroplasticWeights, | |
| this.buildHolographicMemory, this.buildFractalPatterns, this.buildTemporalNets, this.buildChaosFields, | |
| this.buildEntropicMatrices, this.buildAdaptiveFilters, this.buildCognitiveMaps, this.buildSynapticPaths, | |
| this.buildNeuralOscillators, this.buildMemeticFields, this.buildGeneticOperators, this.buildHyperdimensionalVectors, | |
| this.buildTopologicalManifolds, this.buildMorphogeneticFields, this.buildQuantumEntanglement, this.buildHolographicProjection, | |
| this.buildFractalEncoding, this.buildTemporalFolding, this.buildChaosSynchronization, this.buildEntropicBalancing, | |
| this.buildAdaptiveResonance, this.buildCognitiveDissonance, this.buildSynapticPruning, this.buildNeuralHarmonics, | |
| this.buildMemeticReplication, this.buildGeneticDrift, this.buildHyperdimensionalProjection, this.buildTopologicalCompression, | |
| this.buildMorphogeneticResonance, this.buildAnswerTemplates, this.buildDeepMemory, this.buildConceptCorrelations, | |
| this.buildNarrativeStructures, this.buildEmotionalGradients, this.buildSymbolicMaps, this.buildLinguisticPatterns, | |
| this.buildCausalChains, this.buildAbstractionLevels, this.buildContextualFrames, this.buildEigenVectors, | |
| this.buildStateMachines, this.buildBayesianNetworks, this.buildFuzzyLogicSets, this.buildFeedbackLoops, | |
| this.buildResonanceChambers, this.buildInformationFlows, this.buildPhaseSpace, this.buildAttractorBasins, | |
| this.buildSemanticGradients, this.buildConceptualKernels, this.buildRhetoricalDevices, this.buildStylisticMatrices, | |
| this.buildDialogueActs, this.buildPragmaticContext | |
| ]; | |
| buildFunctions.forEach(fn => fn.call(this, this.knowledgeBase)); | |
| } | |
| buildTransformerLayers() { | |
| const transformerFunctions = [ | |
| text => this.applyQuantumSuperposition(text), text => this.applyHolographicReduction(text), text => this.applyFractalCompression(text), | |
| text => this.applyTemporalConvolution(text), text => this.applyChaosEntanglement(text), text => this.applyEntropicFiltering(text), | |
| text => this.applyAdaptiveResonanceTransform(text), text => this.applyCognitiveMapping(text), text => this.applySynapticWeights(text), | |
| text => this.applyNeuralHarmonicsTransform(text), text => this.applyMemeticEvolution(text), text => this.applyGeneticOperations(text), | |
| text => this.applyHyperdimensionalRotation(text), text => this.applyTopologicalFolding(text), text => this.applyMorphogeneticField(text), | |
| text => this.applyConceptCorrelation(text), text => this.applyNarrativeShift(text), text => this.applyEmotionalGradient(text), | |
| text => this.applySymbolicSubstitution(text), text => this.applyLinguisticPattern(text), text => this.applyCausalInference(text), | |
| text => this.applyAbstractionChange(text), text => this.applyContextualFraming(text), text => this.applyEigenVector(text), | |
| text => this.applyStateMachine(text), text => this.applyBayesianUpdate(text), text => this.applyFuzzyLogic(text), | |
| text => this.applyFeedback(text), text => this.applyResonance(text), text => this.applyInformationFlow(text), | |
| text => this.applyPhaseSpaceJump(text), text => this.applyAttractorBasin(text), text => this.applySemanticGradientShift(text), | |
| text => this.applyConceptualKernel(text), text => this.applyRhetoricalDevice(text), text => this.applyStylisticMatrix(text), | |
| text => this.applyDialogueAct(text), text => this.applyPragmaticContext(text) | |
| ]; | |
| this.transformerLayers = []; | |
| for (let i = 0; i < 7000; i++) { | |
| const method = transformerFunctions[Math.floor(Math.random() * transformerFunctions.length)]; | |
| this.transformerLayers.push(text => { | |
| try { | |
| const result = method(text); | |
| return (typeof result === 'string' && result.trim() !== '') ? result : this.generateFallbackResponse(text); | |
| } catch (e) { | |
| return this.generateFallbackResponse(text); | |
| } | |
| }); | |
| } | |
| } | |
| applyQuantumSuperposition(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| const states = this.quantumStates[word] || [word, this.generateWordByLetters(word[0] || 'a')]; | |
| return states[Math.floor(Math.random() * states.length)]; | |
| }).join(' '); | |
| } | |
| applyHolographicReduction(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return ''; | |
| const key = words.slice(0, 2).join(' '); | |
| const memory = this.holographicMemory[key] || [text]; | |
| return memory[Math.floor(Math.random() * memory.length)]; | |
| } | |
| applyFractalCompression(text) { | |
| if (!text) return ''; | |
| const pattern = this.fractalPatterns[text.substring(0, 3)] || text.substring(0, 5); | |
| if (!pattern) return text; | |
| return (pattern + text).substring(0, Math.max(text.length, 5)); | |
| } | |
| applyTemporalConvolution(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return ''; | |
| const temporalNet = this.temporalNets[words[0]] || {}; | |
| const next = Object.keys(temporalNet); | |
| if (next.length > 0) { | |
| return next[Math.floor(Math.random() * next.length)] + ' ' + words.slice(1).join(' '); | |
| } | |
| return text; | |
| } | |
| applyChaosEntanglement(text) { | |
| const words = text.split(' '); | |
| const entangled = words.map(word => { | |
| const chaos = this.chaosFields[word] || [word]; | |
| return chaos[Math.floor(Math.random() * chaos.length)]; | |
| }); | |
| return entangled.join(' '); | |
| } | |
| applyEntropicFiltering(text) { | |
| const entropyMap = this.calculateEntropy(text); | |
| return text.split('').filter(char => entropyMap[char] > 0.1).join(''); | |
| } | |
| calculateEntropy(text) { | |
| const freq = {}; | |
| if (!text) return {}; | |
| text.split('').forEach(char => { | |
| freq[char] = (freq[char] || 0) + 1; | |
| }); | |
| const total = text.length; | |
| const entropy = {}; | |
| if (total === 0) return {}; | |
| Object.keys(freq).forEach(char => { | |
| const p = freq[char] / total; | |
| entropy[char] = -p * Math.log2(p); | |
| }); | |
| return entropy; | |
| } | |
| applyAdaptiveResonanceTransform(text) { | |
| if (!text) return ''; | |
| const resonance = this.adaptiveResonance[text.substring(0, 5)] || text; | |
| return resonance.split('').reverse().join(''); | |
| } | |
| applyCognitiveMapping(text) { | |
| if (!text) return ''; | |
| const map = this.cognitiveMaps[text.substring(0, 2)] || [text]; | |
| return map[Math.floor(Math.random() * map.length)]; | |
| } | |
| applySynapticWeights(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| const weights = this.synapticPaths[word] || { [word]: 1 }; | |
| const choices = Object.keys(weights); | |
| return choices[Math.floor(Math.random() * choices.length)]; | |
| }).join(' '); | |
| } | |
| applyNeuralHarmonicsTransform(text) { | |
| if (!text) return ''; | |
| const harmonic = this.neuralHarmonics[text.length % 5] || [text]; | |
| return harmonic[Math.floor(Math.random() * harmonic.length)]; | |
| } | |
| applyMemeticEvolution(text) { | |
| const memes = this.memeticFields[text] || [text]; | |
| return memes[Math.floor(Math.random() * memes.length)]; | |
| } | |
| applyGeneticOperations(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| if (this.geneticOperators[word] && Math.random() < 0.5) { | |
| return this.geneticOperators[word][Math.floor(Math.random() * this.geneticOperators[word].length)]; | |
| } | |
| return word; | |
| }).join(' '); | |
| } | |
| applyHyperdimensionalRotation(text) { | |
| if (!text) return ''; | |
| const dims = this.hyperdimensionalVectors[text.split(' ')[0]] || [text]; | |
| return dims[Math.floor(Math.random() * dims.length)]; | |
| } | |
| applyTopologicalFolding(text) { | |
| if (!text) return ''; | |
| const key = text.replace(/\s+/g, '').length % 10; | |
| const folded = this.topologicalCompression[key] || [text]; | |
| return folded[Math.floor(Math.random() * folded.length)].split('').reverse().join(''); | |
| } | |
| applyMorphogeneticField(text) { | |
| if (!text) return ''; | |
| const key = text.substring(text.length - 3); | |
| const morph = this.morphogeneticFields[key] || [text]; | |
| return morph[Math.floor(Math.random() * morph.length)]; | |
| } | |
| applyConceptCorrelation(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return ''; | |
| const concept = words[Math.floor(Math.random() * words.length)]; | |
| const correlated = this.conceptCorrelations[concept] || { [concept]: 1 }; | |
| const choices = Object.keys(correlated); | |
| const newWord = choices[Math.floor(Math.random() * choices.length)]; | |
| words.splice(Math.floor(Math.random() * words.length), 1, newWord); | |
| return words.join(' '); | |
| } | |
| applyNarrativeShift(text) { | |
| if (!text) return ''; | |
| const structure = this.narrativeStructures[text.length % 10] || [text]; | |
| return structure[Math.floor(Math.random() * structure.length)]; | |
| } | |
| applyEmotionalGradient(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| const gradient = this.emotionalGradients[word]; | |
| if (gradient && Math.random() < 0.3) { | |
| return gradient; | |
| } | |
| return word; | |
| }).join(' '); | |
| } | |
| applySymbolicSubstitution(text) { | |
| if (!text) return ''; | |
| const key = text[0]; | |
| const symbols = this.symbolicMaps[key] || [text]; | |
| return symbols[Math.floor(Math.random() * symbols.length)]; | |
| } | |
| applyLinguisticPattern(text) { | |
| if (!text) return ''; | |
| const patternKey = text.split(' ').length % 5; | |
| const pattern = this.linguisticPatterns[patternKey] || [text]; | |
| return pattern[Math.floor(Math.random() * pattern.length)]; | |
| } | |
| applyCausalInference(text) { | |
| const words = text.split(' '); | |
| const lastWord = words[words.length - 1]; | |
| const cause = this.causalChains[lastWord] || {}; | |
| const effects = Object.keys(cause); | |
| if (effects.length > 0) { | |
| return text + ' потому что ' + effects[Math.floor(Math.random() * effects.length)]; | |
| } | |
| return text; | |
| } | |
| applyAbstractionChange(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| const level = this.abstractionLevels[word]; | |
| if (level && Math.random() < 0.2) { | |
| return level[Math.floor(Math.random() * level.length)]; | |
| } | |
| return word; | |
| }).join(' '); | |
| } | |
| applyContextualFraming(text) { | |
| if (!text) return ''; | |
| const frame = this.contextualFrames[text.substring(0, 4)] || [text]; | |
| return frame[Math.floor(Math.random() * frame.length)]; | |
| } | |
| applyEigenVector(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return ''; | |
| const vector = this.eigenVectors[words[0]] || [text]; | |
| return vector[Math.floor(Math.random() * vector.length)]; | |
| } | |
| applyStateMachine(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return ''; | |
| const state = this.stateMachines[words[0]] || { [words.slice(1).join(' ')]: 1 }; | |
| const nextStates = Object.keys(state); | |
| return words[0] + ' ' + nextStates[Math.floor(Math.random() * nextStates.length)]; | |
| } | |
| applyBayesianUpdate(text) { | |
| const words = text.split(' '); | |
| if (words.length < 2) return text; | |
| const prior = words[0]; | |
| const evidence = words[1]; | |
| const posterior = this.bayesianNetworks[prior]?.[evidence]; | |
| if (posterior) { | |
| const choices = Object.keys(posterior); | |
| words.splice(0, 2, choices[Math.floor(Math.random() * choices.length)]); | |
| return words.join(' '); | |
| } | |
| return text; | |
| } | |
| applyFuzzyLogic(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| const fuzzySet = this.fuzzyLogicSets[word]; | |
| if (fuzzySet && Math.random() < 0.4) { | |
| return fuzzySet[Math.floor(Math.random() * fuzzySet.length)]; | |
| } | |
| return word; | |
| }).join(' '); | |
| } | |
| applyFeedback(text) { | |
| if (!text) return ''; | |
| const loop = this.feedbackLoops[text.length % 10] || [text]; | |
| return loop[Math.floor(Math.random() * loop.length)]; | |
| } | |
| applyResonance(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return ''; | |
| const chamber = this.resonanceChambers[words[0]] || [text]; | |
| return chamber[Math.floor(Math.random() * chamber.length)]; | |
| } | |
| applyInformationFlow(text) { | |
| const words = text.split(' '); | |
| if (words.length < 2) return text; | |
| const from = words[0]; | |
| const flow = this.informationFlows[from]; | |
| if (flow) { | |
| const to = flow[Math.floor(Math.random() * flow.length)]; | |
| words[1] = to; | |
| return words.join(' '); | |
| } | |
| return text; | |
| } | |
| applyPhaseSpaceJump(text) { | |
| if (!text) return ''; | |
| const key = text.split('').sort().join('').substring(0, 5); | |
| const newState = this.phaseSpace[key] || [text]; | |
| return newState[Math.floor(Math.random() * newState.length)]; | |
| } | |
| applyAttractorBasin(text) { | |
| if (!text) return ''; | |
| const basin = this.attractorBasins[text.substring(0, 2)] || [text]; | |
| return basin[Math.floor(Math.random() * basin.length)]; | |
| } | |
| applySemanticGradientShift(text) { | |
| const words = text.split(' '); | |
| return words.map(word => { | |
| const gradient = this.semanticGradients[word]; | |
| if (gradient && Math.random() < 0.3) { | |
| return gradient[Math.floor(Math.random() * gradient.length)]; | |
| } | |
| return word; | |
| }).join(' '); | |
| } | |
| applyConceptualKernel(text) { | |
| const words = text.split(' '); | |
| if (words.length === 0) return text; | |
| const kernelKey = words[Math.floor(Math.random() * words.length)]; | |
| const kernel = this.conceptualKernels[kernelKey]; | |
| if (kernel) { | |
| return kernel.replace('%s', text); | |
| } | |
| return text; | |
| } | |
| applyRhetoricalDevice(text) { | |
| const devices = Object.keys(this.rhetoricalDevices); | |
| if (devices.length === 0) return text; | |
| const device = devices[Math.floor(Math.random() * devices.length)]; | |
| return this.rhetoricalDevices[device](text); | |
| } | |
| applyStylisticMatrix(text) { | |
| const styles = Object.keys(this.stylisticMatrices); | |
| if (styles.length === 0) return text; | |
| const style = styles[Math.floor(Math.random() * styles.length)]; | |
| const matrix = this.stylisticMatrices[style]; | |
| return text.split(' ').map(word => matrix[word] || word).join(' '); | |
| } | |
| applyDialogueAct(text) { | |
| const acts = Object.keys(this.dialogueActs); | |
| if (acts.length === 0) return text; | |
| const act = acts[Math.floor(Math.random() * acts.length)]; | |
| return this.dialogueActs[act] + ' ' + text; | |
| } | |
| applyPragmaticContext(text) { | |
| const contexts = Object.keys(this.pragmaticContext); | |
| if (contexts.length === 0) return text; | |
| const context = contexts[Math.floor(Math.random() * contexts.length)]; | |
| const modification = this.pragmaticContext[context]; | |
| return text + ' ' + modification; | |
| } | |
| loadLogicData(data) { | |
| this.logicChain = data.split('\n').filter(line => line.trim() !== ''); | |
| } | |
| buildDeepMemory(data) { | |
| this.deepMemory = data.map(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| return { | |
| raw: line, | |
| tokens: words, | |
| vector: this.createSemanticVector(words) | |
| }; | |
| }); | |
| } | |
| createSemanticVector(words) { | |
| const vector = {}; | |
| words.forEach(word => { | |
| const cleanWord = word.toLowerCase().replace(/[^а-яё]/g, ''); | |
| if (!cleanWord) return; | |
| vector[cleanWord] = (vector[cleanWord] || 0) + 1; | |
| const similar = this.getSemanticSimilarWords(cleanWord, 3); | |
| similar.forEach(w => { | |
| vector[w] = (vector[w] || 0) + 0.7; | |
| }); | |
| }); | |
| return vector; | |
| } | |
| semanticSimilarity(vecA, vecB) { | |
| const keys = new Set([...Object.keys(vecA), ...Object.keys(vecB)]); | |
| let dotProduct = 0; | |
| let magA = 0; | |
| let magB = 0; | |
| for (const key of keys) { | |
| const a = vecA[key] || 0; | |
| const b = vecB[key] || 0; | |
| dotProduct += a * b; | |
| magA += a * a; | |
| magB += b * b; | |
| } | |
| magA = Math.sqrt(magA); | |
| magB = Math.sqrt(magB); | |
| return magA && magB ? dotProduct / (magA * magB) : 0; | |
| } | |
| findInDeepMemory(query, threshold = 0.65) { | |
| const queryVector = this.createSemanticVector(query.split(' ')); | |
| const results = []; | |
| this.deepMemory.forEach(memory => { | |
| const similarity = this.semanticSimilarity(queryVector, memory.vector); | |
| if (similarity > threshold) { | |
| results.push({ | |
| text: memory.raw, | |
| score: similarity | |
| }); | |
| } | |
| }); | |
| return results.sort((a, b) => b.score - a.score) | |
| .slice(0, 7) | |
| .map(item => item.text); | |
| } | |
| buildMarkovChains(data) { | |
| const maxOrder = 9; | |
| for (let order = 1; order <= maxOrder; order++) { | |
| this.markovChains[order] = this.buildMarkovChainForOrder(data, order); | |
| } | |
| } | |
| buildMarkovChainForOrder(data, order) { | |
| const chain = {}; | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| for (let i = 0; i <= words.length - order; i++) { | |
| const key = words.slice(i, i + order - 1).join(' '); | |
| const value = words[i + order - 1]; | |
| if (!chain[key]) { | |
| chain[key] = []; | |
| } | |
| chain[key].push(value); | |
| } | |
| }); | |
| return chain; | |
| } | |
| buildLetterChains(data) { | |
| const chain = {}; | |
| data.join(' ').split(/\s+/).forEach(word => { | |
| for (let i = 0; i < word.length - 1; i++) { | |
| const key = word[i]; | |
| const next = word[i + 1]; | |
| if (!chain[key]) chain[key] = []; | |
| chain[key].push(next); | |
| } | |
| }); | |
| this.letterChains = chain; | |
| } | |
| buildWordAssociations(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()).map(w => w.toLowerCase().replace(/[^а-яё]/g, '')); | |
| words.forEach((word, index) => { | |
| if (!this.wordAssociations[word]) { | |
| this.wordAssociations[word] = {}; | |
| } | |
| for (let j = Math.max(0, index - 7); j < Math.min(words.length, index + 8); j++) { | |
| if (index !== j) { | |
| const assocWord = words[j]; | |
| const weight = 1 / (Math.abs(index - j) + 1); | |
| this.wordAssociations[word][assocWord] = (this.wordAssociations[word][assocWord] || 0) + weight; | |
| } | |
| } | |
| }); | |
| }); | |
| } | |
| buildSemanticNetwork(data) { | |
| const contextWindow = 15; | |
| data.forEach(line => { | |
| const sentences = line.split(/[.!?]+/).filter(s => s.trim()); | |
| sentences.forEach(sentence => { | |
| const words = sentence.toLowerCase().split(' ').filter(w => w.trim() && w.match(/[а-яё]/g)); | |
| words.forEach((word, i) => { | |
| if (!this.semanticNetwork[word]) this.semanticNetwork[word] = {}; | |
| for (let j = Math.max(0, i - contextWindow); j < Math.min(words.length, i + contextWindow); j++) { | |
| if (i !== j) { | |
| const contextWord = words[j]; | |
| const weight = 1 / (Math.abs(i - j) + 1); | |
| this.semanticNetwork[word][contextWord] = (this.semanticNetwork[word][contextWord] || 0) + weight; | |
| } | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildSyntacticChains(data) { | |
| data.forEach(line => { | |
| const sentences = line.split(/[.!?]+/).filter(s => s.trim()); | |
| sentences.forEach(sentence => { | |
| const words = sentence.split(' ').filter(w => w.trim()).map(w => w.toLowerCase().replace(/[^а-яё]/g, '')); | |
| words.forEach((word, i) => { | |
| if (!this.syntacticChains[word]) this.syntacticChains[word] = {}; | |
| if (i < words.length - 1) { | |
| const nextWord = words[i + 1]; | |
| this.syntacticChains[word][nextWord] = (this.syntacticChains[word][nextWord] || 0) + 1; | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildHierarchicalChains(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach((word, i) => { | |
| if (!this.hierarchicalChains[word]) this.hierarchicalChains[word] = []; | |
| if (i < words.length - 2) { | |
| this.hierarchicalChains[word].push(words[i + 1] + ' ' + words[i + 2]); | |
| } | |
| }); | |
| }); | |
| } | |
| buildQuantumStates(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach((word, i) => { | |
| if (!this.quantumStates[word]) this.quantumStates[word] = []; | |
| if (i > 0) this.quantumStates[word].push(words[i - 1]); | |
| if (i < words.length - 1) this.quantumStates[word].push(words[i + 1]); | |
| }); | |
| }); | |
| } | |
| buildNeuroplasticWeights(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach((word, i) => { | |
| if (!this.neuroplasticWeights[word]) this.neuroplasticWeights[word] = {}; | |
| words.forEach(other => { | |
| if (word !== other) { | |
| this.neuroplasticWeights[word][other] = (this.neuroplasticWeights[word][other] || 0) + 1; | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildHolographicMemory(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length < 2) return; | |
| const key = words.slice(0, 2).join(' '); | |
| if (!this.holographicMemory[key]) this.holographicMemory[key] = []; | |
| this.holographicMemory[key].push(line); | |
| }); | |
| } | |
| buildFractalPatterns(data) { | |
| data.forEach(line => { | |
| if (line.length > 10) { | |
| const pattern = line.substring(0, 5); | |
| this.fractalPatterns[pattern] = line; | |
| } | |
| }); | |
| } | |
| buildTemporalNets(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach((word, i) => { | |
| if (!this.temporalNets[word]) this.temporalNets[word] = {}; | |
| if (i < words.length - 1) { | |
| const next = words[i + 1]; | |
| this.temporalNets[word][next] = (this.temporalNets[word][next] || 0) + 1; | |
| } | |
| }); | |
| }); | |
| } | |
| buildChaosFields(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.chaosFields[word]) this.chaosFields[word] = []; | |
| this.chaosFields[word] = [...new Set([...this.chaosFields[word], ...words])]; | |
| }); | |
| }); | |
| } | |
| buildEntropicMatrices(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.entropicMatrices[word]) this.entropicMatrices[word] = {}; | |
| words.forEach(other => { | |
| if (word !== other) { | |
| this.entropicMatrices[word][other] = (this.entropicMatrices[word][other] || 0) + 1; | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildAdaptiveFilters(data) { | |
| data.forEach(line => { | |
| const key = line.length % 10; | |
| if (!this.adaptiveFilters[key]) this.adaptiveFilters[key] = []; | |
| this.adaptiveFilters[key].push(line); | |
| }); | |
| } | |
| buildCognitiveMaps(data) { | |
| data.forEach(line => { | |
| const key = line.substring(0, 2); | |
| if (!this.cognitiveMaps[key]) this.cognitiveMaps[key] = []; | |
| this.cognitiveMaps[key].push(line); | |
| }); | |
| } | |
| buildSynapticPaths(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.synapticPaths[word]) this.synapticPaths[word] = {}; | |
| words.forEach(otherWord => { | |
| if (word !== otherWord) { | |
| this.synapticPaths[word][otherWord] = (this.synapticPaths[word][otherWord] || 0) + 1; | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildNeuralOscillators(data) { | |
| data.forEach(line => { | |
| const key = line.length % 5; | |
| if (!this.neuralOscillators[key]) this.neuralOscillators[key] = []; | |
| this.neuralOscillators[key].push(line); | |
| }); | |
| } | |
| buildMemeticFields(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.memeticFields[word]) this.memeticFields[word] = []; | |
| this.memeticFields[word] = [...new Set([...this.memeticFields[word], ...words.filter(w => w !== word)])]; | |
| }); | |
| }); | |
| } | |
| buildGeneticOperators(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach((word, i) => { | |
| if (!this.geneticOperators[word]) this.geneticOperators[word] = []; | |
| if (i > 0) this.geneticOperators[word].push(words[i - 1]); | |
| if (i < words.length - 1) this.geneticOperators[word].push(words[i + 1]); | |
| }); | |
| }); | |
| } | |
| buildHyperdimensionalVectors(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length === 0) return; | |
| const key = words[0]; | |
| if (!this.hyperdimensionalVectors[key]) this.hyperdimensionalVectors[key] = []; | |
| this.hyperdimensionalVectors[key].push(line); | |
| }); | |
| } | |
| buildTopologicalManifolds(data) { | |
| data.forEach(line => { | |
| const key = line.split(' ').length; | |
| if (!this.topologicalManifolds[key]) this.topologicalManifolds[key] = []; | |
| this.topologicalManifolds[key].push(line); | |
| }); | |
| } | |
| buildMorphogeneticFields(data) { | |
| data.forEach(line => { | |
| if (line.length < 3) return; | |
| const key = line.substring(line.length - 3); | |
| if (!this.morphogeneticFields[key]) this.morphogeneticFields[key] = []; | |
| this.morphogeneticFields[key].push(line); | |
| }); | |
| } | |
| buildQuantumEntanglement(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.quantumEntanglement[word]) this.quantumEntanglement[word] = []; | |
| words.forEach(other => { | |
| if (word !== other && !this.quantumEntanglement[word].includes(other)) { | |
| this.quantumEntanglement[word].push(other); | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildHolographicProjection(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length === 0) return; | |
| const key = words[0] + '_' + words.length; | |
| if (!this.holographicProjection[key]) this.holographicProjection[key] = []; | |
| this.holographicProjection[key].push(line); | |
| }); | |
| } | |
| buildFractalEncoding(data) { | |
| data.forEach(line => { | |
| const key = line.replace(/\s+/g, '').substring(0, 4); | |
| if (!this.fractalEncoding[key]) this.fractalEncoding[key] = []; | |
| this.fractalEncoding[key].push(line); | |
| }); | |
| } | |
| buildTemporalFolding(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.temporalFolding[word]) this.temporalFolding[word] = []; | |
| this.temporalFolding[word].push(words.join(' ')); | |
| }); | |
| }); | |
| } | |
| buildChaosSynchronization(data) { | |
| data.forEach(line => { | |
| const key = line.split('').filter(c => c.match(/[а-яё]/)).length % 10; | |
| if (!this.chaosSynchronization[key]) this.chaosSynchronization[key] = []; | |
| this.chaosSynchronization[key].push(line); | |
| }); | |
| } | |
| buildEntropicBalancing(data) { | |
| data.forEach(line => { | |
| const key = line.length % 8; | |
| if (!this.entropicBalancing[key]) this.entropicBalancing[key] = []; | |
| this.entropicBalancing[key].push(line); | |
| }); | |
| } | |
| buildAdaptiveResonance(data) { | |
| data.forEach(line => { | |
| if (line.length < 5) return; | |
| const key = line.substring(0, 5); | |
| if (!this.adaptiveResonance[key]) this.adaptiveResonance[key] = ''; | |
| this.adaptiveResonance[key] = line; | |
| }); | |
| } | |
| buildCognitiveDissonance(data) { | |
| data.forEach(line => { | |
| const key = line.split(' ').filter(w => w.length > 3).join('_'); | |
| if (!key) return; | |
| if (!this.cognitiveDissonance[key]) this.cognitiveDissonance[key] = []; | |
| this.cognitiveDissonance[key].push(line); | |
| }); | |
| } | |
| buildSynapticPruning(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.synapticPruning[word]) this.synapticPruning[word] = []; | |
| this.synapticPruning[word] = words.filter(w => w !== word); | |
| }); | |
| }); | |
| } | |
| buildNeuralHarmonics(data) { | |
| data.forEach(line => { | |
| const key = line.length % 5; | |
| if (!this.neuralHarmonics[key]) this.neuralHarmonics[key] = []; | |
| this.neuralHarmonics[key].push(line); | |
| }); | |
| } | |
| buildMemeticReplication(data) { | |
| data.forEach(line => { | |
| const key = line.substring(0, 4); | |
| if (!this.memeticReplication[key]) this.memeticReplication[key] = []; | |
| this.memeticReplication[key].push(line); | |
| }); | |
| } | |
| buildGeneticDrift(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length === 0) return; | |
| words.forEach(word => { | |
| if (!this.geneticDrift[word]) this.geneticDrift[word] = []; | |
| this.geneticDrift[word].push(words[Math.floor(Math.random() * words.length)]); | |
| }); | |
| }); | |
| } | |
| buildHyperdimensionalProjection(data) { | |
| data.forEach(line => { | |
| if (!line) return; | |
| const key = line[0]; | |
| if (!this.hyperdimensionalProjection[key]) this.hyperdimensionalProjection[key] = []; | |
| this.hyperdimensionalProjection[key].push(line); | |
| }); | |
| } | |
| buildTopologicalCompression(data) { | |
| data.forEach(line => { | |
| const key = line.replace(/\s+/g, '').length % 10; | |
| if (!this.topologicalCompression[key]) this.topologicalCompression[key] = []; | |
| this.topologicalCompression[key].push(line); | |
| }); | |
| } | |
| buildMorphogeneticResonance(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length < 1) return; | |
| const key = words[0] + (words[words.length - 1] || ''); | |
| if (!this.morphogeneticResonance[key]) this.morphogeneticResonance[key] = []; | |
| this.morphogeneticResonance[key].push(line); | |
| }); | |
| } | |
| buildConceptCorrelations(data) { | |
| data.forEach(line => { | |
| const words = line.toLowerCase().replace(/[^а-яё\s]/g, '').split(' ').filter(w => w.trim()); | |
| words.forEach((word, index) => { | |
| if (!this.conceptCorrelations[word]) this.conceptCorrelations[word] = {}; | |
| for (let i = 0; i < words.length; i++) { | |
| if (i !== index) { | |
| this.conceptCorrelations[word][words[i]] = (this.conceptCorrelations[word][words[i]] || 0) + 1; | |
| } | |
| } | |
| }); | |
| }); | |
| } | |
| buildNarrativeStructures(data) { | |
| data.forEach(line => { | |
| const key = line.length % 10; | |
| if (!this.narrativeStructures[key]) this.narrativeStructures[key] = []; | |
| this.narrativeStructures[key].push(line); | |
| }); | |
| } | |
| buildEmotionalGradients(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| this.sentimentKeywords.forEach(sentiment => { | |
| if (line.includes(sentiment)) { | |
| this.emotionalGradients[word] = sentiment; | |
| } | |
| }); | |
| }); | |
| }); | |
| } | |
| buildSymbolicMaps(data) { | |
| data.forEach(line => { | |
| if (!line) return; | |
| const key = line[0]; | |
| if (!this.symbolicMaps[key]) this.symbolicMaps[key] = []; | |
| this.symbolicMaps[key].push(line); | |
| }); | |
| } | |
| buildLinguisticPatterns(data) { | |
| data.forEach(line => { | |
| const key = line.split(' ').length % 5; | |
| if (!this.linguisticPatterns[key]) this.linguisticPatterns[key] = []; | |
| this.linguisticPatterns[key].push(line); | |
| }); | |
| } | |
| buildCausalChains(data) { | |
| data.forEach(line => { | |
| const parts = line.split('потому что'); | |
| if (parts.length > 1) { | |
| const cause = parts[1].trim().split(' ')[0].toLowerCase().replace(/[^а-яё]/g, ''); | |
| const effect = parts[0].trim().split(' ').pop().toLowerCase().replace(/[^а-яё]/g, ''); | |
| if (cause && effect) { | |
| if (!this.causalChains[effect]) this.causalChains[effect] = {}; | |
| this.causalChains[effect][cause] = (this.causalChains[effect][cause] || 0) + 1; | |
| } | |
| } | |
| }); | |
| } | |
| buildAbstractionLevels(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length > 2) { | |
| const specific = words[0]; | |
| const general = words.slice(1).join(' '); | |
| if (!this.abstractionLevels[specific]) this.abstractionLevels[specific] = []; | |
| this.abstractionLevels[specific].push(general); | |
| } | |
| }); | |
| } | |
| buildContextualFrames(data) { | |
| data.forEach(line => { | |
| if (line.length < 4) return; | |
| const key = line.substring(0, 4); | |
| if (!this.contextualFrames[key]) this.contextualFrames[key] = []; | |
| this.contextualFrames[key].push(line); | |
| }); | |
| } | |
| buildEigenVectors(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length === 0) return; | |
| const key = words[0]; | |
| if (!this.eigenVectors[key]) this.eigenVectors[key] = []; | |
| this.eigenVectors[key].push(line); | |
| }); | |
| } | |
| buildStateMachines(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length < 2) return; | |
| const fromState = words[0]; | |
| const toState = words.slice(1).join(' '); | |
| if (!this.stateMachines[fromState]) this.stateMachines[fromState] = {}; | |
| this.stateMachines[fromState][toState] = (this.stateMachines[fromState][toState] || 0) + 1; | |
| }); | |
| } | |
| buildBayesianNetworks(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length < 3) return; | |
| const cause = words[0]; | |
| const evidence = words[1]; | |
| const effect = words[2]; | |
| if (!this.bayesianNetworks[cause]) this.bayesianNetworks[cause] = {}; | |
| if (!this.bayesianNetworks[cause][evidence]) this.bayesianNetworks[cause][evidence] = {}; | |
| this.bayesianNetworks[cause][evidence][effect] = (this.bayesianNetworks[cause][evidence][effect] || 0) + 1; | |
| }); | |
| } | |
| buildFuzzyLogicSets(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| words.forEach(word => { | |
| if (!this.fuzzyLogicSets[word]) this.fuzzyLogicSets[word] = []; | |
| this.fuzzyLogicSets[word] = [...new Set([...this.fuzzyLogicSets[word], ...words])]; | |
| }); | |
| }); | |
| } | |
| buildFeedbackLoops(data) { | |
| data.forEach(line => { | |
| const key = line.length % 10; | |
| if (!this.feedbackLoops[key]) this.feedbackLoops[key] = []; | |
| this.feedbackLoops[key].push(line); | |
| }); | |
| } | |
| buildResonanceChambers(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length === 0) return; | |
| const key = words[0]; | |
| if (!this.resonanceChambers[key]) this.resonanceChambers[key] = []; | |
| this.resonanceChambers[key].push(line); | |
| }); | |
| } | |
| buildInformationFlows(data) { | |
| data.forEach(line => { | |
| const words = line.split(' ').filter(w => w.trim()); | |
| if (words.length < 2) return; | |
| const from = words[0]; | |
| const to = words[1]; | |
| if (!this.informationFlows[from]) this.informationFlows[from] = []; | |
| this.informationFlows[from].push(to); | |
| }); | |
| } | |
| buildPhaseSpace(data) { | |
| data.forEach(line => { | |
| const key = line.split('').sort().join('').substring(0, 5); | |
| if (!this.phaseSpace[key]) this.phaseSpace[key] = []; | |
| this.phaseSpace[key].push(line); | |
| }); | |
| } | |
| buildAttractorBasins(data) { | |
| data.forEach(line => { | |
| const key = line.substring(0, 2); | |
| if (!this.attractorBasins[key]) this.attractorBasins[key] = []; | |
| this.attractorBasins[key].push(line); | |
| }); | |
| } | |
| buildSemanticGradients(data) { | |
| data.forEach(line => { | |
| const words = line.toLowerCase().replace(/[^а-яё\s]/g, '').split(' ').filter(w => w.trim()); | |
| words.forEach((word, i) => { | |
| if (i < words.length - 1) { | |
| const nextWord = words[i + 1]; | |
| if (!this.semanticGradients[word]) this.semanticGradients[word] = []; | |
| this.semanticGradients[word].push(nextWord); | |
| } | |
| }); | |
| }); | |
| } | |
| buildConceptualKernels(data) { | |
| data.forEach(line => { | |
| if (line.includes('%s')) { | |
| const [key, value] = line.split('%s'); | |
| if (key && value) { | |
| const kernelKey = key.trim().split(' ').pop(); | |
| if (kernelKey) { | |
| this.conceptualKernels[kernelKey] = line; | |
| } | |
| } | |
| } | |
| }); | |
| } | |
| buildRhetoricalDevices(data) { | |
| this.rhetoricalDevices = { | |
| metaphor: (text) => { | |
| const words = text.split(' '); | |
| if (words.length < 2) return text; | |
| const subject = words[0]; | |
| const vehicle = this.getSemanticSimilarWords(words[words.length - 1], 1)[0] || 'мир'; | |
| return `${subject} это ${vehicle}`; | |
| }, | |
| simile: (text) => { | |
| const words = text.split(' '); | |
| if (words.length < 2) return text; | |
| const subject = words[0]; | |
| const vehicle = this.getSemanticSimilarWords(words[words.length - 1], 1)[0] || 'сон'; | |
| return `${subject} как ${vehicle}`; | |
| }, | |
| anaphora: (text) => { | |
| const words = text.split(' '); | |
| if (words.length < 2) return text; | |
| const phrase = words.slice(0, 2).join(' '); | |
| return `${phrase}, ${phrase.toLowerCase()}`; | |
| } | |
| }; | |
| } | |
| buildStylisticMatrices(data) { | |
| this.stylisticMatrices = { | |
| formal: { 'привет': 'здравствуйте', 'пока': 'до свидания' }, | |
| poetic: { 'ветер': 'дыхание небес', 'река': 'зеркало времени' } | |
| }; | |
| } | |
| buildDialogueActs(data) { | |
| this.dialogueActs = { | |
| greeting: 'Привет,', | |
| question: 'Скажи,', | |
| statement: 'Я думаю, что', | |
| farewell: 'До встречи,' | |
| }; | |
| } | |
| buildPragmaticContext(data) { | |
| this.pragmaticContext = { | |
| formal: 'с уважением', | |
| informal: 'кстати', | |
| uncertain: 'возможно' | |
| }; | |
| } | |
| buildAnswerTemplates(data) { | |
| this.answerTemplates = data.filter(line => line.length > 15 && line.length < 100); | |
| } | |
| getSemanticSimilarWords(word, count = 10) { | |
| const cleanWord = word.toLowerCase().replace(/[^а-яё]/g, ''); | |
| if (!this.semanticNetwork[cleanWord]) return []; | |
| return Object.entries(this.semanticNetwork[cleanWord]) | |
| .sort((a, b) => b[1] - a[1]) | |
| .slice(0, count) | |
| .map(entry => entry[0]); | |
| } | |
| findSimilarWordsByLetters(word, threshold = 0.4) { | |
| const cleanWord = word.toLowerCase().replace(/[^а-яё]/g, ''); | |
| const words = Object.keys(this.wordAssociations); | |
| let similar = words.filter(w => { | |
| const minLen = Math.min(cleanWord.length, w.length); | |
| let distance = 0; | |
| for (let i = 0; i < minLen; i++) { | |
| if (cleanWord[i] !== w[i]) distance++; | |
| } | |
| distance += Math.abs(cleanWord.length - w.length); | |
| return distance / Math.max(cleanWord.length, w.length) <= (1 - threshold); | |
| }); | |
| if (!similar.length) { | |
| similar = words.slice(0, 5); | |
| } | |
| return similar; | |
| } | |
| ensureCoherence(words) { | |
| if (!words || words.length === 0) return [this.generateWordByLetters('а')]; | |
| return words.filter((word, i) => { | |
| if (i === 0) return true; | |
| const prevWord = words[i - 1]?.toLowerCase().replace(/[^а-яё]/g, ''); | |
| const currentWord = word?.toLowerCase().replace(/[^а-яё]/g, ''); | |
| if (!prevWord || !currentWord) return false; | |
| if (this.syntacticChains[prevWord]?.[currentWord] || this.semanticNetwork[prevWord]?.[currentWord]) { | |
| return true; | |
| } | |
| return Math.random() > 0.6; | |
| }); | |
| } | |
| generateWordByLetters(seed = '') { | |
| let result = seed?.[0] || ['а', 'б', 'в', 'г', 'д', 'е', 'ё'][Math.floor(Math.random() * 7)]; | |
| for (let i = 0; i < 4 + Math.floor(Math.random() * 6); i++) { | |
| const lastChar = result[result.length - 1]; | |
| const next = this.letterChains[lastChar] || ['а', 'о', 'и', 'е', 'у', 'н', 'т']; | |
| result += next[Math.floor(Math.random() * next.length)]; | |
| } | |
| return result; | |
| } | |
| generateFallbackResponse(topic = 'что') { | |
| const randomWords = Object.keys(this.wordAssociations); | |
| if (randomWords.length === 0) return this.generateWordByLetters('а'); | |
| const fallback = this.generateMarkovResponse(topic || randomWords[Math.floor(Math.random() * randomWords.length)]); | |
| return fallback || this.generateWordByLetters('а'); | |
| } | |
| analyzeTopic(input) { | |
| const words = input.toLowerCase().split(' ').filter(w => w.trim() && w.match(/[а-яё]/)); | |
| let topicScore = {}; | |
| words.forEach(word => { | |
| const similarWords = this.getSemanticSimilarWords(word, 5); | |
| similarWords.forEach(w => { | |
| const associates = this.getSemanticSimilarWords(w, 5); | |
| associates.forEach(assoc => { | |
| topicScore[assoc] = (topicScore[assoc] || 0) + (this.semanticNetwork[w]?.[assoc] || 1); | |
| }); | |
| }); | |
| }); | |
| const topic = Object.entries(topicScore) | |
| .sort((a, b) => b[1] - a[1]) | |
| .slice(0, 5) | |
| .map(entry => entry[0]) | |
| .join(' '); | |
| return { | |
| isQuestion: input.endsWith('?') || words.some(w => this.questionWords.includes(w)), | |
| questionWord: words.find(w => this.questionWords.includes(w)) || null, | |
| topic: topic || input | |
| }; | |
| } | |
| transformPhrase(phrase) { | |
| const words = phrase.split(' ').filter(w => w.trim()); | |
| const transformedWords = this.ensureCoherence(words.map(word => this.transformWord(word))); | |
| return transformedWords.join(' ') || this.generateWordByLetters('а'); | |
| } | |
| transformWord(word) { | |
| const clean = word.toLowerCase().replace(/[^а-яё]/g, ''); | |
| if (Math.random() < 0.5) { | |
| const associates = this.getSemanticSimilarWords(clean, 5); | |
| if (associates.length > 0) { | |
| return associates[Math.floor(Math.random() * associates.length)]; | |
| } | |
| } | |
| if (this.wordAssociations[clean]) { | |
| const associates = Object.entries(this.wordAssociations[clean]); | |
| if (associates.length > 0) { | |
| const weights = associates.map(entry => entry[1]); | |
| const total = weights.reduce((sum, w) => sum + w, 0); | |
| if (total > 0) { | |
| const rand = Math.random() * total; | |
| let sum = 0; | |
| for (const [assocWord, weight] of associates) { | |
| sum += weight; | |
| if (rand <= sum) return assocWord; | |
| } | |
| } | |
| } | |
| } | |
| return clean || this.generateWordByLetters(clean); | |
| } | |
| composeNewAnswer(topic) { | |
| const templates = this.answerTemplates | |
| .filter(t => t.toLowerCase().includes(topic.toLowerCase().slice(0, 3))) | |
| .slice(0, 5); | |
| if (templates.length > 0) { | |
| const template = templates[Math.floor(Math.random() * templates.length)]; | |
| const words = template.split(' ').filter(w => w.trim()); | |
| const topicWords = topic.split(' ').filter(w => w.trim()); | |
| words.splice(Math.floor(Math.random() * words.length), 0, ...topicWords); | |
| return this.ensureCoherence(words).join(' ') || this.generateFallbackResponse(topic); | |
| } | |
| return this.generateMarkovResponse(topic) || this.generateFallbackResponse(topic); | |
| } | |
| reformulateResponse(text, isQuestion = false) { | |
| let words = text.split(' ').filter(w => w.trim()).map(w => this.transformWord(w)); | |
| words = this.ensureCoherence(words); | |
| if (isQuestion && Math.random() > 0.7) { | |
| const addition = this.generateMarkovResponse(words[0] || 'что', 2) || this.generateWordByLetters('а'); | |
| return words.join(' ') + ' ' + addition; | |
| } | |
| return words.join(' ') || this.generateWordByLetters('а'); | |
| } | |
| generateMarkovResponse(input, order = 4) { | |
| const words = input.split(' ').filter(w => w.trim()); | |
| if (words.length === 0) return this.generateFallbackResponse(); | |
| let response = []; | |
| const validOrder = Math.min(order, 9); | |
| let currentKey = words.slice(0, validOrder - 1).join(' '); | |
| for (let i = 0; i < 50; i++) { | |
| const chain = this.markovChains[validOrder] || this.syntacticChains; | |
| const nextWords = chain[currentKey] || this.syntacticChains[currentKey.split(' ').pop()] || []; | |
| if (!nextWords.length) break; | |
| const nextWord = this.getNextWordByProbability(nextWords); | |
| response.push(nextWord); | |
| const keyParts = currentKey.split(' '); | |
| keyParts.push(nextWord); | |
| if (keyParts.length >= validOrder) { | |
| keyParts.shift(); | |
| } | |
| currentKey = keyParts.join(' '); | |
| } | |
| return this.ensureCoherence(response).join(' ') || this.generateFallbackResponse(input); | |
| } | |
| getNextWordByProbability(words) { | |
| if (!words || words.length === 0) return 'что'; | |
| return words[Math.floor(Math.random() * words.length)]; | |
| } | |
| processMultipleInputs(inputs) { | |
| const responses = inputs.map(input => { | |
| const { isQuestion, topic } = this.analyzeTopic(input); | |
| let response; | |
| const deepMemoryResults = this.findInDeepMemory(topic); | |
| if (deepMemoryResults.length > 0 && Math.random() < 0.7) { | |
| response = deepMemoryResults[Math.floor(Math.random() * deepMemoryResults.length)]; | |
| } else if (isQuestion) { | |
| const knowledgeResults = this.findInKnowledgeBase(topic); | |
| if (knowledgeResults.length > 0) { | |
| response = knowledgeResults[Math.floor(Math.random() * knowledgeResults.length)]; | |
| } else { | |
| response = this.composeNewAnswer(topic); | |
| } | |
| } else { | |
| const markovResponse = this.generateMarkovResponse(input); | |
| const similarFromKB = this.findByPartialMatch(input); | |
| response = similarFromKB.length > 0 && Math.random() < 0.5 ? similarFromKB[0] : markovResponse; | |
| } | |
| this.contextMemory.push({ input, response }); | |
| return this.reformulateResponse(response, isQuestion); | |
| }); | |
| return responses.join('. ') || this.generateFallbackResponse(); | |
| } | |
| generateResponse(input) { | |
| const inputs = input.split(/[.,!?;]+/).filter(i => i.trim() !== ''); | |
| if (inputs.length > 1) { | |
| return this.processMultipleInputs(inputs); | |
| } | |
| const { isQuestion, topic } = this.analyzeTopic(input); | |
| let source; | |
| const deepMemoryResults = this.findInDeepMemory(topic, 0.7); | |
| if (deepMemoryResults.length > 0 && Math.random() < 0.8) { | |
| source = deepMemoryResults[Math.floor(Math.random() * deepMemoryResults.length)]; | |
| } else if (isQuestion) { | |
| const knowledgeResults = this.findInKnowledgeBase(topic); | |
| const logicResults = this.findInLogicChain(topic); | |
| if (knowledgeResults.length > 0 && Math.random() < 0.6) { | |
| source = knowledgeResults[Math.floor(Math.random() * knowledgeResults.length)]; | |
| } else if (logicResults.length > 0 && Math.random() < 0.4) { | |
| source = logicResults[Math.floor(Math.random() * logicResults.length)]; | |
| } else { | |
| source = this.composeNewAnswer(topic); | |
| } | |
| } else { | |
| const knowledgeResults = this.findInKnowledgeBase(input); | |
| if (knowledgeResults.length > 0 && Math.random() < 0.5) { | |
| source = knowledgeResults[Math.floor(Math.random() * knowledgeResults.length)]; | |
| } else { | |
| source = this.generateMarkovResponse(input, 5); | |
| } | |
| } | |
| let processedResponse = source; | |
| for (const layer of this.transformerLayers) { | |
| processedResponse = layer(processedResponse); | |
| } | |
| this.contextMemory.push({ input, response: processedResponse }); | |
| if (this.contextMemory.length > 10) this.contextMemory.shift(); | |
| return this.reformulateResponse(processedResponse, isQuestion) || this.generateFallbackResponse(topic); | |
| } | |
| findByPartialMatch(input) { | |
| const partialMatches = []; | |
| const lowerInput = input.toLowerCase(); | |
| this.knowledgeBase.forEach(line => { | |
| if (line.toLowerCase().startsWith(lowerInput.slice(0, 3))) { | |
| partialMatches.push(line); | |
| } | |
| }); | |
| return partialMatches.slice(0, 5); | |
| } | |
| findInKnowledgeBase(query) { | |
| return this.knowledgeBase.filter(line => line.toLowerCase().includes(query.toLowerCase())).slice(0, 5); | |
| } | |
| findInLogicChain(query) { | |
| return this.logicChain.filter(line => line.toLowerCase().includes(query.toLowerCase())).slice(0, 5); | |
| } | |
| } | |
| module.exports = SimpleGPT; |