aaron0eidt commited on
Commit
e8099ac
·
1 Parent(s): f4747e4

Disable generation controls for static mode

Browse files
attribution_analysis/attribution_analysis_page.py CHANGED
@@ -546,7 +546,20 @@ def verify_claims(claims, analysis_data):
546
  score_series = analysis_data['scores_df'].abs().mean(axis=1)
547
  score_name = "average score"
548
  else: # peak
549
- score_series = analysis_data['scores_df'].abs().max(axis=1)
 
 
 
 
 
 
 
 
 
 
 
 
 
550
  score_name = "peak score"
551
 
552
  if score_series.empty:
@@ -879,10 +892,9 @@ def run_analysis(prompt, max_tokens, enable_explanations, force_exact_num_tokens
879
  cached_result = cached_data[prompt]
880
 
881
  # Check if influential_docs are missing and update the cache if possible
882
- # FORCE UPDATE FOR DEBUGGING
883
- if True: # "influential_docs" not in cached_result:
884
  try:
885
- print(f"DEBUG: Force updating cache for '{prompt}'...")
886
  lang = st.session_state.get('lang', 'en')
887
  # This call should hit the Streamlit cache and be fast
888
  missing_docs = get_influential_docs(prompt, lang)
@@ -890,12 +902,9 @@ def run_analysis(prompt, max_tokens, enable_explanations, force_exact_num_tokens
890
  if missing_docs:
891
  cached_result["influential_docs"] = missing_docs
892
  # Save updated cache back to file
893
- print(f"DEBUG: Writing to {cache_file}...")
894
  with open(cache_file, "w", encoding="utf-8") as f:
895
  json.dump(cached_data, f, ensure_ascii=False, indent=4)
896
  print("Cache updated successfully.")
897
- else:
898
- print("DEBUG: No docs returned from get_influential_docs")
899
  except Exception as e:
900
  print(f"Could not update cache with influence docs: {e}")
901
 
@@ -1103,7 +1112,8 @@ def show_attribution_analysis():
1103
  min_value=1,
1104
  max_value=50,
1105
  value=5,
1106
- help=tr('max_new_tokens_slider_help')
 
1107
  )
1108
 
1109
  # Checkbox to enable or disable AI explanations.
@@ -1118,7 +1128,8 @@ def show_attribution_analysis():
1118
  tr('generate_and_analyze_button'),
1119
  type="primary",
1120
  on_click=start_new_analysis,
1121
- args=(prompt, max_tokens, enable_explanations)
 
1122
  )
1123
 
1124
  with col2:
 
546
  score_series = analysis_data['scores_df'].abs().mean(axis=1)
547
  score_name = "average score"
548
  else: # peak
549
+ # Check both influence GIVEN (input) and RECEIVED (output)
550
+ # We use fillna(0) to handle cases where a token is not in that axis
551
+ input_peaks = analysis_data['scores_df'].abs().max(axis=1)
552
+ output_peaks = analysis_data['scores_df'].abs().max(axis=0)
553
+
554
+ combined_scores = {}
555
+ all_tokens = set(input_peaks.index) | set(output_peaks.index)
556
+
557
+ for t in all_tokens:
558
+ s1 = input_peaks.get(t, 0.0)
559
+ s2 = output_peaks.get(t, 0.0)
560
+ combined_scores[t] = max(s1, s2)
561
+
562
+ score_series = pd.Series(combined_scores)
563
  score_name = "peak score"
564
 
565
  if score_series.empty:
 
892
  cached_result = cached_data[prompt]
893
 
894
  # Check if influential_docs are missing and update the cache if possible
895
+ if "influential_docs" not in cached_result:
 
896
  try:
897
+ print(f"Updating cache for '{prompt}' with missing influence docs...")
898
  lang = st.session_state.get('lang', 'en')
899
  # This call should hit the Streamlit cache and be fast
900
  missing_docs = get_influential_docs(prompt, lang)
 
902
  if missing_docs:
903
  cached_result["influential_docs"] = missing_docs
904
  # Save updated cache back to file
 
905
  with open(cache_file, "w", encoding="utf-8") as f:
906
  json.dump(cached_data, f, ensure_ascii=False, indent=4)
907
  print("Cache updated successfully.")
 
 
908
  except Exception as e:
909
  print(f"Could not update cache with influence docs: {e}")
910
 
 
1112
  min_value=1,
1113
  max_value=50,
1114
  value=5,
1115
+ help=tr('max_new_tokens_slider_help'),
1116
+ disabled=True
1117
  )
1118
 
1119
  # Checkbox to enable or disable AI explanations.
 
1128
  tr('generate_and_analyze_button'),
1129
  type="primary",
1130
  on_click=start_new_analysis,
1131
+ args=(prompt, max_tokens, enable_explanations),
1132
+ disabled=True
1133
  )
1134
 
1135
  with col2:
function_vectors/function_vectors_page.py CHANGED
@@ -303,7 +303,7 @@ def display_interactive_analysis():
303
  enable_ai_explanation = st.checkbox(tr('enable_ai_explanation_checkbox'), value=True, help=tr('enable_ai_explanation_help'))
304
 
305
  # Analysis button.
306
- if st.button(tr('analyze_button'), type="primary"):
307
  _trigger_and_rerun_analysis(input_text, True, True, enable_ai_explanation)
308
 
309
 
 
303
  enable_ai_explanation = st.checkbox(tr('enable_ai_explanation_checkbox'), value=True, help=tr('enable_ai_explanation_help'))
304
 
305
  # Analysis button.
306
+ if st.button(tr('analyze_button'), type="primary", disabled=True):
307
  _trigger_and_rerun_analysis(input_text, True, True, enable_ai_explanation)
308
 
309