id int64 1 6.07M | name stringlengths 1 295 | code stringlengths 12 426k | language stringclasses 1
value | source_file stringlengths 5 202 | start_line int64 1 158k | end_line int64 1 158k | repo dict |
|---|---|---|---|---|---|---|---|
2,201 | PollExit | def PollExit(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/PollExit',
wandb_dot_proto_dot_wandb__internal__pb2.PollExitRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.PollExitResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 797 | 811 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,202 | ServerInfo | def ServerInfo(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInfo',
wandb_dot_proto_dot_wandb__internal__pb2.ServerInfoRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ServerInfoResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 814 | 828 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,203 | Shutdown | def Shutdown(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Shutdown',
wandb_dot_proto_dot_wandb__internal__pb2.ShutdownRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ShutdownResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 831 | 845 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,204 | RunStatus | def RunStatus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/RunStatus',
wandb_dot_proto_dot_wandb__internal__pb2.RunStatusRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.RunStatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 848 | 862 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,205 | RunExit | def RunExit(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/RunExit',
wandb_dot_proto_dot_wandb__internal__pb2.RunExitRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.RunExitResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 865 | 879 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,206 | RunPreempting | def RunPreempting(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/RunPreempting',
wandb_dot_proto_dot_wandb__internal__pb2.RunPreemptingRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.RunPreemptingResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 882 | 896 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,207 | Metric | def Metric(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Metric',
wandb_dot_proto_dot_wandb__internal__pb2.MetricRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.MetricResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 899 | 913 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,208 | PartialLog | def PartialLog(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/PartialLog',
wandb_dot_proto_dot_wandb__internal__pb2.PartialHistoryRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.PartialHistoryResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 916 | 930 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,209 | Log | def Log(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Log',
wandb_dot_proto_dot_wandb__internal__pb2.HistoryRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.HistoryResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 933 | 947 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,210 | Summary | def Summary(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Summary',
wandb_dot_proto_dot_wandb__internal__pb2.SummaryRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.SummaryResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 950 | 964 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,211 | Config | def Config(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Config',
wandb_dot_proto_dot_wandb__internal__pb2.ConfigRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ConfigResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 967 | 981 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,212 | Files | def Files(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Files',
wandb_dot_proto_dot_wandb__internal__pb2.FilesRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.FilesResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 984 | 998 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,213 | Output | def Output(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Output',
wandb_dot_proto_dot_wandb__internal__pb2.OutputRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.OutputResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,001 | 1,015 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,214 | OutputRaw | def OutputRaw(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/OutputRaw',
wandb_dot_proto_dot_wandb__internal__pb2.OutputRawRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.OutputRawResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,018 | 1,032 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,215 | Telemetry | def Telemetry(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Telemetry',
wandb_dot_proto_dot_wandb__telemetry__pb2.TelemetryRecord.SerializeToString,
wandb_dot_proto_dot_wandb__telemetry__pb2.TelemetryResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,035 | 1,049 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,216 | Alert | def Alert(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Alert',
wandb_dot_proto_dot_wandb__internal__pb2.AlertRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.AlertResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,052 | 1,066 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,217 | Artifact | def Artifact(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Artifact',
wandb_dot_proto_dot_wandb__internal__pb2.ArtifactRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ArtifactResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,069 | 1,083 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,218 | LinkArtifact | def LinkArtifact(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/LinkArtifact',
wandb_dot_proto_dot_wandb__internal__pb2.LinkArtifactRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.LinkArtifactResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,086 | 1,100 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,219 | UseArtifact | def UseArtifact(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/UseArtifact',
wandb_dot_proto_dot_wandb__internal__pb2.UseArtifactRecord.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.UseArtifactResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,103 | 1,117 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,220 | ArtifactSend | def ArtifactSend(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ArtifactSend',
wandb_dot_proto_dot_wandb__internal__pb2.ArtifactSendRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ArtifactSendResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,120 | 1,134 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,221 | ArtifactPoll | def ArtifactPoll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ArtifactPoll',
wandb_dot_proto_dot_wandb__internal__pb2.ArtifactPollRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ArtifactPollResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,137 | 1,151 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,222 | Cancel | def Cancel(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Cancel',
wandb_dot_proto_dot_wandb__internal__pb2.CancelRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.CancelResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,154 | 1,168 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,223 | Keepalive | def Keepalive(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Keepalive',
wandb_dot_proto_dot_wandb__internal__pb2.KeepaliveRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.KeepaliveResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,171 | 1,185 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,224 | CheckVersion | def CheckVersion(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/CheckVersion',
wandb_dot_proto_dot_wandb__internal__pb2.CheckVersionRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.CheckVersionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,188 | 1,202 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,225 | Pause | def Pause(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Pause',
wandb_dot_proto_dot_wandb__internal__pb2.PauseRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.PauseResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,205 | 1,219 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,226 | Resume | def Resume(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Resume',
wandb_dot_proto_dot_wandb__internal__pb2.ResumeRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.ResumeResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,222 | 1,236 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,227 | Status | def Status(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/Status',
wandb_dot_proto_dot_wandb__internal__pb2.StatusRequest.SerializeToString,
wandb_dot_proto_dot_wandb__internal__pb2.StatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,239 | 1,253 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,228 | ServerShutdown | def ServerShutdown(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerShutdown',
wandb_dot_proto_dot_wandb__server__pb2.ServerShutdownRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerShutdownResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,256 | 1,270 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,229 | ServerStatus | def ServerStatus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerStatus',
wandb_dot_proto_dot_wandb__server__pb2.ServerStatusRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerStatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,273 | 1,287 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,230 | ServerInformInit | def ServerInformInit(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInformInit',
wandb_dot_proto_dot_wandb__server__pb2.ServerInformInitRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerInformInitResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,290 | 1,304 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,231 | ServerInformStart | def ServerInformStart(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInformStart',
wandb_dot_proto_dot_wandb__server__pb2.ServerInformStartRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerInformStartResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,307 | 1,321 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,232 | ServerInformFinish | def ServerInformFinish(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInformFinish',
wandb_dot_proto_dot_wandb__server__pb2.ServerInformFinishRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerInformFinishResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,324 | 1,338 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,233 | ServerInformAttach | def ServerInformAttach(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInformAttach',
wandb_dot_proto_dot_wandb__server__pb2.ServerInformAttachRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerInformAttachResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,341 | 1,355 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,234 | ServerInformDetach | def ServerInformDetach(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInformDetach',
wandb_dot_proto_dot_wandb__server__pb2.ServerInformDetachRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerInformDetachResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,358 | 1,372 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,235 | ServerInformTeardown | def ServerInformTeardown(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/wandb_internal.InternalService/ServerInformTeardown',
wandb_dot_proto_dot_wandb__server__pb2.ServerInformTeardownRequest.SerializeToString,
wandb_dot_proto_dot_wandb__server__pb2.ServerInformTeardownResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | python | wandb/proto/v3/wandb_server_pb2_grpc.py | 1,375 | 1,389 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,236 | error | def error(self, message):
raise ArgumentException() | python | wandb/integration/magic.py | 28 | 29 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,237 | _merge_dicts | def _merge_dicts(source, destination):
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
_merge_dicts(value, node)
else:
destination[key] = value
return destination | python | wandb/integration/magic.py | 32 | 39 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,238 | _dict_from_keyval | def _dict_from_keyval(k, v, json_parse=True):
d = ret = {}
keys = k.split(".")
for k in keys[:-1]:
d = d.setdefault(k, {})
if json_parse:
try:
v = json.loads(v.strip('"'))
except ValueError:
pass
d[keys[-1]] = v
return ret | python | wandb/integration/magic.py | 42 | 53 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,239 | _magic_get_config | def _magic_get_config(k, default):
d = _magic_config
keys = k.split(".")
for k in keys[:-1]:
d = d.get(k, {})
return d.get(keys[-1], default) | python | wandb/integration/magic.py | 56 | 61 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,240 | _parse_magic | def _parse_magic(val):
# attempt to treat string as a json
not_set = {}
if val is None:
return _magic_defaults, not_set
if val.startswith("{"):
try:
val = json.loads(val)
except ValueError:
wandb.termwarn("Unable to parse magic json", repeat=False)
return _magic_defaults, not_set
conf = _merge_dicts(_magic_defaults, {})
return _merge_dicts(val, conf), val
if os.path.isfile(val):
try:
with open(val) as stream:
val = yaml.safe_load(stream)
except OSError as e:
wandb.termwarn("Unable to read magic config file", repeat=False)
return _magic_defaults, not_set
except yaml.YAMLError as e:
wandb.termwarn("Unable to parse magic yaml file", repeat=False)
return _magic_defaults, not_set
conf = _merge_dicts(_magic_defaults, {})
return _merge_dicts(val, conf), val
# parse as a list of key value pairs
if val.find("=") > 0:
# split on commas but ignore commas inside quotes
# Using this re allows env variable parsing like:
# WANDB_MAGIC=key1='"["cat","dog","pizza"]"',key2=true
items = re.findall(r'(?:[^\s,"]|"(?:\\.|[^"])*")+', val)
conf_set = {}
for kv in items:
kv = kv.split("=")
if len(kv) != 2:
wandb.termwarn("Unable to parse magic key value pair", repeat=False)
continue
d = _dict_from_keyval(*kv)
_merge_dicts(d, conf_set)
conf = _merge_dicts(_magic_defaults, {})
return _merge_dicts(conf_set, conf), conf_set
wandb.termwarn("Unable to parse magic parameter", repeat=False)
return _magic_defaults, not_set | python | wandb/integration/magic.py | 120 | 162 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,241 | set_entity | def set_entity(value, env=None):
if env is None:
env = os.environ | python | wandb/integration/magic.py | 165 | 167 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,242 | _fit_wrapper | def _fit_wrapper(self, fn, generator=None, *args, **kwargs):
trigger.call("on_fit")
keras = sys.modules.get("keras", None)
tfkeras = sys.modules.get("tensorflow.python.keras", None)
epochs = kwargs.pop("epochs", None)
batch_size = kwargs.pop("batch_size", None)
magic_epochs = _magic_get_config("keras.fit.epochs", None)
if magic_epochs is not None:
epochs = magic_epochs
magic_batch_size = _magic_get_config("keras.fit.batch_size", None)
if magic_batch_size is not None:
batch_size = magic_batch_size
callbacks = kwargs.pop("callbacks", [])
tb_enabled = _magic_get_config("keras.fit.callbacks.tensorboard.enable", None)
if tb_enabled:
k = getattr(self, "_keras_or_tfkeras", None)
if k:
tb_duplicate = _magic_get_config(
"keras.fit.callbacks.tensorboard.duplicate", None
)
tb_overwrite = _magic_get_config(
"keras.fit.callbacks.tensorboard.overwrite", None
)
tb_present = any(
[isinstance(cb, k.callbacks.TensorBoard) for cb in callbacks]
)
if tb_present and tb_overwrite:
callbacks = [
cb
for cb in callbacks
if not isinstance(cb, k.callbacks.TensorBoard)
]
if tb_overwrite or tb_duplicate or not tb_present:
tb_callback_kwargs = {"log_dir": wandb.run.dir}
cb_args = (
"write_graph",
"histogram_freq",
"update_freq",
"write_grads",
"write_images",
"batch_size",
)
for cb_arg in cb_args:
v = _magic_get_config(
"keras.fit.callbacks.tensorboard." + cb_arg, None
)
if v is not None:
tb_callback_kwargs[cb_arg] = v
tb_callback = k.callbacks.TensorBoard(**tb_callback_kwargs)
callbacks.append(tb_callback)
wandb_enabled = _magic_get_config("keras.fit.callbacks.wandb.enable", None)
if wandb_enabled:
wandb_duplicate = _magic_get_config("keras.fit.callbacks.wandb.duplicate", None)
wandb_overwrite = _magic_get_config("keras.fit.callbacks.wandb.overwrite", None)
wandb_present = any(
[isinstance(cb, wandb.keras.WandbCallback) for cb in callbacks]
)
if wandb_present and wandb_overwrite:
callbacks = [
cb for cb in callbacks if not isinstance(cb, wandb.keras.WandbCallback)
]
if wandb_overwrite or wandb_duplicate or not wandb_present:
wandb_callback_kwargs = {}
log_gradients = _magic_get_config(
"keras.fit.callbacks.wandb.log_gradients", None
)
if log_gradients and kwargs.get("x") and kwargs.get("y"):
wandb_callback_kwargs["log_gradients"] = log_gradients
cb_args = (
"predictions",
"log_weights",
"data_type",
"save_model",
"save_weights_only",
"monitor",
"mode",
"verbose",
"input_type",
"output_type",
"log_evaluation",
"labels",
)
for cb_arg in cb_args:
v = _magic_get_config("keras.fit.callbacks.wandb." + cb_arg, None)
if v is not None:
wandb_callback_kwargs[cb_arg] = v
wandb_callback = wandb.keras.WandbCallback(**wandb_callback_kwargs)
callbacks.append(wandb_callback)
kwargs["callbacks"] = callbacks
if epochs is not None:
kwargs["epochs"] = epochs
if batch_size is not None:
kwargs["batch_size"] = batch_size
if generator:
return fn(generator, *args, **kwargs)
return fn(*args, **kwargs) | python | wandb/integration/magic.py | 170 | 269 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,243 | _magic_fit | def _magic_fit(
self,
x=None,
y=None,
batch_size=None,
epochs=1,
# FIXME: there is more
# verbose=1,
# callbacks=None,
# validation_split=0.,
# validation_data=None,
# shuffle=True,
# class_weight=None,
# sample_weight=None,
# initial_epoch=0,
# steps_per_epoch=None,
# validation_steps=None,
# validation_freq=1,
# max_queue_size=10,
# workers=1,
# use_multiprocessing=False,
*args,
**kwargs
):
if hasattr(self, "_wandb_internal_model"):
return self._fit(
x=x, y=y, batch_size=batch_size, epochs=epochs, *args, **kwargs
)
return _fit_wrapper(
self, self._fit, x=x, y=y, batch_size=batch_size, epochs=epochs, *args, **kwargs
) | python | wandb/integration/magic.py | 273 | 303 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,244 | _magic_fit_generator | def _magic_fit_generator(
self,
generator,
steps_per_epoch=None,
epochs=1,
# FIXME: there is more
# verbose=1,
# verbose=1,
# callbacks=None,
# validation_data=None,
# validation_steps=None,
# validation_freq=1,
# class_weight=None,
# max_queue_size=10,
# workers=1,
##use_multiprocessing=False,
# shuffle=True,
# initial_epoch=0,
*args,
**kwargs
):
return _fit_wrapper(
self,
self._fit_generator,
generator=generator,
steps_per_epoch=steps_per_epoch,
epochs=epochs,
*args,
**kwargs
) | python | wandb/integration/magic.py | 306 | 335 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,245 | _monkey_tfkeras | def _monkey_tfkeras():
from tensorflow import keras as tfkeras
from wandb.integration.keras import WandbCallback # add keras import hooks first
models = getattr(tfkeras, "models", None)
if not models:
return
models.Model._keras_or_tfkeras = tfkeras
if models.Model.fit == _magic_fit:
return
models.Model._fit = models.Model.fit
models.Model.fit = _magic_fit
models.Model._fit_generator = models.Model.fit_generator
models.Model.fit_generator = _magic_fit_generator | python | wandb/integration/magic.py | 338 | 352 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,246 | _monkey_absl | def _monkey_absl():
from absl import app as absl_app
def _absl_callback():
absl_flags = sys.modules.get("absl.flags")
if not absl_flags:
return
_flags = getattr(absl_flags, "FLAGS", None)
if not _flags:
return
_flags_as_dict = getattr(_flags, "flag_values_dict", None)
if not _flags_as_dict:
return
_flags_module = getattr(_flags, "find_module_defining_flag", None)
if not _flags_module:
return
flags_dict = {}
for f, v in _flags_as_dict().items():
m = _flags_module(f)
if not m or m.startswith("absl."):
continue
flags_dict[f] = v
global _args_absl
_args_absl = flags_dict
call_after_init = getattr(absl_app, "call_after_init", None)
if not call_after_init:
return
call_after_init(_absl_callback) | python | wandb/integration/magic.py | 355 | 383 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,247 | _absl_callback | def _absl_callback():
absl_flags = sys.modules.get("absl.flags")
if not absl_flags:
return
_flags = getattr(absl_flags, "FLAGS", None)
if not _flags:
return
_flags_as_dict = getattr(_flags, "flag_values_dict", None)
if not _flags_as_dict:
return
_flags_module = getattr(_flags, "find_module_defining_flag", None)
if not _flags_module:
return
flags_dict = {}
for f, v in _flags_as_dict().items():
m = _flags_module(f)
if not m or m.startswith("absl."):
continue
flags_dict[f] = v
global _args_absl
_args_absl = flags_dict | python | wandb/integration/magic.py | 358 | 378 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,248 | _process_system_args | def _process_system_args():
global _args_system
# try using argparse
parser = SafeArgumentParser(add_help=False)
for num, arg in enumerate(sys.argv):
try:
next_arg = sys.argv[num + 1]
except IndexError:
next_arg = ""
if arg.startswith(("-", "--")) and not next_arg.startswith(("-", "--")):
try:
parser.add_argument(arg)
except ValueError:
pass
try:
parsed, unknown = parser.parse_known_args()
except ArgumentException:
pass
else:
_args_system = vars(parsed) | python | wandb/integration/magic.py | 386 | 405 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,249 | _monkey_argparse | def _monkey_argparse():
argparse._ArgumentParser = argparse.ArgumentParser
def _install():
argparse.ArgumentParser = MonitoredArgumentParser
def _uninstall():
argparse.ArgumentParser = argparse._ArgumentParser
def monitored(self, args, unknown=None):
global _args_argparse
_args_argparse = copy.deepcopy(vars(args))
class MonitoredArgumentParser(argparse._ArgumentParser):
def __init__(self, *args, **kwargs):
_uninstall()
super().__init__(*args, **kwargs)
_install()
def parse_args(self, *args, **kwargs):
args = super().parse_args(*args, **kwargs)
return args
def parse_known_args(self, *args, **kwargs):
args, unknown = super().parse_known_args(*args, **kwargs)
if self._callback:
self._callback(args, unknown=unknown)
return args, unknown
_install()
argparse.ArgumentParser._callback = monitored | python | wandb/integration/magic.py | 408 | 438 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,250 | _install | def _install():
argparse.ArgumentParser = MonitoredArgumentParser | python | wandb/integration/magic.py | 411 | 412 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,251 | _uninstall | def _uninstall():
argparse.ArgumentParser = argparse._ArgumentParser | python | wandb/integration/magic.py | 414 | 415 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,252 | monitored | def monitored(self, args, unknown=None):
global _args_argparse
_args_argparse = copy.deepcopy(vars(args)) | python | wandb/integration/magic.py | 417 | 419 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,253 | __init__ | def __init__(self, *args, **kwargs):
_uninstall()
super().__init__(*args, **kwargs)
_install() | python | wandb/integration/magic.py | 422 | 425 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,254 | parse_args | def parse_args(self, *args, **kwargs):
args = super().parse_args(*args, **kwargs)
return args | python | wandb/integration/magic.py | 427 | 429 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,255 | parse_known_args | def parse_known_args(self, *args, **kwargs):
args, unknown = super().parse_known_args(*args, **kwargs)
if self._callback:
self._callback(args, unknown=unknown)
return args, unknown | python | wandb/integration/magic.py | 431 | 435 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,256 | _magic_update_config | def _magic_update_config():
# if we already have config set, don't add anymore
if wandb.run and wandb.run.config:
c = wandb.run.config
user_config = dict(c.items())
# ignore keys set by magic integration when checking
# if user added any keys
if set(user_config).difference({"magic"}):
return
if _magic_get_config("args.absl", None) is False:
global _args_absl
_args_absl = None
if _magic_get_config("args.argparse", None) is False:
global _args_argparse
_args_argparse = None
if _magic_get_config("args.sys", None) is False:
global _args_system
_args_system = None
# prefer absl, then argparse values, fallback to parsed system args
args = _args_absl or _args_argparse or _args_system
if args and wandb.run and wandb.run.config:
wandb.run.config.update(args) | python | wandb/integration/magic.py | 441 | 462 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,257 | _magic_init | def _magic_init(**kwargs):
magic_arg = kwargs.get("magic", None)
if magic_arg is not None and magic_arg is not False:
global _magic_init_seen
if _magic_init_seen and magic_arg is not True:
wandb.termwarn(
"wandb.init() magic argument ignored because wandb magic has already been initialized",
repeat=False,
)
_magic_init_seen = True
else:
wandb.termwarn(
"wandb.init() arguments ignored because wandb magic has already been initialized",
repeat=False,
) | python | wandb/integration/magic.py | 465 | 479 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,258 | magic_install | def magic_install(init_args=None):
if wandb.setup().settings._noop:
return
global _run_once
if _run_once:
return
_run_once = True
global _magic_config
global _import_hook
# parse config early, before we have wandb.config overrides
_magic_config, magic_set = _parse_magic(wandb.env.get_magic())
# we are implicitly enabling magic
if _magic_config.get("enable") is None:
_magic_config["enable"] = True
magic_set["enable"] = True
# allow early config to disable magic
if not _magic_config.get("enable"):
return
# process system args
_process_system_args()
# install argparse wrapper
in_jupyter_or_ipython = wandb.wandb_sdk.lib.ipython._get_python_type() != "python"
if not in_jupyter_or_ipython:
_monkey_argparse()
# track init calls
trigger.register("on_init", _magic_init)
# if wandb.init has already been called, this call is ignored
init_args = init_args or {}
init_args["magic"] = True
wandb.init(**init_args)
# parse magic from wandb.config (from flattened to dict)
magic_from_config = {}
MAGIC_KEY = "wandb_magic"
for k in wandb.config.keys():
if not k.startswith(MAGIC_KEY + "."):
continue
d = _dict_from_keyval(k, wandb.config[k], json_parse=False)
_merge_dicts(d, magic_from_config)
magic_from_config = magic_from_config.get(MAGIC_KEY, {})
_merge_dicts(magic_from_config, _magic_config)
# allow late config to disable magic
if not _magic_config.get("enable"):
return
# store magic_set into config
if magic_set:
wandb.config["magic"] = magic_set
wandb.config.persist()
# Monkey patch tf.keras
if get_optional_module("tensorflow"):
if "tensorflow.python.keras" in sys.modules or "keras" in sys.modules:
_monkey_tfkeras()
# Always setup import hooks looking for keras or tf.keras
add_import_hook(fullname="keras", on_import=_monkey_tfkeras)
add_import_hook(fullname="tensorflow.python.keras", on_import=_monkey_tfkeras)
if "absl.app" in sys.modules:
_monkey_absl()
else:
add_import_hook(fullname="absl.app", on_import=_monkey_absl)
# update wandb.config on fit or program finish
trigger.register("on_fit", _magic_update_config)
trigger.register("on_finished", _magic_update_config) | python | wandb/integration/magic.py | 482 | 556 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,259 | __init__ | def __init__(
self,
yolo: YOLO,
run_name: Optional[str] = None,
project: Optional[str] = None,
tags: Optional[List[str]] = None,
resume: Optional[str] = None,
**kwargs: Optional[Any],
) -> None:
"""A utility class to manage wandb run and various callbacks for the ultralytics YOLOv8 framework.
Args:
yolo: A YOLOv8 model that's inherited from `:class:ultralytics.yolo.engine.model.YOLO`
run_name, str: The name of the Weights & Biases run, defaults to an auto generated run_name if `trainer.args.name` is not defined.
project, str: The name of the Weights & Biases project, defaults to `"YOLOv8"` if `trainer.args.project` is not defined.
tags, List[str]: A list of tags to be added to the Weights & Biases run, defaults to `["YOLOv8"]`.
resume, str: Whether to resume a previous run on Weights & Biases, defaults to `None`.
**kwargs: Additional arguments to be passed to `wandb.init()`.
"""
self.yolo = yolo
self.run_name = run_name
self.project = project
self.tags = tags
self.resume = resume
self.kwargs = kwargs | python | wandb/integration/yolov8/yolov8.py | 26 | 50 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,260 | on_pretrain_routine_start | def on_pretrain_routine_start(self, trainer: BaseTrainer) -> None:
"""Starts a new wandb run to track the training process and log to Weights & Biases.
Args:
trainer: A task trainer that's inherited from `:class:ultralytics.yolo.engine.trainer.BaseTrainer`
that contains the model training and optimization routine.
"""
if wandb.run is None:
self.run = wandb.init(
name=self.run_name if self.run_name else trainer.args.name,
project=self.project
if self.project
else trainer.args.project or "YOLOv8",
tags=self.tags if self.tags else ["YOLOv8"],
config=vars(trainer.args),
resume=self.resume if self.resume else None,
**self.kwargs,
)
else:
self.run = wandb.run
self.run.define_metric("epoch", hidden=True)
self.run.define_metric(
"train/*", step_metric="epoch", step_sync=True, summary="min"
)
self.run.define_metric(
"val/*", step_metric="epoch", step_sync=True, summary="min"
)
self.run.define_metric(
"metrics/*", step_metric="epoch", step_sync=True, summary="max"
)
self.run.define_metric(
"lr/*", step_metric="epoch", step_sync=True, summary="last"
)
with telemetry.context(run=wandb.run) as tel:
tel.feature.ultralytics_yolov8 = True | python | wandb/integration/yolov8/yolov8.py | 52 | 89 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,261 | on_pretrain_routine_end | def on_pretrain_routine_end(self, trainer: BaseTrainer) -> None:
self.run.summary.update(
{
"model/parameters": get_num_params(trainer.model),
"model/GFLOPs": round(get_flops(trainer.model), 3),
}
) | python | wandb/integration/yolov8/yolov8.py | 91 | 97 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,262 | on_train_epoch_start | def on_train_epoch_start(self, trainer: BaseTrainer) -> None:
"""On train epoch start we only log epoch number to the Weights & Biases run."""
# We log the epoch number here to commit the previous step,
self.run.log({"epoch": trainer.epoch + 1}) | python | wandb/integration/yolov8/yolov8.py | 99 | 102 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,263 | on_train_epoch_end | def on_train_epoch_end(self, trainer: BaseTrainer) -> None:
"""On train epoch end we log all the metrics to the Weights & Biases run."""
self.run.log(
{
**trainer.metrics,
**trainer.label_loss_items(trainer.tloss, prefix="train"),
**trainer.lr,
},
)
# Currently only the detection and segmentation trainers save images to the save_dir
if not isinstance(trainer, ClassificationTrainer):
self.run.log(
{
"train_batch_images": [
wandb.Image(str(image_path), caption=image_path.stem)
for image_path in trainer.save_dir.glob("train_batch*.jpg")
]
}
) | python | wandb/integration/yolov8/yolov8.py | 104 | 122 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,264 | on_fit_epoch_end | def on_fit_epoch_end(self, trainer: BaseTrainer) -> None:
"""On fit epoch end we log all the best metrics and model detail to Weights & Biases run summary."""
if trainer.epoch == 0:
speeds = [
trainer.validator.speed.get(
key,
)
for key in (1, "inference")
]
speed = speeds[0] if speeds[0] else speeds[1]
if speed:
self.run.summary.update(
{
"model/speed(ms/img)": round(speed, 3),
}
)
if trainer.best_fitness == trainer.fitness:
self.run.summary.update(
{
"best/epoch": trainer.epoch + 1,
**{f"best/{key}": val for key, val in trainer.metrics.items()},
}
) | python | wandb/integration/yolov8/yolov8.py | 124 | 146 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,265 | on_train_end | def on_train_end(self, trainer: BaseTrainer) -> None:
"""On train end we log all the media, including plots, images and best model artifact to Weights & Biases."""
# Currently only the detection and segmentation trainers save images to the save_dir
if not isinstance(trainer, ClassificationTrainer):
self.run.log(
{
"plots": [
wandb.Image(str(image_path), caption=image_path.stem)
for image_path in trainer.save_dir.glob("*.png")
],
"val_images": [
wandb.Image(str(image_path), caption=image_path.stem)
for image_path in trainer.validator.save_dir.glob("val*.jpg")
],
},
)
if trainer.best.exists():
self.run.log_artifact(
str(trainer.best),
type="model",
name=f"{self.run.name}_{trainer.args.task}.pt",
aliases=["best", f"epoch_{trainer.epoch + 1}"],
) | python | wandb/integration/yolov8/yolov8.py | 148 | 171 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,266 | on_model_save | def on_model_save(self, trainer: BaseTrainer) -> None:
"""On model save we log the model as an artifact to Weights & Biases."""
self.run.log_artifact(
str(trainer.last),
type="model",
name=f"{self.run.name}_{trainer.args.task}.pt",
aliases=["last", f"epoch_{trainer.epoch + 1}"],
) | python | wandb/integration/yolov8/yolov8.py | 173 | 180 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,267 | teardown | def teardown(self, _trainer: BaseTrainer) -> None:
"""On teardown, we finish the Weights & Biases run and set it to None."""
self.run.finish()
self.run = None | python | wandb/integration/yolov8/yolov8.py | 182 | 185 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,268 | callbacks | def callbacks(
self,
) -> Dict[str, Callable]:
"""Property contains all the relevant callbacks to add to the YOLO model for the Weights & Biases logging."""
return {
"on_pretrain_routine_start": self.on_pretrain_routine_start,
"on_pretrain_routine_end": self.on_pretrain_routine_end,
"on_train_epoch_start": self.on_train_epoch_start,
"on_train_epoch_end": self.on_train_epoch_end,
"on_fit_epoch_end": self.on_fit_epoch_end,
"on_train_end": self.on_train_end,
"on_model_save": self.on_model_save,
"teardown": self.teardown,
} | python | wandb/integration/yolov8/yolov8.py | 188 | 201 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,269 | add_callbacks | def add_callbacks(
yolo: YOLO,
run_name: Optional[str] = None,
project: Optional[str] = None,
tags: Optional[List[str]] = None,
resume: Optional[str] = None,
**kwargs: Optional[Any],
) -> YOLO:
"""A YOLO model wrapper that tracks metrics, and logs models to Weights & Biases.
Args:
yolo: A YOLOv8 model that's inherited from `:class:ultralytics.yolo.engine.model.YOLO`
run_name, str: The name of the Weights & Biases run, defaults to an auto generated name if `trainer.args.name` is not defined.
project, str: The name of the Weights & Biases project, defaults to `"YOLOv8"` if `trainer.args.project` is not defined.
tags, List[str]: A list of tags to be added to the Weights & Biases run, defaults to `["YOLOv8"]`.
resume, str: Whether to resume a previous run on Weights & Biases, defaults to `None`.
**kwargs: Additional arguments to be passed to `wandb.init()`.
Usage:
```python
from wandb.integration.yolov8 import add_callbacks as add_wandb_callbacks
model = YOLO("yolov8n.pt")
add_wandb_callbacks(model,)
model.train(data="coco128.yaml", epochs=3, imgsz=640,)
```
"""
wandb.termwarn(
"""The wandb callback is currently in beta and is subject to change based on updates to `ultralytics yolov8`.
The callback is tested and supported for ultralytics v8.0.43 and above.
Please report any issues to https://github.com/wandb/wandb/issues with the tag `yolov8`.
""",
repeat=False,
)
if RANK in [-1, 0]:
wandb_logger = WandbCallback(
yolo, run_name=run_name, project=project, tags=tags, resume=resume, **kwargs
)
for event, callback_fn in wandb_logger.callbacks.items():
yolo.add_callback(event, callback_fn)
return yolo
else:
wandb.termerror(
"The RANK of the process to add the callbacks was neither 0 or -1."
"No Weights & Biases callbacks were added to this instance of the YOLO model."
)
return yolo | python | wandb/integration/yolov8/yolov8.py | 204 | 250 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,270 | _check_keras_version | def _check_keras_version():
from keras import __version__ as keras_version
from pkg_resources import parse_version
if parse_version(keras_version) < parse_version("2.4.0"):
wandb.termwarn(
f"Keras version {keras_version} is not fully supported. Required keras >= 2.4.0"
) | python | wandb/integration/keras/keras.py | 22 | 29 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,271 | _can_compute_flops | def _can_compute_flops() -> bool:
"""
FLOPS computation is restricted to TF 2.x as it requires tf.compat.v1
"""
from pkg_resources import parse_version
if parse_version(tf.__version__) >= parse_version("2.0.0"):
return True
return False | python | wandb/integration/keras/keras.py | 32 | 41 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,272 | is_dataset | def is_dataset(data):
dataset_ops = wandb.util.get_module("tensorflow.python.data.ops.dataset_ops")
if dataset_ops and hasattr(dataset_ops, "DatasetV2"):
dataset_types = (dataset_ops.DatasetV2,)
if hasattr(dataset_ops, "DatasetV1"):
dataset_types = dataset_types + (dataset_ops.DatasetV1,)
return isinstance(data, dataset_types)
else:
return False | python | wandb/integration/keras/keras.py | 53 | 61 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,273 | is_generator_like | def is_generator_like(data):
# Checks if data is a generator, Sequence, or Iterator.
types = (tf.keras.utils.Sequence,)
iterator_ops = wandb.util.get_module("tensorflow.python.data.ops.iterator_ops")
if iterator_ops:
types = types + (iterator_ops.Iterator,)
# EagerIterator was in tensorflow < 2
if hasattr(iterator_ops, "EagerIterator"):
types = types + (iterator_ops.EagerIterator,)
elif hasattr(iterator_ops, "IteratorV2"):
types = types + (iterator_ops.IteratorV2,)
return hasattr(data, "next") or hasattr(data, "__next__") or isinstance(data, types) | python | wandb/integration/keras/keras.py | 64 | 76 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,274 | patch_tf_keras | def patch_tf_keras():
from pkg_resources import parse_version
from tensorflow.python.eager import context
if parse_version(tf.__version__) >= parse_version("2.6.0"):
keras_engine = "keras.engine"
try:
from keras.engine import training
from keras.engine import training_arrays_v1 as training_arrays
from keras.engine import training_generator_v1 as training_generator
except (ImportError, AttributeError):
wandb.termerror("Unable to patch Tensorflow/Keras")
logger.exception("exception while trying to patch_tf_keras")
return
else:
keras_engine = "tensorflow.python.keras.engine"
from tensorflow.python.keras.engine import training
try:
from tensorflow.python.keras.engine import (
training_arrays_v1 as training_arrays,
)
from tensorflow.python.keras.engine import (
training_generator_v1 as training_generator,
)
except (ImportError, AttributeError):
try:
from tensorflow.python.keras.engine import (
training_arrays,
training_generator,
)
except (ImportError, AttributeError):
wandb.termerror("Unable to patch Tensorflow/Keras")
logger.exception("exception while trying to patch_tf_keras")
return
# Tensorflow 2.1
training_v2_1 = wandb.util.get_module("tensorflow.python.keras.engine.training_v2")
# Tensorflow 2.2
training_v2_2 = wandb.util.get_module(f"{keras_engine}.training_v1")
if training_v2_1:
old_v2 = training_v2_1.Loop.fit
elif training_v2_2:
old_v2 = training.Model.fit
old_arrays = training_arrays.fit_loop
old_generator = training_generator.fit_generator
def set_wandb_attrs(cbk, val_data):
if isinstance(cbk, WandbCallback):
if is_generator_like(val_data):
cbk.generator = val_data
elif is_dataset(val_data):
if context.executing_eagerly():
cbk.generator = iter(val_data)
else:
wandb.termwarn(
"Found a validation dataset in graph mode, can't patch Keras."
)
elif isinstance(val_data, tuple) and isinstance(val_data[0], tf.Tensor):
# Graph mode dataset generator
def gen():
while True:
yield K.get_session().run(val_data)
cbk.generator = gen()
else:
cbk.validation_data = val_data
def new_arrays(*args, **kwargs):
cbks = kwargs.get("callbacks", [])
val_inputs = kwargs.get("val_inputs")
val_targets = kwargs.get("val_targets")
# TODO: these could be generators, why index 0?
if val_inputs and val_targets:
for cbk in cbks:
set_wandb_attrs(cbk, (val_inputs[0], val_targets[0]))
return old_arrays(*args, **kwargs)
def new_generator(*args, **kwargs):
cbks = kwargs.get("callbacks", [])
val_data = kwargs.get("validation_data")
if val_data:
for cbk in cbks:
set_wandb_attrs(cbk, val_data)
return old_generator(*args, **kwargs)
def new_v2(*args, **kwargs):
cbks = kwargs.get("callbacks", [])
val_data = kwargs.get("validation_data")
if val_data:
for cbk in cbks:
set_wandb_attrs(cbk, val_data)
return old_v2(*args, **kwargs)
training_arrays.orig_fit_loop = old_arrays
training_arrays.fit_loop = new_arrays
training_generator.orig_fit_generator = old_generator
training_generator.fit_generator = new_generator
wandb.patched["keras"].append([f"{keras_engine}.training_arrays", "fit_loop"])
wandb.patched["keras"].append(
[f"{keras_engine}.training_generator", "fit_generator"]
)
if training_v2_1:
training_v2_1.Loop.fit = new_v2
wandb.patched["keras"].append(
["tensorflow.python.keras.engine.training_v2.Loop", "fit"]
)
elif training_v2_2:
training.Model.fit = new_v2
wandb.patched["keras"].append([f"{keras_engine}.training.Model", "fit"]) | python | wandb/integration/keras/keras.py | 79 | 192 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,275 | set_wandb_attrs | def set_wandb_attrs(cbk, val_data):
if isinstance(cbk, WandbCallback):
if is_generator_like(val_data):
cbk.generator = val_data
elif is_dataset(val_data):
if context.executing_eagerly():
cbk.generator = iter(val_data)
else:
wandb.termwarn(
"Found a validation dataset in graph mode, can't patch Keras."
)
elif isinstance(val_data, tuple) and isinstance(val_data[0], tf.Tensor):
# Graph mode dataset generator
def gen():
while True:
yield K.get_session().run(val_data)
cbk.generator = gen()
else:
cbk.validation_data = val_data | python | wandb/integration/keras/keras.py | 129 | 148 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,276 | gen | def gen():
while True:
yield K.get_session().run(val_data) | python | wandb/integration/keras/keras.py | 142 | 144 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,277 | new_arrays | def new_arrays(*args, **kwargs):
cbks = kwargs.get("callbacks", [])
val_inputs = kwargs.get("val_inputs")
val_targets = kwargs.get("val_targets")
# TODO: these could be generators, why index 0?
if val_inputs and val_targets:
for cbk in cbks:
set_wandb_attrs(cbk, (val_inputs[0], val_targets[0]))
return old_arrays(*args, **kwargs) | python | wandb/integration/keras/keras.py | 150 | 158 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,278 | new_generator | def new_generator(*args, **kwargs):
cbks = kwargs.get("callbacks", [])
val_data = kwargs.get("validation_data")
if val_data:
for cbk in cbks:
set_wandb_attrs(cbk, val_data)
return old_generator(*args, **kwargs) | python | wandb/integration/keras/keras.py | 160 | 166 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,279 | new_v2 | def new_v2(*args, **kwargs):
cbks = kwargs.get("callbacks", [])
val_data = kwargs.get("validation_data")
if val_data:
for cbk in cbks:
set_wandb_attrs(cbk, val_data)
return old_v2(*args, **kwargs) | python | wandb/integration/keras/keras.py | 168 | 174 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,280 | _array_has_dtype | def _array_has_dtype(array):
return hasattr(array, "dtype") | python | wandb/integration/keras/keras.py | 195 | 196 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,281 | _update_if_numeric | def _update_if_numeric(metrics, key, values):
if not _array_has_dtype(values):
_warn_not_logging(key)
return
if not is_numeric_array(values):
_warn_not_logging_non_numeric(key)
return
metrics[key] = wandb.Histogram(values) | python | wandb/integration/keras/keras.py | 199 | 208 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,282 | is_numeric_array | def is_numeric_array(array):
return np.issubdtype(array.dtype, np.number) | python | wandb/integration/keras/keras.py | 211 | 212 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,283 | _warn_not_logging_non_numeric | def _warn_not_logging_non_numeric(name):
wandb.termwarn(
f"Non-numeric values found in layer: {name}, not logging this layer",
repeat=False,
) | python | wandb/integration/keras/keras.py | 215 | 219 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,284 | _warn_not_logging | def _warn_not_logging(name):
wandb.termwarn(
f"Layer {name} has undetermined datatype not logging this layer",
repeat=False,
) | python | wandb/integration/keras/keras.py | 222 | 226 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,285 | _get_custom_optimizer_parent_class | def _get_custom_optimizer_parent_class():
from pkg_resources import parse_version
if parse_version(tf.__version__) >= parse_version("2.9.0"):
custom_optimizer_parent_class = tf.keras.optimizers.legacy.Optimizer
else:
custom_optimizer_parent_class = tf.keras.optimizers.Optimizer
return custom_optimizer_parent_class | python | wandb/integration/keras/keras.py | 237 | 245 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,286 | __init__ | def __init__(self):
super().__init__(name="CustomOptimizer")
self._resource_apply_dense = tf.function(self._resource_apply_dense)
self._resource_apply_sparse = tf.function(self._resource_apply_sparse) | python | wandb/integration/keras/keras.py | 252 | 255 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,287 | _resource_apply_dense | def _resource_apply_dense(self, grad, var):
var.assign(grad) | python | wandb/integration/keras/keras.py | 257 | 258 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,288 | _resource_apply_sparse | def _resource_apply_sparse(self, grad, var, indices):
pass | python | wandb/integration/keras/keras.py | 262 | 263 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,289 | get_config | def get_config(self):
return super().get_config() | python | wandb/integration/keras/keras.py | 265 | 266 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,290 | set_model | def set_model(self, model):
super().set_model(model)
self.og_weights = model.get_weights()
self.grads = [np.zeros(tuple(w.shape)) for w in model.trainable_weights] | python | wandb/integration/keras/keras.py | 275 | 278 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,291 | on_batch_end | def on_batch_end(self, batch, logs=None):
for g, w in zip(self.grads, self.model.trainable_weights):
g += w.numpy()
self.model.set_weights(self.og_weights) | python | wandb/integration/keras/keras.py | 280 | 283 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,292 | get_grads | def get_grads(self):
return [g.copy() for g in self.grads] | python | wandb/integration/keras/keras.py | 285 | 286 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,293 | __init__ | def __init__(
self,
monitor="val_loss",
verbose=0,
mode="auto",
save_weights_only=False,
log_weights=False,
log_gradients=False,
save_model=True,
training_data=None,
validation_data=None,
labels=[],
predictions=36,
generator=None,
input_type=None,
output_type=None,
log_evaluation=False,
validation_steps=None,
class_colors=None,
log_batch_frequency=None,
log_best_prefix="best_",
save_graph=True,
validation_indexes=None,
validation_row_processor=None,
prediction_row_processor=None,
infer_missing_processors=True,
log_evaluation_frequency=0,
compute_flops=False,
**kwargs,
):
if wandb.run is None:
raise wandb.Error("You must call wandb.init() before WandbCallback()")
with wandb.wandb_lib.telemetry.context(run=wandb.run) as tel:
tel.feature.keras = True
self.validation_data = None
# This is kept around for legacy reasons
if validation_data is not None:
if is_generator_like(validation_data):
generator = validation_data
else:
self.validation_data = validation_data
self.labels = labels
self.predictions = min(predictions, 100)
self.monitor = monitor
self.verbose = verbose
self.save_weights_only = save_weights_only
self.save_graph = save_graph
wandb.save("model-best.h5")
self.filepath = os.path.join(wandb.run.dir, "model-best.h5")
self.save_model = save_model
if save_model:
deprecate(
field_name=Deprecated.keras_callback__save_model,
warning_message=(
"The save_model argument by default saves the model in the HDF5 format that cannot save "
"custom objects like subclassed models and custom layers. This behavior will be deprecated "
"in a future release in favor of the SavedModel format. Meanwhile, the HDF5 model is saved "
"as W&B files and the SavedModel as W&B Artifacts."
),
)
self.save_model_as_artifact = True
self.log_weights = log_weights
self.log_gradients = log_gradients
self.training_data = training_data
self.generator = generator
self._graph_rendered = False
data_type = kwargs.get("data_type", None)
if data_type is not None:
deprecate(
field_name=Deprecated.keras_callback__data_type,
warning_message=(
"The data_type argument of wandb.keras.WandbCallback is deprecated "
"and will be removed in a future release. Please use input_type instead.\n"
"Setting input_type = data_type."
),
)
input_type = data_type
self.input_type = input_type
self.output_type = output_type
self.log_evaluation = log_evaluation
self.validation_steps = validation_steps
self.class_colors = np.array(class_colors) if class_colors is not None else None
self.log_batch_frequency = log_batch_frequency
self.log_best_prefix = log_best_prefix
self.compute_flops = compute_flops
self._prediction_batch_size = None
if self.log_gradients:
if int(tf.__version__.split(".")[0]) < 2:
raise Exception("Gradient logging requires tensorflow 2.0 or higher.")
if self.training_data is None:
raise ValueError(
"training_data argument is required for gradient logging."
)
if isinstance(self.training_data, (list, tuple)):
if len(self.training_data) != 2:
raise ValueError("training data must be a tuple of length two")
self._training_data_x, self._training_data_y = self.training_data
else:
self._training_data_x = (
self.training_data
) # generator, tf.data.Dataset etc
self._training_data_y = None
# From Keras
if mode not in ["auto", "min", "max"]:
print(f"WandbCallback mode {mode} is unknown, fallback to auto mode.")
mode = "auto"
if mode == "min":
self.monitor_op = operator.lt
self.best = float("inf")
elif mode == "max":
self.monitor_op = operator.gt
self.best = float("-inf")
else:
if "acc" in self.monitor or self.monitor.startswith("fmeasure"):
self.monitor_op = operator.gt
self.best = float("-inf")
else:
self.monitor_op = operator.lt
self.best = float("inf")
# Get the previous best metric for resumed runs
previous_best = wandb.run.summary.get(f"{self.log_best_prefix}{self.monitor}")
if previous_best is not None:
self.best = previous_best
self._validation_data_logger = None
self._validation_indexes = validation_indexes
self._validation_row_processor = validation_row_processor
self._prediction_row_processor = prediction_row_processor
self._infer_missing_processors = infer_missing_processors
self._log_evaluation_frequency = log_evaluation_frequency
self._model_trained_since_last_eval = False | python | wandb/integration/keras/keras.py | 389 | 528 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,294 | _build_grad_accumulator_model | def _build_grad_accumulator_model(self):
inputs = self.model.inputs
outputs = self.model(inputs)
grad_acc_model = tf.keras.models.Model(inputs, outputs)
grad_acc_model.compile(loss=self.model.loss, optimizer=_CustomOptimizer())
# make sure magic doesn't think this is a user model
grad_acc_model._wandb_internal_model = True
self._grad_accumulator_model = grad_acc_model
self._grad_accumulator_callback = _GradAccumulatorCallback() | python | wandb/integration/keras/keras.py | 530 | 540 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,295 | _implements_train_batch_hooks | def _implements_train_batch_hooks(self):
return self.log_batch_frequency is not None | python | wandb/integration/keras/keras.py | 542 | 543 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,296 | _implements_test_batch_hooks | def _implements_test_batch_hooks(self):
return self.log_batch_frequency is not None | python | wandb/integration/keras/keras.py | 545 | 546 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,297 | _implements_predict_batch_hooks | def _implements_predict_batch_hooks(self):
return self.log_batch_frequency is not None | python | wandb/integration/keras/keras.py | 548 | 549 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,298 | set_params | def set_params(self, params):
self.params = params | python | wandb/integration/keras/keras.py | 551 | 552 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,299 | set_model | def set_model(self, model):
self.model = model
if self.input_type == "auto" and len(model.inputs) == 1:
self.input_type = wandb.util.guess_data_type(
model.inputs[0].shape, risky=True
)
if self.input_type and self.output_type is None and len(model.outputs) == 1:
self.output_type = wandb.util.guess_data_type(model.outputs[0].shape)
if self.log_gradients:
self._build_grad_accumulator_model() | python | wandb/integration/keras/keras.py | 554 | 563 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
2,300 | _attempt_evaluation_log | def _attempt_evaluation_log(self, commit=True):
if self.log_evaluation and self._validation_data_logger:
try:
if not self.model:
wandb.termwarn("WandbCallback unable to read model from trainer")
else:
self._validation_data_logger.log_predictions(
predictions=self._validation_data_logger.make_predictions(
self.model.predict
),
commit=commit,
)
self._model_trained_since_last_eval = False
except Exception as e:
wandb.termwarn("Error durring prediction logging for epoch: " + str(e)) | python | wandb/integration/keras/keras.py | 565 | 579 | {
"name": "Git-abouvier/wandb",
"url": "https://github.com/Git-abouvier/wandb.git",
"license": "MIT",
"stars": 0,
"forks": 0
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.