Update README.md
Browse files
README.md
CHANGED
|
@@ -19,16 +19,28 @@ _io32: model input is fp32, model will convert the input to fp16, perform ops in
|
|
| 19 |
_io16: model input is fp16, perform ops in fp16 and write the final result in fp16
|
| 20 |
|
| 21 |
```csharp
|
|
|
|
|
|
|
|
|
|
| 22 |
// csharp example
|
| 23 |
// Create Pipeline
|
| 24 |
-
var pipeline =
|
| 25 |
// Prompt
|
| 26 |
var promptOptions = new PromptOptions
|
| 27 |
{
|
| 28 |
Prompt = "a majestic Royal Bengal Tiger on the mountain top overlooking beatiful Lake Tahoe snowy mountains and deep blue lake, deep blue sky, ultra hd, 8k, photorealistic"
|
| 29 |
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
// Run pipeline
|
| 31 |
var result = await pipeline.GenerateImageAsync(promptOptions, schedulerOptions);
|
|
|
|
| 32 |
// Save Image Result
|
| 33 |
await result.SaveAsync("Result.png");
|
| 34 |
```
|
|
|
|
| 19 |
_io16: model input is fp16, perform ops in fp16 and write the final result in fp16
|
| 20 |
|
| 21 |
```csharp
|
| 22 |
+
## C# Inference Demo
|
| 23 |
+
https://github.com/TensorStack-AI/OnnxStack
|
| 24 |
+
|
| 25 |
// csharp example
|
| 26 |
// Create Pipeline
|
| 27 |
+
var pipeline = FluxPipeline.CreatePipeline("D:\\Models\\Flux.1-dev_amdgpu");
|
| 28 |
// Prompt
|
| 29 |
var promptOptions = new PromptOptions
|
| 30 |
{
|
| 31 |
Prompt = "a majestic Royal Bengal Tiger on the mountain top overlooking beatiful Lake Tahoe snowy mountains and deep blue lake, deep blue sky, ultra hd, 8k, photorealistic"
|
| 32 |
};
|
| 33 |
+
// Scheduler Options
|
| 34 |
+
var schedulerOptions = pipeline.DefaultSchedulerOptions with
|
| 35 |
+
{
|
| 36 |
+
InferenceSteps = 50,
|
| 37 |
+
GuidanceScale = 3.5f,
|
| 38 |
+
SchedulerType = SchedulerType.FlowMatchEulerDiscrete,
|
| 39 |
+
};
|
| 40 |
+
|
| 41 |
// Run pipeline
|
| 42 |
var result = await pipeline.GenerateImageAsync(promptOptions, schedulerOptions);
|
| 43 |
+
|
| 44 |
// Save Image Result
|
| 45 |
await result.SaveAsync("Result.png");
|
| 46 |
```
|