Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion OpenUtau.Core/OpenUtau.Core.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
<PackageReference Include="Microsoft.ML.OnnxRuntime.DirectML" Version="1.23.0" />
</ItemGroup>
<ItemGroup Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))' == 'false'">
<PackageReference Include="Microsoft.ML.OnnxRuntime" Version="1.23.0" />
<PackageReference Include="Microsoft.ML.OnnxRuntime" Version="1.23.2" />
</ItemGroup>
<ItemGroup>
<Compile Update="Analysis\Crepe\Resources.Designer.cs">
Expand Down
27 changes: 25 additions & 2 deletions OpenUtau.Core/Util/Onnx.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using System.Linq;
using Microsoft.ML.OnnxRuntime;
using OpenUtau.Core.Util;
using Serilog;

namespace OpenUtau.Core {
public class GpuInfo {
Expand Down Expand Up @@ -83,7 +84,7 @@ public static List<GpuInfo> getGpuInfo() {
return gpuList;
}

private static SessionOptions getOnnxSessionOptions(){
private static SessionOptions getOnnxSessionOptions(bool coremlEnableOnSubgraphs = false) {
SessionOptions options = new SessionOptions();
List<string> runnerOptions = getRunnerOptions();
string runner = Preferences.Default.OnnxRunner;
Expand All @@ -103,7 +104,13 @@ private static SessionOptions getOnnxSessionOptions(){
);
break;
case "CoreML":
options.AppendExecutionProvider_CoreML(CoreMLFlags.COREML_FLAG_ENABLE_ON_SUBGRAPH);
// Note: MLProgram format has stricter validation and may fail with complex DiffSinger models
// that have topological sorting issues (e.g., variance_predictor with diffusion embeddings)
// so we always use NeuralNetwork format (default) as MLProgram fails with complex models.
options.AppendExecutionProvider("CoreML", new Dictionary<string, string> {
{ "MLComputeUnits", "ALL" },
{ "EnableOnSubgraphs", coremlEnableOnSubgraphs ? "1" : "0" } // Disable subgraph processing to avoid complex control flow issues
});
break;
case "NNAPI":
options.AppendExecutionProvider_Nnapi();
Expand All @@ -116,6 +123,14 @@ public static InferenceSession getInferenceSession(byte[] model, bool force_cpu
if (force_cpu) {
return new InferenceSession(model);
} else {
// Try with CoreML subgraphs enabled first, fallback to default if it fails
if (OS.IsMacOS() && Preferences.Default.OnnxRunner == "CoreML") {
try {
return new InferenceSession(model, getOnnxSessionOptions(coremlEnableOnSubgraphs: true));
} catch (Exception e) {
Log.Warning(e, "Failed to create session with CoreML subgraphs enabled, falling back to default settings");
}
}
return new InferenceSession(model, getOnnxSessionOptions());
}
}
Expand All @@ -124,6 +139,14 @@ public static InferenceSession getInferenceSession(string modelPath, bool force_
if (force_cpu) {
return new InferenceSession(modelPath);
} else {
// Try with CoreML subgraphs enabled first, fallback to default if it fails
if (OS.IsMacOS() && Preferences.Default.OnnxRunner == "CoreML") {
try {
return new InferenceSession(modelPath, getOnnxSessionOptions(coremlEnableOnSubgraphs: true));
} catch (Exception e) {
Log.Warning(e, "Failed to create session with CoreML subgraphs enabled, falling back to default settings");
}
}
return new InferenceSession(modelPath, getOnnxSessionOptions());
}
}
Expand Down
Loading