Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ let package = Package(
.product(name: "OpenAPIKitCompat", package: "OpenAPIKit"),
.product(name: "Algorithms", package: "swift-algorithms"),
.product(name: "OrderedCollections", package: "swift-collections"),
.product(name: "HeapModule", package: "swift-collections"),
.product(name: "Yams", package: "Yams"),
],
swiftSettings: swiftSettings
Expand Down
86 changes: 85 additions & 1 deletion Sources/_OpenAPIGeneratorCore/Config.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,85 @@ public enum NamingStrategy: String, Sendable, Codable, Equatable, CaseIterable {
case idiomatic
}

/// Configuration for sharding the generated Types file into multiple files
/// organized by dependency layers.
public struct ShardingConfig: Sendable, Equatable {
/// The number of type shards per layer (index 0 = component/leaf layer,
/// index 1+ = dependent layers).
public var typeShardCounts: [Int]
/// Maximum Swift files per shard for type schemas.
public var maxFilesPerShard: Int
/// Maximum Swift files per shard for operations.
public var maxFilesPerShardOps: Int
/// The number of shards per operation layer (index = layer number).
/// Must have exactly `layerCount` entries.
public var operationLayerShardCounts: [Int]
/// Module prefix for deterministic file naming (e.g., "MyServiceAPI").
/// When set, produces deterministic file names suitable for build-system integration.
public var modulePrefix: String?

/// The total number of dependency layers.
public var layerCount: Int { typeShardCounts.count }

/// Returns the number of type shards for the given layer index.
public func typeShardCount(forLayer layerIndex: Int) -> Int {
typeShardCounts[layerIndex]
}

public init(
typeShardCounts: [Int],
maxFilesPerShard: Int = 25,
maxFilesPerShardOps: Int = 16,
operationLayerShardCounts: [Int],
modulePrefix: String? = nil
) {
self.typeShardCounts = typeShardCounts
self.maxFilesPerShard = maxFilesPerShard
self.maxFilesPerShardOps = maxFilesPerShardOps
self.operationLayerShardCounts = operationLayerShardCounts
self.modulePrefix = modulePrefix
}

public enum ValidationError: Error, CustomStringConvertible {
case nonPositiveShardCount(field: String, value: Int)
case shardCountMismatch(field: String, expected: Int, got: Int)

public var description: String {
switch self {
case .nonPositiveShardCount(let field, let value):
return "\(field) must be > 0, got \(value)"
case .shardCountMismatch(let field, let expected, let got):
return "\(field) count (\(got)) must equal layerCount (\(expected))"
}
}
}

public func validate() throws {
func requirePositive(_ value: Int, field: String) throws {
guard value > 0 else {
throw ValidationError.nonPositiveShardCount(field: field, value: value)
}
}
for (index, count) in typeShardCounts.enumerated() {
try requirePositive(count, field: "typeShardCounts[\(index)]")
}
try requirePositive(maxFilesPerShard, field: "maxFilesPerShard")
try requirePositive(maxFilesPerShardOps, field: "maxFilesPerShardOps")
for (index, count) in operationLayerShardCounts.enumerated() {
try requirePositive(count, field: "operationLayerShardCounts[\(index)]")
}
if operationLayerShardCounts.count != layerCount {
throw ValidationError.shardCountMismatch(
field: "operationLayerShardCounts",
expected: layerCount,
got: operationLayerShardCounts.count
)
}
}
}

extension ShardingConfig: Codable {}

/// A structure that contains configuration options for a single execution
/// of the generator pipeline run.
///
Expand Down Expand Up @@ -68,6 +147,9 @@ public struct Config: Sendable {
/// Additional pre-release features to enable.
public var featureFlags: FeatureFlags

/// Optional sharding configuration for splitting Types output into multiple files.
public var sharding: ShardingConfig?

/// Creates a configuration with the specified generator mode and imports.
/// - Parameters:
/// - mode: The mode to use for generation.
Expand All @@ -90,7 +172,8 @@ public struct Config: Sendable {
namingStrategy: NamingStrategy,
nameOverrides: [String: String] = [:],
typeOverrides: TypeOverrides = .init(),
featureFlags: FeatureFlags = []
featureFlags: FeatureFlags = [],
sharding: ShardingConfig? = nil
) {
self.mode = mode
self.access = access
Expand All @@ -101,5 +184,6 @@ public struct Config: Sendable {
self.nameOverrides = nameOverrides
self.typeOverrides = typeOverrides
self.featureFlags = featureFlags
self.sharding = sharding
}
}
18 changes: 18 additions & 0 deletions Sources/_OpenAPIGeneratorCore/GeneratorPipeline.swift
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,24 @@ public func runGenerator(input: InMemoryInputFile, config: Config, diagnostics:
-> InMemoryOutputFile
{ try makeGeneratorPipeline(config: config, diagnostics: diagnostics).run(input) }

/// Runs the generator and returns multiple output files when sharding is configured.
/// Falls back to a single-element array when sharding is not configured.
public func runShardedGenerator(
input: InMemoryInputFile,
config: Config,
diagnostics: any DiagnosticCollector
) throws -> [InMemoryOutputFile] {
let pipeline = makeGeneratorPipeline(config: config, diagnostics: diagnostics)
let parsed = try pipeline.parseOpenAPIFileStage.run(input)
let translated = try pipeline.translateOpenAPIToStructuredSwiftStage.run(parsed)
return translated.files.map { namedFile in
let renderer = TextBasedRenderer.default
renderer.renderFile(namedFile.contents)
let string = renderer.renderedContents()
return InMemoryOutputFile(baseName: namedFile.name, contents: Data(string.utf8))
}
}

/// Creates a new pipeline instance.
/// - Parameters:
/// - parser: An OpenAPI document parser.
Expand Down
214 changes: 214 additions & 0 deletions Sources/_OpenAPIGeneratorCore/GraphAlgorithms.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,214 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the SwiftOpenAPIGenerator open source project
//
// Copyright (c) 2023 Apple Inc. and the SwiftOpenAPIGenerator project authors
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
// See CONTRIBUTORS.txt for the list of SwiftOpenAPIGenerator project authors
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//
import HeapModule

enum GraphAlgorithms {

// MARK: - Tarjan SCC (iterative for stack safety on large schemas)

struct SCCResult: Sendable {
var componentIdOf: [String: Int]
var components: [[String]]
}

static func tarjanSCC(graph: [String: Set<String>]) -> SCCResult {
let sortedGraph = graph.mapValues { $0.sorted() }
var index = 0
var sccStack: [String] = []
var indices: [String: Int] = [:]
var lowlinks: [String: Int] = [:]
var onStack: Set<String> = []
var components: [[String]] = []

struct CallFrame {
let node: String
let neighbors: [String]
var neighborIndex: Int
}

func strongConnect(startNode: String) {
var callStack: [CallFrame] = []

indices[startNode] = index
lowlinks[startNode] = index
index += 1
sccStack.append(startNode)
onStack.insert(startNode)

let startNeighbors = sortedGraph[startNode] ?? []
callStack.append(CallFrame(node: startNode, neighbors: startNeighbors, neighborIndex: 0))

while !callStack.isEmpty {
let currentIndex = callStack.count - 1
let frame = callStack[currentIndex]
let v = frame.node

if frame.neighborIndex < frame.neighbors.count {
let w = frame.neighbors[frame.neighborIndex]
callStack[currentIndex].neighborIndex += 1

if indices[w] == nil {
indices[w] = index
lowlinks[w] = index
index += 1
sccStack.append(w)
onStack.insert(w)

let wNeighbors = sortedGraph[w] ?? []
callStack.append(CallFrame(node: w, neighbors: wNeighbors, neighborIndex: 0))
} else if onStack.contains(w) {
if let lowV = lowlinks[v], let idxW = indices[w] {
lowlinks[v] = min(lowV, idxW)
}
}
} else {
callStack.removeLast()

if let parentIndex = callStack.indices.last {
let parent = callStack[parentIndex].node
if let lowParent = lowlinks[parent], let lowV = lowlinks[v] {
lowlinks[parent] = min(lowParent, lowV)
}
}

if lowlinks[v] == indices[v] {
var component: [String] = []
while true {
let w = sccStack.removeLast()
onStack.remove(w)
component.append(w)
if w == v { break }
}
components.append(component.sorted())
}
}
}
}

for v in graph.keys.sorted() where indices[v] == nil {
strongConnect(startNode: v)
}

let componentIdOf = Dictionary(
uniqueKeysWithValues: components.enumerated().flatMap { compId, members in
members.map { ($0, compId) }
}
)

return SCCResult(componentIdOf: componentIdOf, components: components)
}

// MARK: - Topological Sort

static func topologicalSort(predecessors: [[Int]]) -> [Int] {
let n = predecessors.count
guard n > 0 else { return [] }

var successors = Array(repeating: [Int](), count: n)
var inDegree = Array(repeating: 0, count: n)
for (v, preds) in predecessors.enumerated() {
inDegree[v] = preds.count
for u in preds {
successors[u].append(v)
}
}

var heap = Heap<Int>()
for i in 0..<n where inDegree[i] == 0 {
heap.insert(i)
}

var result: [Int] = []
result.reserveCapacity(n)

while let u = heap.popMin() {
result.append(u)
for v in successors[u] {
inDegree[v] -= 1
if inDegree[v] == 0 {
heap.insert(v)
}
}
}

return result
}

// MARK: - Condensation DAG

static func buildCondensationDAG(
graph: [String: Set<String>],
scc: SCCResult
) -> [[Int]] {
var dagPredecessors = Array(repeating: Set<Int>(), count: scc.components.count)

for (u, neighbors) in graph {
guard let cu = scc.componentIdOf[u] else { continue }
for v in neighbors {
if let cv = scc.componentIdOf[v], cu != cv {
dagPredecessors[cu].insert(cv)
}
}
}

return dagPredecessors.map { Array($0).sorted() }
}

// MARK: - Longest-Path Layering

static func longestPathLayering(dagPredecessors: [[Int]]) -> [Int] {
let topo = topologicalSort(predecessors: dagPredecessors)
var layerOf = Array(repeating: 0, count: dagPredecessors.count)

for u in topo {
if let maxPredLayer = dagPredecessors[u].map({ layerOf[$0] }).max() {
layerOf[u] = maxPredLayer + 1
} else {
layerOf[u] = 0
}
}

return layerOf
}

// MARK: - LPT Bin-Packing

typealias Island = [String]

static func lptPacking(
islands: [Island],
binCount: Int,
weight: (Island) -> Int
) -> [[Island]] {
guard binCount > 0 else { return [] }

var bins = Array(repeating: (weight: 0, items: [Island]()), count: binCount)

let weightedIslands = islands.map { ($0, weight($0)) }
let sortedIslands = weightedIslands.sorted { lhs, rhs in
if lhs.1 != rhs.1 {
return lhs.1 > rhs.1
}
return lhs.0.lexicographicallyPrecedes(rhs.0)
}

for (island, islandWeight) in sortedIslands {
let binIndex = bins.indices.min(by: { bins[$0].weight < bins[$1].weight })!
bins[binIndex].weight += islandWeight
bins[binIndex].items.append(island)
}

return bins.map(\.items)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ struct ImportDescription: Equatable, Codable {
/// For example, if there are type imports like `import Foo.Bar`, they would be listed here.
var moduleTypes: [String]?

/// Whether this is an `@_exported` import (re-export).
var exported: Bool = false

/// The name of the private interface for an `@_spi` import.
///
/// For example, if `spi` was "Secret" and the module name was "Foo" then the import
Expand Down Expand Up @@ -1073,8 +1076,17 @@ struct NamedFileDescription: Equatable, Codable {
/// A file with contents made up of structured Swift code.
struct StructuredSwiftRepresentation: Equatable, Codable {

/// The contents of the file.
var file: NamedFileDescription
/// All output files. For non-sharded output this contains a single file.
/// For sharded output this contains the root file plus all shard files.
var files: [NamedFileDescription]

init(file: NamedFileDescription) {
self.files = [file]
}

init(files: [NamedFileDescription]) {
self.files = files
}
}

// MARK: - Conveniences
Expand Down
Loading