-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.scalafix.conf
More file actions
59 lines (48 loc) · 1.79 KB
/
.scalafix.conf
File metadata and controls
59 lines (48 loc) · 1.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# Scalafix configuration for spark-pipeline-framework
#
# RESPONSIBILITY: Semantic linting and refactoring
# - Unused imports, variables, private members
# - Dangerous syntax patterns
# - Semantic code issues that require type information
#
# DEFERS TO:
# - Scalafmt: Formatting, import ordering (don't use OrganizeImports here)
# - Scalastyle: Naming conventions, method length, complexity
#
# Run: sbt "scalafixAll" or sbt "scalafix RemoveUnused"
# Check: sbt "scalafixAll --check"
rules = [
# Remove unused imports, local variables, and private members
# This is semantic analysis - more accurate than text-based tools
RemoveUnused
# Catch implicit class value leaks (common Scala pitfall)
LeakingImplicitClassVal
]
# RemoveUnused configuration
RemoveUnused {
# Remove unused imports (Scalafix owns this - semantic analysis)
imports = true
# Remove unused private members
privates = true
# Remove unused local variables
locals = true
# Remove unused pattern variables (e.g., case Foo(unused) =>)
patternvars = true
# Don't remove unused parameters - can break API compatibility
params = false
}
# =============================================================================
# DISABLED RULES - Handled by other tools or too strict for Spark
# =============================================================================
# OrganizeImports - DISABLED: Scalafmt handles import ordering
# We use scalafmt's rewrite.imports.sort = scalastyle instead
# DisableSyntax - DISABLED: Too strict for Spark interop
# Spark APIs often require null, var, and other patterns
# DisableSyntax {
# noNulls = true
# noVars = true
# noThrows = true
# noReturns = true
# }
# NoAutoTupling - DISABLED: Can be noisy with Spark's tuple-heavy APIs
# Consider enabling if you want stricter tuple handling