diff --git a/semgrep_output_v1.atd b/semgrep_output_v1.atd index 5b42065..8e0f10d 100644 --- a/semgrep_output_v1.atd +++ b/semgrep_output_v1.atd @@ -903,6 +903,14 @@ type skipped_rule = { position: position; } +(* Result of get_targets internal RPC, similar to scanned_and_skipped but + more complete *) +type target_discovery_result = { + target_paths: fpath list; + errors: core_error list; + skipped: skipped_target list; +} + (*****************************************************************************) (* Profiling information *) (*****************************************************************************) @@ -2324,6 +2332,7 @@ type function_call = [ | CallUploadSymbolAnalysis of ((* token *) string * (* scan_id *) int * symbol_analysis) | CallDumpRulePartitions of dump_rule_partitions_params | CallTransitiveReachabilityFilter of transitive_finding list + | CallGetTargets of scanning_roots ] (* ----------------------------- *) @@ -2342,6 +2351,7 @@ type function_return = [ | RetUploadSymbolAnalysis of (* success msg *) string | RetDumpRulePartitions of bool | RetTransitiveReachabilityFilter of transitive_finding list + | RetGetTargets of target_discovery_result ] (*****************************************************************************) diff --git a/semgrep_output_v1.jsonschema b/semgrep_output_v1.jsonschema index 69345d5..652bf4b 100644 --- a/semgrep_output_v1.jsonschema +++ b/semgrep_output_v1.jsonschema @@ -679,6 +679,24 @@ "position": { "$ref": "#/definitions/position" } } }, + "target_discovery_result": { + "type": "object", + "required": [ "target_paths", "errors", "skipped" ], + "properties": { + "target_paths": { + "type": "array", + "items": { "$ref": "#/definitions/fpath" } + }, + "errors": { + "type": "array", + "items": { "$ref": "#/definitions/core_error" } + }, + "skipped": { + "type": "array", + "items": { "$ref": "#/definitions/skipped_target" } + } + } + }, "profile": { "type": "object", "required": [ @@ -2125,6 +2143,15 @@ "items": { "$ref": "#/definitions/transitive_finding" } } ] + }, + { + "type": "array", + "minItems": 2, + "items": false, + "prefixItems": [ + { "const": "CallGetTargets" }, + { "$ref": "#/definitions/scanning_roots" } + ] } ] }, @@ -2225,6 +2252,15 @@ "items": { "$ref": "#/definitions/transitive_finding" } } ] + }, + { + "type": "array", + "minItems": 2, + "items": false, + "prefixItems": [ + { "const": "RetGetTargets" }, + { "$ref": "#/definitions/target_discovery_result" } + ] } ] }, diff --git a/semgrep_output_v1.proto b/semgrep_output_v1.proto index a7a35e4..ddaaacb 100644 --- a/semgrep_output_v1.proto +++ b/semgrep_output_v1.proto @@ -1,6 +1,6 @@ // Generated by jsonschema2protobuf. DO NOT EDIT! // Source file: semgrep_output_v1.jsonschema -// Source file sha256 digest: 6aa5e057abf0dffbd6e58504418e22f21b4f8d5927c51eaf2d68146716997d1b +// Source file sha256 digest: d070db442c726e1cbfcbb08d98078d8fa2b0df2ab090753482f83dbd4926b14c syntax = "proto3"; @@ -206,6 +206,12 @@ message SkippedRule { Position position = 206964729; } +message TargetDiscoveryResult { + repeated string target_paths = 516466271; + repeated CoreError errors = 179828283; + repeated SkippedTarget skipped = 480732762; +} + message Profile { repeated string rules = 109321335; float rules_parse_time = 367689164; diff --git a/semgrep_output_v1.py b/semgrep_output_v1.py index 0cc64bf..b24c58f 100644 --- a/semgrep_output_v1.py +++ b/semgrep_output_v1.py @@ -4081,1142 +4081,1229 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class Tag: - """Original type: tag""" - - value: str - - @classmethod - def from_json(cls, x: Any) -> 'Tag': - return cls(_atd_read_string(x)) +class AlwaysSkipped: + """Original type: skip_reason = [ ... | Always_skipped | ... ]""" - def to_json(self) -> Any: - return _atd_write_string(self.value) + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'AlwaysSkipped' - @classmethod - def from_json_string(cls, x: str) -> 'Tag': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'always_skipped' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class Symbol: - """Original type: symbol = { ... }""" - - fqn: List[str] - - @classmethod - def from_json(cls, x: Any) -> 'Symbol': - if isinstance(x, dict): - return cls( - fqn=_atd_read_list(_atd_read_string)(x['fqn']) if 'fqn' in x else _atd_missing_json_field('Symbol', 'fqn'), - ) - else: - _atd_bad_json('Symbol', x) +class SemgrepignorePatternsMatch: + """Original type: skip_reason = [ ... | Semgrepignore_patterns_match | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['fqn'] = _atd_write_list(_atd_write_string)(self.fqn) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'SemgrepignorePatternsMatch' - @classmethod - def from_json_string(cls, x: str) -> 'Symbol': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'semgrepignore_patterns_match' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class SymbolUsage: - """Original type: symbol_usage = { ... }""" - - symbol: Symbol - locs: List[Location] - - @classmethod - def from_json(cls, x: Any) -> 'SymbolUsage': - if isinstance(x, dict): - return cls( - symbol=Symbol.from_json(x['symbol']) if 'symbol' in x else _atd_missing_json_field('SymbolUsage', 'symbol'), - locs=_atd_read_list(Location.from_json)(x['locs']) if 'locs' in x else _atd_missing_json_field('SymbolUsage', 'locs'), - ) - else: - _atd_bad_json('SymbolUsage', x) +class CliIncludeFlagsDoNotMatch: + """Original type: skip_reason = [ ... | Cli_include_flags_do_not_match | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['symbol'] = (lambda x: x.to_json())(self.symbol) - res['locs'] = _atd_write_list((lambda x: x.to_json()))(self.locs) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'CliIncludeFlagsDoNotMatch' - @classmethod - def from_json_string(cls, x: str) -> 'SymbolUsage': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'cli_include_flags_do_not_match' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class SymbolAnalysis: - """Original type: symbol_analysis""" - - value: List[SymbolUsage] - - @classmethod - def from_json(cls, x: Any) -> 'SymbolAnalysis': - return cls(_atd_read_list(SymbolUsage.from_json)(x)) +class CliExcludeFlagsMatch: + """Original type: skip_reason = [ ... | Cli_exclude_flags_match | ... ]""" - def to_json(self) -> Any: - return _atd_write_list((lambda x: x.to_json()))(self.value) + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'CliExcludeFlagsMatch' - @classmethod - def from_json_string(cls, x: str) -> 'SymbolAnalysis': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'cli_exclude_flags_match' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class LockfileParsing: - """Original type: resolution_method = [ ... | LockfileParsing | ... ]""" +@dataclass +class ExceededSizeLimit: + """Original type: skip_reason = [ ... | Exceeded_size_limit | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'LockfileParsing' + return 'ExceededSizeLimit' @staticmethod def to_json() -> Any: - return 'LockfileParsing' + return 'exceeded_size_limit' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class DynamicResolution: - """Original type: resolution_method = [ ... | DynamicResolution | ... ]""" +@dataclass +class AnalysisFailedParserOrInternalError: + """Original type: skip_reason = [ ... | Analysis_failed_parser_or_internal_error | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'DynamicResolution' + return 'AnalysisFailedParserOrInternalError' @staticmethod def to_json() -> Any: - return 'DynamicResolution' + return 'analysis_failed_parser_or_internal_error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class ResolutionMethod: - """Original type: resolution_method = [ ... ]""" - - value: Union[LockfileParsing, DynamicResolution] +@dataclass +class ExcludedByConfig: + """Original type: skip_reason = [ ... | Excluded_by_config | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'ResolutionMethod': - if isinstance(x, str): - if x == 'LockfileParsing': - return cls(LockfileParsing()) - if x == 'DynamicResolution': - return cls(DynamicResolution()) - _atd_bad_json('ResolutionMethod', x) - _atd_bad_json('ResolutionMethod', x) - - def to_json(self) -> Any: - return self.value.to_json() + return 'ExcludedByConfig' - @classmethod - def from_json_string(cls, x: str) -> 'ResolutionMethod': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'excluded_by_config' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class RequirementsIn: - """Original type: manifest_kind = [ ... | RequirementsIn | ... ]""" +@dataclass +class WrongLanguage: + """Original type: skip_reason = [ ... | Wrong_language | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'RequirementsIn' + return 'WrongLanguage' @staticmethod def to_json() -> Any: - return 'RequirementsIn' + return 'wrong_language' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class PackageJson: - """Original type: manifest_kind = [ ... | PackageJson | ... ]""" +@dataclass +class TooBig: + """Original type: skip_reason = [ ... | Too_big | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PackageJson' + return 'TooBig' @staticmethod def to_json() -> Any: - return 'PackageJson' + return 'too_big' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class Gemfile: - """Original type: manifest_kind = [ ... | Gemfile | ... ]""" +@dataclass +class Minified: + """Original type: skip_reason = [ ... | Minified | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Gemfile' + return 'Minified' @staticmethod def to_json() -> Any: - return 'Gemfile' + return 'minified' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class GoMod_: - """Original type: manifest_kind = [ ... | GoMod | ... ]""" +@dataclass +class Binary: + """Original type: skip_reason = [ ... | Binary | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GoMod_' + return 'Binary' @staticmethod def to_json() -> Any: - return 'GoMod' + return 'binary' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class CargoToml: - """Original type: manifest_kind = [ ... | CargoToml | ... ]""" +@dataclass +class IrrelevantRule: + """Original type: skip_reason = [ ... | Irrelevant_rule | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'CargoToml' + return 'IrrelevantRule' @staticmethod def to_json() -> Any: - return 'CargoToml' + return 'irrelevant_rule' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class PomXml: - """Original type: manifest_kind = [ ... | PomXml | ... ]""" +@dataclass +class TooManyMatches: + """Original type: skip_reason = [ ... | Too_many_matches | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PomXml' + return 'TooManyMatches' @staticmethod def to_json() -> Any: - return 'PomXml' + return 'too_many_matches' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class BuildGradle: - """Original type: manifest_kind = [ ... | BuildGradle | ... ]""" +@dataclass +class GitignorePatternsMatch: + """Original type: skip_reason = [ ... | Gitignore_patterns_match | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'BuildGradle' + return 'GitignorePatternsMatch' @staticmethod def to_json() -> Any: - return 'BuildGradle' + return 'Gitignore_patterns_match' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class SettingsGradle: - """Original type: manifest_kind = [ ... | SettingsGradle | ... ]""" +@dataclass +class Dotfile: + """Original type: skip_reason = [ ... | Dotfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'SettingsGradle' + return 'Dotfile' @staticmethod def to_json() -> Any: - return 'SettingsGradle' + return 'Dotfile' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class ComposerJson: - """Original type: manifest_kind = [ ... | ComposerJson | ... ]""" +@dataclass +class NonexistentFile: + """Original type: skip_reason = [ ... | Nonexistent_file | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ComposerJson' + return 'NonexistentFile' @staticmethod def to_json() -> Any: - return 'ComposerJson' + return 'Nonexistent_file' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class NugetManifestJson: - """Original type: manifest_kind = [ ... | NugetManifestJson | ... ]""" +@dataclass +class InsufficientPermissions: + """Original type: skip_reason = [ ... | Insufficient_permissions | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'NugetManifestJson' + return 'InsufficientPermissions' @staticmethod def to_json() -> Any: - return 'NugetManifestJson' + return 'insufficient_permissions' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class PubspecYaml: - """Original type: manifest_kind = [ ... | PubspecYaml | ... ]""" +@dataclass +class SkipReason: + """Original type: skip_reason = [ ... ]""" + + value: Union[AlwaysSkipped, SemgrepignorePatternsMatch, CliIncludeFlagsDoNotMatch, CliExcludeFlagsMatch, ExceededSizeLimit, AnalysisFailedParserOrInternalError, ExcludedByConfig, WrongLanguage, TooBig, Minified, Binary, IrrelevantRule, TooManyMatches, GitignorePatternsMatch, Dotfile, NonexistentFile, InsufficientPermissions] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PubspecYaml' + return self.value.kind - @staticmethod - def to_json() -> Any: - return 'PubspecYaml' + @classmethod + def from_json(cls, x: Any) -> 'SkipReason': + if isinstance(x, str): + if x == 'always_skipped': + return cls(AlwaysSkipped()) + if x == 'semgrepignore_patterns_match': + return cls(SemgrepignorePatternsMatch()) + if x == 'cli_include_flags_do_not_match': + return cls(CliIncludeFlagsDoNotMatch()) + if x == 'cli_exclude_flags_match': + return cls(CliExcludeFlagsMatch()) + if x == 'exceeded_size_limit': + return cls(ExceededSizeLimit()) + if x == 'analysis_failed_parser_or_internal_error': + return cls(AnalysisFailedParserOrInternalError()) + if x == 'excluded_by_config': + return cls(ExcludedByConfig()) + if x == 'wrong_language': + return cls(WrongLanguage()) + if x == 'too_big': + return cls(TooBig()) + if x == 'minified': + return cls(Minified()) + if x == 'binary': + return cls(Binary()) + if x == 'irrelevant_rule': + return cls(IrrelevantRule()) + if x == 'too_many_matches': + return cls(TooManyMatches()) + if x == 'Gitignore_patterns_match': + return cls(GitignorePatternsMatch()) + if x == 'Dotfile': + return cls(Dotfile()) + if x == 'Nonexistent_file': + return cls(NonexistentFile()) + if x == 'insufficient_permissions': + return cls(InsufficientPermissions()) + _atd_bad_json('SkipReason', x) + _atd_bad_json('SkipReason', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'SkipReason': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class PackageSwift: - """Original type: manifest_kind = [ ... | PackageSwift | ... ]""" +@dataclass +class SkippedTarget: + """Original type: skipped_target = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PackageSwift' + path: Fpath + reason: SkipReason + details: Optional[str] = None + rule_id: Optional[RuleId] = None - @staticmethod - def to_json() -> Any: - return 'PackageSwift' + @classmethod + def from_json(cls, x: Any) -> 'SkippedTarget': + if isinstance(x, dict): + return cls( + path=Fpath.from_json(x['path']) if 'path' in x else _atd_missing_json_field('SkippedTarget', 'path'), + reason=SkipReason.from_json(x['reason']) if 'reason' in x else _atd_missing_json_field('SkippedTarget', 'reason'), + details=_atd_read_string(x['details']) if 'details' in x else None, + rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else None, + ) + else: + _atd_bad_json('SkippedTarget', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['path'] = (lambda x: x.to_json())(self.path) + res['reason'] = (lambda x: x.to_json())(self.reason) + if self.details is not None: + res['details'] = _atd_write_string(self.details) + if self.rule_id is not None: + res['rule_id'] = (lambda x: x.to_json())(self.rule_id) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'SkippedTarget': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class Podfile: - """Original type: manifest_kind = [ ... | Podfile | ... ]""" +class ResolutionCmdFailed: + """Original type: resolution_cmd_failed = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Podfile' + command: str + message: str - @staticmethod - def to_json() -> Any: - return 'Podfile' + @classmethod + def from_json(cls, x: Any) -> 'ResolutionCmdFailed': + if isinstance(x, dict): + return cls( + command=_atd_read_string(x['command']) if 'command' in x else _atd_missing_json_field('ResolutionCmdFailed', 'command'), + message=_atd_read_string(x['message']) if 'message' in x else _atd_missing_json_field('ResolutionCmdFailed', 'message'), + ) + else: + _atd_bad_json('ResolutionCmdFailed', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['command'] = _atd_write_string(self.command) + res['message'] = _atd_write_string(self.message) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'ResolutionCmdFailed': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class MixExs: - """Original type: manifest_kind = [ ... | MixExs | ... ]""" +class UnsupportedManifest: + """Original type: resolution_error = [ ... | UnsupportedManifest | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'MixExs' + return 'UnsupportedManifest' @staticmethod def to_json() -> Any: - return 'MixExs' + return 'UnsupportedManifest' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class Pipfile: - """Original type: manifest_kind = [ ... | Pipfile | ... ]""" +class MissingRequirement: + """Original type: resolution_error = [ ... | MissingRequirement of ... | ... ]""" + + value: str @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Pipfile' + return 'MissingRequirement' - @staticmethod - def to_json() -> Any: - return 'Pipfile' + def to_json(self) -> Any: + return ['MissingRequirement', _atd_write_string(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class PyprojectToml: - """Original type: manifest_kind = [ ... | PyprojectToml | ... ]""" +class ResolutionCmdFailed_: + """Original type: resolution_error = [ ... | ResolutionCmdFailed of ... | ... ]""" + + value: ResolutionCmdFailed @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PyprojectToml' + return 'ResolutionCmdFailed_' - @staticmethod - def to_json() -> Any: - return 'PyprojectToml' + def to_json(self) -> Any: + return ['ResolutionCmdFailed', (lambda x: x.to_json())(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class ConanFileTxt: - """Original type: manifest_kind = [ ... | ConanFileTxt | ... ]""" +class ParseDependenciesFailed: + """Original type: resolution_error = [ ... | ParseDependenciesFailed of ... | ... ]""" + + value: str @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ConanFileTxt' + return 'ParseDependenciesFailed' - @staticmethod - def to_json() -> Any: - return 'ConanFileTxt' + def to_json(self) -> Any: + return ['ParseDependenciesFailed', _atd_write_string(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class ConanFilePy: - """Original type: manifest_kind = [ ... | ConanFilePy | ... ]""" +class ResolutionError: + """Original type: resolution_error = [ ... ]""" + + value: Union[UnsupportedManifest, MissingRequirement, ResolutionCmdFailed_, ParseDependenciesFailed] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ConanFilePy' + return self.value.kind - @staticmethod - def to_json() -> Any: - return 'ConanFilePy' + @classmethod + def from_json(cls, x: Any) -> 'ResolutionError': + if isinstance(x, str): + if x == 'UnsupportedManifest': + return cls(UnsupportedManifest()) + _atd_bad_json('ResolutionError', x) + if isinstance(x, List) and len(x) == 2: + cons = x[0] + if cons == 'MissingRequirement': + return cls(MissingRequirement(_atd_read_string(x[1]))) + if cons == 'ResolutionCmdFailed': + return cls(ResolutionCmdFailed_(ResolutionCmdFailed.from_json(x[1]))) + if cons == 'ParseDependenciesFailed': + return cls(ParseDependenciesFailed(_atd_read_string(x[1]))) + _atd_bad_json('ResolutionError', x) + _atd_bad_json('ResolutionError', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'ResolutionError': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass(frozen=True) -class Csproj: - """Original type: manifest_kind = [ ... | Csproj | ... ]""" +class IncompatibleRule: + """Original type: incompatible_rule = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Csproj' + rule_id: RuleId + this_version: Version + min_version: Optional[Version] = None + max_version: Optional[Version] = None - @staticmethod - def to_json() -> Any: - return 'Csproj' + @classmethod + def from_json(cls, x: Any) -> 'IncompatibleRule': + if isinstance(x, dict): + return cls( + rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else _atd_missing_json_field('IncompatibleRule', 'rule_id'), + this_version=Version.from_json(x['this_version']) if 'this_version' in x else _atd_missing_json_field('IncompatibleRule', 'this_version'), + min_version=Version.from_json(x['min_version']) if 'min_version' in x else None, + max_version=Version.from_json(x['max_version']) if 'max_version' in x else None, + ) + else: + _atd_bad_json('IncompatibleRule', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['rule_id'] = (lambda x: x.to_json())(self.rule_id) + res['this_version'] = (lambda x: x.to_json())(self.this_version) + if self.min_version is not None: + res['min_version'] = (lambda x: x.to_json())(self.min_version) + if self.max_version is not None: + res['max_version'] = (lambda x: x.to_json())(self.max_version) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'IncompatibleRule': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class ManifestKind: - """Original type: manifest_kind = [ ... ]""" - - value: Union[RequirementsIn, PackageJson, Gemfile, GoMod_, CargoToml, PomXml, BuildGradle, SettingsGradle, ComposerJson, NugetManifestJson, PubspecYaml, PackageSwift, Podfile, MixExs, Pipfile, PyprojectToml, ConanFileTxt, ConanFilePy, Csproj] +@dataclass(frozen=True, order=True) +class LexicalError: + """Original type: error_type = [ ... | LexicalError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'ManifestKind': - if isinstance(x, str): - if x == 'RequirementsIn': - return cls(RequirementsIn()) - if x == 'PackageJson': - return cls(PackageJson()) - if x == 'Gemfile': - return cls(Gemfile()) - if x == 'GoMod': - return cls(GoMod_()) - if x == 'CargoToml': - return cls(CargoToml()) - if x == 'PomXml': - return cls(PomXml()) - if x == 'BuildGradle': - return cls(BuildGradle()) - if x == 'SettingsGradle': - return cls(SettingsGradle()) - if x == 'ComposerJson': - return cls(ComposerJson()) - if x == 'NugetManifestJson': - return cls(NugetManifestJson()) - if x == 'PubspecYaml': - return cls(PubspecYaml()) - if x == 'PackageSwift': - return cls(PackageSwift()) - if x == 'Podfile': - return cls(Podfile()) - if x == 'MixExs': - return cls(MixExs()) - if x == 'Pipfile': - return cls(Pipfile()) - if x == 'PyprojectToml': - return cls(PyprojectToml()) - if x == 'ConanFileTxt': - return cls(ConanFileTxt()) - if x == 'ConanFilePy': - return cls(ConanFilePy()) - if x == 'Csproj': - return cls(Csproj()) - _atd_bad_json('ManifestKind', x) - _atd_bad_json('ManifestKind', x) - - def to_json(self) -> Any: - return self.value.to_json() + return 'LexicalError' - @classmethod - def from_json_string(cls, x: str) -> 'ManifestKind': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'Lexical error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class Lockfile_: - """Original type: dependency_source_file_kind = [ ... | Lockfile of ... | ... ]""" - - value: LockfileKind +@dataclass(frozen=True, order=True) +class ParseError: + """Original type: error_type = [ ... | ParseError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Lockfile_' + return 'ParseError' - def to_json(self) -> Any: - return ['Lockfile', (lambda x: x.to_json())(self.value)] + @staticmethod + def to_json() -> Any: + return 'Syntax error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class Manifest_: - """Original type: dependency_source_file_kind = [ ... | Manifest of ... | ... ]""" - - value: ManifestKind +@dataclass(frozen=True, order=True) +class OtherParseError: + """Original type: error_type = [ ... | OtherParseError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Manifest_' + return 'OtherParseError' - def to_json(self) -> Any: - return ['Manifest', (lambda x: x.to_json())(self.value)] + @staticmethod + def to_json() -> Any: + return 'Other syntax error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class DependencySourceFileKind: - """Original type: dependency_source_file_kind = [ ... ]""" - - value: Union[Lockfile_, Manifest_] +@dataclass(frozen=True, order=True) +class AstBuilderError: + """Original type: error_type = [ ... | AstBuilderError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'DependencySourceFileKind': - if isinstance(x, List) and len(x) == 2: - cons = x[0] - if cons == 'Lockfile': - return cls(Lockfile_(LockfileKind.from_json(x[1]))) - if cons == 'Manifest': - return cls(Manifest_(ManifestKind.from_json(x[1]))) - _atd_bad_json('DependencySourceFileKind', x) - _atd_bad_json('DependencySourceFileKind', x) - - def to_json(self) -> Any: - return self.value.to_json() + return 'AstBuilderError' - @classmethod - def from_json_string(cls, x: str) -> 'DependencySourceFileKind': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'AST builder error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class DependencySourceFile: - """Original type: dependency_source_file = { ... }""" - - kind: DependencySourceFileKind - path: Fpath - - @classmethod - def from_json(cls, x: Any) -> 'DependencySourceFile': - if isinstance(x, dict): - return cls( - kind=DependencySourceFileKind.from_json(x['kind']) if 'kind' in x else _atd_missing_json_field('DependencySourceFile', 'kind'), - path=Fpath.from_json(x['path']) if 'path' in x else _atd_missing_json_field('DependencySourceFile', 'path'), - ) - else: - _atd_bad_json('DependencySourceFile', x) +@dataclass(frozen=True, order=True) +class RuleParseError: + """Original type: error_type = [ ... | RuleParseError | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['kind'] = (lambda x: x.to_json())(self.kind) - res['path'] = (lambda x: x.to_json())(self.path) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'RuleParseError' - @classmethod - def from_json_string(cls, x: str) -> 'DependencySourceFile': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'Rule parse error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class DependencyResolutionStats: - """Original type: dependency_resolution_stats = { ... }""" - - resolution_method: ResolutionMethod - dependency_count: int - ecosystem: Ecosystem - - @classmethod - def from_json(cls, x: Any) -> 'DependencyResolutionStats': - if isinstance(x, dict): - return cls( - resolution_method=ResolutionMethod.from_json(x['resolution_method']) if 'resolution_method' in x else _atd_missing_json_field('DependencyResolutionStats', 'resolution_method'), - dependency_count=_atd_read_int(x['dependency_count']) if 'dependency_count' in x else _atd_missing_json_field('DependencyResolutionStats', 'dependency_count'), - ecosystem=Ecosystem.from_json(x['ecosystem']) if 'ecosystem' in x else _atd_missing_json_field('DependencyResolutionStats', 'ecosystem'), - ) - else: - _atd_bad_json('DependencyResolutionStats', x) +@dataclass(frozen=True, order=True) +class SemgrepWarning: + """Original type: error_type = [ ... | SemgrepWarning | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['resolution_method'] = (lambda x: x.to_json())(self.resolution_method) - res['dependency_count'] = _atd_write_int(self.dependency_count) - res['ecosystem'] = (lambda x: x.to_json())(self.ecosystem) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'SemgrepWarning' - @classmethod - def from_json_string(cls, x: str) -> 'DependencyResolutionStats': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'SemgrepWarning' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class SubprojectStats: - """Original type: subproject_stats = { ... }""" - - subproject_id: str - dependency_sources: List[DependencySourceFile] - resolved_stats: Optional[DependencyResolutionStats] = None - - @classmethod - def from_json(cls, x: Any) -> 'SubprojectStats': - if isinstance(x, dict): - return cls( - subproject_id=_atd_read_string(x['subproject_id']) if 'subproject_id' in x else _atd_missing_json_field('SubprojectStats', 'subproject_id'), - dependency_sources=_atd_read_list(DependencySourceFile.from_json)(x['dependency_sources']) if 'dependency_sources' in x else _atd_missing_json_field('SubprojectStats', 'dependency_sources'), - resolved_stats=DependencyResolutionStats.from_json(x['resolved_stats']) if 'resolved_stats' in x else None, - ) - else: - _atd_bad_json('SubprojectStats', x) +@dataclass(frozen=True, order=True) +class SemgrepError: + """Original type: error_type = [ ... | SemgrepError | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['subproject_id'] = _atd_write_string(self.subproject_id) - res['dependency_sources'] = _atd_write_list((lambda x: x.to_json()))(self.dependency_sources) - if self.resolved_stats is not None: - res['resolved_stats'] = (lambda x: x.to_json())(self.resolved_stats) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'SemgrepError' - @classmethod - def from_json_string(cls, x: str) -> 'SubprojectStats': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'SemgrepError' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class SupplyChainStats: - """Original type: supply_chain_stats = { ... }""" +@dataclass(frozen=True, order=True) +class InvalidRuleSchemaError: + """Original type: error_type = [ ... | InvalidRuleSchemaError | ... ]""" - subprojects_stats: List[SubprojectStats] + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'InvalidRuleSchemaError' - @classmethod - def from_json(cls, x: Any) -> 'SupplyChainStats': - if isinstance(x, dict): - return cls( - subprojects_stats=_atd_read_list(SubprojectStats.from_json)(x['subprojects_stats']) if 'subprojects_stats' in x else _atd_missing_json_field('SupplyChainStats', 'subprojects_stats'), - ) - else: - _atd_bad_json('SupplyChainStats', x) + @staticmethod + def to_json() -> Any: + return 'InvalidRuleSchemaError' - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['subprojects_stats'] = _atd_write_list((lambda x: x.to_json()))(self.subprojects_stats) - return res + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) - @classmethod - def from_json_string(cls, x: str) -> 'SupplyChainStats': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - -@dataclass -class AlwaysSkipped: - """Original type: skip_reason = [ ... | Always_skipped | ... ]""" +@dataclass(frozen=True, order=True) +class UnknownLanguageError: + """Original type: error_type = [ ... | UnknownLanguageError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'AlwaysSkipped' + return 'UnknownLanguageError' @staticmethod def to_json() -> Any: - return 'always_skipped' + return 'UnknownLanguageError' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class SemgrepignorePatternsMatch: - """Original type: skip_reason = [ ... | Semgrepignore_patterns_match | ... ]""" +@dataclass(frozen=True, order=True) +class InvalidYaml: + """Original type: error_type = [ ... | InvalidYaml | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'SemgrepignorePatternsMatch' + return 'InvalidYaml' @staticmethod def to_json() -> Any: - return 'semgrepignore_patterns_match' + return 'Invalid YAML' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class CliIncludeFlagsDoNotMatch: - """Original type: skip_reason = [ ... | Cli_include_flags_do_not_match | ... ]""" +@dataclass(frozen=True, order=True) +class MatchingError: + """Original type: error_type = [ ... | MatchingError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'CliIncludeFlagsDoNotMatch' + return 'MatchingError' @staticmethod def to_json() -> Any: - return 'cli_include_flags_do_not_match' + return 'Internal matching error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class CliExcludeFlagsMatch: - """Original type: skip_reason = [ ... | Cli_exclude_flags_match | ... ]""" +@dataclass(frozen=True, order=True) +class SemgrepMatchFound: + """Original type: error_type = [ ... | SemgrepMatchFound | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'CliExcludeFlagsMatch' + return 'SemgrepMatchFound' @staticmethod def to_json() -> Any: - return 'cli_exclude_flags_match' + return 'Semgrep match found' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ExceededSizeLimit: - """Original type: skip_reason = [ ... | Exceeded_size_limit | ... ]""" +@dataclass(frozen=True, order=True) +class TooManyMatches_: + """Original type: error_type = [ ... | TooManyMatches | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ExceededSizeLimit' + return 'TooManyMatches_' @staticmethod def to_json() -> Any: - return 'exceeded_size_limit' + return 'Too many matches' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class AnalysisFailedParserOrInternalError: - """Original type: skip_reason = [ ... | Analysis_failed_parser_or_internal_error | ... ]""" +@dataclass(frozen=True, order=True) +class FatalError: + """Original type: error_type = [ ... | FatalError | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'AnalysisFailedParserOrInternalError' + return 'FatalError' @staticmethod def to_json() -> Any: - return 'analysis_failed_parser_or_internal_error' + return 'Fatal error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ExcludedByConfig: - """Original type: skip_reason = [ ... | Excluded_by_config | ... ]""" +@dataclass(frozen=True, order=True) +class Timeout: + """Original type: error_type = [ ... | Timeout | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ExcludedByConfig' + return 'Timeout' @staticmethod def to_json() -> Any: - return 'excluded_by_config' + return 'Timeout' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class WrongLanguage: - """Original type: skip_reason = [ ... | Wrong_language | ... ]""" +@dataclass(frozen=True, order=True) +class OutOfMemory: + """Original type: error_type = [ ... | OutOfMemory | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'WrongLanguage' + return 'OutOfMemory' @staticmethod def to_json() -> Any: - return 'wrong_language' + return 'Out of memory' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class TooBig: - """Original type: skip_reason = [ ... | Too_big | ... ]""" +@dataclass(frozen=True, order=True) +class StackOverflow: + """Original type: error_type = [ ... | StackOverflow | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'TooBig' + return 'StackOverflow' @staticmethod def to_json() -> Any: - return 'too_big' + return 'Stack overflow' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Minified: - """Original type: skip_reason = [ ... | Minified | ... ]""" +@dataclass(frozen=True, order=True) +class TimeoutDuringInterfile: + """Original type: error_type = [ ... | TimeoutDuringInterfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Minified' + return 'TimeoutDuringInterfile' @staticmethod def to_json() -> Any: - return 'minified' + return 'Timeout during interfile analysis' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Binary: - """Original type: skip_reason = [ ... | Binary | ... ]""" +@dataclass(frozen=True, order=True) +class OutOfMemoryDuringInterfile: + """Original type: error_type = [ ... | OutOfMemoryDuringInterfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Binary' + return 'OutOfMemoryDuringInterfile' @staticmethod def to_json() -> Any: - return 'binary' + return 'OOM during interfile analysis' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class IrrelevantRule: - """Original type: skip_reason = [ ... | Irrelevant_rule | ... ]""" +@dataclass(frozen=True, order=True) +class MissingPlugin: + """Original type: error_type = [ ... | MissingPlugin | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'IrrelevantRule' + return 'MissingPlugin' @staticmethod def to_json() -> Any: - return 'irrelevant_rule' + return 'Missing plugin' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class TooManyMatches: - """Original type: skip_reason = [ ... | Too_many_matches | ... ]""" +@dataclass(frozen=True, order=True) +class PatternParseError: + """Original type: error_type = [ ... | PatternParseError of ... | ... ]""" + + value: List[str] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'TooManyMatches' + return 'PatternParseError' - @staticmethod - def to_json() -> Any: - return 'too_many_matches' + def to_json(self) -> Any: + return ['PatternParseError', _atd_write_list(_atd_write_string)(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class GitignorePatternsMatch: - """Original type: skip_reason = [ ... | Gitignore_patterns_match | ... ]""" +@dataclass(frozen=True, order=True) +class PartialParsing: + """Original type: error_type = [ ... | PartialParsing of ... | ... ]""" + + value: List[Location] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GitignorePatternsMatch' + return 'PartialParsing' - @staticmethod - def to_json() -> Any: - return 'Gitignore_patterns_match' + def to_json(self) -> Any: + return ['PartialParsing', _atd_write_list((lambda x: x.to_json()))(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Dotfile: - """Original type: skip_reason = [ ... | Dotfile | ... ]""" +@dataclass(frozen=True, order=True) +class IncompatibleRule_: + """Original type: error_type = [ ... | IncompatibleRule of ... | ... ]""" + + value: IncompatibleRule @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Dotfile' + return 'IncompatibleRule_' - @staticmethod - def to_json() -> Any: - return 'Dotfile' + def to_json(self) -> Any: + return ['IncompatibleRule', (lambda x: x.to_json())(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class NonexistentFile: - """Original type: skip_reason = [ ... | Nonexistent_file | ... ]""" +@dataclass(frozen=True, order=True) +class PatternParseError0: + """Original type: error_type = [ ... | PatternParseError0 | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'NonexistentFile' + return 'PatternParseError0' @staticmethod def to_json() -> Any: - return 'Nonexistent_file' + return 'Pattern parse error' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class InsufficientPermissions: - """Original type: skip_reason = [ ... | Insufficient_permissions | ... ]""" +@dataclass(frozen=True, order=True) +class IncompatibleRule0: + """Original type: error_type = [ ... | IncompatibleRule0 | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'InsufficientPermissions' + return 'IncompatibleRule0' @staticmethod def to_json() -> Any: - return 'insufficient_permissions' + return 'Incompatible rule' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class SkipReason: - """Original type: skip_reason = [ ... ]""" +@dataclass(frozen=True, order=True) +class DependencyResolutionError: + """Original type: error_type = [ ... | DependencyResolutionError of ... | ... ]""" - value: Union[AlwaysSkipped, SemgrepignorePatternsMatch, CliIncludeFlagsDoNotMatch, CliExcludeFlagsMatch, ExceededSizeLimit, AnalysisFailedParserOrInternalError, ExcludedByConfig, WrongLanguage, TooBig, Minified, Binary, IrrelevantRule, TooManyMatches, GitignorePatternsMatch, Dotfile, NonexistentFile, InsufficientPermissions] + value: ResolutionError @property def kind(self) -> str: """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'SkipReason': - if isinstance(x, str): - if x == 'always_skipped': - return cls(AlwaysSkipped()) - if x == 'semgrepignore_patterns_match': - return cls(SemgrepignorePatternsMatch()) - if x == 'cli_include_flags_do_not_match': - return cls(CliIncludeFlagsDoNotMatch()) - if x == 'cli_exclude_flags_match': - return cls(CliExcludeFlagsMatch()) - if x == 'exceeded_size_limit': - return cls(ExceededSizeLimit()) - if x == 'analysis_failed_parser_or_internal_error': - return cls(AnalysisFailedParserOrInternalError()) - if x == 'excluded_by_config': - return cls(ExcludedByConfig()) - if x == 'wrong_language': - return cls(WrongLanguage()) - if x == 'too_big': - return cls(TooBig()) - if x == 'minified': - return cls(Minified()) - if x == 'binary': - return cls(Binary()) - if x == 'irrelevant_rule': - return cls(IrrelevantRule()) - if x == 'too_many_matches': - return cls(TooManyMatches()) - if x == 'Gitignore_patterns_match': - return cls(GitignorePatternsMatch()) - if x == 'Dotfile': - return cls(Dotfile()) - if x == 'Nonexistent_file': - return cls(NonexistentFile()) - if x == 'insufficient_permissions': - return cls(InsufficientPermissions()) - _atd_bad_json('SkipReason', x) - _atd_bad_json('SkipReason', x) + return 'DependencyResolutionError' def to_json(self) -> Any: - return self.value.to_json() - - @classmethod - def from_json_string(cls, x: str) -> 'SkipReason': - return cls.from_json(json.loads(x)) + return ['DependencyResolutionError', (lambda x: x.to_json())(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class SkippedTarget: - """Original type: skipped_target = { ... }""" +@dataclass(frozen=True, order=True) +class ErrorType: + """Original type: error_type = [ ... ]""" - path: Fpath - reason: SkipReason + value: Union[LexicalError, ParseError, OtherParseError, AstBuilderError, RuleParseError, SemgrepWarning, SemgrepError, InvalidRuleSchemaError, UnknownLanguageError, InvalidYaml, MatchingError, SemgrepMatchFound, TooManyMatches_, FatalError, Timeout, OutOfMemory, StackOverflow, TimeoutDuringInterfile, OutOfMemoryDuringInterfile, MissingPlugin, PatternParseError, PartialParsing, IncompatibleRule_, PatternParseError0, IncompatibleRule0, DependencyResolutionError] + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return self.value.kind + + @classmethod + def from_json(cls, x: Any) -> 'ErrorType': + if isinstance(x, str): + if x == 'Lexical error': + return cls(LexicalError()) + if x == 'Syntax error': + return cls(ParseError()) + if x == 'Other syntax error': + return cls(OtherParseError()) + if x == 'AST builder error': + return cls(AstBuilderError()) + if x == 'Rule parse error': + return cls(RuleParseError()) + if x == 'SemgrepWarning': + return cls(SemgrepWarning()) + if x == 'SemgrepError': + return cls(SemgrepError()) + if x == 'InvalidRuleSchemaError': + return cls(InvalidRuleSchemaError()) + if x == 'UnknownLanguageError': + return cls(UnknownLanguageError()) + if x == 'Invalid YAML': + return cls(InvalidYaml()) + if x == 'Internal matching error': + return cls(MatchingError()) + if x == 'Semgrep match found': + return cls(SemgrepMatchFound()) + if x == 'Too many matches': + return cls(TooManyMatches_()) + if x == 'Fatal error': + return cls(FatalError()) + if x == 'Timeout': + return cls(Timeout()) + if x == 'Out of memory': + return cls(OutOfMemory()) + if x == 'Stack overflow': + return cls(StackOverflow()) + if x == 'Timeout during interfile analysis': + return cls(TimeoutDuringInterfile()) + if x == 'OOM during interfile analysis': + return cls(OutOfMemoryDuringInterfile()) + if x == 'Missing plugin': + return cls(MissingPlugin()) + if x == 'Pattern parse error': + return cls(PatternParseError0()) + if x == 'Incompatible rule': + return cls(IncompatibleRule0()) + _atd_bad_json('ErrorType', x) + if isinstance(x, List) and len(x) == 2: + cons = x[0] + if cons == 'PatternParseError': + return cls(PatternParseError(_atd_read_list(_atd_read_string)(x[1]))) + if cons == 'PartialParsing': + return cls(PartialParsing(_atd_read_list(Location.from_json)(x[1]))) + if cons == 'IncompatibleRule': + return cls(IncompatibleRule_(IncompatibleRule.from_json(x[1]))) + if cons == 'DependencyResolutionError': + return cls(DependencyResolutionError(ResolutionError.from_json(x[1]))) + _atd_bad_json('ErrorType', x) + _atd_bad_json('ErrorType', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'ErrorType': + return cls.from_json(json.loads(x)) + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class Error_: + """Original type: error_severity = [ ... | Error | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'Error_' + + @staticmethod + def to_json() -> Any: + return 'error' + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class Warning_: + """Original type: error_severity = [ ... | Warning | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'Warning_' + + @staticmethod + def to_json() -> Any: + return 'warn' + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class Info_: + """Original type: error_severity = [ ... | Info | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'Info_' + + @staticmethod + def to_json() -> Any: + return 'info' + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class ErrorSeverity: + """Original type: error_severity = [ ... ]""" + + value: Union[Error_, Warning_, Info_] + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return self.value.kind + + @classmethod + def from_json(cls, x: Any) -> 'ErrorSeverity': + if isinstance(x, str): + if x == 'error': + return cls(Error_()) + if x == 'warn': + return cls(Warning_()) + if x == 'info': + return cls(Info_()) + _atd_bad_json('ErrorSeverity', x) + _atd_bad_json('ErrorSeverity', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'ErrorSeverity': + return cls.from_json(json.loads(x)) + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class CoreError: + """Original type: core_error = { ... }""" + + error_type: ErrorType + severity: ErrorSeverity + message: str details: Optional[str] = None + location: Optional[Location] = None rule_id: Optional[RuleId] = None @classmethod - def from_json(cls, x: Any) -> 'SkippedTarget': + def from_json(cls, x: Any) -> 'CoreError': if isinstance(x, dict): return cls( - path=Fpath.from_json(x['path']) if 'path' in x else _atd_missing_json_field('SkippedTarget', 'path'), - reason=SkipReason.from_json(x['reason']) if 'reason' in x else _atd_missing_json_field('SkippedTarget', 'reason'), + error_type=ErrorType.from_json(x['error_type']) if 'error_type' in x else _atd_missing_json_field('CoreError', 'error_type'), + severity=ErrorSeverity.from_json(x['severity']) if 'severity' in x else _atd_missing_json_field('CoreError', 'severity'), + message=_atd_read_string(x['message']) if 'message' in x else _atd_missing_json_field('CoreError', 'message'), details=_atd_read_string(x['details']) if 'details' in x else None, + location=Location.from_json(x['location']) if 'location' in x else None, rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else None, ) else: - _atd_bad_json('SkippedTarget', x) + _atd_bad_json('CoreError', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['path'] = (lambda x: x.to_json())(self.path) - res['reason'] = (lambda x: x.to_json())(self.reason) + res['error_type'] = (lambda x: x.to_json())(self.error_type) + res['severity'] = (lambda x: x.to_json())(self.severity) + res['message'] = _atd_write_string(self.message) if self.details is not None: res['details'] = _atd_write_string(self.details) + if self.location is not None: + res['location'] = (lambda x: x.to_json())(self.location) if self.rule_id is not None: res['rule_id'] = (lambda x: x.to_json())(self.rule_id) return res @classmethod - def from_json_string(cls, x: str) -> 'SkippedTarget': + def from_json_string(cls, x: str) -> 'CoreError': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -5224,33 +5311,33 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class SkippedRule: - """Original type: skipped_rule = { ... }""" +class TargetDiscoveryResult: + """Original type: target_discovery_result = { ... }""" - rule_id: RuleId - details: str - position: Position + target_paths: List[Fpath] + errors: List[CoreError] + skipped: List[SkippedTarget] @classmethod - def from_json(cls, x: Any) -> 'SkippedRule': + def from_json(cls, x: Any) -> 'TargetDiscoveryResult': if isinstance(x, dict): return cls( - rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else _atd_missing_json_field('SkippedRule', 'rule_id'), - details=_atd_read_string(x['details']) if 'details' in x else _atd_missing_json_field('SkippedRule', 'details'), - position=Position.from_json(x['position']) if 'position' in x else _atd_missing_json_field('SkippedRule', 'position'), + target_paths=_atd_read_list(Fpath.from_json)(x['target_paths']) if 'target_paths' in x else _atd_missing_json_field('TargetDiscoveryResult', 'target_paths'), + errors=_atd_read_list(CoreError.from_json)(x['errors']) if 'errors' in x else _atd_missing_json_field('TargetDiscoveryResult', 'errors'), + skipped=_atd_read_list(SkippedTarget.from_json)(x['skipped']) if 'skipped' in x else _atd_missing_json_field('TargetDiscoveryResult', 'skipped'), ) else: - _atd_bad_json('SkippedRule', x) + _atd_bad_json('TargetDiscoveryResult', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['rule_id'] = (lambda x: x.to_json())(self.rule_id) - res['details'] = _atd_write_string(self.details) - res['position'] = (lambda x: x.to_json())(self.position) + res['target_paths'] = _atd_write_list((lambda x: x.to_json()))(self.target_paths) + res['errors'] = _atd_write_list((lambda x: x.to_json()))(self.errors) + res['skipped'] = _atd_write_list((lambda x: x.to_json()))(self.skipped) return res @classmethod - def from_json_string(cls, x: str) -> 'SkippedRule': + def from_json_string(cls, x: str) -> 'TargetDiscoveryResult': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -5258,31 +5345,20 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class ScannedAndSkipped: - """Original type: scanned_and_skipped = { ... }""" +class Tag: + """Original type: tag""" - scanned: List[Fpath] - skipped: Optional[List[SkippedTarget]] = None + value: str @classmethod - def from_json(cls, x: Any) -> 'ScannedAndSkipped': - if isinstance(x, dict): - return cls( - scanned=_atd_read_list(Fpath.from_json)(x['scanned']) if 'scanned' in x else _atd_missing_json_field('ScannedAndSkipped', 'scanned'), - skipped=_atd_read_list(SkippedTarget.from_json)(x['skipped']) if 'skipped' in x else None, - ) - else: - _atd_bad_json('ScannedAndSkipped', x) + def from_json(cls, x: Any) -> 'Tag': + return cls(_atd_read_string(x)) def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['scanned'] = _atd_write_list((lambda x: x.to_json()))(self.scanned) - if self.skipped is not None: - res['skipped'] = _atd_write_list((lambda x: x.to_json()))(self.skipped) - return res + return _atd_write_string(self.value) @classmethod - def from_json_string(cls, x: str) -> 'ScannedAndSkipped': + def from_json_string(cls, x: str) -> 'Tag': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -5290,37 +5366,27 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class ScanInfo: - """Original type: scan_info = { ... }""" +class Symbol: + """Original type: symbol = { ... }""" - enabled_products: List[Product] - deployment_id: int - deployment_name: str - id: Optional[int] = None + fqn: List[str] @classmethod - def from_json(cls, x: Any) -> 'ScanInfo': + def from_json(cls, x: Any) -> 'Symbol': if isinstance(x, dict): return cls( - enabled_products=_atd_read_list(Product.from_json)(x['enabled_products']) if 'enabled_products' in x else _atd_missing_json_field('ScanInfo', 'enabled_products'), - deployment_id=_atd_read_int(x['deployment_id']) if 'deployment_id' in x else _atd_missing_json_field('ScanInfo', 'deployment_id'), - deployment_name=_atd_read_string(x['deployment_name']) if 'deployment_name' in x else _atd_missing_json_field('ScanInfo', 'deployment_name'), - id=_atd_read_int(x['id']) if 'id' in x else None, + fqn=_atd_read_list(_atd_read_string)(x['fqn']) if 'fqn' in x else _atd_missing_json_field('Symbol', 'fqn'), ) else: - _atd_bad_json('ScanInfo', x) + _atd_bad_json('Symbol', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['enabled_products'] = _atd_write_list((lambda x: x.to_json()))(self.enabled_products) - res['deployment_id'] = _atd_write_int(self.deployment_id) - res['deployment_name'] = _atd_write_string(self.deployment_name) - if self.id is not None: - res['id'] = _atd_write_int(self.id) + res['fqn'] = _atd_write_list(_atd_write_string)(self.fqn) return res @classmethod - def from_json_string(cls, x: str) -> 'ScanInfo': + def from_json_string(cls, x: str) -> 'Symbol': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -5328,33 +5394,30 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class ScanConfiguration: - """Original type: scan_configuration = { ... }""" +class SymbolUsage: + """Original type: symbol_usage = { ... }""" - rules: RawJson - triage_ignored_syntactic_ids: List[str] = field(default_factory=lambda: []) - triage_ignored_match_based_ids: List[str] = field(default_factory=lambda: []) + symbol: Symbol + locs: List[Location] @classmethod - def from_json(cls, x: Any) -> 'ScanConfiguration': + def from_json(cls, x: Any) -> 'SymbolUsage': if isinstance(x, dict): return cls( - rules=RawJson.from_json(x['rules']) if 'rules' in x else _atd_missing_json_field('ScanConfiguration', 'rules'), - triage_ignored_syntactic_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_syntactic_ids']) if 'triage_ignored_syntactic_ids' in x else [], - triage_ignored_match_based_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_match_based_ids']) if 'triage_ignored_match_based_ids' in x else [], + symbol=Symbol.from_json(x['symbol']) if 'symbol' in x else _atd_missing_json_field('SymbolUsage', 'symbol'), + locs=_atd_read_list(Location.from_json)(x['locs']) if 'locs' in x else _atd_missing_json_field('SymbolUsage', 'locs'), ) else: - _atd_bad_json('ScanConfiguration', x) + _atd_bad_json('SymbolUsage', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['rules'] = (lambda x: x.to_json())(self.rules) - res['triage_ignored_syntactic_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_syntactic_ids) - res['triage_ignored_match_based_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_match_based_ids) + res['symbol'] = (lambda x: x.to_json())(self.symbol) + res['locs'] = _atd_write_list((lambda x: x.to_json()))(self.locs) return res @classmethod - def from_json_string(cls, x: str) -> 'ScanConfiguration': + def from_json_string(cls, x: str) -> 'SymbolUsage': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -5362,1399 +5425,1336 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class Glob: - """Original type: glob""" +class SymbolAnalysis: + """Original type: symbol_analysis""" - value: str + value: List[SymbolUsage] @classmethod - def from_json(cls, x: Any) -> 'Glob': - return cls(_atd_read_string(x)) + def from_json(cls, x: Any) -> 'SymbolAnalysis': + return cls(_atd_read_list(SymbolUsage.from_json)(x)) def to_json(self) -> Any: - return _atd_write_string(self.value) + return _atd_write_list((lambda x: x.to_json()))(self.value) @classmethod - def from_json_string(cls, x: str) -> 'Glob': + def from_json_string(cls, x: str) -> 'SymbolAnalysis': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ProductIgnoredFiles: - """Original type: product_ignored_files""" - - value: Dict[Product, List[Glob]] - - @classmethod - def from_json(cls, x: Any) -> 'ProductIgnoredFiles': - return cls(_atd_read_assoc_array_into_dict(Product.from_json, _atd_read_list(Glob.from_json))(x)) +@dataclass(frozen=True, order=True) +class LockfileParsing: + """Original type: resolution_method = [ ... | LockfileParsing | ... ]""" - def to_json(self) -> Any: - return _atd_write_assoc_dict_to_array((lambda x: x.to_json()), _atd_write_list((lambda x: x.to_json())))(self.value) + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'LockfileParsing' - @classmethod - def from_json_string(cls, x: str) -> 'ProductIgnoredFiles': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'LockfileParsing' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class HistoricalConfiguration: - """Original type: historical_configuration = { ... }""" - - enabled: bool - lookback_days: Optional[int] = None - - @classmethod - def from_json(cls, x: Any) -> 'HistoricalConfiguration': - if isinstance(x, dict): - return cls( - enabled=_atd_read_bool(x['enabled']) if 'enabled' in x else _atd_missing_json_field('HistoricalConfiguration', 'enabled'), - lookback_days=_atd_read_int(x['lookback_days']) if 'lookback_days' in x else None, - ) - else: - _atd_bad_json('HistoricalConfiguration', x) +@dataclass(frozen=True, order=True) +class DynamicResolution: + """Original type: resolution_method = [ ... | DynamicResolution | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['enabled'] = _atd_write_bool(self.enabled) - if self.lookback_days is not None: - res['lookback_days'] = _atd_write_int(self.lookback_days) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'DynamicResolution' - @classmethod - def from_json_string(cls, x: str) -> 'HistoricalConfiguration': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'DynamicResolution' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class EngineConfiguration: - """Original type: engine_configuration = { ... }""" +@dataclass(frozen=True, order=True) +class ResolutionMethod: + """Original type: resolution_method = [ ... ]""" - autofix: bool = field(default_factory=lambda: False) - deepsemgrep: bool = field(default_factory=lambda: False) - dependency_query: bool = field(default_factory=lambda: False) - path_to_transitivity: bool = field(default_factory=lambda: False) - scan_all_deps_in_diff_scan: bool = field(default_factory=lambda: False) - symbol_analysis: bool = field(default_factory=lambda: False) - ignored_files: List[str] = field(default_factory=lambda: []) - product_ignored_files: Optional[ProductIgnoredFiles] = None - generic_slow_rollout: bool = field(default_factory=lambda: False) - historical_config: Optional[HistoricalConfiguration] = None - always_suppress_errors: bool = field(default_factory=lambda: False) + value: Union[LockfileParsing, DynamicResolution] + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return self.value.kind @classmethod - def from_json(cls, x: Any) -> 'EngineConfiguration': - if isinstance(x, dict): - return cls( - autofix=_atd_read_bool(x['autofix']) if 'autofix' in x else False, - deepsemgrep=_atd_read_bool(x['deepsemgrep']) if 'deepsemgrep' in x else False, - dependency_query=_atd_read_bool(x['dependency_query']) if 'dependency_query' in x else False, - path_to_transitivity=_atd_read_bool(x['path_to_transitivity']) if 'path_to_transitivity' in x else False, - scan_all_deps_in_diff_scan=_atd_read_bool(x['scan_all_deps_in_diff_scan']) if 'scan_all_deps_in_diff_scan' in x else False, - symbol_analysis=_atd_read_bool(x['symbol_analysis']) if 'symbol_analysis' in x else False, - ignored_files=_atd_read_list(_atd_read_string)(x['ignored_files']) if 'ignored_files' in x else [], - product_ignored_files=ProductIgnoredFiles.from_json(x['product_ignored_files']) if 'product_ignored_files' in x else None, - generic_slow_rollout=_atd_read_bool(x['generic_slow_rollout']) if 'generic_slow_rollout' in x else False, - historical_config=HistoricalConfiguration.from_json(x['historical_config']) if 'historical_config' in x else None, - always_suppress_errors=_atd_read_bool(x['always_suppress_errors']) if 'always_suppress_errors' in x else False, - ) - else: - _atd_bad_json('EngineConfiguration', x) + def from_json(cls, x: Any) -> 'ResolutionMethod': + if isinstance(x, str): + if x == 'LockfileParsing': + return cls(LockfileParsing()) + if x == 'DynamicResolution': + return cls(DynamicResolution()) + _atd_bad_json('ResolutionMethod', x) + _atd_bad_json('ResolutionMethod', x) def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['autofix'] = _atd_write_bool(self.autofix) - res['deepsemgrep'] = _atd_write_bool(self.deepsemgrep) - res['dependency_query'] = _atd_write_bool(self.dependency_query) - res['path_to_transitivity'] = _atd_write_bool(self.path_to_transitivity) - res['scan_all_deps_in_diff_scan'] = _atd_write_bool(self.scan_all_deps_in_diff_scan) - res['symbol_analysis'] = _atd_write_bool(self.symbol_analysis) - res['ignored_files'] = _atd_write_list(_atd_write_string)(self.ignored_files) - if self.product_ignored_files is not None: - res['product_ignored_files'] = (lambda x: x.to_json())(self.product_ignored_files) - res['generic_slow_rollout'] = _atd_write_bool(self.generic_slow_rollout) - if self.historical_config is not None: - res['historical_config'] = (lambda x: x.to_json())(self.historical_config) - res['always_suppress_errors'] = _atd_write_bool(self.always_suppress_errors) - return res + return self.value.to_json() @classmethod - def from_json_string(cls, x: str) -> 'EngineConfiguration': + def from_json_string(cls, x: str) -> 'ResolutionMethod': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ScanResponse: - """Original type: scan_response = { ... }""" - - info: ScanInfo - config: ScanConfiguration - engine_params: EngineConfiguration - - @classmethod - def from_json(cls, x: Any) -> 'ScanResponse': - if isinstance(x, dict): - return cls( - info=ScanInfo.from_json(x['info']) if 'info' in x else _atd_missing_json_field('ScanResponse', 'info'), - config=ScanConfiguration.from_json(x['config']) if 'config' in x else _atd_missing_json_field('ScanResponse', 'config'), - engine_params=EngineConfiguration.from_json(x['engine_params']) if 'engine_params' in x else _atd_missing_json_field('ScanResponse', 'engine_params'), - ) - else: - _atd_bad_json('ScanResponse', x) +@dataclass(frozen=True) +class RequirementsIn: + """Original type: manifest_kind = [ ... | RequirementsIn | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['info'] = (lambda x: x.to_json())(self.info) - res['config'] = (lambda x: x.to_json())(self.config) - res['engine_params'] = (lambda x: x.to_json())(self.engine_params) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'RequirementsIn' - @classmethod - def from_json_string(cls, x: str) -> 'ScanResponse': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'RequirementsIn' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ScanMetadata: - """Original type: scan_metadata = { ... }""" - - cli_version: Version - unique_id: Uuid - requested_products: List[Product] - dry_run: bool = field(default_factory=lambda: False) - sms_scan_id: Optional[str] = None - - @classmethod - def from_json(cls, x: Any) -> 'ScanMetadata': - if isinstance(x, dict): - return cls( - cli_version=Version.from_json(x['cli_version']) if 'cli_version' in x else _atd_missing_json_field('ScanMetadata', 'cli_version'), - unique_id=Uuid.from_json(x['unique_id']) if 'unique_id' in x else _atd_missing_json_field('ScanMetadata', 'unique_id'), - requested_products=_atd_read_list(Product.from_json)(x['requested_products']) if 'requested_products' in x else _atd_missing_json_field('ScanMetadata', 'requested_products'), - dry_run=_atd_read_bool(x['dry_run']) if 'dry_run' in x else False, - sms_scan_id=_atd_read_string(x['sms_scan_id']) if 'sms_scan_id' in x else None, - ) - else: - _atd_bad_json('ScanMetadata', x) +@dataclass(frozen=True) +class PackageJson: + """Original type: manifest_kind = [ ... | PackageJson | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['cli_version'] = (lambda x: x.to_json())(self.cli_version) - res['unique_id'] = (lambda x: x.to_json())(self.unique_id) - res['requested_products'] = _atd_write_list((lambda x: x.to_json()))(self.requested_products) - res['dry_run'] = _atd_write_bool(self.dry_run) - if self.sms_scan_id is not None: - res['sms_scan_id'] = _atd_write_string(self.sms_scan_id) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'PackageJson' - @classmethod - def from_json_string(cls, x: str) -> 'ScanMetadata': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'PackageJson' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ProjectMetadata: - """Original type: project_metadata = { ... }""" - - scan_environment: str - repository: str - repo_url: Optional[Uri] - branch: Optional[str] - commit: Optional[Sha1] - commit_title: Optional[str] - commit_author_email: Optional[str] - commit_author_name: Optional[str] - commit_author_username: Optional[str] - commit_author_image_url: Optional[Uri] - ci_job_url: Optional[Uri] - on: str - pull_request_author_username: Optional[str] - pull_request_author_image_url: Optional[Uri] - pull_request_id: Optional[str] - pull_request_title: Optional[str] - is_full_scan: bool - repo_id: Optional[str] = None - org_id: Optional[str] = None - repo_display_name: Optional[str] = None - commit_timestamp: Optional[Datetime] = None - base_sha: Optional[Sha1] = None - start_sha: Optional[Sha1] = None - is_sca_scan: Optional[bool] = None - is_code_scan: Optional[bool] = None - is_secrets_scan: Optional[bool] = None - - @classmethod - def from_json(cls, x: Any) -> 'ProjectMetadata': - if isinstance(x, dict): - return cls( - scan_environment=_atd_read_string(x['scan_environment']) if 'scan_environment' in x else _atd_missing_json_field('ProjectMetadata', 'scan_environment'), - repository=_atd_read_string(x['repository']) if 'repository' in x else _atd_missing_json_field('ProjectMetadata', 'repository'), - repo_url=_atd_read_nullable(Uri.from_json)(x['repo_url']) if 'repo_url' in x else _atd_missing_json_field('ProjectMetadata', 'repo_url'), - branch=_atd_read_nullable(_atd_read_string)(x['branch']) if 'branch' in x else _atd_missing_json_field('ProjectMetadata', 'branch'), - commit=_atd_read_nullable(Sha1.from_json)(x['commit']) if 'commit' in x else _atd_missing_json_field('ProjectMetadata', 'commit'), - commit_title=_atd_read_nullable(_atd_read_string)(x['commit_title']) if 'commit_title' in x else _atd_missing_json_field('ProjectMetadata', 'commit_title'), - commit_author_email=_atd_read_nullable(_atd_read_string)(x['commit_author_email']) if 'commit_author_email' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_email'), - commit_author_name=_atd_read_nullable(_atd_read_string)(x['commit_author_name']) if 'commit_author_name' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_name'), - commit_author_username=_atd_read_nullable(_atd_read_string)(x['commit_author_username']) if 'commit_author_username' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_username'), - commit_author_image_url=_atd_read_nullable(Uri.from_json)(x['commit_author_image_url']) if 'commit_author_image_url' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_image_url'), - ci_job_url=_atd_read_nullable(Uri.from_json)(x['ci_job_url']) if 'ci_job_url' in x else _atd_missing_json_field('ProjectMetadata', 'ci_job_url'), - on=_atd_read_string(x['on']) if 'on' in x else _atd_missing_json_field('ProjectMetadata', 'on'), - pull_request_author_username=_atd_read_nullable(_atd_read_string)(x['pull_request_author_username']) if 'pull_request_author_username' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_author_username'), - pull_request_author_image_url=_atd_read_nullable(Uri.from_json)(x['pull_request_author_image_url']) if 'pull_request_author_image_url' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_author_image_url'), - pull_request_id=_atd_read_nullable(_atd_read_string)(x['pull_request_id']) if 'pull_request_id' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_id'), - pull_request_title=_atd_read_nullable(_atd_read_string)(x['pull_request_title']) if 'pull_request_title' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_title'), - is_full_scan=_atd_read_bool(x['is_full_scan']) if 'is_full_scan' in x else _atd_missing_json_field('ProjectMetadata', 'is_full_scan'), - repo_id=_atd_read_string(x['repo_id']) if 'repo_id' in x else None, - org_id=_atd_read_string(x['org_id']) if 'org_id' in x else None, - repo_display_name=_atd_read_string(x['repo_display_name']) if 'repo_display_name' in x else None, - commit_timestamp=Datetime.from_json(x['commit_timestamp']) if 'commit_timestamp' in x else None, - base_sha=Sha1.from_json(x['base_sha']) if 'base_sha' in x else None, - start_sha=Sha1.from_json(x['start_sha']) if 'start_sha' in x else None, - is_sca_scan=_atd_read_bool(x['is_sca_scan']) if 'is_sca_scan' in x else None, - is_code_scan=_atd_read_bool(x['is_code_scan']) if 'is_code_scan' in x else None, - is_secrets_scan=_atd_read_bool(x['is_secrets_scan']) if 'is_secrets_scan' in x else None, - ) - else: - _atd_bad_json('ProjectMetadata', x) - - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['scan_environment'] = _atd_write_string(self.scan_environment) - res['repository'] = _atd_write_string(self.repository) - res['repo_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.repo_url) - res['branch'] = _atd_write_nullable(_atd_write_string)(self.branch) - res['commit'] = _atd_write_nullable((lambda x: x.to_json()))(self.commit) - res['commit_title'] = _atd_write_nullable(_atd_write_string)(self.commit_title) - res['commit_author_email'] = _atd_write_nullable(_atd_write_string)(self.commit_author_email) - res['commit_author_name'] = _atd_write_nullable(_atd_write_string)(self.commit_author_name) - res['commit_author_username'] = _atd_write_nullable(_atd_write_string)(self.commit_author_username) - res['commit_author_image_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.commit_author_image_url) - res['ci_job_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.ci_job_url) - res['on'] = _atd_write_string(self.on) - res['pull_request_author_username'] = _atd_write_nullable(_atd_write_string)(self.pull_request_author_username) - res['pull_request_author_image_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.pull_request_author_image_url) - res['pull_request_id'] = _atd_write_nullable(_atd_write_string)(self.pull_request_id) - res['pull_request_title'] = _atd_write_nullable(_atd_write_string)(self.pull_request_title) - res['is_full_scan'] = _atd_write_bool(self.is_full_scan) - if self.repo_id is not None: - res['repo_id'] = _atd_write_string(self.repo_id) - if self.org_id is not None: - res['org_id'] = _atd_write_string(self.org_id) - if self.repo_display_name is not None: - res['repo_display_name'] = _atd_write_string(self.repo_display_name) - if self.commit_timestamp is not None: - res['commit_timestamp'] = (lambda x: x.to_json())(self.commit_timestamp) - if self.base_sha is not None: - res['base_sha'] = (lambda x: x.to_json())(self.base_sha) - if self.start_sha is not None: - res['start_sha'] = (lambda x: x.to_json())(self.start_sha) - if self.is_sca_scan is not None: - res['is_sca_scan'] = _atd_write_bool(self.is_sca_scan) - if self.is_code_scan is not None: - res['is_code_scan'] = _atd_write_bool(self.is_code_scan) - if self.is_secrets_scan is not None: - res['is_secrets_scan'] = _atd_write_bool(self.is_secrets_scan) - return res - - @classmethod - def from_json_string(cls, x: str) -> 'ProjectMetadata': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass -class CiConfigFromRepo: - """Original type: ci_config_from_repo = { ... }""" - - version: Version = field(default_factory=lambda: Version('v1')) - tags: Optional[List[Tag]] = None - - @classmethod - def from_json(cls, x: Any) -> 'CiConfigFromRepo': - if isinstance(x, dict): - return cls( - version=Version.from_json(x['version']) if 'version' in x else Version('v1'), - tags=_atd_read_list(Tag.from_json)(x['tags']) if 'tags' in x else None, - ) - else: - _atd_bad_json('CiConfigFromRepo', x) - - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['version'] = (lambda x: x.to_json())(self.version) - if self.tags is not None: - res['tags'] = _atd_write_list((lambda x: x.to_json()))(self.tags) - return res - - @classmethod - def from_json_string(cls, x: str) -> 'CiConfigFromRepo': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass -class ScanRequest: - """Original type: scan_request = { ... }""" - - project_metadata: ProjectMetadata - scan_metadata: ScanMetadata - project_config: Optional[CiConfigFromRepo] = None - - @classmethod - def from_json(cls, x: Any) -> 'ScanRequest': - if isinstance(x, dict): - return cls( - project_metadata=ProjectMetadata.from_json(x['project_metadata']) if 'project_metadata' in x else _atd_missing_json_field('ScanRequest', 'project_metadata'), - scan_metadata=ScanMetadata.from_json(x['scan_metadata']) if 'scan_metadata' in x else _atd_missing_json_field('ScanRequest', 'scan_metadata'), - project_config=CiConfigFromRepo.from_json(x['project_config']) if 'project_config' in x else None, - ) - else: - _atd_bad_json('ScanRequest', x) - - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['project_metadata'] = (lambda x: x.to_json())(self.project_metadata) - res['scan_metadata'] = (lambda x: x.to_json())(self.scan_metadata) - if self.project_config is not None: - res['project_config'] = (lambda x: x.to_json())(self.project_config) - return res - - @classmethod - def from_json_string(cls, x: str) -> 'ScanRequest': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass -class CiEnv: - """Original type: ci_env""" - - value: Dict[str, str] - - @classmethod - def from_json(cls, x: Any) -> 'CiEnv': - return cls(_atd_read_assoc_object_into_dict(_atd_read_string)(x)) - - def to_json(self) -> Any: - return _atd_write_assoc_dict_to_object(_atd_write_string)(self.value) - - @classmethod - def from_json_string(cls, x: str) -> 'CiEnv': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass -class CiConfig: - """Original type: ci_config = { ... }""" - - env: CiEnv - enabled_products: List[Product] - ignored_files: List[str] - autofix: bool = field(default_factory=lambda: False) - deepsemgrep: bool = field(default_factory=lambda: False) - dependency_query: bool = field(default_factory=lambda: False) - path_to_transitivity: bool = field(default_factory=lambda: False) - scan_all_deps_in_diff_scan: bool = field(default_factory=lambda: False) - symbol_analysis: bool = field(default_factory=lambda: False) - - @classmethod - def from_json(cls, x: Any) -> 'CiConfig': - if isinstance(x, dict): - return cls( - env=CiEnv.from_json(x['env']) if 'env' in x else _atd_missing_json_field('CiConfig', 'env'), - enabled_products=_atd_read_list(Product.from_json)(x['enabled_products']) if 'enabled_products' in x else _atd_missing_json_field('CiConfig', 'enabled_products'), - ignored_files=_atd_read_list(_atd_read_string)(x['ignored_files']) if 'ignored_files' in x else _atd_missing_json_field('CiConfig', 'ignored_files'), - autofix=_atd_read_bool(x['autofix']) if 'autofix' in x else False, - deepsemgrep=_atd_read_bool(x['deepsemgrep']) if 'deepsemgrep' in x else False, - dependency_query=_atd_read_bool(x['dependency_query']) if 'dependency_query' in x else False, - path_to_transitivity=_atd_read_bool(x['path_to_transitivity']) if 'path_to_transitivity' in x else False, - scan_all_deps_in_diff_scan=_atd_read_bool(x['scan_all_deps_in_diff_scan']) if 'scan_all_deps_in_diff_scan' in x else False, - symbol_analysis=_atd_read_bool(x['symbol_analysis']) if 'symbol_analysis' in x else False, - ) - else: - _atd_bad_json('CiConfig', x) - - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['env'] = (lambda x: x.to_json())(self.env) - res['enabled_products'] = _atd_write_list((lambda x: x.to_json()))(self.enabled_products) - res['ignored_files'] = _atd_write_list(_atd_write_string)(self.ignored_files) - res['autofix'] = _atd_write_bool(self.autofix) - res['deepsemgrep'] = _atd_write_bool(self.deepsemgrep) - res['dependency_query'] = _atd_write_bool(self.dependency_query) - res['path_to_transitivity'] = _atd_write_bool(self.path_to_transitivity) - res['scan_all_deps_in_diff_scan'] = _atd_write_bool(self.scan_all_deps_in_diff_scan) - res['symbol_analysis'] = _atd_write_bool(self.symbol_analysis) - return res - - @classmethod - def from_json_string(cls, x: str) -> 'CiConfig': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass -class Message: - """Original type: action = [ ... | Message of ... | ... ]""" - - value: str +@dataclass(frozen=True) +class Gemfile: + """Original type: manifest_kind = [ ... | Gemfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Message' + return 'Gemfile' - def to_json(self) -> Any: - return ['Message', _atd_write_string(self.value)] + @staticmethod + def to_json() -> Any: + return 'Gemfile' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Delay: - """Original type: action = [ ... | Delay of ... | ... ]""" - - value: float +@dataclass(frozen=True) +class GoMod_: + """Original type: manifest_kind = [ ... | GoMod | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Delay' + return 'GoMod_' - def to_json(self) -> Any: - return ['Delay', _atd_write_float(self.value)] + @staticmethod + def to_json() -> Any: + return 'GoMod' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Exit: - """Original type: action = [ ... | Exit of ... | ... ]""" - - value: int +@dataclass(frozen=True) +class CargoToml: + """Original type: manifest_kind = [ ... | CargoToml | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Exit' + return 'CargoToml' - def to_json(self) -> Any: - return ['Exit', _atd_write_int(self.value)] + @staticmethod + def to_json() -> Any: + return 'CargoToml' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Action: - """Original type: action = [ ... ]""" - - value: Union[Message, Delay, Exit] +@dataclass(frozen=True) +class PomXml: + """Original type: manifest_kind = [ ... | PomXml | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'Action': - if isinstance(x, List) and len(x) == 2: - cons = x[0] - if cons == 'Message': - return cls(Message(_atd_read_string(x[1]))) - if cons == 'Delay': - return cls(Delay(_atd_read_float(x[1]))) - if cons == 'Exit': - return cls(Exit(_atd_read_int(x[1]))) - _atd_bad_json('Action', x) - _atd_bad_json('Action', x) - - def to_json(self) -> Any: - return self.value.to_json() - - @classmethod - def from_json_string(cls, x: str) -> 'Action': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass -class CiConfigFromCloud: - """Original type: ci_config_from_cloud = { ... }""" - - repo_config: CiConfig - org_config: Optional[CiConfig] = None - dirs_config: Optional[List[Tuple[Fpath, CiConfig]]] = None - actions: List[Action] = field(default_factory=lambda: []) - - @classmethod - def from_json(cls, x: Any) -> 'CiConfigFromCloud': - if isinstance(x, dict): - return cls( - repo_config=CiConfig.from_json(x['repo_config']) if 'repo_config' in x else _atd_missing_json_field('CiConfigFromCloud', 'repo_config'), - org_config=CiConfig.from_json(x['org_config']) if 'org_config' in x else None, - dirs_config=_atd_read_list((lambda x: (Fpath.from_json(x[0]), CiConfig.from_json(x[1])) if isinstance(x, list) and len(x) == 2 else _atd_bad_json('array of length 2', x)))(x['dirs_config']) if 'dirs_config' in x else None, - actions=_atd_read_list(Action.from_json)(x['actions']) if 'actions' in x else [], - ) - else: - _atd_bad_json('CiConfigFromCloud', x) - - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['repo_config'] = (lambda x: x.to_json())(self.repo_config) - if self.org_config is not None: - res['org_config'] = (lambda x: x.to_json())(self.org_config) - if self.dirs_config is not None: - res['dirs_config'] = _atd_write_list((lambda x: [(lambda x: x.to_json())(x[0]), (lambda x: x.to_json())(x[1])] if isinstance(x, tuple) and len(x) == 2 else _atd_bad_python('tuple of length 2', x)))(self.dirs_config) - res['actions'] = _atd_write_list((lambda x: x.to_json()))(self.actions) - return res + return 'PomXml' - @classmethod - def from_json_string(cls, x: str) -> 'CiConfigFromCloud': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'PomXml' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class ScanConfig: - """Original type: scan_config = { ... }""" - - deployment_id: int - deployment_name: str - policy_names: List[str] - rule_config: str - autofix: bool = field(default_factory=lambda: False) - deepsemgrep: bool = field(default_factory=lambda: False) - dependency_query: bool = field(default_factory=lambda: False) - path_to_transitivity: bool = field(default_factory=lambda: False) - scan_all_deps_in_diff_scan: bool = field(default_factory=lambda: False) - symbol_analysis: bool = field(default_factory=lambda: False) - triage_ignored_syntactic_ids: List[str] = field(default_factory=lambda: []) - triage_ignored_match_based_ids: List[str] = field(default_factory=lambda: []) - ignored_files: List[str] = field(default_factory=lambda: []) - enabled_products: Optional[List[Product]] = None - actions: List[Action] = field(default_factory=lambda: []) - ci_config_from_cloud: Optional[CiConfigFromCloud] = None +@dataclass(frozen=True) +class BuildGradle: + """Original type: manifest_kind = [ ... | BuildGradle | ... ]""" - @classmethod - def from_json(cls, x: Any) -> 'ScanConfig': - if isinstance(x, dict): - return cls( - deployment_id=_atd_read_int(x['deployment_id']) if 'deployment_id' in x else _atd_missing_json_field('ScanConfig', 'deployment_id'), - deployment_name=_atd_read_string(x['deployment_name']) if 'deployment_name' in x else _atd_missing_json_field('ScanConfig', 'deployment_name'), - policy_names=_atd_read_list(_atd_read_string)(x['policy_names']) if 'policy_names' in x else _atd_missing_json_field('ScanConfig', 'policy_names'), - rule_config=_atd_read_string(x['rule_config']) if 'rule_config' in x else _atd_missing_json_field('ScanConfig', 'rule_config'), - autofix=_atd_read_bool(x['autofix']) if 'autofix' in x else False, - deepsemgrep=_atd_read_bool(x['deepsemgrep']) if 'deepsemgrep' in x else False, - dependency_query=_atd_read_bool(x['dependency_query']) if 'dependency_query' in x else False, - path_to_transitivity=_atd_read_bool(x['path_to_transitivity']) if 'path_to_transitivity' in x else False, - scan_all_deps_in_diff_scan=_atd_read_bool(x['scan_all_deps_in_diff_scan']) if 'scan_all_deps_in_diff_scan' in x else False, - symbol_analysis=_atd_read_bool(x['symbol_analysis']) if 'symbol_analysis' in x else False, - triage_ignored_syntactic_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_syntactic_ids']) if 'triage_ignored_syntactic_ids' in x else [], - triage_ignored_match_based_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_match_based_ids']) if 'triage_ignored_match_based_ids' in x else [], - ignored_files=_atd_read_list(_atd_read_string)(x['ignored_files']) if 'ignored_files' in x else [], - enabled_products=_atd_read_list(Product.from_json)(x['enabled_products']) if 'enabled_products' in x else None, - actions=_atd_read_list(Action.from_json)(x['actions']) if 'actions' in x else [], - ci_config_from_cloud=CiConfigFromCloud.from_json(x['ci_config_from_cloud']) if 'ci_config_from_cloud' in x else None, - ) - else: - _atd_bad_json('ScanConfig', x) + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'BuildGradle' - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['deployment_id'] = _atd_write_int(self.deployment_id) - res['deployment_name'] = _atd_write_string(self.deployment_name) - res['policy_names'] = _atd_write_list(_atd_write_string)(self.policy_names) - res['rule_config'] = _atd_write_string(self.rule_config) - res['autofix'] = _atd_write_bool(self.autofix) - res['deepsemgrep'] = _atd_write_bool(self.deepsemgrep) - res['dependency_query'] = _atd_write_bool(self.dependency_query) - res['path_to_transitivity'] = _atd_write_bool(self.path_to_transitivity) - res['scan_all_deps_in_diff_scan'] = _atd_write_bool(self.scan_all_deps_in_diff_scan) - res['symbol_analysis'] = _atd_write_bool(self.symbol_analysis) - res['triage_ignored_syntactic_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_syntactic_ids) - res['triage_ignored_match_based_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_match_based_ids) - res['ignored_files'] = _atd_write_list(_atd_write_string)(self.ignored_files) - if self.enabled_products is not None: - res['enabled_products'] = _atd_write_list((lambda x: x.to_json()))(self.enabled_products) - res['actions'] = _atd_write_list((lambda x: x.to_json()))(self.actions) - if self.ci_config_from_cloud is not None: - res['ci_config_from_cloud'] = (lambda x: x.to_json())(self.ci_config_from_cloud) - return res + @staticmethod + def to_json() -> Any: + return 'BuildGradle' - @classmethod - def from_json_string(cls, x: str) -> 'ScanConfig': - return cls.from_json(json.loads(x)) + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class SettingsGradle: + """Original type: manifest_kind = [ ... | SettingsGradle | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'SettingsGradle' + + @staticmethod + def to_json() -> Any: + return 'SettingsGradle' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class GemfileLock_: - """Original type: sca_parser_name = [ ... | Gemfile_lock | ... ]""" +@dataclass(frozen=True) +class ComposerJson: + """Original type: manifest_kind = [ ... | ComposerJson | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GemfileLock_' + return 'ComposerJson' @staticmethod def to_json() -> Any: - return 'gemfile_lock' + return 'ComposerJson' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class GoMod2: - """Original type: sca_parser_name = [ ... | Go_mod | ... ]""" +@dataclass(frozen=True) +class NugetManifestJson: + """Original type: manifest_kind = [ ... | NugetManifestJson | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GoMod2' + return 'NugetManifestJson' @staticmethod def to_json() -> Any: - return 'go_mod' + return 'NugetManifestJson' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class GoSum: - """Original type: sca_parser_name = [ ... | Go_sum | ... ]""" +@dataclass(frozen=True) +class PubspecYaml: + """Original type: manifest_kind = [ ... | PubspecYaml | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GoSum' + return 'PubspecYaml' @staticmethod def to_json() -> Any: - return 'go_sum' + return 'PubspecYaml' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class GradleLockfile_: - """Original type: sca_parser_name = [ ... | Gradle_lockfile | ... ]""" +@dataclass(frozen=True) +class PackageSwift: + """Original type: manifest_kind = [ ... | PackageSwift | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GradleLockfile_' + return 'PackageSwift' @staticmethod def to_json() -> Any: - return 'gradle_lockfile' + return 'PackageSwift' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class GradleBuild: - """Original type: sca_parser_name = [ ... | Gradle_build | ... ]""" +@dataclass(frozen=True) +class Podfile: + """Original type: manifest_kind = [ ... | Podfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'GradleBuild' + return 'Podfile' @staticmethod def to_json() -> Any: - return 'gradle_build' + return 'Podfile' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Jsondoc: - """Original type: sca_parser_name = [ ... | Jsondoc | ... ]""" +@dataclass(frozen=True) +class MixExs: + """Original type: manifest_kind = [ ... | MixExs | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Jsondoc' + return 'MixExs' @staticmethod def to_json() -> Any: - return 'jsondoc' + return 'MixExs' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Pipfile_: - """Original type: sca_parser_name = [ ... | Pipfile | ... ]""" +@dataclass(frozen=True) +class Pipfile: + """Original type: manifest_kind = [ ... | Pipfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Pipfile_' + return 'Pipfile' @staticmethod def to_json() -> Any: - return 'pipfile' + return 'Pipfile' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class PnpmLock_: - """Original type: sca_parser_name = [ ... | Pnpm_lock | ... ]""" +@dataclass(frozen=True) +class PyprojectToml: + """Original type: manifest_kind = [ ... | PyprojectToml | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PnpmLock_' + return 'PyprojectToml' @staticmethod def to_json() -> Any: - return 'pnpm_lock' + return 'PyprojectToml' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class PoetryLock_: - """Original type: sca_parser_name = [ ... | Poetry_lock | ... ]""" +@dataclass(frozen=True) +class ConanFileTxt: + """Original type: manifest_kind = [ ... | ConanFileTxt | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PoetryLock_' + return 'ConanFileTxt' @staticmethod def to_json() -> Any: - return 'poetry_lock' + return 'ConanFileTxt' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class PyprojectToml_: - """Original type: sca_parser_name = [ ... | Pyproject_toml | ... ]""" +@dataclass(frozen=True) +class ConanFilePy: + """Original type: manifest_kind = [ ... | ConanFilePy | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PyprojectToml_' + return 'ConanFilePy' @staticmethod def to_json() -> Any: - return 'pyproject_toml' + return 'ConanFilePy' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Requirements: - """Original type: sca_parser_name = [ ... | Requirements | ... ]""" +@dataclass(frozen=True) +class Csproj: + """Original type: manifest_kind = [ ... | Csproj | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Requirements' + return 'Csproj' @staticmethod def to_json() -> Any: - return 'requirements' + return 'Csproj' + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class ManifestKind: + """Original type: manifest_kind = [ ... ]""" + + value: Union[RequirementsIn, PackageJson, Gemfile, GoMod_, CargoToml, PomXml, BuildGradle, SettingsGradle, ComposerJson, NugetManifestJson, PubspecYaml, PackageSwift, Podfile, MixExs, Pipfile, PyprojectToml, ConanFileTxt, ConanFilePy, Csproj] + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return self.value.kind + + @classmethod + def from_json(cls, x: Any) -> 'ManifestKind': + if isinstance(x, str): + if x == 'RequirementsIn': + return cls(RequirementsIn()) + if x == 'PackageJson': + return cls(PackageJson()) + if x == 'Gemfile': + return cls(Gemfile()) + if x == 'GoMod': + return cls(GoMod_()) + if x == 'CargoToml': + return cls(CargoToml()) + if x == 'PomXml': + return cls(PomXml()) + if x == 'BuildGradle': + return cls(BuildGradle()) + if x == 'SettingsGradle': + return cls(SettingsGradle()) + if x == 'ComposerJson': + return cls(ComposerJson()) + if x == 'NugetManifestJson': + return cls(NugetManifestJson()) + if x == 'PubspecYaml': + return cls(PubspecYaml()) + if x == 'PackageSwift': + return cls(PackageSwift()) + if x == 'Podfile': + return cls(Podfile()) + if x == 'MixExs': + return cls(MixExs()) + if x == 'Pipfile': + return cls(Pipfile()) + if x == 'PyprojectToml': + return cls(PyprojectToml()) + if x == 'ConanFileTxt': + return cls(ConanFileTxt()) + if x == 'ConanFilePy': + return cls(ConanFilePy()) + if x == 'Csproj': + return cls(Csproj()) + _atd_bad_json('ManifestKind', x) + _atd_bad_json('ManifestKind', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'ManifestKind': + return cls.from_json(json.loads(x)) + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class Lockfile_: + """Original type: dependency_source_file_kind = [ ... | Lockfile of ... | ... ]""" + + value: LockfileKind + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'Lockfile_' + + def to_json(self) -> Any: + return ['Lockfile', (lambda x: x.to_json())(self.value)] + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass(frozen=True) +class Manifest_: + """Original type: dependency_source_file_kind = [ ... | Manifest of ... | ... ]""" + + value: ManifestKind + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'Manifest_' + + def to_json(self) -> Any: + return ['Manifest', (lambda x: x.to_json())(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass -class Yarn1: - """Original type: sca_parser_name = [ ... | Yarn_1 | ... ]""" +@dataclass(frozen=True) +class DependencySourceFileKind: + """Original type: dependency_source_file_kind = [ ... ]""" + + value: Union[Lockfile_, Manifest_] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Yarn1' + return self.value.kind - @staticmethod - def to_json() -> Any: - return 'yarn_1' + @classmethod + def from_json(cls, x: Any) -> 'DependencySourceFileKind': + if isinstance(x, List) and len(x) == 2: + cons = x[0] + if cons == 'Lockfile': + return cls(Lockfile_(LockfileKind.from_json(x[1]))) + if cons == 'Manifest': + return cls(Manifest_(ManifestKind.from_json(x[1]))) + _atd_bad_json('DependencySourceFileKind', x) + _atd_bad_json('DependencySourceFileKind', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'DependencySourceFileKind': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class Yarn2: - """Original type: sca_parser_name = [ ... | Yarn_2 | ... ]""" +class DependencySourceFile: + """Original type: dependency_source_file = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Yarn2' + kind: DependencySourceFileKind + path: Fpath - @staticmethod - def to_json() -> Any: - return 'yarn_2' + @classmethod + def from_json(cls, x: Any) -> 'DependencySourceFile': + if isinstance(x, dict): + return cls( + kind=DependencySourceFileKind.from_json(x['kind']) if 'kind' in x else _atd_missing_json_field('DependencySourceFile', 'kind'), + path=Fpath.from_json(x['path']) if 'path' in x else _atd_missing_json_field('DependencySourceFile', 'path'), + ) + else: + _atd_bad_json('DependencySourceFile', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['kind'] = (lambda x: x.to_json())(self.kind) + res['path'] = (lambda x: x.to_json())(self.path) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'DependencySourceFile': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class Pomtree: - """Original type: sca_parser_name = [ ... | Pomtree | ... ]""" +class DependencyResolutionStats: + """Original type: dependency_resolution_stats = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Pomtree' + resolution_method: ResolutionMethod + dependency_count: int + ecosystem: Ecosystem - @staticmethod - def to_json() -> Any: - return 'pomtree' + @classmethod + def from_json(cls, x: Any) -> 'DependencyResolutionStats': + if isinstance(x, dict): + return cls( + resolution_method=ResolutionMethod.from_json(x['resolution_method']) if 'resolution_method' in x else _atd_missing_json_field('DependencyResolutionStats', 'resolution_method'), + dependency_count=_atd_read_int(x['dependency_count']) if 'dependency_count' in x else _atd_missing_json_field('DependencyResolutionStats', 'dependency_count'), + ecosystem=Ecosystem.from_json(x['ecosystem']) if 'ecosystem' in x else _atd_missing_json_field('DependencyResolutionStats', 'ecosystem'), + ) + else: + _atd_bad_json('DependencyResolutionStats', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['resolution_method'] = (lambda x: x.to_json())(self.resolution_method) + res['dependency_count'] = _atd_write_int(self.dependency_count) + res['ecosystem'] = (lambda x: x.to_json())(self.ecosystem) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'DependencyResolutionStats': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class CargoParser: - """Original type: sca_parser_name = [ ... | Cargo_parser | ... ]""" +class SubprojectStats: + """Original type: subproject_stats = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'CargoParser' + subproject_id: str + dependency_sources: List[DependencySourceFile] + resolved_stats: Optional[DependencyResolutionStats] = None - @staticmethod - def to_json() -> Any: - return 'cargo' + @classmethod + def from_json(cls, x: Any) -> 'SubprojectStats': + if isinstance(x, dict): + return cls( + subproject_id=_atd_read_string(x['subproject_id']) if 'subproject_id' in x else _atd_missing_json_field('SubprojectStats', 'subproject_id'), + dependency_sources=_atd_read_list(DependencySourceFile.from_json)(x['dependency_sources']) if 'dependency_sources' in x else _atd_missing_json_field('SubprojectStats', 'dependency_sources'), + resolved_stats=DependencyResolutionStats.from_json(x['resolved_stats']) if 'resolved_stats' in x else None, + ) + else: + _atd_bad_json('SubprojectStats', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['subproject_id'] = _atd_write_string(self.subproject_id) + res['dependency_sources'] = _atd_write_list((lambda x: x.to_json()))(self.dependency_sources) + if self.resolved_stats is not None: + res['resolved_stats'] = (lambda x: x.to_json())(self.resolved_stats) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'SubprojectStats': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class ComposerLock_: - """Original type: sca_parser_name = [ ... | Composer_lock | ... ]""" +class SupplyChainStats: + """Original type: supply_chain_stats = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'ComposerLock_' + subprojects_stats: List[SubprojectStats] - @staticmethod - def to_json() -> Any: - return 'composer_lock' + @classmethod + def from_json(cls, x: Any) -> 'SupplyChainStats': + if isinstance(x, dict): + return cls( + subprojects_stats=_atd_read_list(SubprojectStats.from_json)(x['subprojects_stats']) if 'subprojects_stats' in x else _atd_missing_json_field('SupplyChainStats', 'subprojects_stats'), + ) + else: + _atd_bad_json('SupplyChainStats', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['subprojects_stats'] = _atd_write_list((lambda x: x.to_json()))(self.subprojects_stats) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'SupplyChainStats': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class PubspecLock_: - """Original type: sca_parser_name = [ ... | Pubspec_lock | ... ]""" +class SkippedRule: + """Original type: skipped_rule = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PubspecLock_' + rule_id: RuleId + details: str + position: Position - @staticmethod - def to_json() -> Any: - return 'pubspec_lock' + @classmethod + def from_json(cls, x: Any) -> 'SkippedRule': + if isinstance(x, dict): + return cls( + rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else _atd_missing_json_field('SkippedRule', 'rule_id'), + details=_atd_read_string(x['details']) if 'details' in x else _atd_missing_json_field('SkippedRule', 'details'), + position=Position.from_json(x['position']) if 'position' in x else _atd_missing_json_field('SkippedRule', 'position'), + ) + else: + _atd_bad_json('SkippedRule', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['rule_id'] = (lambda x: x.to_json())(self.rule_id) + res['details'] = _atd_write_string(self.details) + res['position'] = (lambda x: x.to_json())(self.position) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'SkippedRule': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class PackageSwift_: - """Original type: sca_parser_name = [ ... | Package_swift | ... ]""" +class ScannedAndSkipped: + """Original type: scanned_and_skipped = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PackageSwift_' + scanned: List[Fpath] + skipped: Optional[List[SkippedTarget]] = None - @staticmethod - def to_json() -> Any: - return 'package_swift' + @classmethod + def from_json(cls, x: Any) -> 'ScannedAndSkipped': + if isinstance(x, dict): + return cls( + scanned=_atd_read_list(Fpath.from_json)(x['scanned']) if 'scanned' in x else _atd_missing_json_field('ScannedAndSkipped', 'scanned'), + skipped=_atd_read_list(SkippedTarget.from_json)(x['skipped']) if 'skipped' in x else None, + ) + else: + _atd_bad_json('ScannedAndSkipped', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['scanned'] = _atd_write_list((lambda x: x.to_json()))(self.scanned) + if self.skipped is not None: + res['skipped'] = _atd_write_list((lambda x: x.to_json()))(self.skipped) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'ScannedAndSkipped': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class PodfileLock_: - """Original type: sca_parser_name = [ ... | Podfile_lock | ... ]""" +class ScanInfo: + """Original type: scan_info = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PodfileLock_' + enabled_products: List[Product] + deployment_id: int + deployment_name: str + id: Optional[int] = None + + @classmethod + def from_json(cls, x: Any) -> 'ScanInfo': + if isinstance(x, dict): + return cls( + enabled_products=_atd_read_list(Product.from_json)(x['enabled_products']) if 'enabled_products' in x else _atd_missing_json_field('ScanInfo', 'enabled_products'), + deployment_id=_atd_read_int(x['deployment_id']) if 'deployment_id' in x else _atd_missing_json_field('ScanInfo', 'deployment_id'), + deployment_name=_atd_read_string(x['deployment_name']) if 'deployment_name' in x else _atd_missing_json_field('ScanInfo', 'deployment_name'), + id=_atd_read_int(x['id']) if 'id' in x else None, + ) + else: + _atd_bad_json('ScanInfo', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['enabled_products'] = _atd_write_list((lambda x: x.to_json()))(self.enabled_products) + res['deployment_id'] = _atd_write_int(self.deployment_id) + res['deployment_name'] = _atd_write_string(self.deployment_name) + if self.id is not None: + res['id'] = _atd_write_int(self.id) + return res - @staticmethod - def to_json() -> Any: - return 'podfile_lock' + @classmethod + def from_json_string(cls, x: str) -> 'ScanInfo': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class PackageResolved: - """Original type: sca_parser_name = [ ... | Package_resolved | ... ]""" +class ScanConfiguration: + """Original type: scan_configuration = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PackageResolved' + rules: RawJson + triage_ignored_syntactic_ids: List[str] = field(default_factory=lambda: []) + triage_ignored_match_based_ids: List[str] = field(default_factory=lambda: []) - @staticmethod - def to_json() -> Any: - return 'package_resolved' + @classmethod + def from_json(cls, x: Any) -> 'ScanConfiguration': + if isinstance(x, dict): + return cls( + rules=RawJson.from_json(x['rules']) if 'rules' in x else _atd_missing_json_field('ScanConfiguration', 'rules'), + triage_ignored_syntactic_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_syntactic_ids']) if 'triage_ignored_syntactic_ids' in x else [], + triage_ignored_match_based_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_match_based_ids']) if 'triage_ignored_match_based_ids' in x else [], + ) + else: + _atd_bad_json('ScanConfiguration', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['rules'] = (lambda x: x.to_json())(self.rules) + res['triage_ignored_syntactic_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_syntactic_ids) + res['triage_ignored_match_based_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_match_based_ids) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'ScanConfiguration': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class MixLock_: - """Original type: sca_parser_name = [ ... | Mix_lock | ... ]""" +class Glob: + """Original type: glob""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'MixLock_' + value: str - @staticmethod - def to_json() -> Any: - return 'mix_lock' + @classmethod + def from_json(cls, x: Any) -> 'Glob': + return cls(_atd_read_string(x)) + + def to_json(self) -> Any: + return _atd_write_string(self.value) + + @classmethod + def from_json_string(cls, x: str) -> 'Glob': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class ScaParserName: - """Original type: sca_parser_name = [ ... ]""" - - value: Union[GemfileLock_, GoMod2, GoSum, GradleLockfile_, GradleBuild, Jsondoc, Pipfile_, PnpmLock_, PoetryLock_, PyprojectToml_, Requirements, Yarn1, Yarn2, Pomtree, CargoParser, ComposerLock_, PubspecLock_, PackageSwift_, PodfileLock_, PackageResolved, MixLock_] +class ProductIgnoredFiles: + """Original type: product_ignored_files""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return self.value.kind + value: Dict[Product, List[Glob]] @classmethod - def from_json(cls, x: Any) -> 'ScaParserName': - if isinstance(x, str): - if x == 'gemfile_lock': - return cls(GemfileLock_()) - if x == 'go_mod': - return cls(GoMod2()) - if x == 'go_sum': - return cls(GoSum()) - if x == 'gradle_lockfile': - return cls(GradleLockfile_()) - if x == 'gradle_build': - return cls(GradleBuild()) - if x == 'jsondoc': - return cls(Jsondoc()) - if x == 'pipfile': - return cls(Pipfile_()) - if x == 'pnpm_lock': - return cls(PnpmLock_()) - if x == 'poetry_lock': - return cls(PoetryLock_()) - if x == 'pyproject_toml': - return cls(PyprojectToml_()) - if x == 'requirements': - return cls(Requirements()) - if x == 'yarn_1': - return cls(Yarn1()) - if x == 'yarn_2': - return cls(Yarn2()) - if x == 'pomtree': - return cls(Pomtree()) - if x == 'cargo': - return cls(CargoParser()) - if x == 'composer_lock': - return cls(ComposerLock_()) - if x == 'pubspec_lock': - return cls(PubspecLock_()) - if x == 'package_swift': - return cls(PackageSwift_()) - if x == 'podfile_lock': - return cls(PodfileLock_()) - if x == 'package_resolved': - return cls(PackageResolved()) - if x == 'mix_lock': - return cls(MixLock_()) - _atd_bad_json('ScaParserName', x) - _atd_bad_json('ScaParserName', x) + def from_json(cls, x: Any) -> 'ProductIgnoredFiles': + return cls(_atd_read_assoc_array_into_dict(Product.from_json, _atd_read_list(Glob.from_json))(x)) def to_json(self) -> Any: - return self.value.to_json() + return _atd_write_assoc_dict_to_array((lambda x: x.to_json()), _atd_write_list((lambda x: x.to_json())))(self.value) @classmethod - def from_json_string(cls, x: str) -> 'ScaParserName': + def from_json_string(cls, x: str) -> 'ProductIgnoredFiles': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class SarifFormat: - """Original type: sarif_format = { ... }""" +@dataclass +class HistoricalConfiguration: + """Original type: historical_configuration = { ... }""" - rules: Fpath - is_pro: bool - show_dataflow_traces: bool + enabled: bool + lookback_days: Optional[int] = None @classmethod - def from_json(cls, x: Any) -> 'SarifFormat': + def from_json(cls, x: Any) -> 'HistoricalConfiguration': if isinstance(x, dict): return cls( - rules=Fpath.from_json(x['rules']) if 'rules' in x else _atd_missing_json_field('SarifFormat', 'rules'), - is_pro=_atd_read_bool(x['is_pro']) if 'is_pro' in x else _atd_missing_json_field('SarifFormat', 'is_pro'), - show_dataflow_traces=_atd_read_bool(x['show_dataflow_traces']) if 'show_dataflow_traces' in x else _atd_missing_json_field('SarifFormat', 'show_dataflow_traces'), + enabled=_atd_read_bool(x['enabled']) if 'enabled' in x else _atd_missing_json_field('HistoricalConfiguration', 'enabled'), + lookback_days=_atd_read_int(x['lookback_days']) if 'lookback_days' in x else None, ) else: - _atd_bad_json('SarifFormat', x) + _atd_bad_json('HistoricalConfiguration', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['rules'] = (lambda x: x.to_json())(self.rules) - res['is_pro'] = _atd_write_bool(self.is_pro) - res['show_dataflow_traces'] = _atd_write_bool(self.show_dataflow_traces) + res['enabled'] = _atd_write_bool(self.enabled) + if self.lookback_days is not None: + res['lookback_days'] = _atd_write_int(self.lookback_days) return res @classmethod - def from_json_string(cls, x: str) -> 'SarifFormat': + def from_json_string(cls, x: str) -> 'HistoricalConfiguration': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class OSS_: - """Original type: engine_kind = [ ... | OSS | ... ]""" +@dataclass +class EngineConfiguration: + """Original type: engine_configuration = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'OSS_' + autofix: bool = field(default_factory=lambda: False) + deepsemgrep: bool = field(default_factory=lambda: False) + dependency_query: bool = field(default_factory=lambda: False) + path_to_transitivity: bool = field(default_factory=lambda: False) + scan_all_deps_in_diff_scan: bool = field(default_factory=lambda: False) + symbol_analysis: bool = field(default_factory=lambda: False) + ignored_files: List[str] = field(default_factory=lambda: []) + product_ignored_files: Optional[ProductIgnoredFiles] = None + generic_slow_rollout: bool = field(default_factory=lambda: False) + historical_config: Optional[HistoricalConfiguration] = None + always_suppress_errors: bool = field(default_factory=lambda: False) - @staticmethod - def to_json() -> Any: - return 'OSS' + @classmethod + def from_json(cls, x: Any) -> 'EngineConfiguration': + if isinstance(x, dict): + return cls( + autofix=_atd_read_bool(x['autofix']) if 'autofix' in x else False, + deepsemgrep=_atd_read_bool(x['deepsemgrep']) if 'deepsemgrep' in x else False, + dependency_query=_atd_read_bool(x['dependency_query']) if 'dependency_query' in x else False, + path_to_transitivity=_atd_read_bool(x['path_to_transitivity']) if 'path_to_transitivity' in x else False, + scan_all_deps_in_diff_scan=_atd_read_bool(x['scan_all_deps_in_diff_scan']) if 'scan_all_deps_in_diff_scan' in x else False, + symbol_analysis=_atd_read_bool(x['symbol_analysis']) if 'symbol_analysis' in x else False, + ignored_files=_atd_read_list(_atd_read_string)(x['ignored_files']) if 'ignored_files' in x else [], + product_ignored_files=ProductIgnoredFiles.from_json(x['product_ignored_files']) if 'product_ignored_files' in x else None, + generic_slow_rollout=_atd_read_bool(x['generic_slow_rollout']) if 'generic_slow_rollout' in x else False, + historical_config=HistoricalConfiguration.from_json(x['historical_config']) if 'historical_config' in x else None, + always_suppress_errors=_atd_read_bool(x['always_suppress_errors']) if 'always_suppress_errors' in x else False, + ) + else: + _atd_bad_json('EngineConfiguration', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['autofix'] = _atd_write_bool(self.autofix) + res['deepsemgrep'] = _atd_write_bool(self.deepsemgrep) + res['dependency_query'] = _atd_write_bool(self.dependency_query) + res['path_to_transitivity'] = _atd_write_bool(self.path_to_transitivity) + res['scan_all_deps_in_diff_scan'] = _atd_write_bool(self.scan_all_deps_in_diff_scan) + res['symbol_analysis'] = _atd_write_bool(self.symbol_analysis) + res['ignored_files'] = _atd_write_list(_atd_write_string)(self.ignored_files) + if self.product_ignored_files is not None: + res['product_ignored_files'] = (lambda x: x.to_json())(self.product_ignored_files) + res['generic_slow_rollout'] = _atd_write_bool(self.generic_slow_rollout) + if self.historical_config is not None: + res['historical_config'] = (lambda x: x.to_json())(self.historical_config) + res['always_suppress_errors'] = _atd_write_bool(self.always_suppress_errors) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'EngineConfiguration': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class PRO_: - """Original type: engine_kind = [ ... | PRO | ... ]""" +@dataclass +class ScanResponse: + """Original type: scan_response = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PRO_' + info: ScanInfo + config: ScanConfiguration + engine_params: EngineConfiguration - @staticmethod - def to_json() -> Any: - return 'PRO' + @classmethod + def from_json(cls, x: Any) -> 'ScanResponse': + if isinstance(x, dict): + return cls( + info=ScanInfo.from_json(x['info']) if 'info' in x else _atd_missing_json_field('ScanResponse', 'info'), + config=ScanConfiguration.from_json(x['config']) if 'config' in x else _atd_missing_json_field('ScanResponse', 'config'), + engine_params=EngineConfiguration.from_json(x['engine_params']) if 'engine_params' in x else _atd_missing_json_field('ScanResponse', 'engine_params'), + ) + else: + _atd_bad_json('ScanResponse', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['info'] = (lambda x: x.to_json())(self.info) + res['config'] = (lambda x: x.to_json())(self.config) + res['engine_params'] = (lambda x: x.to_json())(self.engine_params) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'ScanResponse': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class EngineKind: - """Original type: engine_kind = [ ... ]""" - - value: Union[OSS_, PRO_] +@dataclass +class ScanMetadata: + """Original type: scan_metadata = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return self.value.kind + cli_version: Version + unique_id: Uuid + requested_products: List[Product] + dry_run: bool = field(default_factory=lambda: False) + sms_scan_id: Optional[str] = None @classmethod - def from_json(cls, x: Any) -> 'EngineKind': - if isinstance(x, str): - if x == 'OSS': - return cls(OSS_()) - if x == 'PRO': - return cls(PRO_()) - _atd_bad_json('EngineKind', x) - _atd_bad_json('EngineKind', x) + def from_json(cls, x: Any) -> 'ScanMetadata': + if isinstance(x, dict): + return cls( + cli_version=Version.from_json(x['cli_version']) if 'cli_version' in x else _atd_missing_json_field('ScanMetadata', 'cli_version'), + unique_id=Uuid.from_json(x['unique_id']) if 'unique_id' in x else _atd_missing_json_field('ScanMetadata', 'unique_id'), + requested_products=_atd_read_list(Product.from_json)(x['requested_products']) if 'requested_products' in x else _atd_missing_json_field('ScanMetadata', 'requested_products'), + dry_run=_atd_read_bool(x['dry_run']) if 'dry_run' in x else False, + sms_scan_id=_atd_read_string(x['sms_scan_id']) if 'sms_scan_id' in x else None, + ) + else: + _atd_bad_json('ScanMetadata', x) def to_json(self) -> Any: - return self.value.to_json() + res: Dict[str, Any] = {} + res['cli_version'] = (lambda x: x.to_json())(self.cli_version) + res['unique_id'] = (lambda x: x.to_json())(self.unique_id) + res['requested_products'] = _atd_write_list((lambda x: x.to_json()))(self.requested_products) + res['dry_run'] = _atd_write_bool(self.dry_run) + if self.sms_scan_id is not None: + res['sms_scan_id'] = _atd_write_string(self.sms_scan_id) + return res @classmethod - def from_json_string(cls, x: str) -> 'EngineKind': + def from_json_string(cls, x: str) -> 'ScanMetadata': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class RuleIdAndEngineKind: - """Original type: rule_id_and_engine_kind""" +@dataclass +class ProjectMetadata: + """Original type: project_metadata = { ... }""" - value: Tuple[RuleId, EngineKind] + scan_environment: str + repository: str + repo_url: Optional[Uri] + branch: Optional[str] + commit: Optional[Sha1] + commit_title: Optional[str] + commit_author_email: Optional[str] + commit_author_name: Optional[str] + commit_author_username: Optional[str] + commit_author_image_url: Optional[Uri] + ci_job_url: Optional[Uri] + on: str + pull_request_author_username: Optional[str] + pull_request_author_image_url: Optional[Uri] + pull_request_id: Optional[str] + pull_request_title: Optional[str] + is_full_scan: bool + repo_id: Optional[str] = None + org_id: Optional[str] = None + repo_display_name: Optional[str] = None + commit_timestamp: Optional[Datetime] = None + base_sha: Optional[Sha1] = None + start_sha: Optional[Sha1] = None + is_sca_scan: Optional[bool] = None + is_code_scan: Optional[bool] = None + is_secrets_scan: Optional[bool] = None @classmethod - def from_json(cls, x: Any) -> 'RuleIdAndEngineKind': - return cls((lambda x: (RuleId.from_json(x[0]), EngineKind.from_json(x[1])) if isinstance(x, list) and len(x) == 2 else _atd_bad_json('array of length 2', x))(x)) + def from_json(cls, x: Any) -> 'ProjectMetadata': + if isinstance(x, dict): + return cls( + scan_environment=_atd_read_string(x['scan_environment']) if 'scan_environment' in x else _atd_missing_json_field('ProjectMetadata', 'scan_environment'), + repository=_atd_read_string(x['repository']) if 'repository' in x else _atd_missing_json_field('ProjectMetadata', 'repository'), + repo_url=_atd_read_nullable(Uri.from_json)(x['repo_url']) if 'repo_url' in x else _atd_missing_json_field('ProjectMetadata', 'repo_url'), + branch=_atd_read_nullable(_atd_read_string)(x['branch']) if 'branch' in x else _atd_missing_json_field('ProjectMetadata', 'branch'), + commit=_atd_read_nullable(Sha1.from_json)(x['commit']) if 'commit' in x else _atd_missing_json_field('ProjectMetadata', 'commit'), + commit_title=_atd_read_nullable(_atd_read_string)(x['commit_title']) if 'commit_title' in x else _atd_missing_json_field('ProjectMetadata', 'commit_title'), + commit_author_email=_atd_read_nullable(_atd_read_string)(x['commit_author_email']) if 'commit_author_email' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_email'), + commit_author_name=_atd_read_nullable(_atd_read_string)(x['commit_author_name']) if 'commit_author_name' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_name'), + commit_author_username=_atd_read_nullable(_atd_read_string)(x['commit_author_username']) if 'commit_author_username' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_username'), + commit_author_image_url=_atd_read_nullable(Uri.from_json)(x['commit_author_image_url']) if 'commit_author_image_url' in x else _atd_missing_json_field('ProjectMetadata', 'commit_author_image_url'), + ci_job_url=_atd_read_nullable(Uri.from_json)(x['ci_job_url']) if 'ci_job_url' in x else _atd_missing_json_field('ProjectMetadata', 'ci_job_url'), + on=_atd_read_string(x['on']) if 'on' in x else _atd_missing_json_field('ProjectMetadata', 'on'), + pull_request_author_username=_atd_read_nullable(_atd_read_string)(x['pull_request_author_username']) if 'pull_request_author_username' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_author_username'), + pull_request_author_image_url=_atd_read_nullable(Uri.from_json)(x['pull_request_author_image_url']) if 'pull_request_author_image_url' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_author_image_url'), + pull_request_id=_atd_read_nullable(_atd_read_string)(x['pull_request_id']) if 'pull_request_id' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_id'), + pull_request_title=_atd_read_nullable(_atd_read_string)(x['pull_request_title']) if 'pull_request_title' in x else _atd_missing_json_field('ProjectMetadata', 'pull_request_title'), + is_full_scan=_atd_read_bool(x['is_full_scan']) if 'is_full_scan' in x else _atd_missing_json_field('ProjectMetadata', 'is_full_scan'), + repo_id=_atd_read_string(x['repo_id']) if 'repo_id' in x else None, + org_id=_atd_read_string(x['org_id']) if 'org_id' in x else None, + repo_display_name=_atd_read_string(x['repo_display_name']) if 'repo_display_name' in x else None, + commit_timestamp=Datetime.from_json(x['commit_timestamp']) if 'commit_timestamp' in x else None, + base_sha=Sha1.from_json(x['base_sha']) if 'base_sha' in x else None, + start_sha=Sha1.from_json(x['start_sha']) if 'start_sha' in x else None, + is_sca_scan=_atd_read_bool(x['is_sca_scan']) if 'is_sca_scan' in x else None, + is_code_scan=_atd_read_bool(x['is_code_scan']) if 'is_code_scan' in x else None, + is_secrets_scan=_atd_read_bool(x['is_secrets_scan']) if 'is_secrets_scan' in x else None, + ) + else: + _atd_bad_json('ProjectMetadata', x) def to_json(self) -> Any: - return (lambda x: [(lambda x: x.to_json())(x[0]), (lambda x: x.to_json())(x[1])] if isinstance(x, tuple) and len(x) == 2 else _atd_bad_python('tuple of length 2', x))(self.value) + res: Dict[str, Any] = {} + res['scan_environment'] = _atd_write_string(self.scan_environment) + res['repository'] = _atd_write_string(self.repository) + res['repo_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.repo_url) + res['branch'] = _atd_write_nullable(_atd_write_string)(self.branch) + res['commit'] = _atd_write_nullable((lambda x: x.to_json()))(self.commit) + res['commit_title'] = _atd_write_nullable(_atd_write_string)(self.commit_title) + res['commit_author_email'] = _atd_write_nullable(_atd_write_string)(self.commit_author_email) + res['commit_author_name'] = _atd_write_nullable(_atd_write_string)(self.commit_author_name) + res['commit_author_username'] = _atd_write_nullable(_atd_write_string)(self.commit_author_username) + res['commit_author_image_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.commit_author_image_url) + res['ci_job_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.ci_job_url) + res['on'] = _atd_write_string(self.on) + res['pull_request_author_username'] = _atd_write_nullable(_atd_write_string)(self.pull_request_author_username) + res['pull_request_author_image_url'] = _atd_write_nullable((lambda x: x.to_json()))(self.pull_request_author_image_url) + res['pull_request_id'] = _atd_write_nullable(_atd_write_string)(self.pull_request_id) + res['pull_request_title'] = _atd_write_nullable(_atd_write_string)(self.pull_request_title) + res['is_full_scan'] = _atd_write_bool(self.is_full_scan) + if self.repo_id is not None: + res['repo_id'] = _atd_write_string(self.repo_id) + if self.org_id is not None: + res['org_id'] = _atd_write_string(self.org_id) + if self.repo_display_name is not None: + res['repo_display_name'] = _atd_write_string(self.repo_display_name) + if self.commit_timestamp is not None: + res['commit_timestamp'] = (lambda x: x.to_json())(self.commit_timestamp) + if self.base_sha is not None: + res['base_sha'] = (lambda x: x.to_json())(self.base_sha) + if self.start_sha is not None: + res['start_sha'] = (lambda x: x.to_json())(self.start_sha) + if self.is_sca_scan is not None: + res['is_sca_scan'] = _atd_write_bool(self.is_sca_scan) + if self.is_code_scan is not None: + res['is_code_scan'] = _atd_write_bool(self.is_code_scan) + if self.is_secrets_scan is not None: + res['is_secrets_scan'] = _atd_write_bool(self.is_secrets_scan) + return res @classmethod - def from_json_string(cls, x: str) -> 'RuleIdAndEngineKind': + def from_json_string(cls, x: str) -> 'ProjectMetadata': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class ResolutionCmdFailed: - """Original type: resolution_cmd_failed = { ... }""" +@dataclass +class CiConfigFromRepo: + """Original type: ci_config_from_repo = { ... }""" - command: str - message: str + version: Version = field(default_factory=lambda: Version('v1')) + tags: Optional[List[Tag]] = None @classmethod - def from_json(cls, x: Any) -> 'ResolutionCmdFailed': + def from_json(cls, x: Any) -> 'CiConfigFromRepo': if isinstance(x, dict): return cls( - command=_atd_read_string(x['command']) if 'command' in x else _atd_missing_json_field('ResolutionCmdFailed', 'command'), - message=_atd_read_string(x['message']) if 'message' in x else _atd_missing_json_field('ResolutionCmdFailed', 'message'), + version=Version.from_json(x['version']) if 'version' in x else Version('v1'), + tags=_atd_read_list(Tag.from_json)(x['tags']) if 'tags' in x else None, ) else: - _atd_bad_json('ResolutionCmdFailed', x) + _atd_bad_json('CiConfigFromRepo', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['command'] = _atd_write_string(self.command) - res['message'] = _atd_write_string(self.message) + res['version'] = (lambda x: x.to_json())(self.version) + if self.tags is not None: + res['tags'] = _atd_write_list((lambda x: x.to_json()))(self.tags) return res @classmethod - def from_json_string(cls, x: str) -> 'ResolutionCmdFailed': + def from_json_string(cls, x: str) -> 'CiConfigFromRepo': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class UnsupportedManifest: - """Original type: resolution_error = [ ... | UnsupportedManifest | ... ]""" - - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'UnsupportedManifest' - - @staticmethod - def to_json() -> Any: - return 'UnsupportedManifest' - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass(frozen=True) -class MissingRequirement: - """Original type: resolution_error = [ ... | MissingRequirement of ... | ... ]""" +@dataclass +class ScanRequest: + """Original type: scan_request = { ... }""" - value: str + project_metadata: ProjectMetadata + scan_metadata: ScanMetadata + project_config: Optional[CiConfigFromRepo] = None - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'MissingRequirement' + @classmethod + def from_json(cls, x: Any) -> 'ScanRequest': + if isinstance(x, dict): + return cls( + project_metadata=ProjectMetadata.from_json(x['project_metadata']) if 'project_metadata' in x else _atd_missing_json_field('ScanRequest', 'project_metadata'), + scan_metadata=ScanMetadata.from_json(x['scan_metadata']) if 'scan_metadata' in x else _atd_missing_json_field('ScanRequest', 'scan_metadata'), + project_config=CiConfigFromRepo.from_json(x['project_config']) if 'project_config' in x else None, + ) + else: + _atd_bad_json('ScanRequest', x) def to_json(self) -> Any: - return ['MissingRequirement', _atd_write_string(self.value)] + res: Dict[str, Any] = {} + res['project_metadata'] = (lambda x: x.to_json())(self.project_metadata) + res['scan_metadata'] = (lambda x: x.to_json())(self.scan_metadata) + if self.project_config is not None: + res['project_config'] = (lambda x: x.to_json())(self.project_config) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'ScanRequest': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class ResolutionCmdFailed_: - """Original type: resolution_error = [ ... | ResolutionCmdFailed of ... | ... ]""" +@dataclass +class CiEnv: + """Original type: ci_env""" - value: ResolutionCmdFailed + value: Dict[str, str] - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'ResolutionCmdFailed_' + @classmethod + def from_json(cls, x: Any) -> 'CiEnv': + return cls(_atd_read_assoc_object_into_dict(_atd_read_string)(x)) def to_json(self) -> Any: - return ['ResolutionCmdFailed', (lambda x: x.to_json())(self.value)] + return _atd_write_assoc_dict_to_object(_atd_write_string)(self.value) + + @classmethod + def from_json_string(cls, x: str) -> 'CiEnv': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class ParseDependenciesFailed: - """Original type: resolution_error = [ ... | ParseDependenciesFailed of ... | ... ]""" +@dataclass +class CiConfig: + """Original type: ci_config = { ... }""" - value: str + env: CiEnv + enabled_products: List[Product] + ignored_files: List[str] + autofix: bool = field(default_factory=lambda: False) + deepsemgrep: bool = field(default_factory=lambda: False) + dependency_query: bool = field(default_factory=lambda: False) + path_to_transitivity: bool = field(default_factory=lambda: False) + scan_all_deps_in_diff_scan: bool = field(default_factory=lambda: False) + symbol_analysis: bool = field(default_factory=lambda: False) - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'ParseDependenciesFailed' + @classmethod + def from_json(cls, x: Any) -> 'CiConfig': + if isinstance(x, dict): + return cls( + env=CiEnv.from_json(x['env']) if 'env' in x else _atd_missing_json_field('CiConfig', 'env'), + enabled_products=_atd_read_list(Product.from_json)(x['enabled_products']) if 'enabled_products' in x else _atd_missing_json_field('CiConfig', 'enabled_products'), + ignored_files=_atd_read_list(_atd_read_string)(x['ignored_files']) if 'ignored_files' in x else _atd_missing_json_field('CiConfig', 'ignored_files'), + autofix=_atd_read_bool(x['autofix']) if 'autofix' in x else False, + deepsemgrep=_atd_read_bool(x['deepsemgrep']) if 'deepsemgrep' in x else False, + dependency_query=_atd_read_bool(x['dependency_query']) if 'dependency_query' in x else False, + path_to_transitivity=_atd_read_bool(x['path_to_transitivity']) if 'path_to_transitivity' in x else False, + scan_all_deps_in_diff_scan=_atd_read_bool(x['scan_all_deps_in_diff_scan']) if 'scan_all_deps_in_diff_scan' in x else False, + symbol_analysis=_atd_read_bool(x['symbol_analysis']) if 'symbol_analysis' in x else False, + ) + else: + _atd_bad_json('CiConfig', x) def to_json(self) -> Any: - return ['ParseDependenciesFailed', _atd_write_string(self.value)] + res: Dict[str, Any] = {} + res['env'] = (lambda x: x.to_json())(self.env) + res['enabled_products'] = _atd_write_list((lambda x: x.to_json()))(self.enabled_products) + res['ignored_files'] = _atd_write_list(_atd_write_string)(self.ignored_files) + res['autofix'] = _atd_write_bool(self.autofix) + res['deepsemgrep'] = _atd_write_bool(self.deepsemgrep) + res['dependency_query'] = _atd_write_bool(self.dependency_query) + res['path_to_transitivity'] = _atd_write_bool(self.path_to_transitivity) + res['scan_all_deps_in_diff_scan'] = _atd_write_bool(self.scan_all_deps_in_diff_scan) + res['symbol_analysis'] = _atd_write_bool(self.symbol_analysis) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'CiConfig': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class ResolutionError: - """Original type: resolution_error = [ ... ]""" +@dataclass +class Message: + """Original type: action = [ ... | Message of ... | ... ]""" - value: Union[UnsupportedManifest, MissingRequirement, ResolutionCmdFailed_, ParseDependenciesFailed] + value: str @property def kind(self) -> str: """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'ResolutionError': - if isinstance(x, str): - if x == 'UnsupportedManifest': - return cls(UnsupportedManifest()) - _atd_bad_json('ResolutionError', x) - if isinstance(x, List) and len(x) == 2: - cons = x[0] - if cons == 'MissingRequirement': - return cls(MissingRequirement(_atd_read_string(x[1]))) - if cons == 'ResolutionCmdFailed': - return cls(ResolutionCmdFailed_(ResolutionCmdFailed.from_json(x[1]))) - if cons == 'ParseDependenciesFailed': - return cls(ParseDependenciesFailed(_atd_read_string(x[1]))) - _atd_bad_json('ResolutionError', x) - _atd_bad_json('ResolutionError', x) + return 'Message' def to_json(self) -> Any: - return self.value.to_json() - - @classmethod - def from_json_string(cls, x: str) -> 'ResolutionError': - return cls.from_json(json.loads(x)) + return ['Message', _atd_write_string(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class ResolutionOk: - """Original type: resolution_result = [ ... | ResolutionOk of ... | ... ]""" +class Delay: + """Original type: action = [ ... | Delay of ... | ... ]""" - value: Tuple[List[FoundDependency], List[ResolutionError]] + value: float @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ResolutionOk' + return 'Delay' def to_json(self) -> Any: - return ['ResolutionOk', (lambda x: [_atd_write_list((lambda x: x.to_json()))(x[0]), _atd_write_list((lambda x: x.to_json()))(x[1])] if isinstance(x, tuple) and len(x) == 2 else _atd_bad_python('tuple of length 2', x))(self.value)] + return ['Delay', _atd_write_float(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class ResolutionError_: - """Original type: resolution_result = [ ... | ResolutionError of ... | ... ]""" +class Exit: + """Original type: action = [ ... | Exit of ... | ... ]""" - value: List[ResolutionError] + value: int @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ResolutionError_' + return 'Exit' def to_json(self) -> Any: - return ['ResolutionError', _atd_write_list((lambda x: x.to_json()))(self.value)] + return ['Exit', _atd_write_int(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class ResolutionResult: - """Original type: resolution_result = [ ... ]""" +class Action: + """Original type: action = [ ... ]""" - value: Union[ResolutionOk, ResolutionError_] + value: Union[Message, Delay, Exit] @property def kind(self) -> str: @@ -6762,21 +6762,23 @@ def kind(self) -> str: return self.value.kind @classmethod - def from_json(cls, x: Any) -> 'ResolutionResult': + def from_json(cls, x: Any) -> 'Action': if isinstance(x, List) and len(x) == 2: cons = x[0] - if cons == 'ResolutionOk': - return cls(ResolutionOk((lambda x: (_atd_read_list(FoundDependency.from_json)(x[0]), _atd_read_list(ResolutionError.from_json)(x[1])) if isinstance(x, list) and len(x) == 2 else _atd_bad_json('array of length 2', x))(x[1]))) - if cons == 'ResolutionError': - return cls(ResolutionError_(_atd_read_list(ResolutionError.from_json)(x[1]))) - _atd_bad_json('ResolutionResult', x) - _atd_bad_json('ResolutionResult', x) + if cons == 'Message': + return cls(Message(_atd_read_string(x[1]))) + if cons == 'Delay': + return cls(Delay(_atd_read_float(x[1]))) + if cons == 'Exit': + return cls(Exit(_atd_read_int(x[1]))) + _atd_bad_json('Action', x) + _atd_bad_json('Action', x) def to_json(self) -> Any: return self.value.to_json() @classmethod - def from_json_string(cls, x: str) -> 'ResolutionResult': + def from_json_string(cls, x: str) -> 'Action': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -6784,43 +6786,38 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class Profile: - """Original type: profile = { ... }""" +class CiConfigFromCloud: + """Original type: ci_config_from_cloud = { ... }""" - rules: List[RuleId] - rules_parse_time: float - profiling_times: Dict[str, float] - targets: List[TargetTimes] - total_bytes: int - max_memory_bytes: Optional[int] = None + repo_config: CiConfig + org_config: Optional[CiConfig] = None + dirs_config: Optional[List[Tuple[Fpath, CiConfig]]] = None + actions: List[Action] = field(default_factory=lambda: []) @classmethod - def from_json(cls, x: Any) -> 'Profile': + def from_json(cls, x: Any) -> 'CiConfigFromCloud': if isinstance(x, dict): return cls( - rules=_atd_read_list(RuleId.from_json)(x['rules']) if 'rules' in x else _atd_missing_json_field('Profile', 'rules'), - rules_parse_time=_atd_read_float(x['rules_parse_time']) if 'rules_parse_time' in x else _atd_missing_json_field('Profile', 'rules_parse_time'), - profiling_times=_atd_read_assoc_object_into_dict(_atd_read_float)(x['profiling_times']) if 'profiling_times' in x else _atd_missing_json_field('Profile', 'profiling_times'), - targets=_atd_read_list(TargetTimes.from_json)(x['targets']) if 'targets' in x else _atd_missing_json_field('Profile', 'targets'), - total_bytes=_atd_read_int(x['total_bytes']) if 'total_bytes' in x else _atd_missing_json_field('Profile', 'total_bytes'), - max_memory_bytes=_atd_read_int(x['max_memory_bytes']) if 'max_memory_bytes' in x else None, + repo_config=CiConfig.from_json(x['repo_config']) if 'repo_config' in x else _atd_missing_json_field('CiConfigFromCloud', 'repo_config'), + org_config=CiConfig.from_json(x['org_config']) if 'org_config' in x else None, + dirs_config=_atd_read_list((lambda x: (Fpath.from_json(x[0]), CiConfig.from_json(x[1])) if isinstance(x, list) and len(x) == 2 else _atd_bad_json('array of length 2', x)))(x['dirs_config']) if 'dirs_config' in x else None, + actions=_atd_read_list(Action.from_json)(x['actions']) if 'actions' in x else [], ) else: - _atd_bad_json('Profile', x) + _atd_bad_json('CiConfigFromCloud', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['rules'] = _atd_write_list((lambda x: x.to_json()))(self.rules) - res['rules_parse_time'] = _atd_write_float(self.rules_parse_time) - res['profiling_times'] = _atd_write_assoc_dict_to_object(_atd_write_float)(self.profiling_times) - res['targets'] = _atd_write_list((lambda x: x.to_json()))(self.targets) - res['total_bytes'] = _atd_write_int(self.total_bytes) - if self.max_memory_bytes is not None: - res['max_memory_bytes'] = _atd_write_int(self.max_memory_bytes) + res['repo_config'] = (lambda x: x.to_json())(self.repo_config) + if self.org_config is not None: + res['org_config'] = (lambda x: x.to_json())(self.org_config) + if self.dirs_config is not None: + res['dirs_config'] = _atd_write_list((lambda x: [(lambda x: x.to_json())(x[0]), (lambda x: x.to_json())(x[1])] if isinstance(x, tuple) and len(x) == 2 else _atd_bad_python('tuple of length 2', x)))(self.dirs_config) + res['actions'] = _atd_write_list((lambda x: x.to_json()))(self.actions) return res @classmethod - def from_json_string(cls, x: str) -> 'Profile': + def from_json_string(cls, x: str) -> 'CiConfigFromCloud': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -6828,737 +6825,905 @@ def to_json_string(self, **kw: Any) -> str: @dataclass -class ParsingStats: - """Original type: parsing_stats = { ... }""" +class ScanConfig: + """Original type: scan_config = { ... }""" - targets_parsed: int - num_targets: int - bytes_parsed: int - num_bytes: int + deployment_id: int + deployment_name: str + policy_names: List[str] + rule_config: str + autofix: bool = field(default_factory=lambda: False) + deepsemgrep: bool = field(default_factory=lambda: False) + dependency_query: bool = field(default_factory=lambda: False) + path_to_transitivity: bool = field(default_factory=lambda: False) + scan_all_deps_in_diff_scan: bool = field(default_factory=lambda: False) + symbol_analysis: bool = field(default_factory=lambda: False) + triage_ignored_syntactic_ids: List[str] = field(default_factory=lambda: []) + triage_ignored_match_based_ids: List[str] = field(default_factory=lambda: []) + ignored_files: List[str] = field(default_factory=lambda: []) + enabled_products: Optional[List[Product]] = None + actions: List[Action] = field(default_factory=lambda: []) + ci_config_from_cloud: Optional[CiConfigFromCloud] = None @classmethod - def from_json(cls, x: Any) -> 'ParsingStats': + def from_json(cls, x: Any) -> 'ScanConfig': if isinstance(x, dict): return cls( - targets_parsed=_atd_read_int(x['targets_parsed']) if 'targets_parsed' in x else _atd_missing_json_field('ParsingStats', 'targets_parsed'), - num_targets=_atd_read_int(x['num_targets']) if 'num_targets' in x else _atd_missing_json_field('ParsingStats', 'num_targets'), - bytes_parsed=_atd_read_int(x['bytes_parsed']) if 'bytes_parsed' in x else _atd_missing_json_field('ParsingStats', 'bytes_parsed'), - num_bytes=_atd_read_int(x['num_bytes']) if 'num_bytes' in x else _atd_missing_json_field('ParsingStats', 'num_bytes'), + deployment_id=_atd_read_int(x['deployment_id']) if 'deployment_id' in x else _atd_missing_json_field('ScanConfig', 'deployment_id'), + deployment_name=_atd_read_string(x['deployment_name']) if 'deployment_name' in x else _atd_missing_json_field('ScanConfig', 'deployment_name'), + policy_names=_atd_read_list(_atd_read_string)(x['policy_names']) if 'policy_names' in x else _atd_missing_json_field('ScanConfig', 'policy_names'), + rule_config=_atd_read_string(x['rule_config']) if 'rule_config' in x else _atd_missing_json_field('ScanConfig', 'rule_config'), + autofix=_atd_read_bool(x['autofix']) if 'autofix' in x else False, + deepsemgrep=_atd_read_bool(x['deepsemgrep']) if 'deepsemgrep' in x else False, + dependency_query=_atd_read_bool(x['dependency_query']) if 'dependency_query' in x else False, + path_to_transitivity=_atd_read_bool(x['path_to_transitivity']) if 'path_to_transitivity' in x else False, + scan_all_deps_in_diff_scan=_atd_read_bool(x['scan_all_deps_in_diff_scan']) if 'scan_all_deps_in_diff_scan' in x else False, + symbol_analysis=_atd_read_bool(x['symbol_analysis']) if 'symbol_analysis' in x else False, + triage_ignored_syntactic_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_syntactic_ids']) if 'triage_ignored_syntactic_ids' in x else [], + triage_ignored_match_based_ids=_atd_read_list(_atd_read_string)(x['triage_ignored_match_based_ids']) if 'triage_ignored_match_based_ids' in x else [], + ignored_files=_atd_read_list(_atd_read_string)(x['ignored_files']) if 'ignored_files' in x else [], + enabled_products=_atd_read_list(Product.from_json)(x['enabled_products']) if 'enabled_products' in x else None, + actions=_atd_read_list(Action.from_json)(x['actions']) if 'actions' in x else [], + ci_config_from_cloud=CiConfigFromCloud.from_json(x['ci_config_from_cloud']) if 'ci_config_from_cloud' in x else None, ) else: - _atd_bad_json('ParsingStats', x) + _atd_bad_json('ScanConfig', x) def to_json(self) -> Any: res: Dict[str, Any] = {} - res['targets_parsed'] = _atd_write_int(self.targets_parsed) - res['num_targets'] = _atd_write_int(self.num_targets) - res['bytes_parsed'] = _atd_write_int(self.bytes_parsed) - res['num_bytes'] = _atd_write_int(self.num_bytes) + res['deployment_id'] = _atd_write_int(self.deployment_id) + res['deployment_name'] = _atd_write_string(self.deployment_name) + res['policy_names'] = _atd_write_list(_atd_write_string)(self.policy_names) + res['rule_config'] = _atd_write_string(self.rule_config) + res['autofix'] = _atd_write_bool(self.autofix) + res['deepsemgrep'] = _atd_write_bool(self.deepsemgrep) + res['dependency_query'] = _atd_write_bool(self.dependency_query) + res['path_to_transitivity'] = _atd_write_bool(self.path_to_transitivity) + res['scan_all_deps_in_diff_scan'] = _atd_write_bool(self.scan_all_deps_in_diff_scan) + res['symbol_analysis'] = _atd_write_bool(self.symbol_analysis) + res['triage_ignored_syntactic_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_syntactic_ids) + res['triage_ignored_match_based_ids'] = _atd_write_list(_atd_write_string)(self.triage_ignored_match_based_ids) + res['ignored_files'] = _atd_write_list(_atd_write_string)(self.ignored_files) + if self.enabled_products is not None: + res['enabled_products'] = _atd_write_list((lambda x: x.to_json()))(self.enabled_products) + res['actions'] = _atd_write_list((lambda x: x.to_json()))(self.actions) + if self.ci_config_from_cloud is not None: + res['ci_config_from_cloud'] = (lambda x: x.to_json())(self.ci_config_from_cloud) return res - @classmethod - def from_json_string(cls, x: str) -> 'ParsingStats': - return cls.from_json(json.loads(x)) + @classmethod + def from_json_string(cls, x: str) -> 'ScanConfig': + return cls.from_json(json.loads(x)) + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass +class GemfileLock_: + """Original type: sca_parser_name = [ ... | Gemfile_lock | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'GemfileLock_' + + @staticmethod + def to_json() -> Any: + return 'gemfile_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class IncompatibleRule: - """Original type: incompatible_rule = { ... }""" - - rule_id: RuleId - this_version: Version - min_version: Optional[Version] = None - max_version: Optional[Version] = None - - @classmethod - def from_json(cls, x: Any) -> 'IncompatibleRule': - if isinstance(x, dict): - return cls( - rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else _atd_missing_json_field('IncompatibleRule', 'rule_id'), - this_version=Version.from_json(x['this_version']) if 'this_version' in x else _atd_missing_json_field('IncompatibleRule', 'this_version'), - min_version=Version.from_json(x['min_version']) if 'min_version' in x else None, - max_version=Version.from_json(x['max_version']) if 'max_version' in x else None, - ) - else: - _atd_bad_json('IncompatibleRule', x) +@dataclass +class GoMod2: + """Original type: sca_parser_name = [ ... | Go_mod | ... ]""" - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['rule_id'] = (lambda x: x.to_json())(self.rule_id) - res['this_version'] = (lambda x: x.to_json())(self.this_version) - if self.min_version is not None: - res['min_version'] = (lambda x: x.to_json())(self.min_version) - if self.max_version is not None: - res['max_version'] = (lambda x: x.to_json())(self.max_version) - return res + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'GoMod2' - @classmethod - def from_json_string(cls, x: str) -> 'IncompatibleRule': - return cls.from_json(json.loads(x)) + @staticmethod + def to_json() -> Any: + return 'go_mod' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class FindingHashes: - """Original type: finding_hashes = { ... }""" +class GoSum: + """Original type: sca_parser_name = [ ... | Go_sum | ... ]""" - start_line_hash: str - end_line_hash: str - code_hash: str - pattern_hash: str + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'GoSum' - @classmethod - def from_json(cls, x: Any) -> 'FindingHashes': - if isinstance(x, dict): - return cls( - start_line_hash=_atd_read_string(x['start_line_hash']) if 'start_line_hash' in x else _atd_missing_json_field('FindingHashes', 'start_line_hash'), - end_line_hash=_atd_read_string(x['end_line_hash']) if 'end_line_hash' in x else _atd_missing_json_field('FindingHashes', 'end_line_hash'), - code_hash=_atd_read_string(x['code_hash']) if 'code_hash' in x else _atd_missing_json_field('FindingHashes', 'code_hash'), - pattern_hash=_atd_read_string(x['pattern_hash']) if 'pattern_hash' in x else _atd_missing_json_field('FindingHashes', 'pattern_hash'), - ) - else: - _atd_bad_json('FindingHashes', x) + @staticmethod + def to_json() -> Any: + return 'go_sum' - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['start_line_hash'] = _atd_write_string(self.start_line_hash) - res['end_line_hash'] = _atd_write_string(self.end_line_hash) - res['code_hash'] = _atd_write_string(self.code_hash) - res['pattern_hash'] = _atd_write_string(self.pattern_hash) - return res + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) - @classmethod - def from_json_string(cls, x: str) -> 'FindingHashes': - return cls.from_json(json.loads(x)) + +@dataclass +class GradleLockfile_: + """Original type: sca_parser_name = [ ... | Gradle_lockfile | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'GradleLockfile_' + + @staticmethod + def to_json() -> Any: + return 'gradle_lockfile' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) @dataclass -class Finding: - """Original type: finding = { ... }""" +class GradleBuild: + """Original type: sca_parser_name = [ ... | Gradle_build | ... ]""" - check_id: RuleId - path: Fpath - line: int - column: int - end_line: int - end_column: int - message: str - severity: Any - index: int - commit_date: str - syntactic_id: str - metadata: RawJson - is_blocking: bool - match_based_id: Optional[str] = None - hashes: Optional[FindingHashes] = None - fixed_lines: Optional[List[str]] = None - sca_info: Optional[ScaMatch] = None - dataflow_trace: Optional[MatchDataflowTrace] = None - validation_state: Optional[ValidationState] = None - historical_info: Optional[HistoricalInfo] = None - engine_kind: Optional[EngineOfFinding] = None + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'GradleBuild' - @classmethod - def from_json(cls, x: Any) -> 'Finding': - if isinstance(x, dict): - return cls( - check_id=RuleId.from_json(x['check_id']) if 'check_id' in x else _atd_missing_json_field('Finding', 'check_id'), - path=Fpath.from_json(x['path']) if 'path' in x else _atd_missing_json_field('Finding', 'path'), - line=_atd_read_int(x['line']) if 'line' in x else _atd_missing_json_field('Finding', 'line'), - column=_atd_read_int(x['column']) if 'column' in x else _atd_missing_json_field('Finding', 'column'), - end_line=_atd_read_int(x['end_line']) if 'end_line' in x else _atd_missing_json_field('Finding', 'end_line'), - end_column=_atd_read_int(x['end_column']) if 'end_column' in x else _atd_missing_json_field('Finding', 'end_column'), - message=_atd_read_string(x['message']) if 'message' in x else _atd_missing_json_field('Finding', 'message'), - severity=(lambda x: x)(x['severity']) if 'severity' in x else _atd_missing_json_field('Finding', 'severity'), - index=_atd_read_int(x['index']) if 'index' in x else _atd_missing_json_field('Finding', 'index'), - commit_date=_atd_read_string(x['commit_date']) if 'commit_date' in x else _atd_missing_json_field('Finding', 'commit_date'), - syntactic_id=_atd_read_string(x['syntactic_id']) if 'syntactic_id' in x else _atd_missing_json_field('Finding', 'syntactic_id'), - metadata=RawJson.from_json(x['metadata']) if 'metadata' in x else _atd_missing_json_field('Finding', 'metadata'), - is_blocking=_atd_read_bool(x['is_blocking']) if 'is_blocking' in x else _atd_missing_json_field('Finding', 'is_blocking'), - match_based_id=_atd_read_string(x['match_based_id']) if 'match_based_id' in x else None, - hashes=FindingHashes.from_json(x['hashes']) if 'hashes' in x else None, - fixed_lines=_atd_read_list(_atd_read_string)(x['fixed_lines']) if 'fixed_lines' in x else None, - sca_info=ScaMatch.from_json(x['sca_info']) if 'sca_info' in x else None, - dataflow_trace=MatchDataflowTrace.from_json(x['dataflow_trace']) if 'dataflow_trace' in x else None, - validation_state=ValidationState.from_json(x['validation_state']) if 'validation_state' in x else None, - historical_info=HistoricalInfo.from_json(x['historical_info']) if 'historical_info' in x else None, - engine_kind=EngineOfFinding.from_json(x['engine_kind']) if 'engine_kind' in x else None, - ) - else: - _atd_bad_json('Finding', x) + @staticmethod + def to_json() -> Any: + return 'gradle_build' - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['check_id'] = (lambda x: x.to_json())(self.check_id) - res['path'] = (lambda x: x.to_json())(self.path) - res['line'] = _atd_write_int(self.line) - res['column'] = _atd_write_int(self.column) - res['end_line'] = _atd_write_int(self.end_line) - res['end_column'] = _atd_write_int(self.end_column) - res['message'] = _atd_write_string(self.message) - res['severity'] = (lambda x: x)(self.severity) - res['index'] = _atd_write_int(self.index) - res['commit_date'] = _atd_write_string(self.commit_date) - res['syntactic_id'] = _atd_write_string(self.syntactic_id) - res['metadata'] = (lambda x: x.to_json())(self.metadata) - res['is_blocking'] = _atd_write_bool(self.is_blocking) - if self.match_based_id is not None: - res['match_based_id'] = _atd_write_string(self.match_based_id) - if self.hashes is not None: - res['hashes'] = (lambda x: x.to_json())(self.hashes) - if self.fixed_lines is not None: - res['fixed_lines'] = _atd_write_list(_atd_write_string)(self.fixed_lines) - if self.sca_info is not None: - res['sca_info'] = (lambda x: x.to_json())(self.sca_info) - if self.dataflow_trace is not None: - res['dataflow_trace'] = (lambda x: x.to_json())(self.dataflow_trace) - if self.validation_state is not None: - res['validation_state'] = (lambda x: x.to_json())(self.validation_state) - if self.historical_info is not None: - res['historical_info'] = (lambda x: x.to_json())(self.historical_info) - if self.engine_kind is not None: - res['engine_kind'] = (lambda x: x.to_json())(self.engine_kind) - return res + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) - @classmethod - def from_json_string(cls, x: str) -> 'Finding': - return cls.from_json(json.loads(x)) + +@dataclass +class Jsondoc: + """Original type: sca_parser_name = [ ... | Jsondoc | ... ]""" + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'Jsondoc' + + @staticmethod + def to_json() -> Any: + return 'jsondoc' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class LexicalError: - """Original type: error_type = [ ... | LexicalError | ... ]""" +@dataclass +class Pipfile_: + """Original type: sca_parser_name = [ ... | Pipfile | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'LexicalError' + return 'Pipfile_' @staticmethod def to_json() -> Any: - return 'Lexical error' + return 'pipfile' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class ParseError: - """Original type: error_type = [ ... | ParseError | ... ]""" +@dataclass +class PnpmLock_: + """Original type: sca_parser_name = [ ... | Pnpm_lock | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'ParseError' + return 'PnpmLock_' @staticmethod def to_json() -> Any: - return 'Syntax error' + return 'pnpm_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class OtherParseError: - """Original type: error_type = [ ... | OtherParseError | ... ]""" +@dataclass +class PoetryLock_: + """Original type: sca_parser_name = [ ... | Poetry_lock | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'OtherParseError' + return 'PoetryLock_' @staticmethod def to_json() -> Any: - return 'Other syntax error' + return 'poetry_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class AstBuilderError: - """Original type: error_type = [ ... | AstBuilderError | ... ]""" +@dataclass +class PyprojectToml_: + """Original type: sca_parser_name = [ ... | Pyproject_toml | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'AstBuilderError' + return 'PyprojectToml_' @staticmethod def to_json() -> Any: - return 'AST builder error' + return 'pyproject_toml' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class RuleParseError: - """Original type: error_type = [ ... | RuleParseError | ... ]""" - +@dataclass +class Requirements: + """Original type: sca_parser_name = [ ... | Requirements | ... ]""" + @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'RuleParseError' + return 'Requirements' @staticmethod def to_json() -> Any: - return 'Rule parse error' + return 'requirements' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class SemgrepWarning: - """Original type: error_type = [ ... | SemgrepWarning | ... ]""" +@dataclass +class Yarn1: + """Original type: sca_parser_name = [ ... | Yarn_1 | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'SemgrepWarning' + return 'Yarn1' @staticmethod def to_json() -> Any: - return 'SemgrepWarning' + return 'yarn_1' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class SemgrepError: - """Original type: error_type = [ ... | SemgrepError | ... ]""" +@dataclass +class Yarn2: + """Original type: sca_parser_name = [ ... | Yarn_2 | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'SemgrepError' + return 'Yarn2' @staticmethod def to_json() -> Any: - return 'SemgrepError' + return 'yarn_2' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class InvalidRuleSchemaError: - """Original type: error_type = [ ... | InvalidRuleSchemaError | ... ]""" +@dataclass +class Pomtree: + """Original type: sca_parser_name = [ ... | Pomtree | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'InvalidRuleSchemaError' + return 'Pomtree' @staticmethod def to_json() -> Any: - return 'InvalidRuleSchemaError' + return 'pomtree' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class UnknownLanguageError: - """Original type: error_type = [ ... | UnknownLanguageError | ... ]""" +@dataclass +class CargoParser: + """Original type: sca_parser_name = [ ... | Cargo_parser | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'UnknownLanguageError' + return 'CargoParser' @staticmethod def to_json() -> Any: - return 'UnknownLanguageError' + return 'cargo' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class InvalidYaml: - """Original type: error_type = [ ... | InvalidYaml | ... ]""" +@dataclass +class ComposerLock_: + """Original type: sca_parser_name = [ ... | Composer_lock | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'InvalidYaml' + return 'ComposerLock_' @staticmethod def to_json() -> Any: - return 'Invalid YAML' + return 'composer_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class MatchingError: - """Original type: error_type = [ ... | MatchingError | ... ]""" +@dataclass +class PubspecLock_: + """Original type: sca_parser_name = [ ... | Pubspec_lock | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'MatchingError' + return 'PubspecLock_' @staticmethod def to_json() -> Any: - return 'Internal matching error' + return 'pubspec_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class SemgrepMatchFound: - """Original type: error_type = [ ... | SemgrepMatchFound | ... ]""" +@dataclass +class PackageSwift_: + """Original type: sca_parser_name = [ ... | Package_swift | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'SemgrepMatchFound' + return 'PackageSwift_' @staticmethod def to_json() -> Any: - return 'Semgrep match found' + return 'package_swift' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class TooManyMatches_: - """Original type: error_type = [ ... | TooManyMatches | ... ]""" +@dataclass +class PodfileLock_: + """Original type: sca_parser_name = [ ... | Podfile_lock | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'TooManyMatches_' + return 'PodfileLock_' @staticmethod def to_json() -> Any: - return 'Too many matches' + return 'podfile_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class FatalError: - """Original type: error_type = [ ... | FatalError | ... ]""" +@dataclass +class PackageResolved: + """Original type: sca_parser_name = [ ... | Package_resolved | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'FatalError' + return 'PackageResolved' @staticmethod def to_json() -> Any: - return 'Fatal error' + return 'package_resolved' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class Timeout: - """Original type: error_type = [ ... | Timeout | ... ]""" +@dataclass +class MixLock_: + """Original type: sca_parser_name = [ ... | Mix_lock | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'Timeout' + return 'MixLock_' @staticmethod def to_json() -> Any: - return 'Timeout' + return 'mix_lock' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class OutOfMemory: - """Original type: error_type = [ ... | OutOfMemory | ... ]""" +@dataclass +class ScaParserName: + """Original type: sca_parser_name = [ ... ]""" + + value: Union[GemfileLock_, GoMod2, GoSum, GradleLockfile_, GradleBuild, Jsondoc, Pipfile_, PnpmLock_, PoetryLock_, PyprojectToml_, Requirements, Yarn1, Yarn2, Pomtree, CargoParser, ComposerLock_, PubspecLock_, PackageSwift_, PodfileLock_, PackageResolved, MixLock_] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'OutOfMemory' + return self.value.kind - @staticmethod - def to_json() -> Any: - return 'Out of memory' + @classmethod + def from_json(cls, x: Any) -> 'ScaParserName': + if isinstance(x, str): + if x == 'gemfile_lock': + return cls(GemfileLock_()) + if x == 'go_mod': + return cls(GoMod2()) + if x == 'go_sum': + return cls(GoSum()) + if x == 'gradle_lockfile': + return cls(GradleLockfile_()) + if x == 'gradle_build': + return cls(GradleBuild()) + if x == 'jsondoc': + return cls(Jsondoc()) + if x == 'pipfile': + return cls(Pipfile_()) + if x == 'pnpm_lock': + return cls(PnpmLock_()) + if x == 'poetry_lock': + return cls(PoetryLock_()) + if x == 'pyproject_toml': + return cls(PyprojectToml_()) + if x == 'requirements': + return cls(Requirements()) + if x == 'yarn_1': + return cls(Yarn1()) + if x == 'yarn_2': + return cls(Yarn2()) + if x == 'pomtree': + return cls(Pomtree()) + if x == 'cargo': + return cls(CargoParser()) + if x == 'composer_lock': + return cls(ComposerLock_()) + if x == 'pubspec_lock': + return cls(PubspecLock_()) + if x == 'package_swift': + return cls(PackageSwift_()) + if x == 'podfile_lock': + return cls(PodfileLock_()) + if x == 'package_resolved': + return cls(PackageResolved()) + if x == 'mix_lock': + return cls(MixLock_()) + _atd_bad_json('ScaParserName', x) + _atd_bad_json('ScaParserName', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'ScaParserName': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class StackOverflow: - """Original type: error_type = [ ... | StackOverflow | ... ]""" +@dataclass(frozen=True) +class SarifFormat: + """Original type: sarif_format = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'StackOverflow' + rules: Fpath + is_pro: bool + show_dataflow_traces: bool - @staticmethod - def to_json() -> Any: - return 'Stack overflow' + @classmethod + def from_json(cls, x: Any) -> 'SarifFormat': + if isinstance(x, dict): + return cls( + rules=Fpath.from_json(x['rules']) if 'rules' in x else _atd_missing_json_field('SarifFormat', 'rules'), + is_pro=_atd_read_bool(x['is_pro']) if 'is_pro' in x else _atd_missing_json_field('SarifFormat', 'is_pro'), + show_dataflow_traces=_atd_read_bool(x['show_dataflow_traces']) if 'show_dataflow_traces' in x else _atd_missing_json_field('SarifFormat', 'show_dataflow_traces'), + ) + else: + _atd_bad_json('SarifFormat', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['rules'] = (lambda x: x.to_json())(self.rules) + res['is_pro'] = _atd_write_bool(self.is_pro) + res['show_dataflow_traces'] = _atd_write_bool(self.show_dataflow_traces) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'SarifFormat': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class TimeoutDuringInterfile: - """Original type: error_type = [ ... | TimeoutDuringInterfile | ... ]""" +@dataclass(frozen=True) +class OSS_: + """Original type: engine_kind = [ ... | OSS | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'TimeoutDuringInterfile' + return 'OSS_' @staticmethod def to_json() -> Any: - return 'Timeout during interfile analysis' + return 'OSS' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class OutOfMemoryDuringInterfile: - """Original type: error_type = [ ... | OutOfMemoryDuringInterfile | ... ]""" +@dataclass(frozen=True) +class PRO_: + """Original type: engine_kind = [ ... | PRO | ... ]""" @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'OutOfMemoryDuringInterfile' + return 'PRO_' @staticmethod def to_json() -> Any: - return 'OOM during interfile analysis' + return 'PRO' def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class MissingPlugin: - """Original type: error_type = [ ... | MissingPlugin | ... ]""" +@dataclass(frozen=True) +class EngineKind: + """Original type: engine_kind = [ ... ]""" + + value: Union[OSS_, PRO_] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'MissingPlugin' + return self.value.kind - @staticmethod - def to_json() -> Any: - return 'Missing plugin' + @classmethod + def from_json(cls, x: Any) -> 'EngineKind': + if isinstance(x, str): + if x == 'OSS': + return cls(OSS_()) + if x == 'PRO': + return cls(PRO_()) + _atd_bad_json('EngineKind', x) + _atd_bad_json('EngineKind', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'EngineKind': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class PatternParseError: - """Original type: error_type = [ ... | PatternParseError of ... | ... ]""" +@dataclass(frozen=True) +class RuleIdAndEngineKind: + """Original type: rule_id_and_engine_kind""" - value: List[str] + value: Tuple[RuleId, EngineKind] - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'PatternParseError' + @classmethod + def from_json(cls, x: Any) -> 'RuleIdAndEngineKind': + return cls((lambda x: (RuleId.from_json(x[0]), EngineKind.from_json(x[1])) if isinstance(x, list) and len(x) == 2 else _atd_bad_json('array of length 2', x))(x)) def to_json(self) -> Any: - return ['PatternParseError', _atd_write_list(_atd_write_string)(self.value)] + return (lambda x: [(lambda x: x.to_json())(x[0]), (lambda x: x.to_json())(x[1])] if isinstance(x, tuple) and len(x) == 2 else _atd_bad_python('tuple of length 2', x))(self.value) + + @classmethod + def from_json_string(cls, x: str) -> 'RuleIdAndEngineKind': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class PartialParsing: - """Original type: error_type = [ ... | PartialParsing of ... | ... ]""" +@dataclass +class ResolutionOk: + """Original type: resolution_result = [ ... | ResolutionOk of ... | ... ]""" - value: List[Location] + value: Tuple[List[FoundDependency], List[ResolutionError]] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PartialParsing' + return 'ResolutionOk' def to_json(self) -> Any: - return ['PartialParsing', _atd_write_list((lambda x: x.to_json()))(self.value)] + return ['ResolutionOk', (lambda x: [_atd_write_list((lambda x: x.to_json()))(x[0]), _atd_write_list((lambda x: x.to_json()))(x[1])] if isinstance(x, tuple) and len(x) == 2 else _atd_bad_python('tuple of length 2', x))(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class IncompatibleRule_: - """Original type: error_type = [ ... | IncompatibleRule of ... | ... ]""" +@dataclass +class ResolutionError_: + """Original type: resolution_result = [ ... | ResolutionError of ... | ... ]""" - value: IncompatibleRule + value: List[ResolutionError] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'IncompatibleRule_' + return 'ResolutionError_' def to_json(self) -> Any: - return ['IncompatibleRule', (lambda x: x.to_json())(self.value)] + return ['ResolutionError', _atd_write_list((lambda x: x.to_json()))(self.value)] def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class PatternParseError0: - """Original type: error_type = [ ... | PatternParseError0 | ... ]""" +@dataclass +class ResolutionResult: + """Original type: resolution_result = [ ... ]""" + + value: Union[ResolutionOk, ResolutionError_] @property def kind(self) -> str: """Name of the class representing this variant.""" - return 'PatternParseError0' + return self.value.kind - @staticmethod - def to_json() -> Any: - return 'Pattern parse error' + @classmethod + def from_json(cls, x: Any) -> 'ResolutionResult': + if isinstance(x, List) and len(x) == 2: + cons = x[0] + if cons == 'ResolutionOk': + return cls(ResolutionOk((lambda x: (_atd_read_list(FoundDependency.from_json)(x[0]), _atd_read_list(ResolutionError.from_json)(x[1])) if isinstance(x, list) and len(x) == 2 else _atd_bad_json('array of length 2', x))(x[1]))) + if cons == 'ResolutionError': + return cls(ResolutionError_(_atd_read_list(ResolutionError.from_json)(x[1]))) + _atd_bad_json('ResolutionResult', x) + _atd_bad_json('ResolutionResult', x) + + def to_json(self) -> Any: + return self.value.to_json() + + @classmethod + def from_json_string(cls, x: str) -> 'ResolutionResult': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class IncompatibleRule0: - """Original type: error_type = [ ... | IncompatibleRule0 | ... ]""" +@dataclass +class Profile: + """Original type: profile = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'IncompatibleRule0' + rules: List[RuleId] + rules_parse_time: float + profiling_times: Dict[str, float] + targets: List[TargetTimes] + total_bytes: int + max_memory_bytes: Optional[int] = None - @staticmethod - def to_json() -> Any: - return 'Incompatible rule' + @classmethod + def from_json(cls, x: Any) -> 'Profile': + if isinstance(x, dict): + return cls( + rules=_atd_read_list(RuleId.from_json)(x['rules']) if 'rules' in x else _atd_missing_json_field('Profile', 'rules'), + rules_parse_time=_atd_read_float(x['rules_parse_time']) if 'rules_parse_time' in x else _atd_missing_json_field('Profile', 'rules_parse_time'), + profiling_times=_atd_read_assoc_object_into_dict(_atd_read_float)(x['profiling_times']) if 'profiling_times' in x else _atd_missing_json_field('Profile', 'profiling_times'), + targets=_atd_read_list(TargetTimes.from_json)(x['targets']) if 'targets' in x else _atd_missing_json_field('Profile', 'targets'), + total_bytes=_atd_read_int(x['total_bytes']) if 'total_bytes' in x else _atd_missing_json_field('Profile', 'total_bytes'), + max_memory_bytes=_atd_read_int(x['max_memory_bytes']) if 'max_memory_bytes' in x else None, + ) + else: + _atd_bad_json('Profile', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['rules'] = _atd_write_list((lambda x: x.to_json()))(self.rules) + res['rules_parse_time'] = _atd_write_float(self.rules_parse_time) + res['profiling_times'] = _atd_write_assoc_dict_to_object(_atd_write_float)(self.profiling_times) + res['targets'] = _atd_write_list((lambda x: x.to_json()))(self.targets) + res['total_bytes'] = _atd_write_int(self.total_bytes) + if self.max_memory_bytes is not None: + res['max_memory_bytes'] = _atd_write_int(self.max_memory_bytes) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'Profile': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class DependencyResolutionError: - """Original type: error_type = [ ... | DependencyResolutionError of ... | ... ]""" +@dataclass +class ParsingStats: + """Original type: parsing_stats = { ... }""" - value: ResolutionError + targets_parsed: int + num_targets: int + bytes_parsed: int + num_bytes: int - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'DependencyResolutionError' + @classmethod + def from_json(cls, x: Any) -> 'ParsingStats': + if isinstance(x, dict): + return cls( + targets_parsed=_atd_read_int(x['targets_parsed']) if 'targets_parsed' in x else _atd_missing_json_field('ParsingStats', 'targets_parsed'), + num_targets=_atd_read_int(x['num_targets']) if 'num_targets' in x else _atd_missing_json_field('ParsingStats', 'num_targets'), + bytes_parsed=_atd_read_int(x['bytes_parsed']) if 'bytes_parsed' in x else _atd_missing_json_field('ParsingStats', 'bytes_parsed'), + num_bytes=_atd_read_int(x['num_bytes']) if 'num_bytes' in x else _atd_missing_json_field('ParsingStats', 'num_bytes'), + ) + else: + _atd_bad_json('ParsingStats', x) def to_json(self) -> Any: - return ['DependencyResolutionError', (lambda x: x.to_json())(self.value)] + res: Dict[str, Any] = {} + res['targets_parsed'] = _atd_write_int(self.targets_parsed) + res['num_targets'] = _atd_write_int(self.num_targets) + res['bytes_parsed'] = _atd_write_int(self.bytes_parsed) + res['num_bytes'] = _atd_write_int(self.num_bytes) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'ParsingStats': + return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True, order=True) -class ErrorType: - """Original type: error_type = [ ... ]""" - - value: Union[LexicalError, ParseError, OtherParseError, AstBuilderError, RuleParseError, SemgrepWarning, SemgrepError, InvalidRuleSchemaError, UnknownLanguageError, InvalidYaml, MatchingError, SemgrepMatchFound, TooManyMatches_, FatalError, Timeout, OutOfMemory, StackOverflow, TimeoutDuringInterfile, OutOfMemoryDuringInterfile, MissingPlugin, PatternParseError, PartialParsing, IncompatibleRule_, PatternParseError0, IncompatibleRule0, DependencyResolutionError] +@dataclass +class FindingHashes: + """Original type: finding_hashes = { ... }""" - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return self.value.kind + start_line_hash: str + end_line_hash: str + code_hash: str + pattern_hash: str @classmethod - def from_json(cls, x: Any) -> 'ErrorType': - if isinstance(x, str): - if x == 'Lexical error': - return cls(LexicalError()) - if x == 'Syntax error': - return cls(ParseError()) - if x == 'Other syntax error': - return cls(OtherParseError()) - if x == 'AST builder error': - return cls(AstBuilderError()) - if x == 'Rule parse error': - return cls(RuleParseError()) - if x == 'SemgrepWarning': - return cls(SemgrepWarning()) - if x == 'SemgrepError': - return cls(SemgrepError()) - if x == 'InvalidRuleSchemaError': - return cls(InvalidRuleSchemaError()) - if x == 'UnknownLanguageError': - return cls(UnknownLanguageError()) - if x == 'Invalid YAML': - return cls(InvalidYaml()) - if x == 'Internal matching error': - return cls(MatchingError()) - if x == 'Semgrep match found': - return cls(SemgrepMatchFound()) - if x == 'Too many matches': - return cls(TooManyMatches_()) - if x == 'Fatal error': - return cls(FatalError()) - if x == 'Timeout': - return cls(Timeout()) - if x == 'Out of memory': - return cls(OutOfMemory()) - if x == 'Stack overflow': - return cls(StackOverflow()) - if x == 'Timeout during interfile analysis': - return cls(TimeoutDuringInterfile()) - if x == 'OOM during interfile analysis': - return cls(OutOfMemoryDuringInterfile()) - if x == 'Missing plugin': - return cls(MissingPlugin()) - if x == 'Pattern parse error': - return cls(PatternParseError0()) - if x == 'Incompatible rule': - return cls(IncompatibleRule0()) - _atd_bad_json('ErrorType', x) - if isinstance(x, List) and len(x) == 2: - cons = x[0] - if cons == 'PatternParseError': - return cls(PatternParseError(_atd_read_list(_atd_read_string)(x[1]))) - if cons == 'PartialParsing': - return cls(PartialParsing(_atd_read_list(Location.from_json)(x[1]))) - if cons == 'IncompatibleRule': - return cls(IncompatibleRule_(IncompatibleRule.from_json(x[1]))) - if cons == 'DependencyResolutionError': - return cls(DependencyResolutionError(ResolutionError.from_json(x[1]))) - _atd_bad_json('ErrorType', x) - _atd_bad_json('ErrorType', x) + def from_json(cls, x: Any) -> 'FindingHashes': + if isinstance(x, dict): + return cls( + start_line_hash=_atd_read_string(x['start_line_hash']) if 'start_line_hash' in x else _atd_missing_json_field('FindingHashes', 'start_line_hash'), + end_line_hash=_atd_read_string(x['end_line_hash']) if 'end_line_hash' in x else _atd_missing_json_field('FindingHashes', 'end_line_hash'), + code_hash=_atd_read_string(x['code_hash']) if 'code_hash' in x else _atd_missing_json_field('FindingHashes', 'code_hash'), + pattern_hash=_atd_read_string(x['pattern_hash']) if 'pattern_hash' in x else _atd_missing_json_field('FindingHashes', 'pattern_hash'), + ) + else: + _atd_bad_json('FindingHashes', x) + + def to_json(self) -> Any: + res: Dict[str, Any] = {} + res['start_line_hash'] = _atd_write_string(self.start_line_hash) + res['end_line_hash'] = _atd_write_string(self.end_line_hash) + res['code_hash'] = _atd_write_string(self.code_hash) + res['pattern_hash'] = _atd_write_string(self.pattern_hash) + return res + + @classmethod + def from_json_string(cls, x: str) -> 'FindingHashes': + return cls.from_json(json.loads(x)) + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + +@dataclass +class Finding: + """Original type: finding = { ... }""" + + check_id: RuleId + path: Fpath + line: int + column: int + end_line: int + end_column: int + message: str + severity: Any + index: int + commit_date: str + syntactic_id: str + metadata: RawJson + is_blocking: bool + match_based_id: Optional[str] = None + hashes: Optional[FindingHashes] = None + fixed_lines: Optional[List[str]] = None + sca_info: Optional[ScaMatch] = None + dataflow_trace: Optional[MatchDataflowTrace] = None + validation_state: Optional[ValidationState] = None + historical_info: Optional[HistoricalInfo] = None + engine_kind: Optional[EngineOfFinding] = None + + @classmethod + def from_json(cls, x: Any) -> 'Finding': + if isinstance(x, dict): + return cls( + check_id=RuleId.from_json(x['check_id']) if 'check_id' in x else _atd_missing_json_field('Finding', 'check_id'), + path=Fpath.from_json(x['path']) if 'path' in x else _atd_missing_json_field('Finding', 'path'), + line=_atd_read_int(x['line']) if 'line' in x else _atd_missing_json_field('Finding', 'line'), + column=_atd_read_int(x['column']) if 'column' in x else _atd_missing_json_field('Finding', 'column'), + end_line=_atd_read_int(x['end_line']) if 'end_line' in x else _atd_missing_json_field('Finding', 'end_line'), + end_column=_atd_read_int(x['end_column']) if 'end_column' in x else _atd_missing_json_field('Finding', 'end_column'), + message=_atd_read_string(x['message']) if 'message' in x else _atd_missing_json_field('Finding', 'message'), + severity=(lambda x: x)(x['severity']) if 'severity' in x else _atd_missing_json_field('Finding', 'severity'), + index=_atd_read_int(x['index']) if 'index' in x else _atd_missing_json_field('Finding', 'index'), + commit_date=_atd_read_string(x['commit_date']) if 'commit_date' in x else _atd_missing_json_field('Finding', 'commit_date'), + syntactic_id=_atd_read_string(x['syntactic_id']) if 'syntactic_id' in x else _atd_missing_json_field('Finding', 'syntactic_id'), + metadata=RawJson.from_json(x['metadata']) if 'metadata' in x else _atd_missing_json_field('Finding', 'metadata'), + is_blocking=_atd_read_bool(x['is_blocking']) if 'is_blocking' in x else _atd_missing_json_field('Finding', 'is_blocking'), + match_based_id=_atd_read_string(x['match_based_id']) if 'match_based_id' in x else None, + hashes=FindingHashes.from_json(x['hashes']) if 'hashes' in x else None, + fixed_lines=_atd_read_list(_atd_read_string)(x['fixed_lines']) if 'fixed_lines' in x else None, + sca_info=ScaMatch.from_json(x['sca_info']) if 'sca_info' in x else None, + dataflow_trace=MatchDataflowTrace.from_json(x['dataflow_trace']) if 'dataflow_trace' in x else None, + validation_state=ValidationState.from_json(x['validation_state']) if 'validation_state' in x else None, + historical_info=HistoricalInfo.from_json(x['historical_info']) if 'historical_info' in x else None, + engine_kind=EngineOfFinding.from_json(x['engine_kind']) if 'engine_kind' in x else None, + ) + else: + _atd_bad_json('Finding', x) def to_json(self) -> Any: - return self.value.to_json() + res: Dict[str, Any] = {} + res['check_id'] = (lambda x: x.to_json())(self.check_id) + res['path'] = (lambda x: x.to_json())(self.path) + res['line'] = _atd_write_int(self.line) + res['column'] = _atd_write_int(self.column) + res['end_line'] = _atd_write_int(self.end_line) + res['end_column'] = _atd_write_int(self.end_column) + res['message'] = _atd_write_string(self.message) + res['severity'] = (lambda x: x)(self.severity) + res['index'] = _atd_write_int(self.index) + res['commit_date'] = _atd_write_string(self.commit_date) + res['syntactic_id'] = _atd_write_string(self.syntactic_id) + res['metadata'] = (lambda x: x.to_json())(self.metadata) + res['is_blocking'] = _atd_write_bool(self.is_blocking) + if self.match_based_id is not None: + res['match_based_id'] = _atd_write_string(self.match_based_id) + if self.hashes is not None: + res['hashes'] = (lambda x: x.to_json())(self.hashes) + if self.fixed_lines is not None: + res['fixed_lines'] = _atd_write_list(_atd_write_string)(self.fixed_lines) + if self.sca_info is not None: + res['sca_info'] = (lambda x: x.to_json())(self.sca_info) + if self.dataflow_trace is not None: + res['dataflow_trace'] = (lambda x: x.to_json())(self.dataflow_trace) + if self.validation_state is not None: + res['validation_state'] = (lambda x: x.to_json())(self.validation_state) + if self.historical_info is not None: + res['historical_info'] = (lambda x: x.to_json())(self.historical_info) + if self.engine_kind is not None: + res['engine_kind'] = (lambda x: x.to_json())(self.engine_kind) + return res @classmethod - def from_json_string(cls, x: str) -> 'ErrorType': + def from_json_string(cls, x: str) -> 'Finding': return cls.from_json(json.loads(x)) def to_json_string(self, **kw: Any) -> str: @@ -7623,91 +7788,6 @@ def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class Error_: - """Original type: error_severity = [ ... | Error | ... ]""" - - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Error_' - - @staticmethod - def to_json() -> Any: - return 'error' - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass(frozen=True) -class Warning_: - """Original type: error_severity = [ ... | Warning | ... ]""" - - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Warning_' - - @staticmethod - def to_json() -> Any: - return 'warn' - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass(frozen=True) -class Info_: - """Original type: error_severity = [ ... | Info | ... ]""" - - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return 'Info_' - - @staticmethod - def to_json() -> Any: - return 'info' - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - -@dataclass(frozen=True) -class ErrorSeverity: - """Original type: error_severity = [ ... ]""" - - value: Union[Error_, Warning_, Info_] - - @property - def kind(self) -> str: - """Name of the class representing this variant.""" - return self.value.kind - - @classmethod - def from_json(cls, x: Any) -> 'ErrorSeverity': - if isinstance(x, str): - if x == 'error': - return cls(Error_()) - if x == 'warn': - return cls(Warning_()) - if x == 'info': - return cls(Info_()) - _atd_bad_json('ErrorSeverity', x) - _atd_bad_json('ErrorSeverity', x) - - def to_json(self) -> Any: - return self.value.to_json() - - @classmethod - def from_json_string(cls, x: str) -> 'ErrorSeverity': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - @dataclass class DependencyParserError: """Original type: dependency_parser_error = { ... }""" @@ -8783,11 +8863,29 @@ def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) +@dataclass(frozen=True) +class RetGetTargets: + """Original type: function_return = [ ... | RetGetTargets of ... | ... ]""" + + value: TargetDiscoveryResult + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'RetGetTargets' + + def to_json(self) -> Any: + return ['RetGetTargets', (lambda x: x.to_json())(self.value)] + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + @dataclass(frozen=True) class FunctionReturn: """Original type: function_return = [ ... ]""" - value: Union[RetError, RetApplyFixes, RetContributions, RetFormatter, RetSarifFormat, RetValidate, RetResolveDependencies, RetUploadSymbolAnalysis, RetDumpRulePartitions, RetTransitiveReachabilityFilter] + value: Union[RetError, RetApplyFixes, RetContributions, RetFormatter, RetSarifFormat, RetValidate, RetResolveDependencies, RetUploadSymbolAnalysis, RetDumpRulePartitions, RetTransitiveReachabilityFilter, RetGetTargets] @property def kind(self) -> str: @@ -8818,6 +8916,8 @@ def from_json(cls, x: Any) -> 'FunctionReturn': return cls(RetDumpRulePartitions(_atd_read_bool(x[1]))) if cons == 'RetTransitiveReachabilityFilter': return cls(RetTransitiveReachabilityFilter(_atd_read_list(TransitiveFinding.from_json)(x[1]))) + if cons == 'RetGetTargets': + return cls(RetGetTargets(TargetDiscoveryResult.from_json(x[1]))) _atd_bad_json('FunctionReturn', x) _atd_bad_json('FunctionReturn', x) @@ -9310,11 +9410,29 @@ def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) +@dataclass(frozen=True) +class CallGetTargets: + """Original type: function_call = [ ... | CallGetTargets of ... | ... ]""" + + value: ScanningRoots + + @property + def kind(self) -> str: + """Name of the class representing this variant.""" + return 'CallGetTargets' + + def to_json(self) -> Any: + return ['CallGetTargets', (lambda x: x.to_json())(self.value)] + + def to_json_string(self, **kw: Any) -> str: + return json.dumps(self.to_json(), **kw) + + @dataclass(frozen=True) class FunctionCall: """Original type: function_call = [ ... ]""" - value: Union[CallContributions, CallApplyFixes, CallFormatter, CallSarifFormat, CallValidate, CallResolveDependencies, CallUploadSymbolAnalysis, CallDumpRulePartitions, CallTransitiveReachabilityFilter] + value: Union[CallContributions, CallApplyFixes, CallFormatter, CallSarifFormat, CallValidate, CallResolveDependencies, CallUploadSymbolAnalysis, CallDumpRulePartitions, CallTransitiveReachabilityFilter, CallGetTargets] @property def kind(self) -> str: @@ -9345,6 +9463,8 @@ def from_json(cls, x: Any) -> 'FunctionCall': return cls(CallDumpRulePartitions(DumpRulePartitionsParams.from_json(x[1]))) if cons == 'CallTransitiveReachabilityFilter': return cls(CallTransitiveReachabilityFilter(_atd_read_list(TransitiveFinding.from_json)(x[1]))) + if cons == 'CallGetTargets': + return cls(CallGetTargets(ScanningRoots.from_json(x[1]))) _atd_bad_json('FunctionCall', x) _atd_bad_json('FunctionCall', x) @@ -9582,52 +9702,6 @@ def to_json_string(self, **kw: Any) -> str: return json.dumps(self.to_json(), **kw) -@dataclass(frozen=True) -class CoreError: - """Original type: core_error = { ... }""" - - error_type: ErrorType - severity: ErrorSeverity - message: str - details: Optional[str] = None - location: Optional[Location] = None - rule_id: Optional[RuleId] = None - - @classmethod - def from_json(cls, x: Any) -> 'CoreError': - if isinstance(x, dict): - return cls( - error_type=ErrorType.from_json(x['error_type']) if 'error_type' in x else _atd_missing_json_field('CoreError', 'error_type'), - severity=ErrorSeverity.from_json(x['severity']) if 'severity' in x else _atd_missing_json_field('CoreError', 'severity'), - message=_atd_read_string(x['message']) if 'message' in x else _atd_missing_json_field('CoreError', 'message'), - details=_atd_read_string(x['details']) if 'details' in x else None, - location=Location.from_json(x['location']) if 'location' in x else None, - rule_id=RuleId.from_json(x['rule_id']) if 'rule_id' in x else None, - ) - else: - _atd_bad_json('CoreError', x) - - def to_json(self) -> Any: - res: Dict[str, Any] = {} - res['error_type'] = (lambda x: x.to_json())(self.error_type) - res['severity'] = (lambda x: x.to_json())(self.severity) - res['message'] = _atd_write_string(self.message) - if self.details is not None: - res['details'] = _atd_write_string(self.details) - if self.location is not None: - res['location'] = (lambda x: x.to_json())(self.location) - if self.rule_id is not None: - res['rule_id'] = (lambda x: x.to_json())(self.rule_id) - return res - - @classmethod - def from_json_string(cls, x: str) -> 'CoreError': - return cls.from_json(json.loads(x)) - - def to_json_string(self, **kw: Any) -> str: - return json.dumps(self.to_json(), **kw) - - @dataclass class CoreOutput: """Original type: core_output = { ... }""" diff --git a/semgrep_output_v1.ts b/semgrep_output_v1.ts index 720b257..f8a4bd7 100644 --- a/semgrep_output_v1.ts +++ b/semgrep_output_v1.ts @@ -355,6 +355,12 @@ export type SkippedRule = { position: Position; } +export type TargetDiscoveryResult = { + target_paths: Fpath[]; + errors: CoreError[]; + skipped: SkippedTarget[]; +} + export type Profile = { rules: RuleId[]; rules_parse_time: number; @@ -1052,6 +1058,7 @@ export type FunctionCall = | { kind: 'CallUploadSymbolAnalysis'; value: [string, number /*int*/, SymbolAnalysis] } | { kind: 'CallDumpRulePartitions'; value: DumpRulePartitionsParams } | { kind: 'CallTransitiveReachabilityFilter'; value: TransitiveFinding[] } +| { kind: 'CallGetTargets'; value: ScanningRoots } export type FunctionReturn = | { kind: 'RetError'; value: string } @@ -1064,6 +1071,7 @@ export type FunctionReturn = | { kind: 'RetUploadSymbolAnalysis'; value: string } | { kind: 'RetDumpRulePartitions'; value: boolean } | { kind: 'RetTransitiveReachabilityFilter'; value: TransitiveFinding[] } +| { kind: 'RetGetTargets'; value: TargetDiscoveryResult } export type PartialScanResult = | { kind: 'PartialScanOk'; value: [CiScanResults, CiScanComplete] } @@ -2279,6 +2287,22 @@ export function readSkippedRule(x: any, context: any = x): SkippedRule { }; } +export function writeTargetDiscoveryResult(x: TargetDiscoveryResult, context: any = x): any { + return { + 'target_paths': _atd_write_required_field('TargetDiscoveryResult', 'target_paths', _atd_write_array(writeFpath), x.target_paths, x), + 'errors': _atd_write_required_field('TargetDiscoveryResult', 'errors', _atd_write_array(writeCoreError), x.errors, x), + 'skipped': _atd_write_required_field('TargetDiscoveryResult', 'skipped', _atd_write_array(writeSkippedTarget), x.skipped, x), + }; +} + +export function readTargetDiscoveryResult(x: any, context: any = x): TargetDiscoveryResult { + return { + target_paths: _atd_read_required_field('TargetDiscoveryResult', 'target_paths', _atd_read_array(readFpath), x['target_paths'], x), + errors: _atd_read_required_field('TargetDiscoveryResult', 'errors', _atd_read_array(readCoreError), x['errors'], x), + skipped: _atd_read_required_field('TargetDiscoveryResult', 'skipped', _atd_read_array(readSkippedTarget), x['skipped'], x), + }; +} + export function writeProfile(x: Profile, context: any = x): any { return { 'rules': _atd_write_required_field('Profile', 'rules', _atd_write_array(writeRuleId), x.rules, x), @@ -4371,6 +4395,8 @@ export function writeFunctionCall(x: FunctionCall, context: any = x): any { return ['CallDumpRulePartitions', writeDumpRulePartitionsParams(x.value, x)] case 'CallTransitiveReachabilityFilter': return ['CallTransitiveReachabilityFilter', _atd_write_array(writeTransitiveFinding)(x.value, x)] + case 'CallGetTargets': + return ['CallGetTargets', writeScanningRoots(x.value, x)] } } @@ -4403,6 +4429,8 @@ export function readFunctionCall(x: any, context: any = x): FunctionCall { return { kind: 'CallDumpRulePartitions', value: readDumpRulePartitionsParams(x[1], x) } case 'CallTransitiveReachabilityFilter': return { kind: 'CallTransitiveReachabilityFilter', value: _atd_read_array(readTransitiveFinding)(x[1], x) } + case 'CallGetTargets': + return { kind: 'CallGetTargets', value: readScanningRoots(x[1], x) } default: _atd_bad_json('FunctionCall', x, context) throw new Error('impossible') @@ -4432,6 +4460,8 @@ export function writeFunctionReturn(x: FunctionReturn, context: any = x): any { return ['RetDumpRulePartitions', _atd_write_bool(x.value, x)] case 'RetTransitiveReachabilityFilter': return ['RetTransitiveReachabilityFilter', _atd_write_array(writeTransitiveFinding)(x.value, x)] + case 'RetGetTargets': + return ['RetGetTargets', writeTargetDiscoveryResult(x.value, x)] } } @@ -4458,6 +4488,8 @@ export function readFunctionReturn(x: any, context: any = x): FunctionReturn { return { kind: 'RetDumpRulePartitions', value: _atd_read_bool(x[1], x) } case 'RetTransitiveReachabilityFilter': return { kind: 'RetTransitiveReachabilityFilter', value: _atd_read_array(readTransitiveFinding)(x[1], x) } + case 'RetGetTargets': + return { kind: 'RetGetTargets', value: readTargetDiscoveryResult(x[1], x) } default: _atd_bad_json('FunctionReturn', x, context) throw new Error('impossible') diff --git a/semgrep_output_v1_j.ml b/semgrep_output_v1_j.ml index 618f10f..54e63d1 100644 --- a/semgrep_output_v1_j.ml +++ b/semgrep_output_v1_j.ml @@ -345,6 +345,89 @@ type target_times = Semgrep_output_v1_t.target_times = { run_time: float } +type skip_reason = Semgrep_output_v1_t.skip_reason = + Always_skipped | Semgrepignore_patterns_match + | Cli_include_flags_do_not_match | Cli_exclude_flags_match + | Exceeded_size_limit | Analysis_failed_parser_or_internal_error + | Excluded_by_config | Wrong_language | Too_big | Minified | Binary + | Irrelevant_rule | Too_many_matches | Gitignore_patterns_match | Dotfile + | Nonexistent_file | Insufficient_permissions + + [@@deriving show] + +type skipped_target = Semgrep_output_v1_t.skipped_target = { + path: fpath; + reason: skip_reason; + details: string option; + rule_id: rule_id option +} + [@@deriving show] + +type resolution_cmd_failed = Semgrep_output_v1_t.resolution_cmd_failed = { + command: string; + message: string +} + [@@deriving show] + +type resolution_error = Semgrep_output_v1_t.resolution_error + [@@deriving show] + +type incompatible_rule = Semgrep_output_v1_t.incompatible_rule = { + rule_id: rule_id; + this_version: version; + min_version: version option; + max_version: version option +} + [@@deriving show] + +type error_type = Semgrep_output_v1_t.error_type = + LexicalError + | ParseError + | OtherParseError + | AstBuilderError + | RuleParseError + | SemgrepWarning + | SemgrepError + | InvalidRuleSchemaError + | UnknownLanguageError + | InvalidYaml + | MatchingError + | SemgrepMatchFound + | TooManyMatches + | FatalError + | Timeout + | OutOfMemory + | StackOverflow + | TimeoutDuringInterfile + | OutOfMemoryDuringInterfile + | MissingPlugin + | PatternParseError of string list + | PartialParsing of location list + | IncompatibleRule of incompatible_rule + | PatternParseError0 + | IncompatibleRule0 + | DependencyResolutionError of resolution_error + + [@@deriving show] + +type error_severity = Semgrep_output_v1_t.error_severity + [@@deriving show, eq] + +type core_error = Semgrep_output_v1_t.core_error = { + error_type: error_type; + severity: error_severity; + message: string; + details: string option; + location: location option; + rule_id: rule_id option +} + +type target_discovery_result = Semgrep_output_v1_t.target_discovery_result = { + target_paths: fpath list; + errors: core_error list; + skipped: skipped_target list +} + type tag = Semgrep_output_v1_t.tag type symbol = Semgrep_output_v1_t.symbol = { fqn: string list } @@ -389,24 +472,6 @@ type supply_chain_stats = Semgrep_output_v1_t.supply_chain_stats = { subprojects_stats: subproject_stats list } -type skip_reason = Semgrep_output_v1_t.skip_reason = - Always_skipped | Semgrepignore_patterns_match - | Cli_include_flags_do_not_match | Cli_exclude_flags_match - | Exceeded_size_limit | Analysis_failed_parser_or_internal_error - | Excluded_by_config | Wrong_language | Too_big | Minified | Binary - | Irrelevant_rule | Too_many_matches | Gitignore_patterns_match | Dotfile - | Nonexistent_file | Insufficient_permissions - - [@@deriving show] - -type skipped_target = Semgrep_output_v1_t.skipped_target = { - path: fpath; - reason: skip_reason; - details: string option; - rule_id: rule_id option -} - [@@deriving show] - type skipped_rule = Semgrep_output_v1_t.skipped_rule = { rule_id: rule_id; details: string; @@ -563,15 +628,6 @@ type engine_kind = Semgrep_output_v1_t.engine_kind [@@deriving show] type rule_id_and_engine_kind = Semgrep_output_v1_t.rule_id_and_engine_kind -type resolution_cmd_failed = Semgrep_output_v1_t.resolution_cmd_failed = { - command: string; - message: string -} - [@@deriving show] - -type resolution_error = Semgrep_output_v1_t.resolution_error - [@@deriving show] - type resolution_result = Semgrep_output_v1_t.resolution_result type profile = Semgrep_output_v1_t.profile = { @@ -590,14 +646,6 @@ type parsing_stats = Semgrep_output_v1_t.parsing_stats = { num_bytes: int } -type incompatible_rule = Semgrep_output_v1_t.incompatible_rule = { - rule_id: rule_id; - this_version: version; - min_version: version option; - max_version: version option -} - [@@deriving show] - type finding_hashes = Semgrep_output_v1_t.finding_hashes = { start_line_hash: string; end_line_hash: string; @@ -629,36 +677,6 @@ type finding = Semgrep_output_v1_t.finding = { engine_kind: engine_of_finding option } -type error_type = Semgrep_output_v1_t.error_type = - LexicalError - | ParseError - | OtherParseError - | AstBuilderError - | RuleParseError - | SemgrepWarning - | SemgrepError - | InvalidRuleSchemaError - | UnknownLanguageError - | InvalidYaml - | MatchingError - | SemgrepMatchFound - | TooManyMatches - | FatalError - | Timeout - | OutOfMemory - | StackOverflow - | TimeoutDuringInterfile - | OutOfMemoryDuringInterfile - | MissingPlugin - | PatternParseError of string list - | PartialParsing of location list - | IncompatibleRule of incompatible_rule - | PatternParseError0 - | IncompatibleRule0 - | DependencyResolutionError of resolution_error - - [@@deriving show] - type error_span = Semgrep_output_v1_t.error_span = { file: fpath; start: position; @@ -671,9 +689,6 @@ type error_span = Semgrep_output_v1_t.error_span = { context_end: position option option } -type error_severity = Semgrep_output_v1_t.error_severity - [@@deriving show, eq] - type dependency_parser_error = Semgrep_output_v1_t.dependency_parser_error = { path: fpath; parser: sca_parser_name; @@ -898,15 +913,6 @@ type core_output_extra = Semgrep_output_v1_t.core_output_extra = { symbol_analysis: symbol_analysis option } -type core_error = Semgrep_output_v1_t.core_error = { - error_type: error_type; - severity: error_severity; - message: string; - details: string option; - location: location option; - rule_id: rule_id option -} - type core_output = Semgrep_output_v1_t.core_output = { version: version; results: core_match list; @@ -13018,42 +13024,270 @@ let read_target_times = ( ) let target_times_of_string s = read_target_times (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_tag = ( - Yojson.Safe.write_string +let write_skip_reason : _ -> skip_reason -> _ = ( + fun ob (x : skip_reason) -> + match x with + | Always_skipped -> Buffer.add_string ob "\"always_skipped\"" + | Semgrepignore_patterns_match -> Buffer.add_string ob "\"semgrepignore_patterns_match\"" + | Cli_include_flags_do_not_match -> Buffer.add_string ob "\"cli_include_flags_do_not_match\"" + | Cli_exclude_flags_match -> Buffer.add_string ob "\"cli_exclude_flags_match\"" + | Exceeded_size_limit -> Buffer.add_string ob "\"exceeded_size_limit\"" + | Analysis_failed_parser_or_internal_error -> Buffer.add_string ob "\"analysis_failed_parser_or_internal_error\"" + | Excluded_by_config -> Buffer.add_string ob "\"excluded_by_config\"" + | Wrong_language -> Buffer.add_string ob "\"wrong_language\"" + | Too_big -> Buffer.add_string ob "\"too_big\"" + | Minified -> Buffer.add_string ob "\"minified\"" + | Binary -> Buffer.add_string ob "\"binary\"" + | Irrelevant_rule -> Buffer.add_string ob "\"irrelevant_rule\"" + | Too_many_matches -> Buffer.add_string ob "\"too_many_matches\"" + | Gitignore_patterns_match -> Buffer.add_string ob "\"Gitignore_patterns_match\"" + | Dotfile -> Buffer.add_string ob "\"Dotfile\"" + | Nonexistent_file -> Buffer.add_string ob "\"Nonexistent_file\"" + | Insufficient_permissions -> Buffer.add_string ob "\"insufficient_permissions\"" ) -let string_of_tag ?(len = 1024) x = +let string_of_skip_reason ?(len = 1024) x = let ob = Buffer.create len in - write_tag ob x; + write_skip_reason ob x; Buffer.contents ob -let read_tag = ( - Atdgen_runtime.Oj_run.read_string +let read_skip_reason = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "always_skipped" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Always_skipped : skip_reason) + | "semgrepignore_patterns_match" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Semgrepignore_patterns_match : skip_reason) + | "cli_include_flags_do_not_match" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Cli_include_flags_do_not_match : skip_reason) + | "cli_exclude_flags_match" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Cli_exclude_flags_match : skip_reason) + | "exceeded_size_limit" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Exceeded_size_limit : skip_reason) + | "analysis_failed_parser_or_internal_error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Analysis_failed_parser_or_internal_error : skip_reason) + | "excluded_by_config" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Excluded_by_config : skip_reason) + | "wrong_language" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Wrong_language : skip_reason) + | "too_big" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Too_big : skip_reason) + | "minified" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Minified : skip_reason) + | "binary" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Binary : skip_reason) + | "irrelevant_rule" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Irrelevant_rule : skip_reason) + | "too_many_matches" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Too_many_matches : skip_reason) + | "Gitignore_patterns_match" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Gitignore_patterns_match : skip_reason) + | "Dotfile" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Dotfile : skip_reason) + | "Nonexistent_file" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Nonexistent_file : skip_reason) + | "insufficient_permissions" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Insufficient_permissions : skip_reason) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "always_skipped" -> + (Always_skipped : skip_reason) + | "semgrepignore_patterns_match" -> + (Semgrepignore_patterns_match : skip_reason) + | "cli_include_flags_do_not_match" -> + (Cli_include_flags_do_not_match : skip_reason) + | "cli_exclude_flags_match" -> + (Cli_exclude_flags_match : skip_reason) + | "exceeded_size_limit" -> + (Exceeded_size_limit : skip_reason) + | "analysis_failed_parser_or_internal_error" -> + (Analysis_failed_parser_or_internal_error : skip_reason) + | "excluded_by_config" -> + (Excluded_by_config : skip_reason) + | "wrong_language" -> + (Wrong_language : skip_reason) + | "too_big" -> + (Too_big : skip_reason) + | "minified" -> + (Minified : skip_reason) + | "binary" -> + (Binary : skip_reason) + | "irrelevant_rule" -> + (Irrelevant_rule : skip_reason) + | "too_many_matches" -> + (Too_many_matches : skip_reason) + | "Gitignore_patterns_match" -> + (Gitignore_patterns_match : skip_reason) + | "Dotfile" -> + (Dotfile : skip_reason) + | "Nonexistent_file" -> + (Nonexistent_file : skip_reason) + | "insufficient_permissions" -> + (Insufficient_permissions : skip_reason) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) ) -let tag_of_string s = - read_tag (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_symbol : _ -> symbol -> _ = ( - fun ob (x : symbol) -> +let skip_reason_of_string s = + read_skip_reason (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write__rule_id_option = ( + Atdgen_runtime.Oj_run.write_std_option ( + write_rule_id + ) +) +let string_of__rule_id_option ?(len = 1024) x = + let ob = Buffer.create len in + write__rule_id_option ob x; + Buffer.contents ob +let read__rule_id_option = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "None" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (None : _ option) + | "Some" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_rule_id + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Some x : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "None" -> + (None : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | "Some" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_rule_id + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (Some x : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) +) +let _rule_id_option_of_string s = + read__rule_id_option (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_skipped_target : _ -> skipped_target -> _ = ( + fun ob (x : skipped_target) -> Buffer.add_char ob '{'; let is_first = ref true in if !is_first then is_first := false else Buffer.add_char ob ','; - Buffer.add_string ob "\"fqn\":"; + Buffer.add_string ob "\"path\":"; ( - write__string_list + write_fpath ) - ob x.fqn; + ob x.path; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"reason\":"; + ( + write_skip_reason + ) + ob x.reason; + (match x.details with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"details\":"; + ( + Yojson.Safe.write_string + ) + ob x; + ); + (match x.rule_id with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"rule_id\":"; + ( + write_rule_id + ) + ob x; + ); Buffer.add_char ob '}'; ) -let string_of_symbol ?(len = 1024) x = +let string_of_skipped_target ?(len = 1024) x = let ob = Buffer.create len in - write_symbol ob x; + write_skipped_target ob x; Buffer.contents ob -let read_symbol = ( +let read_skipped_target = ( fun p lb -> Yojson.Safe.read_space p lb; Yojson.Safe.read_lcurl p lb; - let field_fqn = ref (None) in + let field_path = ref (None) in + let field_reason = ref (None) in + let field_details = ref (None) in + let field_rule_id = ref (None) in try Yojson.Safe.read_space p lb; Yojson.Safe.read_object_end lb; @@ -13062,25 +13296,89 @@ let read_symbol = ( fun s pos len -> if pos < 0 || len < 0 || pos + len > String.length s then invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - if len = 3 && String.unsafe_get s pos = 'f' && String.unsafe_get s (pos+1) = 'q' && String.unsafe_get s (pos+2) = 'n' then ( - 0 - ) - else ( - -1 - ) + match len with + | 4 -> ( + if String.unsafe_get s pos = 'p' && String.unsafe_get s (pos+1) = 'a' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'h' then ( + 0 + ) + else ( + -1 + ) + ) + | 6 -> ( + if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'a' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'o' && String.unsafe_get s (pos+5) = 'n' then ( + 1 + ) + else ( + -1 + ) + ) + | 7 -> ( + match String.unsafe_get s pos with + | 'd' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( + 2 + ) + else ( + -1 + ) + ) + | 'r' -> ( + if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( + 3 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + | _ -> ( + -1 + ) in let i = Yojson.Safe.map_ident p f lb in Atdgen_runtime.Oj_run.read_until_field_value p lb; ( match i with | 0 -> - field_fqn := ( + field_path := ( Some ( ( - read__string_list + read_fpath + ) p lb + ) + ); + | 1 -> + field_reason := ( + Some ( + ( + read_skip_reason ) p lb ) ); + | 2 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_details := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + ) + | 3 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_rule_id := ( + Some ( + ( + read_rule_id + ) p lb + ) + ); + ) | _ -> ( Yojson.Safe.skip_json p lb ) @@ -13093,25 +13391,1474 @@ let read_symbol = ( fun s pos len -> if pos < 0 || len < 0 || pos + len > String.length s then invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - if len = 3 && String.unsafe_get s pos = 'f' && String.unsafe_get s (pos+1) = 'q' && String.unsafe_get s (pos+2) = 'n' then ( - 0 - ) - else ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_fqn := ( + match len with + | 4 -> ( + if String.unsafe_get s pos = 'p' && String.unsafe_get s (pos+1) = 'a' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'h' then ( + 0 + ) + else ( + -1 + ) + ) + | 6 -> ( + if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'a' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'o' && String.unsafe_get s (pos+5) = 'n' then ( + 1 + ) + else ( + -1 + ) + ) + | 7 -> ( + match String.unsafe_get s pos with + | 'd' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( + 2 + ) + else ( + -1 + ) + ) + | 'r' -> ( + if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( + 3 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_path := ( + Some ( + ( + read_fpath + ) p lb + ) + ); + | 1 -> + field_reason := ( + Some ( + ( + read_skip_reason + ) p lb + ) + ); + | 2 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_details := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + ) + | 3 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_rule_id := ( + Some ( + ( + read_rule_id + ) p lb + ) + ); + ) + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + done; + assert false; + with Yojson.End_of_object -> ( + ( + { + path = (match !field_path with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "path"); + reason = (match !field_reason with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "reason"); + details = !field_details; + rule_id = !field_rule_id; + } + : skipped_target) + ) +) +let skipped_target_of_string s = + read_skipped_target (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_resolution_cmd_failed : _ -> resolution_cmd_failed -> _ = ( + fun ob (x : resolution_cmd_failed) -> + Buffer.add_char ob '{'; + let is_first = ref true in + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"command\":"; + ( + Yojson.Safe.write_string + ) + ob x.command; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"message\":"; + ( + Yojson.Safe.write_string + ) + ob x.message; + Buffer.add_char ob '}'; +) +let string_of_resolution_cmd_failed ?(len = 1024) x = + let ob = Buffer.create len in + write_resolution_cmd_failed ob x; + Buffer.contents ob +let read_resolution_cmd_failed = ( + fun p lb -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_lcurl p lb; + let field_command = ref (None) in + let field_message = ref (None) in + try + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_end lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + if len = 7 then ( + match String.unsafe_get s pos with + | 'c' -> ( + if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'm' && String.unsafe_get s (pos+3) = 'm' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'n' && String.unsafe_get s (pos+6) = 'd' then ( + 0 + ) + else ( + -1 + ) + ) + | 'm' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( + 1 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + else ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_command := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + | 1 -> + field_message := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + while true do + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_sep p lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + if len = 7 then ( + match String.unsafe_get s pos with + | 'c' -> ( + if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'm' && String.unsafe_get s (pos+3) = 'm' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'n' && String.unsafe_get s (pos+6) = 'd' then ( + 0 + ) + else ( + -1 + ) + ) + | 'm' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( + 1 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + else ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_command := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + | 1 -> + field_message := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + done; + assert false; + with Yojson.End_of_object -> ( + ( + { + command = (match !field_command with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "command"); + message = (match !field_message with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "message"); + } + : resolution_cmd_failed) + ) +) +let resolution_cmd_failed_of_string s = + read_resolution_cmd_failed (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_resolution_error = ( + fun ob x -> + match x with + | `UnsupportedManifest -> Buffer.add_string ob "\"UnsupportedManifest\"" + | `MissingRequirement x -> + Buffer.add_string ob "[\"MissingRequirement\","; + ( + Yojson.Safe.write_string + ) ob x; + Buffer.add_char ob ']' + | `ResolutionCmdFailed x -> + Buffer.add_string ob "[\"ResolutionCmdFailed\","; + ( + write_resolution_cmd_failed + ) ob x; + Buffer.add_char ob ']' + | `ParseDependenciesFailed x -> + Buffer.add_string ob "[\"ParseDependenciesFailed\","; + ( + Yojson.Safe.write_string + ) ob x; + Buffer.add_char ob ']' +) +let string_of_resolution_error ?(len = 1024) x = + let ob = Buffer.create len in + write_resolution_error ob x; + Buffer.contents ob +let read_resolution_error = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "UnsupportedManifest" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `UnsupportedManifest + | "MissingRequirement" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + Atdgen_runtime.Oj_run.read_string + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `MissingRequirement x + | "ResolutionCmdFailed" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_resolution_cmd_failed + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `ResolutionCmdFailed x + | "ParseDependenciesFailed" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + Atdgen_runtime.Oj_run.read_string + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `ParseDependenciesFailed x + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "UnsupportedManifest" -> + `UnsupportedManifest + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | "MissingRequirement" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + Atdgen_runtime.Oj_run.read_string + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + `MissingRequirement x + | "ResolutionCmdFailed" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_resolution_cmd_failed + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + `ResolutionCmdFailed x + | "ParseDependenciesFailed" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + Atdgen_runtime.Oj_run.read_string + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + `ParseDependenciesFailed x + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) +) +let resolution_error_of_string s = + read_resolution_error (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write__version_option = ( + Atdgen_runtime.Oj_run.write_std_option ( + write_version + ) +) +let string_of__version_option ?(len = 1024) x = + let ob = Buffer.create len in + write__version_option ob x; + Buffer.contents ob +let read__version_option = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "None" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (None : _ option) + | "Some" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_version + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Some x : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "None" -> + (None : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | "Some" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_version + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (Some x : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) +) +let _version_option_of_string s = + read__version_option (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_incompatible_rule : _ -> incompatible_rule -> _ = ( + fun ob (x : incompatible_rule) -> + Buffer.add_char ob '{'; + let is_first = ref true in + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"rule_id\":"; + ( + write_rule_id + ) + ob x.rule_id; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"this_version\":"; + ( + write_version + ) + ob x.this_version; + (match x.min_version with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"min_version\":"; + ( + write_version + ) + ob x; + ); + (match x.max_version with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"max_version\":"; + ( + write_version + ) + ob x; + ); + Buffer.add_char ob '}'; +) +let string_of_incompatible_rule ?(len = 1024) x = + let ob = Buffer.create len in + write_incompatible_rule ob x; + Buffer.contents ob +let read_incompatible_rule = ( + fun p lb -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_lcurl p lb; + let field_rule_id = ref (None) in + let field_this_version = ref (None) in + let field_min_version = ref (None) in + let field_max_version = ref (None) in + try + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_end lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + match len with + | 7 -> ( + if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( + 0 + ) + else ( + -1 + ) + ) + | 11 -> ( + if String.unsafe_get s pos = 'm' then ( + match String.unsafe_get s (pos+1) with + | 'a' -> ( + if String.unsafe_get s (pos+2) = 'x' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( + 3 + ) + else ( + -1 + ) + ) + | 'i' -> ( + if String.unsafe_get s (pos+2) = 'n' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( + 2 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + else ( + -1 + ) + ) + | 12 -> ( + if String.unsafe_get s pos = 't' && String.unsafe_get s (pos+1) = 'h' && String.unsafe_get s (pos+2) = 'i' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'v' && String.unsafe_get s (pos+6) = 'e' && String.unsafe_get s (pos+7) = 'r' && String.unsafe_get s (pos+8) = 's' && String.unsafe_get s (pos+9) = 'i' && String.unsafe_get s (pos+10) = 'o' && String.unsafe_get s (pos+11) = 'n' then ( + 1 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_rule_id := ( + Some ( + ( + read_rule_id + ) p lb + ) + ); + | 1 -> + field_this_version := ( + Some ( + ( + read_version + ) p lb + ) + ); + | 2 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_min_version := ( + Some ( + ( + read_version + ) p lb + ) + ); + ) + | 3 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_max_version := ( + Some ( + ( + read_version + ) p lb + ) + ); + ) + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + while true do + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_sep p lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + match len with + | 7 -> ( + if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( + 0 + ) + else ( + -1 + ) + ) + | 11 -> ( + if String.unsafe_get s pos = 'm' then ( + match String.unsafe_get s (pos+1) with + | 'a' -> ( + if String.unsafe_get s (pos+2) = 'x' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( + 3 + ) + else ( + -1 + ) + ) + | 'i' -> ( + if String.unsafe_get s (pos+2) = 'n' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( + 2 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + else ( + -1 + ) + ) + | 12 -> ( + if String.unsafe_get s pos = 't' && String.unsafe_get s (pos+1) = 'h' && String.unsafe_get s (pos+2) = 'i' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'v' && String.unsafe_get s (pos+6) = 'e' && String.unsafe_get s (pos+7) = 'r' && String.unsafe_get s (pos+8) = 's' && String.unsafe_get s (pos+9) = 'i' && String.unsafe_get s (pos+10) = 'o' && String.unsafe_get s (pos+11) = 'n' then ( + 1 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_rule_id := ( + Some ( + ( + read_rule_id + ) p lb + ) + ); + | 1 -> + field_this_version := ( + Some ( + ( + read_version + ) p lb + ) + ); + | 2 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_min_version := ( + Some ( + ( + read_version + ) p lb + ) + ); + ) + | 3 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_max_version := ( + Some ( + ( + read_version + ) p lb + ) + ); + ) + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + done; + assert false; + with Yojson.End_of_object -> ( + ( + { + rule_id = (match !field_rule_id with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "rule_id"); + this_version = (match !field_this_version with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "this_version"); + min_version = !field_min_version; + max_version = !field_max_version; + } + : incompatible_rule) + ) +) +let incompatible_rule_of_string s = + read_incompatible_rule (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write__location_list = ( + Atdgen_runtime.Oj_run.write_list ( + write_location + ) +) +let string_of__location_list ?(len = 1024) x = + let ob = Buffer.create len in + write__location_list ob x; + Buffer.contents ob +let read__location_list = ( + Atdgen_runtime.Oj_run.read_list ( + read_location + ) +) +let _location_list_of_string s = + read__location_list (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_error_type : _ -> error_type -> _ = ( + fun ob (x : error_type) -> + match x with + | LexicalError -> Buffer.add_string ob "\"Lexical error\"" + | ParseError -> Buffer.add_string ob "\"Syntax error\"" + | OtherParseError -> Buffer.add_string ob "\"Other syntax error\"" + | AstBuilderError -> Buffer.add_string ob "\"AST builder error\"" + | RuleParseError -> Buffer.add_string ob "\"Rule parse error\"" + | SemgrepWarning -> Buffer.add_string ob "\"SemgrepWarning\"" + | SemgrepError -> Buffer.add_string ob "\"SemgrepError\"" + | InvalidRuleSchemaError -> Buffer.add_string ob "\"InvalidRuleSchemaError\"" + | UnknownLanguageError -> Buffer.add_string ob "\"UnknownLanguageError\"" + | InvalidYaml -> Buffer.add_string ob "\"Invalid YAML\"" + | MatchingError -> Buffer.add_string ob "\"Internal matching error\"" + | SemgrepMatchFound -> Buffer.add_string ob "\"Semgrep match found\"" + | TooManyMatches -> Buffer.add_string ob "\"Too many matches\"" + | FatalError -> Buffer.add_string ob "\"Fatal error\"" + | Timeout -> Buffer.add_string ob "\"Timeout\"" + | OutOfMemory -> Buffer.add_string ob "\"Out of memory\"" + | StackOverflow -> Buffer.add_string ob "\"Stack overflow\"" + | TimeoutDuringInterfile -> Buffer.add_string ob "\"Timeout during interfile analysis\"" + | OutOfMemoryDuringInterfile -> Buffer.add_string ob "\"OOM during interfile analysis\"" + | MissingPlugin -> Buffer.add_string ob "\"Missing plugin\"" + | PatternParseError x -> + Buffer.add_string ob "[\"PatternParseError\","; + ( + write__string_list + ) ob x; + Buffer.add_char ob ']' + | PartialParsing x -> + Buffer.add_string ob "[\"PartialParsing\","; + ( + write__location_list + ) ob x; + Buffer.add_char ob ']' + | IncompatibleRule x -> + Buffer.add_string ob "[\"IncompatibleRule\","; + ( + write_incompatible_rule + ) ob x; + Buffer.add_char ob ']' + | PatternParseError0 -> Buffer.add_string ob "\"Pattern parse error\"" + | IncompatibleRule0 -> Buffer.add_string ob "\"Incompatible rule\"" + | DependencyResolutionError x -> + Buffer.add_string ob "[\"DependencyResolutionError\","; + ( + write_resolution_error + ) ob x; + Buffer.add_char ob ']' +) +let string_of_error_type ?(len = 1024) x = + let ob = Buffer.create len in + write_error_type ob x; + Buffer.contents ob +let read_error_type = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "Lexical error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (LexicalError : error_type) + | "Syntax error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (ParseError : error_type) + | "Other syntax error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (OtherParseError : error_type) + | "AST builder error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (AstBuilderError : error_type) + | "Rule parse error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (RuleParseError : error_type) + | "SemgrepWarning" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (SemgrepWarning : error_type) + | "SemgrepError" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (SemgrepError : error_type) + | "InvalidRuleSchemaError" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (InvalidRuleSchemaError : error_type) + | "UnknownLanguageError" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (UnknownLanguageError : error_type) + | "Invalid YAML" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (InvalidYaml : error_type) + | "Internal matching error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (MatchingError : error_type) + | "Semgrep match found" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (SemgrepMatchFound : error_type) + | "Too many matches" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (TooManyMatches : error_type) + | "Fatal error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (FatalError : error_type) + | "Timeout" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Timeout : error_type) + | "Out of memory" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (OutOfMemory : error_type) + | "Stack overflow" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (StackOverflow : error_type) + | "Timeout during interfile analysis" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (TimeoutDuringInterfile : error_type) + | "OOM during interfile analysis" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (OutOfMemoryDuringInterfile : error_type) + | "Missing plugin" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (MissingPlugin : error_type) + | "PatternParseError" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read__string_list + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (PatternParseError x : error_type) + | "PartialParsing" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read__location_list + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (PartialParsing x : error_type) + | "IncompatibleRule" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_incompatible_rule + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (IncompatibleRule x : error_type) + | "Pattern parse error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (PatternParseError0 : error_type) + | "Incompatible rule" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (IncompatibleRule0 : error_type) + | "DependencyResolutionError" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_resolution_error + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (DependencyResolutionError x : error_type) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "Lexical error" -> + (LexicalError : error_type) + | "Syntax error" -> + (ParseError : error_type) + | "Other syntax error" -> + (OtherParseError : error_type) + | "AST builder error" -> + (AstBuilderError : error_type) + | "Rule parse error" -> + (RuleParseError : error_type) + | "SemgrepWarning" -> + (SemgrepWarning : error_type) + | "SemgrepError" -> + (SemgrepError : error_type) + | "InvalidRuleSchemaError" -> + (InvalidRuleSchemaError : error_type) + | "UnknownLanguageError" -> + (UnknownLanguageError : error_type) + | "Invalid YAML" -> + (InvalidYaml : error_type) + | "Internal matching error" -> + (MatchingError : error_type) + | "Semgrep match found" -> + (SemgrepMatchFound : error_type) + | "Too many matches" -> + (TooManyMatches : error_type) + | "Fatal error" -> + (FatalError : error_type) + | "Timeout" -> + (Timeout : error_type) + | "Out of memory" -> + (OutOfMemory : error_type) + | "Stack overflow" -> + (StackOverflow : error_type) + | "Timeout during interfile analysis" -> + (TimeoutDuringInterfile : error_type) + | "OOM during interfile analysis" -> + (OutOfMemoryDuringInterfile : error_type) + | "Missing plugin" -> + (MissingPlugin : error_type) + | "Pattern parse error" -> + (PatternParseError0 : error_type) + | "Incompatible rule" -> + (IncompatibleRule0 : error_type) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | "PatternParseError" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read__string_list + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (PatternParseError x : error_type) + | "PartialParsing" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read__location_list + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (PartialParsing x : error_type) + | "IncompatibleRule" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_incompatible_rule + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (IncompatibleRule x : error_type) + | "DependencyResolutionError" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_resolution_error + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (DependencyResolutionError x : error_type) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) +) +let error_type_of_string s = + read_error_type (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_error_severity = ( + fun ob x -> + match x with + | `Error -> Buffer.add_string ob "\"error\"" + | `Warning -> Buffer.add_string ob "\"warn\"" + | `Info -> Buffer.add_string ob "\"info\"" +) +let string_of_error_severity ?(len = 1024) x = + let ob = Buffer.create len in + write_error_severity ob x; + Buffer.contents ob +let read_error_severity = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "error" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `Error + | "warn" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `Warning + | "info" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `Info + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "error" -> + `Error + | "warn" -> + `Warning + | "info" -> + `Info + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) +) +let error_severity_of_string s = + read_error_severity (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write__location_option = ( + Atdgen_runtime.Oj_run.write_std_option ( + write_location + ) +) +let string_of__location_option ?(len = 1024) x = + let ob = Buffer.create len in + write__location_option ob x; + Buffer.contents ob +let read__location_option = ( + fun p lb -> + Yojson.Safe.read_space p lb; + match Yojson.Safe.start_any_variant p lb with + | `Edgy_bracket -> ( + match Yojson.Safe.read_ident p lb with + | "None" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (None : _ option) + | "Some" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_location + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + (Some x : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Double_quote -> ( + match Yojson.Safe.finish_string p lb with + | "None" -> + (None : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) + | `Square_bracket -> ( + match Atdgen_runtime.Oj_run.read_string p lb with + | "Some" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_location + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + (Some x : _ option) + | x -> + Atdgen_runtime.Oj_run.invalid_variant_tag p x + ) +) +let _location_option_of_string s = + read__location_option (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_core_error : _ -> core_error -> _ = ( + fun ob (x : core_error) -> + Buffer.add_char ob '{'; + let is_first = ref true in + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"error_type\":"; + ( + write_error_type + ) + ob x.error_type; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"severity\":"; + ( + write_error_severity + ) + ob x.severity; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"message\":"; + ( + Yojson.Safe.write_string + ) + ob x.message; + (match x.details with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"details\":"; + ( + Yojson.Safe.write_string + ) + ob x; + ); + (match x.location with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"location\":"; + ( + write_location + ) + ob x; + ); + (match x.rule_id with None -> () | Some x -> + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"rule_id\":"; + ( + write_rule_id + ) + ob x; + ); + Buffer.add_char ob '}'; +) +let string_of_core_error ?(len = 1024) x = + let ob = Buffer.create len in + write_core_error ob x; + Buffer.contents ob +let read_core_error = ( + fun p lb -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_lcurl p lb; + let field_error_type = ref (None) in + let field_severity = ref (None) in + let field_message = ref (None) in + let field_details = ref (None) in + let field_location = ref (None) in + let field_rule_id = ref (None) in + try + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_end lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + match len with + | 7 -> ( + match String.unsafe_get s pos with + | 'd' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( + 3 + ) + else ( + -1 + ) + ) + | 'm' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( + 2 + ) + else ( + -1 + ) + ) + | 'r' -> ( + if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( + 5 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + | 8 -> ( + match String.unsafe_get s pos with + | 'l' -> ( + if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'c' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 't' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'o' && String.unsafe_get s (pos+7) = 'n' then ( + 4 + ) + else ( + -1 + ) + ) + | 's' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'v' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' then ( + 1 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + | 10 -> ( + if String.unsafe_get s pos = 'e' && String.unsafe_get s (pos+1) = 'r' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'o' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = '_' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' && String.unsafe_get s (pos+8) = 'p' && String.unsafe_get s (pos+9) = 'e' then ( + 0 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_error_type := ( + Some ( + ( + read_error_type + ) p lb + ) + ); + | 1 -> + field_severity := ( + Some ( + ( + read_error_severity + ) p lb + ) + ); + | 2 -> + field_message := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + | 3 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_details := ( Some ( ( - read__string_list + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + ) + | 4 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_location := ( + Some ( + ( + read_location + ) p lb + ) + ); + ) + | 5 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_rule_id := ( + Some ( + ( + read_rule_id + ) p lb + ) + ); + ) + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + while true do + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_sep p lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + match len with + | 7 -> ( + match String.unsafe_get s pos with + | 'd' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( + 3 + ) + else ( + -1 + ) + ) + | 'm' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( + 2 + ) + else ( + -1 + ) + ) + | 'r' -> ( + if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( + 5 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + | 8 -> ( + match String.unsafe_get s pos with + | 'l' -> ( + if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'c' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 't' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'o' && String.unsafe_get s (pos+7) = 'n' then ( + 4 + ) + else ( + -1 + ) + ) + | 's' -> ( + if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'v' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' then ( + 1 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + ) + | 10 -> ( + if String.unsafe_get s pos = 'e' && String.unsafe_get s (pos+1) = 'r' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'o' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = '_' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' && String.unsafe_get s (pos+8) = 'p' && String.unsafe_get s (pos+9) = 'e' then ( + 0 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_error_type := ( + Some ( + ( + read_error_type + ) p lb + ) + ); + | 1 -> + field_severity := ( + Some ( + ( + read_error_severity + ) p lb + ) + ); + | 2 -> + field_message := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string ) p lb ) ); + | 3 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_details := ( + Some ( + ( + Atdgen_runtime.Oj_run.read_string + ) p lb + ) + ); + ) + | 4 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_location := ( + Some ( + ( + read_location + ) p lb + ) + ); + ) + | 5 -> + if not (Yojson.Safe.read_null_if_possible p lb) then ( + field_rule_id := ( + Some ( + ( + read_rule_id + ) p lb + ) + ); + ) | _ -> ( Yojson.Safe.skip_json p lb ) @@ -13121,29 +14868,356 @@ let read_symbol = ( with Yojson.End_of_object -> ( ( { - fqn = (match !field_fqn with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "fqn"); + error_type = (match !field_error_type with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "error_type"); + severity = (match !field_severity with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "severity"); + message = (match !field_message with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "message"); + details = !field_details; + location = !field_location; + rule_id = !field_rule_id; } - : symbol) + : core_error) ) ) -let symbol_of_string s = - read_symbol (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write__location_list = ( +let core_error_of_string s = + read_core_error (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write__skipped_target_list = ( Atdgen_runtime.Oj_run.write_list ( - write_location + write_skipped_target ) ) -let string_of__location_list ?(len = 1024) x = +let string_of__skipped_target_list ?(len = 1024) x = let ob = Buffer.create len in - write__location_list ob x; + write__skipped_target_list ob x; Buffer.contents ob -let read__location_list = ( +let read__skipped_target_list = ( Atdgen_runtime.Oj_run.read_list ( - read_location + read_skipped_target ) ) -let _location_list_of_string s = - read__location_list (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let _skipped_target_list_of_string s = + read__skipped_target_list (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write__core_error_list = ( + Atdgen_runtime.Oj_run.write_list ( + write_core_error + ) +) +let string_of__core_error_list ?(len = 1024) x = + let ob = Buffer.create len in + write__core_error_list ob x; + Buffer.contents ob +let read__core_error_list = ( + Atdgen_runtime.Oj_run.read_list ( + read_core_error + ) +) +let _core_error_list_of_string s = + read__core_error_list (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_target_discovery_result : _ -> target_discovery_result -> _ = ( + fun ob (x : target_discovery_result) -> + Buffer.add_char ob '{'; + let is_first = ref true in + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"target_paths\":"; + ( + write__fpath_list + ) + ob x.target_paths; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"errors\":"; + ( + write__core_error_list + ) + ob x.errors; + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"skipped\":"; + ( + write__skipped_target_list + ) + ob x.skipped; + Buffer.add_char ob '}'; +) +let string_of_target_discovery_result ?(len = 1024) x = + let ob = Buffer.create len in + write_target_discovery_result ob x; + Buffer.contents ob +let read_target_discovery_result = ( + fun p lb -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_lcurl p lb; + let field_target_paths = ref (None) in + let field_errors = ref (None) in + let field_skipped = ref (None) in + try + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_end lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + match len with + | 6 -> ( + if String.unsafe_get s pos = 'e' && String.unsafe_get s (pos+1) = 'r' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'o' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = 's' then ( + 1 + ) + else ( + -1 + ) + ) + | 7 -> ( + if String.unsafe_get s pos = 's' && String.unsafe_get s (pos+1) = 'k' && String.unsafe_get s (pos+2) = 'i' && String.unsafe_get s (pos+3) = 'p' && String.unsafe_get s (pos+4) = 'p' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'd' then ( + 2 + ) + else ( + -1 + ) + ) + | 12 -> ( + if String.unsafe_get s pos = 't' && String.unsafe_get s (pos+1) = 'a' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'g' && String.unsafe_get s (pos+4) = 'e' && String.unsafe_get s (pos+5) = 't' && String.unsafe_get s (pos+6) = '_' && String.unsafe_get s (pos+7) = 'p' && String.unsafe_get s (pos+8) = 'a' && String.unsafe_get s (pos+9) = 't' && String.unsafe_get s (pos+10) = 'h' && String.unsafe_get s (pos+11) = 's' then ( + 0 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_target_paths := ( + Some ( + ( + read__fpath_list + ) p lb + ) + ); + | 1 -> + field_errors := ( + Some ( + ( + read__core_error_list + ) p lb + ) + ); + | 2 -> + field_skipped := ( + Some ( + ( + read__skipped_target_list + ) p lb + ) + ); + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + while true do + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_sep p lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + match len with + | 6 -> ( + if String.unsafe_get s pos = 'e' && String.unsafe_get s (pos+1) = 'r' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'o' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = 's' then ( + 1 + ) + else ( + -1 + ) + ) + | 7 -> ( + if String.unsafe_get s pos = 's' && String.unsafe_get s (pos+1) = 'k' && String.unsafe_get s (pos+2) = 'i' && String.unsafe_get s (pos+3) = 'p' && String.unsafe_get s (pos+4) = 'p' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'd' then ( + 2 + ) + else ( + -1 + ) + ) + | 12 -> ( + if String.unsafe_get s pos = 't' && String.unsafe_get s (pos+1) = 'a' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'g' && String.unsafe_get s (pos+4) = 'e' && String.unsafe_get s (pos+5) = 't' && String.unsafe_get s (pos+6) = '_' && String.unsafe_get s (pos+7) = 'p' && String.unsafe_get s (pos+8) = 'a' && String.unsafe_get s (pos+9) = 't' && String.unsafe_get s (pos+10) = 'h' && String.unsafe_get s (pos+11) = 's' then ( + 0 + ) + else ( + -1 + ) + ) + | _ -> ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_target_paths := ( + Some ( + ( + read__fpath_list + ) p lb + ) + ); + | 1 -> + field_errors := ( + Some ( + ( + read__core_error_list + ) p lb + ) + ); + | 2 -> + field_skipped := ( + Some ( + ( + read__skipped_target_list + ) p lb + ) + ); + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + done; + assert false; + with Yojson.End_of_object -> ( + ( + { + target_paths = (match !field_target_paths with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "target_paths"); + errors = (match !field_errors with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "errors"); + skipped = (match !field_skipped with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "skipped"); + } + : target_discovery_result) + ) +) +let target_discovery_result_of_string s = + read_target_discovery_result (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_tag = ( + Yojson.Safe.write_string +) +let string_of_tag ?(len = 1024) x = + let ob = Buffer.create len in + write_tag ob x; + Buffer.contents ob +let read_tag = ( + Atdgen_runtime.Oj_run.read_string +) +let tag_of_string s = + read_tag (Yojson.Safe.init_lexer ()) (Lexing.from_string s) +let write_symbol : _ -> symbol -> _ = ( + fun ob (x : symbol) -> + Buffer.add_char ob '{'; + let is_first = ref true in + if !is_first then + is_first := false + else + Buffer.add_char ob ','; + Buffer.add_string ob "\"fqn\":"; + ( + write__string_list + ) + ob x.fqn; + Buffer.add_char ob '}'; +) +let string_of_symbol ?(len = 1024) x = + let ob = Buffer.create len in + write_symbol ob x; + Buffer.contents ob +let read_symbol = ( + fun p lb -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_lcurl p lb; + let field_fqn = ref (None) in + try + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_end lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + if len = 3 && String.unsafe_get s pos = 'f' && String.unsafe_get s (pos+1) = 'q' && String.unsafe_get s (pos+2) = 'n' then ( + 0 + ) + else ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_fqn := ( + Some ( + ( + read__string_list + ) p lb + ) + ); + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + while true do + Yojson.Safe.read_space p lb; + Yojson.Safe.read_object_sep p lb; + Yojson.Safe.read_space p lb; + let f = + fun s pos len -> + if pos < 0 || len < 0 || pos + len > String.length s then + invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); + if len = 3 && String.unsafe_get s pos = 'f' && String.unsafe_get s (pos+1) = 'q' && String.unsafe_get s (pos+2) = 'n' then ( + 0 + ) + else ( + -1 + ) + in + let i = Yojson.Safe.map_ident p f lb in + Atdgen_runtime.Oj_run.read_until_field_value p lb; + ( + match i with + | 0 -> + field_fqn := ( + Some ( + ( + read__string_list + ) p lb + ) + ); + | _ -> ( + Yojson.Safe.skip_json p lb + ) + ); + done; + assert false; + with Yojson.End_of_object -> ( + ( + { + fqn = (match !field_fqn with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "fqn"); + } + : symbol) + ) +) +let symbol_of_string s = + read_symbol (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write_symbol_usage : _ -> symbol_usage -> _ = ( fun ob (x : symbol_usage) -> Buffer.add_char ob '{'; @@ -14362,475 +16436,6 @@ let read_supply_chain_stats = ( ) let supply_chain_stats_of_string s = read_supply_chain_stats (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_skip_reason : _ -> skip_reason -> _ = ( - fun ob (x : skip_reason) -> - match x with - | Always_skipped -> Buffer.add_string ob "\"always_skipped\"" - | Semgrepignore_patterns_match -> Buffer.add_string ob "\"semgrepignore_patterns_match\"" - | Cli_include_flags_do_not_match -> Buffer.add_string ob "\"cli_include_flags_do_not_match\"" - | Cli_exclude_flags_match -> Buffer.add_string ob "\"cli_exclude_flags_match\"" - | Exceeded_size_limit -> Buffer.add_string ob "\"exceeded_size_limit\"" - | Analysis_failed_parser_or_internal_error -> Buffer.add_string ob "\"analysis_failed_parser_or_internal_error\"" - | Excluded_by_config -> Buffer.add_string ob "\"excluded_by_config\"" - | Wrong_language -> Buffer.add_string ob "\"wrong_language\"" - | Too_big -> Buffer.add_string ob "\"too_big\"" - | Minified -> Buffer.add_string ob "\"minified\"" - | Binary -> Buffer.add_string ob "\"binary\"" - | Irrelevant_rule -> Buffer.add_string ob "\"irrelevant_rule\"" - | Too_many_matches -> Buffer.add_string ob "\"too_many_matches\"" - | Gitignore_patterns_match -> Buffer.add_string ob "\"Gitignore_patterns_match\"" - | Dotfile -> Buffer.add_string ob "\"Dotfile\"" - | Nonexistent_file -> Buffer.add_string ob "\"Nonexistent_file\"" - | Insufficient_permissions -> Buffer.add_string ob "\"insufficient_permissions\"" -) -let string_of_skip_reason ?(len = 1024) x = - let ob = Buffer.create len in - write_skip_reason ob x; - Buffer.contents ob -let read_skip_reason = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "always_skipped" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Always_skipped : skip_reason) - | "semgrepignore_patterns_match" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Semgrepignore_patterns_match : skip_reason) - | "cli_include_flags_do_not_match" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Cli_include_flags_do_not_match : skip_reason) - | "cli_exclude_flags_match" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Cli_exclude_flags_match : skip_reason) - | "exceeded_size_limit" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Exceeded_size_limit : skip_reason) - | "analysis_failed_parser_or_internal_error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Analysis_failed_parser_or_internal_error : skip_reason) - | "excluded_by_config" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Excluded_by_config : skip_reason) - | "wrong_language" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Wrong_language : skip_reason) - | "too_big" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Too_big : skip_reason) - | "minified" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Minified : skip_reason) - | "binary" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Binary : skip_reason) - | "irrelevant_rule" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Irrelevant_rule : skip_reason) - | "too_many_matches" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Too_many_matches : skip_reason) - | "Gitignore_patterns_match" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Gitignore_patterns_match : skip_reason) - | "Dotfile" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Dotfile : skip_reason) - | "Nonexistent_file" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Nonexistent_file : skip_reason) - | "insufficient_permissions" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Insufficient_permissions : skip_reason) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "always_skipped" -> - (Always_skipped : skip_reason) - | "semgrepignore_patterns_match" -> - (Semgrepignore_patterns_match : skip_reason) - | "cli_include_flags_do_not_match" -> - (Cli_include_flags_do_not_match : skip_reason) - | "cli_exclude_flags_match" -> - (Cli_exclude_flags_match : skip_reason) - | "exceeded_size_limit" -> - (Exceeded_size_limit : skip_reason) - | "analysis_failed_parser_or_internal_error" -> - (Analysis_failed_parser_or_internal_error : skip_reason) - | "excluded_by_config" -> - (Excluded_by_config : skip_reason) - | "wrong_language" -> - (Wrong_language : skip_reason) - | "too_big" -> - (Too_big : skip_reason) - | "minified" -> - (Minified : skip_reason) - | "binary" -> - (Binary : skip_reason) - | "irrelevant_rule" -> - (Irrelevant_rule : skip_reason) - | "too_many_matches" -> - (Too_many_matches : skip_reason) - | "Gitignore_patterns_match" -> - (Gitignore_patterns_match : skip_reason) - | "Dotfile" -> - (Dotfile : skip_reason) - | "Nonexistent_file" -> - (Nonexistent_file : skip_reason) - | "insufficient_permissions" -> - (Insufficient_permissions : skip_reason) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let skip_reason_of_string s = - read_skip_reason (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write__rule_id_option = ( - Atdgen_runtime.Oj_run.write_std_option ( - write_rule_id - ) -) -let string_of__rule_id_option ?(len = 1024) x = - let ob = Buffer.create len in - write__rule_id_option ob x; - Buffer.contents ob -let read__rule_id_option = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "None" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (None : _ option) - | "Some" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read_rule_id - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Some x : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "None" -> - (None : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | "Some" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read_rule_id - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (Some x : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let _rule_id_option_of_string s = - read__rule_id_option (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_skipped_target : _ -> skipped_target -> _ = ( - fun ob (x : skipped_target) -> - Buffer.add_char ob '{'; - let is_first = ref true in - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"path\":"; - ( - write_fpath - ) - ob x.path; - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"reason\":"; - ( - write_skip_reason - ) - ob x.reason; - (match x.details with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"details\":"; - ( - Yojson.Safe.write_string - ) - ob x; - ); - (match x.rule_id with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"rule_id\":"; - ( - write_rule_id - ) - ob x; - ); - Buffer.add_char ob '}'; -) -let string_of_skipped_target ?(len = 1024) x = - let ob = Buffer.create len in - write_skipped_target ob x; - Buffer.contents ob -let read_skipped_target = ( - fun p lb -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_lcurl p lb; - let field_path = ref (None) in - let field_reason = ref (None) in - let field_details = ref (None) in - let field_rule_id = ref (None) in - try - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_end lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - match len with - | 4 -> ( - if String.unsafe_get s pos = 'p' && String.unsafe_get s (pos+1) = 'a' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'h' then ( - 0 - ) - else ( - -1 - ) - ) - | 6 -> ( - if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'a' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'o' && String.unsafe_get s (pos+5) = 'n' then ( - 1 - ) - else ( - -1 - ) - ) - | 7 -> ( - match String.unsafe_get s pos with - | 'd' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( - 2 - ) - else ( - -1 - ) - ) - | 'r' -> ( - if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( - 3 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - | _ -> ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_path := ( - Some ( - ( - read_fpath - ) p lb - ) - ); - | 1 -> - field_reason := ( - Some ( - ( - read_skip_reason - ) p lb - ) - ); - | 2 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_details := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - ) - | 3 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_rule_id := ( - Some ( - ( - read_rule_id - ) p lb - ) - ); - ) - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - while true do - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_sep p lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - match len with - | 4 -> ( - if String.unsafe_get s pos = 'p' && String.unsafe_get s (pos+1) = 'a' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'h' then ( - 0 - ) - else ( - -1 - ) - ) - | 6 -> ( - if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'a' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'o' && String.unsafe_get s (pos+5) = 'n' then ( - 1 - ) - else ( - -1 - ) - ) - | 7 -> ( - match String.unsafe_get s pos with - | 'd' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( - 2 - ) - else ( - -1 - ) - ) - | 'r' -> ( - if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( - 3 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - | _ -> ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_path := ( - Some ( - ( - read_fpath - ) p lb - ) - ); - | 1 -> - field_reason := ( - Some ( - ( - read_skip_reason - ) p lb - ) - ); - | 2 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_details := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - ) - | 3 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_rule_id := ( - Some ( - ( - read_rule_id - ) p lb - ) - ); - ) - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - done; - assert false; - with Yojson.End_of_object -> ( - ( - { - path = (match !field_path with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "path"); - reason = (match !field_reason with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "reason"); - details = !field_details; - rule_id = !field_rule_id; - } - : skipped_target) - ) -) -let skipped_target_of_string s = - read_skipped_target (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write_skipped_rule : _ -> skipped_rule -> _ = ( fun ob (x : skipped_rule) -> Buffer.add_char ob '{'; @@ -15039,22 +16644,6 @@ let read_skipped_rule = ( ) let skipped_rule_of_string s = read_skipped_rule (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write__skipped_target_list = ( - Atdgen_runtime.Oj_run.write_list ( - write_skipped_target - ) -) -let string_of__skipped_target_list ?(len = 1024) x = - let ob = Buffer.create len in - write__skipped_target_list ob x; - Buffer.contents ob -let read__skipped_target_list = ( - Atdgen_runtime.Oj_run.read_list ( - read_skipped_target - ) -) -let _skipped_target_list_of_string s = - read__skipped_target_list (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write__skipped_target_list_option = ( Atdgen_runtime.Oj_run.write_std_option ( write__skipped_target_list @@ -21630,284 +23219,6 @@ let read_rule_id_and_engine_kind = ( ) let rule_id_and_engine_kind_of_string s = read_rule_id_and_engine_kind (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_resolution_cmd_failed : _ -> resolution_cmd_failed -> _ = ( - fun ob (x : resolution_cmd_failed) -> - Buffer.add_char ob '{'; - let is_first = ref true in - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"command\":"; - ( - Yojson.Safe.write_string - ) - ob x.command; - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"message\":"; - ( - Yojson.Safe.write_string - ) - ob x.message; - Buffer.add_char ob '}'; -) -let string_of_resolution_cmd_failed ?(len = 1024) x = - let ob = Buffer.create len in - write_resolution_cmd_failed ob x; - Buffer.contents ob -let read_resolution_cmd_failed = ( - fun p lb -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_lcurl p lb; - let field_command = ref (None) in - let field_message = ref (None) in - try - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_end lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - if len = 7 then ( - match String.unsafe_get s pos with - | 'c' -> ( - if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'm' && String.unsafe_get s (pos+3) = 'm' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'n' && String.unsafe_get s (pos+6) = 'd' then ( - 0 - ) - else ( - -1 - ) - ) - | 'm' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( - 1 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - else ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_command := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - | 1 -> - field_message := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - while true do - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_sep p lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - if len = 7 then ( - match String.unsafe_get s pos with - | 'c' -> ( - if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'm' && String.unsafe_get s (pos+3) = 'm' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'n' && String.unsafe_get s (pos+6) = 'd' then ( - 0 - ) - else ( - -1 - ) - ) - | 'm' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( - 1 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - else ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_command := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - | 1 -> - field_message := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - done; - assert false; - with Yojson.End_of_object -> ( - ( - { - command = (match !field_command with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "command"); - message = (match !field_message with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "message"); - } - : resolution_cmd_failed) - ) -) -let resolution_cmd_failed_of_string s = - read_resolution_cmd_failed (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_resolution_error = ( - fun ob x -> - match x with - | `UnsupportedManifest -> Buffer.add_string ob "\"UnsupportedManifest\"" - | `MissingRequirement x -> - Buffer.add_string ob "[\"MissingRequirement\","; - ( - Yojson.Safe.write_string - ) ob x; - Buffer.add_char ob ']' - | `ResolutionCmdFailed x -> - Buffer.add_string ob "[\"ResolutionCmdFailed\","; - ( - write_resolution_cmd_failed - ) ob x; - Buffer.add_char ob ']' - | `ParseDependenciesFailed x -> - Buffer.add_string ob "[\"ParseDependenciesFailed\","; - ( - Yojson.Safe.write_string - ) ob x; - Buffer.add_char ob ']' -) -let string_of_resolution_error ?(len = 1024) x = - let ob = Buffer.create len in - write_resolution_error ob x; - Buffer.contents ob -let read_resolution_error = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "UnsupportedManifest" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `UnsupportedManifest - | "MissingRequirement" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - Atdgen_runtime.Oj_run.read_string - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `MissingRequirement x - | "ResolutionCmdFailed" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read_resolution_cmd_failed - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `ResolutionCmdFailed x - | "ParseDependenciesFailed" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - Atdgen_runtime.Oj_run.read_string - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `ParseDependenciesFailed x - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "UnsupportedManifest" -> - `UnsupportedManifest - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | "MissingRequirement" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - Atdgen_runtime.Oj_run.read_string - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - `MissingRequirement x - | "ResolutionCmdFailed" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read_resolution_cmd_failed - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - `ResolutionCmdFailed x - | "ParseDependenciesFailed" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - Atdgen_runtime.Oj_run.read_string - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - `ParseDependenciesFailed x - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let resolution_error_of_string s = - read_resolution_error (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write__resolution_error_list = ( Atdgen_runtime.Oj_run.write_list ( write_resolution_error @@ -22751,336 +24062,6 @@ let read_parsing_stats = ( ) let parsing_stats_of_string s = read_parsing_stats (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write__version_option = ( - Atdgen_runtime.Oj_run.write_std_option ( - write_version - ) -) -let string_of__version_option ?(len = 1024) x = - let ob = Buffer.create len in - write__version_option ob x; - Buffer.contents ob -let read__version_option = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "None" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (None : _ option) - | "Some" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read_version - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Some x : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "None" -> - (None : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | "Some" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read_version - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (Some x : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let _version_option_of_string s = - read__version_option (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_incompatible_rule : _ -> incompatible_rule -> _ = ( - fun ob (x : incompatible_rule) -> - Buffer.add_char ob '{'; - let is_first = ref true in - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"rule_id\":"; - ( - write_rule_id - ) - ob x.rule_id; - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"this_version\":"; - ( - write_version - ) - ob x.this_version; - (match x.min_version with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"min_version\":"; - ( - write_version - ) - ob x; - ); - (match x.max_version with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"max_version\":"; - ( - write_version - ) - ob x; - ); - Buffer.add_char ob '}'; -) -let string_of_incompatible_rule ?(len = 1024) x = - let ob = Buffer.create len in - write_incompatible_rule ob x; - Buffer.contents ob -let read_incompatible_rule = ( - fun p lb -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_lcurl p lb; - let field_rule_id = ref (None) in - let field_this_version = ref (None) in - let field_min_version = ref (None) in - let field_max_version = ref (None) in - try - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_end lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - match len with - | 7 -> ( - if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( - 0 - ) - else ( - -1 - ) - ) - | 11 -> ( - if String.unsafe_get s pos = 'm' then ( - match String.unsafe_get s (pos+1) with - | 'a' -> ( - if String.unsafe_get s (pos+2) = 'x' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( - 3 - ) - else ( - -1 - ) - ) - | 'i' -> ( - if String.unsafe_get s (pos+2) = 'n' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( - 2 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - else ( - -1 - ) - ) - | 12 -> ( - if String.unsafe_get s pos = 't' && String.unsafe_get s (pos+1) = 'h' && String.unsafe_get s (pos+2) = 'i' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'v' && String.unsafe_get s (pos+6) = 'e' && String.unsafe_get s (pos+7) = 'r' && String.unsafe_get s (pos+8) = 's' && String.unsafe_get s (pos+9) = 'i' && String.unsafe_get s (pos+10) = 'o' && String.unsafe_get s (pos+11) = 'n' then ( - 1 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_rule_id := ( - Some ( - ( - read_rule_id - ) p lb - ) - ); - | 1 -> - field_this_version := ( - Some ( - ( - read_version - ) p lb - ) - ); - | 2 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_min_version := ( - Some ( - ( - read_version - ) p lb - ) - ); - ) - | 3 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_max_version := ( - Some ( - ( - read_version - ) p lb - ) - ); - ) - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - while true do - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_sep p lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - match len with - | 7 -> ( - if String.unsafe_get s pos = 'r' && String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( - 0 - ) - else ( - -1 - ) - ) - | 11 -> ( - if String.unsafe_get s pos = 'm' then ( - match String.unsafe_get s (pos+1) with - | 'a' -> ( - if String.unsafe_get s (pos+2) = 'x' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( - 3 - ) - else ( - -1 - ) - ) - | 'i' -> ( - if String.unsafe_get s (pos+2) = 'n' && String.unsafe_get s (pos+3) = '_' && String.unsafe_get s (pos+4) = 'v' && String.unsafe_get s (pos+5) = 'e' && String.unsafe_get s (pos+6) = 'r' && String.unsafe_get s (pos+7) = 's' && String.unsafe_get s (pos+8) = 'i' && String.unsafe_get s (pos+9) = 'o' && String.unsafe_get s (pos+10) = 'n' then ( - 2 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - else ( - -1 - ) - ) - | 12 -> ( - if String.unsafe_get s pos = 't' && String.unsafe_get s (pos+1) = 'h' && String.unsafe_get s (pos+2) = 'i' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'v' && String.unsafe_get s (pos+6) = 'e' && String.unsafe_get s (pos+7) = 'r' && String.unsafe_get s (pos+8) = 's' && String.unsafe_get s (pos+9) = 'i' && String.unsafe_get s (pos+10) = 'o' && String.unsafe_get s (pos+11) = 'n' then ( - 1 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_rule_id := ( - Some ( - ( - read_rule_id - ) p lb - ) - ); - | 1 -> - field_this_version := ( - Some ( - ( - read_version - ) p lb - ) - ); - | 2 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_min_version := ( - Some ( - ( - read_version - ) p lb - ) - ); - ) - | 3 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_max_version := ( - Some ( - ( - read_version - ) p lb - ) - ); - ) - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - done; - assert false; - with Yojson.End_of_object -> ( - ( - { - rule_id = (match !field_rule_id with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "rule_id"); - this_version = (match !field_this_version with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "this_version"); - min_version = !field_min_version; - max_version = !field_max_version; - } - : incompatible_rule) - ) -) -let incompatible_rule_of_string s = - read_incompatible_rule (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write_finding_hashes : _ -> finding_hashes -> _ = ( fun ob (x : finding_hashes) -> Buffer.add_char ob '{'; @@ -24524,294 +25505,6 @@ let read_finding = ( ) let finding_of_string s = read_finding (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_error_type : _ -> error_type -> _ = ( - fun ob (x : error_type) -> - match x with - | LexicalError -> Buffer.add_string ob "\"Lexical error\"" - | ParseError -> Buffer.add_string ob "\"Syntax error\"" - | OtherParseError -> Buffer.add_string ob "\"Other syntax error\"" - | AstBuilderError -> Buffer.add_string ob "\"AST builder error\"" - | RuleParseError -> Buffer.add_string ob "\"Rule parse error\"" - | SemgrepWarning -> Buffer.add_string ob "\"SemgrepWarning\"" - | SemgrepError -> Buffer.add_string ob "\"SemgrepError\"" - | InvalidRuleSchemaError -> Buffer.add_string ob "\"InvalidRuleSchemaError\"" - | UnknownLanguageError -> Buffer.add_string ob "\"UnknownLanguageError\"" - | InvalidYaml -> Buffer.add_string ob "\"Invalid YAML\"" - | MatchingError -> Buffer.add_string ob "\"Internal matching error\"" - | SemgrepMatchFound -> Buffer.add_string ob "\"Semgrep match found\"" - | TooManyMatches -> Buffer.add_string ob "\"Too many matches\"" - | FatalError -> Buffer.add_string ob "\"Fatal error\"" - | Timeout -> Buffer.add_string ob "\"Timeout\"" - | OutOfMemory -> Buffer.add_string ob "\"Out of memory\"" - | StackOverflow -> Buffer.add_string ob "\"Stack overflow\"" - | TimeoutDuringInterfile -> Buffer.add_string ob "\"Timeout during interfile analysis\"" - | OutOfMemoryDuringInterfile -> Buffer.add_string ob "\"OOM during interfile analysis\"" - | MissingPlugin -> Buffer.add_string ob "\"Missing plugin\"" - | PatternParseError x -> - Buffer.add_string ob "[\"PatternParseError\","; - ( - write__string_list - ) ob x; - Buffer.add_char ob ']' - | PartialParsing x -> - Buffer.add_string ob "[\"PartialParsing\","; - ( - write__location_list - ) ob x; - Buffer.add_char ob ']' - | IncompatibleRule x -> - Buffer.add_string ob "[\"IncompatibleRule\","; - ( - write_incompatible_rule - ) ob x; - Buffer.add_char ob ']' - | PatternParseError0 -> Buffer.add_string ob "\"Pattern parse error\"" - | IncompatibleRule0 -> Buffer.add_string ob "\"Incompatible rule\"" - | DependencyResolutionError x -> - Buffer.add_string ob "[\"DependencyResolutionError\","; - ( - write_resolution_error - ) ob x; - Buffer.add_char ob ']' -) -let string_of_error_type ?(len = 1024) x = - let ob = Buffer.create len in - write_error_type ob x; - Buffer.contents ob -let read_error_type = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "Lexical error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (LexicalError : error_type) - | "Syntax error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (ParseError : error_type) - | "Other syntax error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (OtherParseError : error_type) - | "AST builder error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (AstBuilderError : error_type) - | "Rule parse error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (RuleParseError : error_type) - | "SemgrepWarning" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (SemgrepWarning : error_type) - | "SemgrepError" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (SemgrepError : error_type) - | "InvalidRuleSchemaError" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (InvalidRuleSchemaError : error_type) - | "UnknownLanguageError" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (UnknownLanguageError : error_type) - | "Invalid YAML" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (InvalidYaml : error_type) - | "Internal matching error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (MatchingError : error_type) - | "Semgrep match found" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (SemgrepMatchFound : error_type) - | "Too many matches" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (TooManyMatches : error_type) - | "Fatal error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (FatalError : error_type) - | "Timeout" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Timeout : error_type) - | "Out of memory" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (OutOfMemory : error_type) - | "Stack overflow" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (StackOverflow : error_type) - | "Timeout during interfile analysis" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (TimeoutDuringInterfile : error_type) - | "OOM during interfile analysis" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (OutOfMemoryDuringInterfile : error_type) - | "Missing plugin" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (MissingPlugin : error_type) - | "PatternParseError" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read__string_list - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (PatternParseError x : error_type) - | "PartialParsing" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read__location_list - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (PartialParsing x : error_type) - | "IncompatibleRule" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read_incompatible_rule - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (IncompatibleRule x : error_type) - | "Pattern parse error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (PatternParseError0 : error_type) - | "Incompatible rule" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (IncompatibleRule0 : error_type) - | "DependencyResolutionError" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read_resolution_error - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (DependencyResolutionError x : error_type) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "Lexical error" -> - (LexicalError : error_type) - | "Syntax error" -> - (ParseError : error_type) - | "Other syntax error" -> - (OtherParseError : error_type) - | "AST builder error" -> - (AstBuilderError : error_type) - | "Rule parse error" -> - (RuleParseError : error_type) - | "SemgrepWarning" -> - (SemgrepWarning : error_type) - | "SemgrepError" -> - (SemgrepError : error_type) - | "InvalidRuleSchemaError" -> - (InvalidRuleSchemaError : error_type) - | "UnknownLanguageError" -> - (UnknownLanguageError : error_type) - | "Invalid YAML" -> - (InvalidYaml : error_type) - | "Internal matching error" -> - (MatchingError : error_type) - | "Semgrep match found" -> - (SemgrepMatchFound : error_type) - | "Too many matches" -> - (TooManyMatches : error_type) - | "Fatal error" -> - (FatalError : error_type) - | "Timeout" -> - (Timeout : error_type) - | "Out of memory" -> - (OutOfMemory : error_type) - | "Stack overflow" -> - (StackOverflow : error_type) - | "Timeout during interfile analysis" -> - (TimeoutDuringInterfile : error_type) - | "OOM during interfile analysis" -> - (OutOfMemoryDuringInterfile : error_type) - | "Missing plugin" -> - (MissingPlugin : error_type) - | "Pattern parse error" -> - (PatternParseError0 : error_type) - | "Incompatible rule" -> - (IncompatibleRule0 : error_type) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | "PatternParseError" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read__string_list - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (PatternParseError x : error_type) - | "PartialParsing" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read__location_list - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (PartialParsing x : error_type) - | "IncompatibleRule" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read_incompatible_rule - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (IncompatibleRule x : error_type) - | "DependencyResolutionError" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read_resolution_error - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (DependencyResolutionError x : error_type) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let error_type_of_string s = - read_error_type (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write__string_list_nullable = ( Atdgen_runtime.Oj_run.write_nullable ( write__string_list @@ -25488,57 +26181,6 @@ let read_error_span = ( ) let error_span_of_string s = read_error_span (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_error_severity = ( - fun ob x -> - match x with - | `Error -> Buffer.add_string ob "\"error\"" - | `Warning -> Buffer.add_string ob "\"warn\"" - | `Info -> Buffer.add_string ob "\"info\"" -) -let string_of_error_severity ?(len = 1024) x = - let ob = Buffer.create len in - write_error_severity ob x; - Buffer.contents ob -let read_error_severity = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "error" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `Error - | "warn" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `Warning - | "info" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - `Info - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "error" -> - `Error - | "warn" -> - `Warning - | "info" -> - `Info - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let error_severity_of_string s = - read_error_severity (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write_dependency_parser_error : _ -> dependency_parser_error -> _ = ( fun ob (x : dependency_parser_error) -> Buffer.add_char ob '{'; @@ -30081,6 +30723,12 @@ let write_function_return = ( write__transitive_finding_list ) ob x; Buffer.add_char ob ']' + | `RetGetTargets x -> + Buffer.add_string ob "[\"RetGetTargets\","; + ( + write_target_discovery_result + ) ob x; + Buffer.add_char ob ']' ) let string_of_function_return ?(len = 1024) x = let ob = Buffer.create len in @@ -30182,6 +30830,15 @@ let read_function_return = ( Yojson.Safe.read_space p lb; Yojson.Safe.read_gt p lb; `RetTransitiveReachabilityFilter x + | "RetGetTargets" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_target_discovery_result + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `RetGetTargets x | x -> Atdgen_runtime.Oj_run.invalid_variant_tag p x ) @@ -30302,6 +30959,17 @@ let read_function_return = ( Yojson.Safe.read_space p lb; Yojson.Safe.read_rbr p lb; `RetTransitiveReachabilityFilter x + | "RetGetTargets" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_target_discovery_result + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + `RetGetTargets x | x -> Atdgen_runtime.Oj_run.invalid_variant_tag p x ) @@ -33269,6 +33937,12 @@ let write_function_call = ( write__transitive_finding_list ) ob x; Buffer.add_char ob ']' + | `CallGetTargets x -> + Buffer.add_string ob "[\"CallGetTargets\","; + ( + write_scanning_roots + ) ob x; + Buffer.add_char ob ']' ) let string_of_function_call ?(len = 1024) x = let ob = Buffer.create len in @@ -33512,6 +34186,15 @@ let read_function_call = ( Yojson.Safe.read_space p lb; Yojson.Safe.read_gt p lb; `CallTransitiveReachabilityFilter x + | "CallGetTargets" -> + Atdgen_runtime.Oj_run.read_until_field_value p lb; + let x = ( + read_scanning_roots + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_gt p lb; + `CallGetTargets x | x -> Atdgen_runtime.Oj_run.invalid_variant_tag p x ) @@ -33768,6 +34451,17 @@ let read_function_call = ( Yojson.Safe.read_space p lb; Yojson.Safe.read_rbr p lb; `CallTransitiveReachabilityFilter x + | "CallGetTargets" -> + Yojson.Safe.read_space p lb; + Yojson.Safe.read_comma p lb; + Yojson.Safe.read_space p lb; + let x = ( + read_scanning_roots + ) p lb + in + Yojson.Safe.read_space p lb; + Yojson.Safe.read_rbr p lb; + `CallGetTargets x | x -> Atdgen_runtime.Oj_run.invalid_variant_tag p x ) @@ -35287,446 +35981,6 @@ let read_core_output_extra = ( ) let core_output_extra_of_string s = read_core_output_extra (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write__location_option = ( - Atdgen_runtime.Oj_run.write_std_option ( - write_location - ) -) -let string_of__location_option ?(len = 1024) x = - let ob = Buffer.create len in - write__location_option ob x; - Buffer.contents ob -let read__location_option = ( - fun p lb -> - Yojson.Safe.read_space p lb; - match Yojson.Safe.start_any_variant p lb with - | `Edgy_bracket -> ( - match Yojson.Safe.read_ident p lb with - | "None" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (None : _ option) - | "Some" -> - Atdgen_runtime.Oj_run.read_until_field_value p lb; - let x = ( - read_location - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_gt p lb; - (Some x : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Double_quote -> ( - match Yojson.Safe.finish_string p lb with - | "None" -> - (None : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) - | `Square_bracket -> ( - match Atdgen_runtime.Oj_run.read_string p lb with - | "Some" -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_comma p lb; - Yojson.Safe.read_space p lb; - let x = ( - read_location - ) p lb - in - Yojson.Safe.read_space p lb; - Yojson.Safe.read_rbr p lb; - (Some x : _ option) - | x -> - Atdgen_runtime.Oj_run.invalid_variant_tag p x - ) -) -let _location_option_of_string s = - read__location_option (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write_core_error : _ -> core_error -> _ = ( - fun ob (x : core_error) -> - Buffer.add_char ob '{'; - let is_first = ref true in - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"error_type\":"; - ( - write_error_type - ) - ob x.error_type; - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"severity\":"; - ( - write_error_severity - ) - ob x.severity; - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"message\":"; - ( - Yojson.Safe.write_string - ) - ob x.message; - (match x.details with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"details\":"; - ( - Yojson.Safe.write_string - ) - ob x; - ); - (match x.location with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"location\":"; - ( - write_location - ) - ob x; - ); - (match x.rule_id with None -> () | Some x -> - if !is_first then - is_first := false - else - Buffer.add_char ob ','; - Buffer.add_string ob "\"rule_id\":"; - ( - write_rule_id - ) - ob x; - ); - Buffer.add_char ob '}'; -) -let string_of_core_error ?(len = 1024) x = - let ob = Buffer.create len in - write_core_error ob x; - Buffer.contents ob -let read_core_error = ( - fun p lb -> - Yojson.Safe.read_space p lb; - Yojson.Safe.read_lcurl p lb; - let field_error_type = ref (None) in - let field_severity = ref (None) in - let field_message = ref (None) in - let field_details = ref (None) in - let field_location = ref (None) in - let field_rule_id = ref (None) in - try - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_end lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - match len with - | 7 -> ( - match String.unsafe_get s pos with - | 'd' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( - 3 - ) - else ( - -1 - ) - ) - | 'm' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( - 2 - ) - else ( - -1 - ) - ) - | 'r' -> ( - if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( - 5 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - | 8 -> ( - match String.unsafe_get s pos with - | 'l' -> ( - if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'c' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 't' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'o' && String.unsafe_get s (pos+7) = 'n' then ( - 4 - ) - else ( - -1 - ) - ) - | 's' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'v' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' then ( - 1 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - | 10 -> ( - if String.unsafe_get s pos = 'e' && String.unsafe_get s (pos+1) = 'r' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'o' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = '_' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' && String.unsafe_get s (pos+8) = 'p' && String.unsafe_get s (pos+9) = 'e' then ( - 0 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_error_type := ( - Some ( - ( - read_error_type - ) p lb - ) - ); - | 1 -> - field_severity := ( - Some ( - ( - read_error_severity - ) p lb - ) - ); - | 2 -> - field_message := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - | 3 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_details := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - ) - | 4 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_location := ( - Some ( - ( - read_location - ) p lb - ) - ); - ) - | 5 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_rule_id := ( - Some ( - ( - read_rule_id - ) p lb - ) - ); - ) - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - while true do - Yojson.Safe.read_space p lb; - Yojson.Safe.read_object_sep p lb; - Yojson.Safe.read_space p lb; - let f = - fun s pos len -> - if pos < 0 || len < 0 || pos + len > String.length s then - invalid_arg (Printf.sprintf "out-of-bounds substring position or length: string = %S, requested position = %i, requested length = %i" s pos len); - match len with - | 7 -> ( - match String.unsafe_get s pos with - | 'd' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 't' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 'i' && String.unsafe_get s (pos+5) = 'l' && String.unsafe_get s (pos+6) = 's' then ( - 3 - ) - else ( - -1 - ) - ) - | 'm' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 's' && String.unsafe_get s (pos+3) = 's' && String.unsafe_get s (pos+4) = 'a' && String.unsafe_get s (pos+5) = 'g' && String.unsafe_get s (pos+6) = 'e' then ( - 2 - ) - else ( - -1 - ) - ) - | 'r' -> ( - if String.unsafe_get s (pos+1) = 'u' && String.unsafe_get s (pos+2) = 'l' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = '_' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'd' then ( - 5 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - | 8 -> ( - match String.unsafe_get s pos with - | 'l' -> ( - if String.unsafe_get s (pos+1) = 'o' && String.unsafe_get s (pos+2) = 'c' && String.unsafe_get s (pos+3) = 'a' && String.unsafe_get s (pos+4) = 't' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 'o' && String.unsafe_get s (pos+7) = 'n' then ( - 4 - ) - else ( - -1 - ) - ) - | 's' -> ( - if String.unsafe_get s (pos+1) = 'e' && String.unsafe_get s (pos+2) = 'v' && String.unsafe_get s (pos+3) = 'e' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = 'i' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' then ( - 1 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - ) - | 10 -> ( - if String.unsafe_get s pos = 'e' && String.unsafe_get s (pos+1) = 'r' && String.unsafe_get s (pos+2) = 'r' && String.unsafe_get s (pos+3) = 'o' && String.unsafe_get s (pos+4) = 'r' && String.unsafe_get s (pos+5) = '_' && String.unsafe_get s (pos+6) = 't' && String.unsafe_get s (pos+7) = 'y' && String.unsafe_get s (pos+8) = 'p' && String.unsafe_get s (pos+9) = 'e' then ( - 0 - ) - else ( - -1 - ) - ) - | _ -> ( - -1 - ) - in - let i = Yojson.Safe.map_ident p f lb in - Atdgen_runtime.Oj_run.read_until_field_value p lb; - ( - match i with - | 0 -> - field_error_type := ( - Some ( - ( - read_error_type - ) p lb - ) - ); - | 1 -> - field_severity := ( - Some ( - ( - read_error_severity - ) p lb - ) - ); - | 2 -> - field_message := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - | 3 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_details := ( - Some ( - ( - Atdgen_runtime.Oj_run.read_string - ) p lb - ) - ); - ) - | 4 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_location := ( - Some ( - ( - read_location - ) p lb - ) - ); - ) - | 5 -> - if not (Yojson.Safe.read_null_if_possible p lb) then ( - field_rule_id := ( - Some ( - ( - read_rule_id - ) p lb - ) - ); - ) - | _ -> ( - Yojson.Safe.skip_json p lb - ) - ); - done; - assert false; - with Yojson.End_of_object -> ( - ( - { - error_type = (match !field_error_type with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "error_type"); - severity = (match !field_severity with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "severity"); - message = (match !field_message with Some x -> x | None -> Atdgen_runtime.Oj_run.missing_field p "message"); - details = !field_details; - location = !field_location; - rule_id = !field_rule_id; - } - : core_error) - ) -) -let core_error_of_string s = - read_core_error (Yojson.Safe.init_lexer ()) (Lexing.from_string s) -let write__core_error_list = ( - Atdgen_runtime.Oj_run.write_list ( - write_core_error - ) -) -let string_of__core_error_list ?(len = 1024) x = - let ob = Buffer.create len in - write__core_error_list ob x; - Buffer.contents ob -let read__core_error_list = ( - Atdgen_runtime.Oj_run.read_list ( - read_core_error - ) -) -let _core_error_list_of_string s = - read__core_error_list (Yojson.Safe.init_lexer ()) (Lexing.from_string s) let write_core_output : _ -> core_output -> _ = ( fun ob (x : core_output) -> Buffer.add_char ob '{'; diff --git a/semgrep_output_v1_j.mli b/semgrep_output_v1_j.mli index 7991a41..891140d 100644 --- a/semgrep_output_v1_j.mli +++ b/semgrep_output_v1_j.mli @@ -345,6 +345,89 @@ type target_times = Semgrep_output_v1_t.target_times = { run_time: float } +type skip_reason = Semgrep_output_v1_t.skip_reason = + Always_skipped | Semgrepignore_patterns_match + | Cli_include_flags_do_not_match | Cli_exclude_flags_match + | Exceeded_size_limit | Analysis_failed_parser_or_internal_error + | Excluded_by_config | Wrong_language | Too_big | Minified | Binary + | Irrelevant_rule | Too_many_matches | Gitignore_patterns_match | Dotfile + | Nonexistent_file | Insufficient_permissions + + [@@deriving show] + +type skipped_target = Semgrep_output_v1_t.skipped_target = { + path: fpath; + reason: skip_reason; + details: string option; + rule_id: rule_id option +} + [@@deriving show] + +type resolution_cmd_failed = Semgrep_output_v1_t.resolution_cmd_failed = { + command: string; + message: string +} + [@@deriving show] + +type resolution_error = Semgrep_output_v1_t.resolution_error + [@@deriving show] + +type incompatible_rule = Semgrep_output_v1_t.incompatible_rule = { + rule_id: rule_id; + this_version: version; + min_version: version option; + max_version: version option +} + [@@deriving show] + +type error_type = Semgrep_output_v1_t.error_type = + LexicalError + | ParseError + | OtherParseError + | AstBuilderError + | RuleParseError + | SemgrepWarning + | SemgrepError + | InvalidRuleSchemaError + | UnknownLanguageError + | InvalidYaml + | MatchingError + | SemgrepMatchFound + | TooManyMatches + | FatalError + | Timeout + | OutOfMemory + | StackOverflow + | TimeoutDuringInterfile + | OutOfMemoryDuringInterfile + | MissingPlugin + | PatternParseError of string list + | PartialParsing of location list + | IncompatibleRule of incompatible_rule + | PatternParseError0 + | IncompatibleRule0 + | DependencyResolutionError of resolution_error + + [@@deriving show] + +type error_severity = Semgrep_output_v1_t.error_severity + [@@deriving show, eq] + +type core_error = Semgrep_output_v1_t.core_error = { + error_type: error_type; + severity: error_severity; + message: string; + details: string option; + location: location option; + rule_id: rule_id option +} + +type target_discovery_result = Semgrep_output_v1_t.target_discovery_result = { + target_paths: fpath list; + errors: core_error list; + skipped: skipped_target list +} + type tag = Semgrep_output_v1_t.tag type symbol = Semgrep_output_v1_t.symbol = { fqn: string list } @@ -389,24 +472,6 @@ type supply_chain_stats = Semgrep_output_v1_t.supply_chain_stats = { subprojects_stats: subproject_stats list } -type skip_reason = Semgrep_output_v1_t.skip_reason = - Always_skipped | Semgrepignore_patterns_match - | Cli_include_flags_do_not_match | Cli_exclude_flags_match - | Exceeded_size_limit | Analysis_failed_parser_or_internal_error - | Excluded_by_config | Wrong_language | Too_big | Minified | Binary - | Irrelevant_rule | Too_many_matches | Gitignore_patterns_match | Dotfile - | Nonexistent_file | Insufficient_permissions - - [@@deriving show] - -type skipped_target = Semgrep_output_v1_t.skipped_target = { - path: fpath; - reason: skip_reason; - details: string option; - rule_id: rule_id option -} - [@@deriving show] - type skipped_rule = Semgrep_output_v1_t.skipped_rule = { rule_id: rule_id; details: string; @@ -563,15 +628,6 @@ type engine_kind = Semgrep_output_v1_t.engine_kind [@@deriving show] type rule_id_and_engine_kind = Semgrep_output_v1_t.rule_id_and_engine_kind -type resolution_cmd_failed = Semgrep_output_v1_t.resolution_cmd_failed = { - command: string; - message: string -} - [@@deriving show] - -type resolution_error = Semgrep_output_v1_t.resolution_error - [@@deriving show] - type resolution_result = Semgrep_output_v1_t.resolution_result type profile = Semgrep_output_v1_t.profile = { @@ -590,14 +646,6 @@ type parsing_stats = Semgrep_output_v1_t.parsing_stats = { num_bytes: int } -type incompatible_rule = Semgrep_output_v1_t.incompatible_rule = { - rule_id: rule_id; - this_version: version; - min_version: version option; - max_version: version option -} - [@@deriving show] - type finding_hashes = Semgrep_output_v1_t.finding_hashes = { start_line_hash: string; end_line_hash: string; @@ -629,36 +677,6 @@ type finding = Semgrep_output_v1_t.finding = { engine_kind: engine_of_finding option } -type error_type = Semgrep_output_v1_t.error_type = - LexicalError - | ParseError - | OtherParseError - | AstBuilderError - | RuleParseError - | SemgrepWarning - | SemgrepError - | InvalidRuleSchemaError - | UnknownLanguageError - | InvalidYaml - | MatchingError - | SemgrepMatchFound - | TooManyMatches - | FatalError - | Timeout - | OutOfMemory - | StackOverflow - | TimeoutDuringInterfile - | OutOfMemoryDuringInterfile - | MissingPlugin - | PatternParseError of string list - | PartialParsing of location list - | IncompatibleRule of incompatible_rule - | PatternParseError0 - | IncompatibleRule0 - | DependencyResolutionError of resolution_error - - [@@deriving show] - type error_span = Semgrep_output_v1_t.error_span = { file: fpath; start: position; @@ -671,9 +689,6 @@ type error_span = Semgrep_output_v1_t.error_span = { context_end: position option option } -type error_severity = Semgrep_output_v1_t.error_severity - [@@deriving show, eq] - type dependency_parser_error = Semgrep_output_v1_t.dependency_parser_error = { path: fpath; parser: sca_parser_name; @@ -898,15 +913,6 @@ type core_output_extra = Semgrep_output_v1_t.core_output_extra = { symbol_analysis: symbol_analysis option } -type core_error = Semgrep_output_v1_t.core_error = { - error_type: error_type; - severity: error_severity; - message: string; - details: string option; - location: location option; - rule_id: rule_id option -} - type core_output = Semgrep_output_v1_t.core_output = { version: version; results: core_match list; @@ -2273,6 +2279,186 @@ val target_times_of_string : string -> target_times (** Deserialize JSON data of type {!type:target_times}. *) +val write_skip_reason : + Buffer.t -> skip_reason -> unit + (** Output a JSON value of type {!type:skip_reason}. *) + +val string_of_skip_reason : + ?len:int -> skip_reason -> string + (** Serialize a value of type {!type:skip_reason} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_skip_reason : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> skip_reason + (** Input JSON data of type {!type:skip_reason}. *) + +val skip_reason_of_string : + string -> skip_reason + (** Deserialize JSON data of type {!type:skip_reason}. *) + +val write_skipped_target : + Buffer.t -> skipped_target -> unit + (** Output a JSON value of type {!type:skipped_target}. *) + +val string_of_skipped_target : + ?len:int -> skipped_target -> string + (** Serialize a value of type {!type:skipped_target} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_skipped_target : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> skipped_target + (** Input JSON data of type {!type:skipped_target}. *) + +val skipped_target_of_string : + string -> skipped_target + (** Deserialize JSON data of type {!type:skipped_target}. *) + +val write_resolution_cmd_failed : + Buffer.t -> resolution_cmd_failed -> unit + (** Output a JSON value of type {!type:resolution_cmd_failed}. *) + +val string_of_resolution_cmd_failed : + ?len:int -> resolution_cmd_failed -> string + (** Serialize a value of type {!type:resolution_cmd_failed} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_resolution_cmd_failed : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> resolution_cmd_failed + (** Input JSON data of type {!type:resolution_cmd_failed}. *) + +val resolution_cmd_failed_of_string : + string -> resolution_cmd_failed + (** Deserialize JSON data of type {!type:resolution_cmd_failed}. *) + +val write_resolution_error : + Buffer.t -> resolution_error -> unit + (** Output a JSON value of type {!type:resolution_error}. *) + +val string_of_resolution_error : + ?len:int -> resolution_error -> string + (** Serialize a value of type {!type:resolution_error} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_resolution_error : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> resolution_error + (** Input JSON data of type {!type:resolution_error}. *) + +val resolution_error_of_string : + string -> resolution_error + (** Deserialize JSON data of type {!type:resolution_error}. *) + +val write_incompatible_rule : + Buffer.t -> incompatible_rule -> unit + (** Output a JSON value of type {!type:incompatible_rule}. *) + +val string_of_incompatible_rule : + ?len:int -> incompatible_rule -> string + (** Serialize a value of type {!type:incompatible_rule} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_incompatible_rule : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> incompatible_rule + (** Input JSON data of type {!type:incompatible_rule}. *) + +val incompatible_rule_of_string : + string -> incompatible_rule + (** Deserialize JSON data of type {!type:incompatible_rule}. *) + +val write_error_type : + Buffer.t -> error_type -> unit + (** Output a JSON value of type {!type:error_type}. *) + +val string_of_error_type : + ?len:int -> error_type -> string + (** Serialize a value of type {!type:error_type} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_error_type : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> error_type + (** Input JSON data of type {!type:error_type}. *) + +val error_type_of_string : + string -> error_type + (** Deserialize JSON data of type {!type:error_type}. *) + +val write_error_severity : + Buffer.t -> error_severity -> unit + (** Output a JSON value of type {!type:error_severity}. *) + +val string_of_error_severity : + ?len:int -> error_severity -> string + (** Serialize a value of type {!type:error_severity} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_error_severity : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> error_severity + (** Input JSON data of type {!type:error_severity}. *) + +val error_severity_of_string : + string -> error_severity + (** Deserialize JSON data of type {!type:error_severity}. *) + +val write_core_error : + Buffer.t -> core_error -> unit + (** Output a JSON value of type {!type:core_error}. *) + +val string_of_core_error : + ?len:int -> core_error -> string + (** Serialize a value of type {!type:core_error} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_core_error : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> core_error + (** Input JSON data of type {!type:core_error}. *) + +val core_error_of_string : + string -> core_error + (** Deserialize JSON data of type {!type:core_error}. *) + +val write_target_discovery_result : + Buffer.t -> target_discovery_result -> unit + (** Output a JSON value of type {!type:target_discovery_result}. *) + +val string_of_target_discovery_result : + ?len:int -> target_discovery_result -> string + (** Serialize a value of type {!type:target_discovery_result} + into a JSON string. + @param len specifies the initial length + of the buffer used internally. + Default: 1024. *) + +val read_target_discovery_result : + Yojson.Safe.lexer_state -> Lexing.lexbuf -> target_discovery_result + (** Input JSON data of type {!type:target_discovery_result}. *) + +val target_discovery_result_of_string : + string -> target_discovery_result + (** Deserialize JSON data of type {!type:target_discovery_result}. *) + val write_tag : Buffer.t -> tag -> unit (** Output a JSON value of type {!type:tag}. *) @@ -2493,46 +2679,6 @@ val supply_chain_stats_of_string : string -> supply_chain_stats (** Deserialize JSON data of type {!type:supply_chain_stats}. *) -val write_skip_reason : - Buffer.t -> skip_reason -> unit - (** Output a JSON value of type {!type:skip_reason}. *) - -val string_of_skip_reason : - ?len:int -> skip_reason -> string - (** Serialize a value of type {!type:skip_reason} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_skip_reason : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> skip_reason - (** Input JSON data of type {!type:skip_reason}. *) - -val skip_reason_of_string : - string -> skip_reason - (** Deserialize JSON data of type {!type:skip_reason}. *) - -val write_skipped_target : - Buffer.t -> skipped_target -> unit - (** Output a JSON value of type {!type:skipped_target}. *) - -val string_of_skipped_target : - ?len:int -> skipped_target -> string - (** Serialize a value of type {!type:skipped_target} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_skipped_target : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> skipped_target - (** Input JSON data of type {!type:skipped_target}. *) - -val skipped_target_of_string : - string -> skipped_target - (** Deserialize JSON data of type {!type:skipped_target}. *) - val write_skipped_rule : Buffer.t -> skipped_rule -> unit (** Output a JSON value of type {!type:skipped_rule}. *) @@ -2973,46 +3119,6 @@ val rule_id_and_engine_kind_of_string : string -> rule_id_and_engine_kind (** Deserialize JSON data of type {!type:rule_id_and_engine_kind}. *) -val write_resolution_cmd_failed : - Buffer.t -> resolution_cmd_failed -> unit - (** Output a JSON value of type {!type:resolution_cmd_failed}. *) - -val string_of_resolution_cmd_failed : - ?len:int -> resolution_cmd_failed -> string - (** Serialize a value of type {!type:resolution_cmd_failed} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_resolution_cmd_failed : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> resolution_cmd_failed - (** Input JSON data of type {!type:resolution_cmd_failed}. *) - -val resolution_cmd_failed_of_string : - string -> resolution_cmd_failed - (** Deserialize JSON data of type {!type:resolution_cmd_failed}. *) - -val write_resolution_error : - Buffer.t -> resolution_error -> unit - (** Output a JSON value of type {!type:resolution_error}. *) - -val string_of_resolution_error : - ?len:int -> resolution_error -> string - (** Serialize a value of type {!type:resolution_error} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_resolution_error : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> resolution_error - (** Input JSON data of type {!type:resolution_error}. *) - -val resolution_error_of_string : - string -> resolution_error - (** Deserialize JSON data of type {!type:resolution_error}. *) - val write_resolution_result : Buffer.t -> resolution_result -> unit (** Output a JSON value of type {!type:resolution_result}. *) @@ -3073,26 +3179,6 @@ val parsing_stats_of_string : string -> parsing_stats (** Deserialize JSON data of type {!type:parsing_stats}. *) -val write_incompatible_rule : - Buffer.t -> incompatible_rule -> unit - (** Output a JSON value of type {!type:incompatible_rule}. *) - -val string_of_incompatible_rule : - ?len:int -> incompatible_rule -> string - (** Serialize a value of type {!type:incompatible_rule} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_incompatible_rule : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> incompatible_rule - (** Input JSON data of type {!type:incompatible_rule}. *) - -val incompatible_rule_of_string : - string -> incompatible_rule - (** Deserialize JSON data of type {!type:incompatible_rule}. *) - val write_finding_hashes : Buffer.t -> finding_hashes -> unit (** Output a JSON value of type {!type:finding_hashes}. *) @@ -3133,26 +3219,6 @@ val finding_of_string : string -> finding (** Deserialize JSON data of type {!type:finding}. *) -val write_error_type : - Buffer.t -> error_type -> unit - (** Output a JSON value of type {!type:error_type}. *) - -val string_of_error_type : - ?len:int -> error_type -> string - (** Serialize a value of type {!type:error_type} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_error_type : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> error_type - (** Input JSON data of type {!type:error_type}. *) - -val error_type_of_string : - string -> error_type - (** Deserialize JSON data of type {!type:error_type}. *) - val write_error_span : Buffer.t -> error_span -> unit (** Output a JSON value of type {!type:error_span}. *) @@ -3173,26 +3239,6 @@ val error_span_of_string : string -> error_span (** Deserialize JSON data of type {!type:error_span}. *) -val write_error_severity : - Buffer.t -> error_severity -> unit - (** Output a JSON value of type {!type:error_severity}. *) - -val string_of_error_severity : - ?len:int -> error_severity -> string - (** Serialize a value of type {!type:error_severity} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_error_severity : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> error_severity - (** Input JSON data of type {!type:error_severity}. *) - -val error_severity_of_string : - string -> error_severity - (** Deserialize JSON data of type {!type:error_severity}. *) - val write_dependency_parser_error : Buffer.t -> dependency_parser_error -> unit (** Output a JSON value of type {!type:dependency_parser_error}. *) @@ -3833,26 +3879,6 @@ val core_output_extra_of_string : string -> core_output_extra (** Deserialize JSON data of type {!type:core_output_extra}. *) -val write_core_error : - Buffer.t -> core_error -> unit - (** Output a JSON value of type {!type:core_error}. *) - -val string_of_core_error : - ?len:int -> core_error -> string - (** Serialize a value of type {!type:core_error} - into a JSON string. - @param len specifies the initial length - of the buffer used internally. - Default: 1024. *) - -val read_core_error : - Yojson.Safe.lexer_state -> Lexing.lexbuf -> core_error - (** Input JSON data of type {!type:core_error}. *) - -val core_error_of_string : - string -> core_error - (** Deserialize JSON data of type {!type:core_error}. *) - val write_core_output : Buffer.t -> core_output -> unit (** Output a JSON value of type {!type:core_output}. *)