diff --git a/pyk/src/pyk/__main__.py b/pyk/src/pyk/__main__.py index 3b297979af1..2358523002c 100644 --- a/pyk/src/pyk/__main__.py +++ b/pyk/src/pyk/__main__.py @@ -3,15 +3,28 @@ import json import logging import sys -from collections.abc import Iterable +from argparse import FileType from contextlib import contextmanager from pathlib import Path from typing import TYPE_CHECKING from graphviz import Digraph -from .cli.pyk import PrintInput, create_argument_parser, generate_options, parse_toml_args -from .cli.utils import LOG_FORMAT, loglevel +from pyk.cli.utils import dir_path + +from .cli.args import ( + DefinitionOptionsGroup, + DisplayOptionsGroup, + KDefinitionOptionsGroup, + KompileOptionsGroup, + LoggingOptionsGroup, + OutputFileOptionsGroup, + SpecOptionsGroup, + WarningOptionsGroup, +) +from .cli.cli import CLI, Command, Option +from .cli.pyk import PrintInput +from .cli.utils import file_path from .coverage import get_rule_by_id, strip_coverage_logger from .cterm import CTerm from .cterm.symbolic import cterm_symbolic @@ -31,6 +44,7 @@ from .kore.parser import KoreParser from .kore.rpc import ExecuteResult, StopReason from .kore.syntax import Pattern, kore_term +from .ktool._ktool import TypeInferenceMode from .ktool.kompile import Kompile, KompileBackend from .ktool.kprint import KPrint from .ktool.kprove import KProve, ProveRpc @@ -42,23 +56,8 @@ from .utils import check_dir_path, check_file_path, ensure_dir_path, exit_with_process_error if TYPE_CHECKING: - from collections.abc import Iterator - from typing import Any, Final - - from .cli.pyk import ( - CoverageOptions, - GraphImportsOptions, - JsonToKoreOptions, - KompileCommandOptions, - KoreToJsonOptions, - ParseOuterOptions, - PrintOptions, - ProveLegacyOptions, - ProveOptions, - RPCKastOptions, - RPCPrintOptions, - RunOptions, - ) + from collections.abc import Iterable, Iterator + from typing import IO, Any, Final _LOGGER: Final = logging.getLogger(__name__) @@ -70,361 +69,656 @@ def main() -> None: # This change makes it so that in most cases, by default, pyk doesn't run out of stack space. sys.setrecursionlimit(10**7) - cli_parser = create_argument_parser() - args = cli_parser.parse_args() - toml_args = parse_toml_args(args) - - stripped_args = toml_args | { - key: val for (key, val) in vars(args).items() if val is not None and not (isinstance(val, Iterable) and not val) - } - - options = generate_options(stripped_args) - - logging.basicConfig(level=loglevel(args), format=LOG_FORMAT) - - executor_name = 'exec_' + args.command.lower().replace('-', '_') - if executor_name not in globals(): - raise AssertionError(f'Unimplemented command: {args.command}') - - execute = globals()[executor_name] - execute(options) - - -def exec_print(options: PrintOptions) -> None: - kompiled_dir: Path = options.definition_dir - printer = KPrint(kompiled_dir) - if options.input == PrintInput.KORE_JSON: - _LOGGER.info(f'Reading Kore JSON from file: {options.term.name}') - kore = Pattern.from_json(options.term.read()) - term = printer.kore_to_kast(kore) - else: - _LOGGER.info(f'Reading Kast JSON from file: {options.term.name}') - term = KInner.from_json(options.term.read()) - if is_top(term): - options.output_file.write(printer.pretty_print(term)) - _LOGGER.info(f'Wrote file: {options.output_file.name}') - else: - if options.minimize: - if options.omit_labels is not None and options.keep_cells is not None: - raise ValueError('You cannot use both --omit-labels and --keep-cells.') - - abstract_labels = options.omit_labels.split(',') if options.omit_labels is not None else [] - keep_cells = options.keep_cells.split(',') if options.keep_cells is not None else [] - minimized_disjuncts = [] - - for disjunct in flatten_label('#Or', term): - try: - minimized = minimize_term(disjunct, abstract_labels=abstract_labels, keep_cells=keep_cells) - config, constraint = split_config_and_constraints(minimized) - except ValueError as err: - raise ValueError('The minimized term does not contain a config cell.') from err + cli = CLI([JsonToKoreCommand(), JsonToKoreCommand(), CoverageCommand()]) + cli.get_and_exec_command() - if not is_top(constraint): - minimized_disjuncts.append(mlAnd([config, constraint], sort=GENERATED_TOP_CELL)) - else: - minimized_disjuncts.append(config) - term = propagate_up_constraints(mlOr(minimized_disjuncts, sort=GENERATED_TOP_CELL)) - options.output_file.write(printer.pretty_print(term)) - _LOGGER.info(f'Wrote file: {options.output_file.name}') +class PrintOptionsGroup(DefinitionOptionsGroup, OutputFileOptionsGroup, DisplayOptionsGroup, LoggingOptionsGroup): + term: IO[Any] + input: PrintInput + omit_labels: str | None + keep_cells: str | None + def __init__(self) -> None: + super().__init__() + self.add_option( + Option('term', FileType('r'), help_str='Input term (in format specified with --input)', required=True) + ) + self.add_option( + Option( + '--input', + PrintInput, + 'input', + help_str='Input format', + choices=list(PrintInput), + default=PrintInput.KAST_JSON, + ) + ) + self.add_option( + Option( + '--omit-labels', + str, + 'omit_labels', + help_str='List of labels to omit from output.', + nargs='?', + default=[], + ) + ) + self.add_option( + Option( + '--keep-cells', + str, + 'keep_cells', + help_str='List of cells with primitive values to keep in output.', + nargs='?', + default=[], + ) + ) -def exec_rpc_print(options: RPCPrintOptions) -> None: - kompiled_dir: Path = options.definition_dir - printer = KPrint(kompiled_dir) - input_dict = json.loads(options.input_file.read()) - output_buffer = [] - def pretty_print_request(request_params: dict[str, Any]) -> list[str]: - output_buffer = [] - non_state_keys = set(request_params.keys()).difference(['state']) - for key in non_state_keys: - output_buffer.append(f'{key}: {request_params[key]}') - state = CTerm.from_kast(printer.kore_to_kast(kore_term(request_params['state']))) - output_buffer.append('State:') - output_buffer.append(printer.pretty_print(state.kast, sort_collections=True)) - return output_buffer +class PrintCommand(Command[PrintOptionsGroup]): + def __init__(self) -> None: + super().__init__('print', 'Pretty print a term.', PrintOptionsGroup()) - def pretty_print_execute_response(execute_result: ExecuteResult) -> list[str]: - output_buffer = [] - output_buffer.append(f'Depth: {execute_result.depth}') - output_buffer.append(f'Stop reason: {execute_result.reason.value}') - if execute_result.reason == StopReason.TERMINAL_RULE or execute_result.reason == StopReason.CUT_POINT_RULE: - output_buffer.append(f'Stop rule: {execute_result.rule}') - output_buffer.append( - f'Number of next states: {len(execute_result.next_states) if execute_result.next_states is not None else 0}' - ) - state = CTerm.from_kast(printer.kore_to_kast(execute_result.state.kore)) - output_buffer.append('State:') - output_buffer.append(printer.pretty_print(state.kast, sort_collections=True)) - if execute_result.next_states is not None: - next_states = [CTerm.from_kast(printer.kore_to_kast(s.kore)) for s in execute_result.next_states] - for i, s in enumerate(next_states): - output_buffer.append(f'Next state #{i}:') - output_buffer.append(printer.pretty_print(s.kast, sort_collections=True)) - return output_buffer - - try: - if 'method' in input_dict: - output_buffer.append('JSON RPC request') - output_buffer.append(f'id: {input_dict["id"]}') - output_buffer.append(f'Method: {input_dict["method"]}') - try: - if 'state' in input_dict['params']: - output_buffer += pretty_print_request(input_dict['params']) - else: # this is an "add-module" request, skip trying to print state - for key in input_dict['params'].keys(): - output_buffer.append(f'{key}: {input_dict["params"][key]}') - except KeyError as e: - _LOGGER.critical(f'Could not find key {str(e)} in input JSON file') - exit(1) + def exec(self) -> None: + kompiled_dir: Path = self.options.definition_dir + printer = KPrint(kompiled_dir) + if self.options.input == PrintInput.KORE_JSON: + _LOGGER.info(f'Reading Kore JSON from file: {self.options.term.name}') + kore = Pattern.from_json(self.options.term.read()) + term = printer.kore_to_kast(kore) else: - if not 'result' in input_dict: - _LOGGER.critical('The input is neither a request not a resonse') - exit(1) - output_buffer.append('JSON RPC Response') - output_buffer.append(f'id: {input_dict["id"]}') - if list(input_dict['result'].keys()) == ['state']: # this is a "simplify" response - output_buffer.append('Method: simplify') - state = CTerm.from_kast(printer.kore_to_kast(kore_term(input_dict['result']['state']))) - output_buffer.append('State:') - output_buffer.append(printer.pretty_print(state.kast, sort_collections=True)) - elif list(input_dict['result'].keys()) == ['module']: # this is an "add-module" response - output_buffer.append('Method: add-module') - output_buffer.append('Module:') - output_buffer.append(input_dict['result']['module']) - else: - try: # assume it is an "execute" response - output_buffer.append('Method: execute') - execute_result = ExecuteResult.from_dict(input_dict['result']) - output_buffer += pretty_print_execute_response(execute_result) + _LOGGER.info(f'Reading Kast JSON from file: {self.options.term.name}') + term = KInner.from_json(self.options.term.read()) + if is_top(term): + self.options.output_file.write(printer.pretty_print(term)) + _LOGGER.info(f'Wrote file: {self.options.output_file.name}') + else: + if self.options.minimize: + if self.options.omit_labels is not None and self.options.keep_cells is not None: + raise ValueError('You cannot use both --omit-labels and --keep-cells.') + + abstract_labels = self.options.omit_labels.split(',') if self.options.omit_labels is not None else [] + keep_cells = self.options.keep_cells.split(',') if self.options.keep_cells is not None else [] + minimized_disjuncts = [] + + for disjunct in flatten_label('#Or', term): + try: + minimized = minimize_term(disjunct, abstract_labels=abstract_labels, keep_cells=keep_cells) + config, constraint = split_config_and_constraints(minimized) + except ValueError as err: + raise ValueError('The minimized term does not contain a config cell.') from err + + if not is_top(constraint): + minimized_disjuncts.append(mlAnd([config, constraint], sort=GENERATED_TOP_CELL)) + else: + minimized_disjuncts.append(config) + term = propagate_up_constraints(mlOr(minimized_disjuncts, sort=GENERATED_TOP_CELL)) + + self.options.output_file.write(printer.pretty_print(term)) + _LOGGER.info(f'Wrote file: {self.options.output_file.name}') + + +class RPCPrintOptionsGroup(DefinitionOptionsGroup, OutputFileOptionsGroup, LoggingOptionsGroup): + input_file: IO[Any] + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + 'input_file', + FileType('r'), + help_str='An input file containing the JSON RPC request or response with KoreJSON payload. ', + required=True, + ) + ) + + +class RPCPrintCommand(Command[RPCPrintOptionsGroup]): + def __init__(self) -> None: + super().__init__('rpc-print', 'Pretty-print an RPC request/response', RPCPrintOptionsGroup()) + + def exec(self) -> None: + kompiled_dir: Path = self.options.definition_dir + printer = KPrint(kompiled_dir) + input_dict = json.loads(self.options.input_file.read()) + output_buffer = [] + + def pretty_print_request(request_params: dict[str, Any]) -> list[str]: + output_buffer = [] + non_state_keys = set(request_params.keys()).difference(['state']) + for key in non_state_keys: + output_buffer.append(f'{key}: {request_params[key]}') + state = CTerm.from_kast(printer.kore_to_kast(kore_term(request_params['state']))) + output_buffer.append('State:') + output_buffer.append(printer.pretty_print(state.kast, sort_collections=True)) + return output_buffer + + def pretty_print_execute_response(execute_result: ExecuteResult) -> list[str]: + output_buffer = [] + output_buffer.append(f'Depth: {execute_result.depth}') + output_buffer.append(f'Stop reason: {execute_result.reason.value}') + if execute_result.reason == StopReason.TERMINAL_RULE or execute_result.reason == StopReason.CUT_POINT_RULE: + output_buffer.append(f'Stop rule: {execute_result.rule}') + output_buffer.append( + f'Number of next states: {len(execute_result.next_states) if execute_result.next_states is not None else 0}' + ) + state = CTerm.from_kast(printer.kore_to_kast(execute_result.state.kore)) + output_buffer.append('State:') + output_buffer.append(printer.pretty_print(state.kast, sort_collections=True)) + if execute_result.next_states is not None: + next_states = [CTerm.from_kast(printer.kore_to_kast(s.kore)) for s in execute_result.next_states] + for i, s in enumerate(next_states): + output_buffer.append(f'Next state #{i}:') + output_buffer.append(printer.pretty_print(s.kast, sort_collections=True)) + return output_buffer + + try: + if 'method' in input_dict: + output_buffer.append('JSON RPC request') + output_buffer.append(f'id: {input_dict["id"]}') + output_buffer.append(f'Method: {input_dict["method"]}') + try: + if 'state' in input_dict['params']: + output_buffer += pretty_print_request(input_dict['params']) + else: # this is an "add-module" request, skip trying to print state + for key in input_dict['params'].keys(): + output_buffer.append(f'{key}: {input_dict["params"][key]}') except KeyError as e: _LOGGER.critical(f'Could not find key {str(e)} in input JSON file') exit(1) - if options.output_file is not None: - options.output_file.write('\n'.join(output_buffer)) + else: + if not 'result' in input_dict: + _LOGGER.critical('The input is neither a request not a resonse') + exit(1) + output_buffer.append('JSON RPC Response') + output_buffer.append(f'id: {input_dict["id"]}') + if list(input_dict['result'].keys()) == ['state']: # this is a "simplify" response + output_buffer.append('Method: simplify') + state = CTerm.from_kast(printer.kore_to_kast(kore_term(input_dict['result']['state']))) + output_buffer.append('State:') + output_buffer.append(printer.pretty_print(state.kast, sort_collections=True)) + elif list(input_dict['result'].keys()) == ['module']: # this is an "add-module" response + output_buffer.append('Method: add-module') + output_buffer.append('Module:') + output_buffer.append(input_dict['result']['module']) + else: + try: # assume it is an "execute" response + output_buffer.append('Method: execute') + execute_result = ExecuteResult.from_dict(input_dict['result']) + output_buffer += pretty_print_execute_response(execute_result) + except KeyError as e: + _LOGGER.critical(f'Could not find key {str(e)} in input JSON file') + exit(1) + if self.options.output_file is not None: + self.options.output_file.write('\n'.join(output_buffer)) + else: + print('\n'.join(output_buffer)) + except ValueError as e: + # shorten and print the error message in case kore_to_kast throws ValueError + _LOGGER.critical(str(e)[:200]) + exit(1) + + +class RPCKastOptionsGroup(OutputFileOptionsGroup, LoggingOptionsGroup): + reference_request_file: IO[Any] + response_file: IO[Any] + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + 'reference_request_file', + FileType('r'), + help_str='An input file containing a JSON RPC request to serve as a reference for the new request.', + required=True, + ) + ) + self.add_option( + Option( + 'response_file', + FileType('r'), + help_str='An input file containing a JSON RPC response with KoreJSON payload.', + required=True, + ) + ) + + +class RPCKastCommand(Command[RPCKastOptionsGroup]): + def __init__(self) -> None: + super().__init__( + 'rpc-kast', + 'Convert an "execute" JSON RPC response to a new "execute" or "simplify" request, copying paraemeters from a reference request.', + RPCKastOptionsGroup(), + ) + + def exec(self) -> None: + """ + Convert an 'execute' JSON RPC response to a new 'execute' or 'simplify' request, + copying parameters from a reference request. + """ + reference_request = json.loads(self.options.reference_request_file.read()) + input_dict = json.loads(self.options.response_file.read()) + execute_result = ExecuteResult.from_dict(input_dict['result']) + non_state_keys = set(reference_request['params'].keys()).difference(['state']) + request_params = {} + for key in non_state_keys: + request_params[key] = reference_request['params'][key] + request_params['state'] = {'format': 'KORE', 'version': 1, 'term': execute_result.state.kore.dict} + request = { + 'jsonrpc': reference_request['jsonrpc'], + 'id': reference_request['id'], + 'method': reference_request['method'], + 'params': request_params, + } + self.options.output_file.write(json.dumps(request)) + + +class ProveLegacyOptionsGroup(DefinitionOptionsGroup, OutputFileOptionsGroup, LoggingOptionsGroup): + main_file: Path + spec_file: Path + spec_module: str + k_args: Iterable[str] + + def __init__(self) -> None: + super().__init__() + + self.add_option(Option('main_file', str, help_str='Main file used for kompilation', required=True)) + self.add_option(Option('spec_file', str, help_str='File with the specification module.', required=True)) + self.add_option(Option('spec_module', str, help_str='Module with claims to be proven.', required=True)) + self.add_option(Option('kArgs', str, 'k_args', 'Module with claims to be proven.', required=True, nargs='*')) + + +class ProveLegacyCommand(Command[ProveLegacyOptionsGroup]): + def __init__(self) -> None: + super().__init__('prove-legacy', 'Prove an input specification (using kprovex).', ProveLegacyOptionsGroup()) + + def exec(self) -> None: + kompiled_dir: Path = self.options.definition_dir + kprover = KProve(kompiled_dir, self.options.main_file) + final_state = kprover.prove( + Path(self.options.spec_file), spec_module_name=self.options.spec_module, args=self.options.k_args + ) + self.options.output_file.write(json.dumps(mlOr([state.kast for state in final_state]).to_dict())) + _LOGGER.info(f'Wrote file: {self.options.output_file.name}') + + +class ProveOptionsGroup(LoggingOptionsGroup, SpecOptionsGroup): + failure_info: bool + kore_rpc_command: str | Iterable[str] | None + max_depth: int | None + max_iterations: int | None + show_kcfg: bool + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--failure-info', + bool, + 'failure_info', + 'Print out more information about proof failures.', + default=False, + action='store_true', + ) + ) + self.add_option( + Option( + '--show-kcfg', + bool, + 'show_kcfg', + 'Display the resulting proof KCFG.', + default=False, + action='store_true', + ) + ) + self.add_option( + Option( + '--max-depth', + int, + 'max_depth', + 'Maximum number of stepss to take in symbolic execution per basic block.', + default=None, + ) + ) + self.add_option( + Option( + '--max-iterations', + int, + 'max_iterations', + 'Maximum number of KCFG explorations to take in attempting to discharge proof.', + default=None, + ) + ) + self.add_option( + Option('--kore-rpc-command', str, 'kore_rpc_command', 'Custom command to start RPC server.', default=None) + ) + + +class ProveCommand(Command[ProveOptionsGroup]): + def __init__(self) -> None: + super().__init__('prove', 'Prove an input specification (using RPC based prover).', ProveOptionsGroup()) + + def exec(self) -> None: + kompiled_directory: Path + if self.options.definition_dir is None: + kompiled_directory = Kompile.default_directory() + _LOGGER.info(f'Using kompiled directory: {kompiled_directory}.') else: - print('\n'.join(output_buffer)) - except ValueError as e: - # shorten and print the error message in case kore_to_kast throws ValueError - _LOGGER.critical(str(e)[:200]) - exit(1) - - -def exec_rpc_kast(options: RPCKastOptions) -> None: - """ - Convert an 'execute' JSON RPC response to a new 'execute' or 'simplify' request, - copying parameters from a reference request. - """ - reference_request = json.loads(options.reference_request_file.read()) - input_dict = json.loads(options.response_file.read()) - execute_result = ExecuteResult.from_dict(input_dict['result']) - non_state_keys = set(reference_request['params'].keys()).difference(['state']) - request_params = {} - for key in non_state_keys: - request_params[key] = reference_request['params'][key] - request_params['state'] = {'format': 'KORE', 'version': 1, 'term': execute_result.state.kore.dict} - request = { - 'jsonrpc': reference_request['jsonrpc'], - 'id': reference_request['id'], - 'method': reference_request['method'], - 'params': request_params, - } - options.output_file.write(json.dumps(request)) - - -def exec_prove_legacy(options: ProveLegacyOptions) -> None: - kompiled_dir: Path = options.definition_dir - kprover = KProve(kompiled_dir, options.main_file) - final_state = kprover.prove(Path(options.spec_file), spec_module_name=options.spec_module, args=options.k_args) - options.output_file.write(json.dumps(mlOr([state.kast for state in final_state]).to_dict())) - _LOGGER.info(f'Wrote file: {options.output_file.name}') - - -def exec_prove(options: ProveOptions) -> None: - kompiled_directory: Path - if options.definition_dir is None: - kompiled_directory = Kompile.default_directory() - _LOGGER.info(f'Using kompiled directory: {kompiled_directory}.') - else: - kompiled_directory = options.definition_dir - - kprove = KProve(kompiled_directory, use_directory=options.temp_directory) - - @contextmanager - def explore_context() -> Iterator[KCFGExplore]: - with cterm_symbolic( - definition=kprove.definition, - kompiled_kore=kprove.kompiled_kore, - definition_dir=kprove.definition_dir, - ) as cts: - yield KCFGExplore(cts) - - prove_rpc = ProveRpc(kprove, explore_context) - - try: - proofs = prove_rpc.prove_rpc(options=options) - except RuntimeError as err: - _, _, _, cpe = err.args - exit_with_process_error(cpe) - for proof in sorted(proofs, key=lambda p: p.id): - print('\n'.join(proof.summary.lines)) - if proof.failed and options.failure_info: - failure_info = proof.failure_info - if type(failure_info) is APRFailureInfo: - print('\n'.join(failure_info.print())) - if options.show_kcfg and type(proof) is APRProof: - node_printer = APRProofNodePrinter(proof, kprove, full_printer=True, minimize=False) - show = APRProofShow(kprove, node_printer=node_printer) - print('\n'.join(show.show(proof))) - sys.exit(len([p.id for p in proofs if not p.passed])) - - -def exec_show(options: ProveOptions) -> None: - options.max_iterations = 0 - options.show_kcfg = True - exec_prove(options) - - -def exec_kompile(options: KompileCommandOptions) -> None: - main_file = Path(options.main_file) - check_file_path(main_file) - - kompiled_directory: Path - if options.definition_dir is None: - kompiled_directory = Path(f'{main_file.stem}-kompiled') - ensure_dir_path(kompiled_directory) - else: - kompiled_directory = options.definition_dir - - kompile_dict: dict[str, Any] = { - 'main_file': main_file, - 'backend': options.backend.value, - 'syntax_module': options.syntax_module, - 'main_module': options.main_module, - 'md_selector': options.md_selector, - 'include_dirs': (Path(include) for include in options.includes), - 'emit_json': options.emit_json, - 'coverage': options.coverage, - 'gen_bison_parser': options.gen_bison_parser, - 'gen_glr_bison_parser': options.gen_glr_bison_parser, - 'bison_lists': options.bison_lists, - } - if options.backend == KompileBackend.LLVM: - kompile_dict['ccopts'] = options.ccopts - kompile_dict['enable_search'] = options.enable_search - kompile_dict['llvm_kompile_type'] = options.llvm_kompile_type - kompile_dict['llvm_kompile_output'] = options.llvm_kompile_output - kompile_dict['llvm_proof_hint_instrumentation'] = options.llvm_proof_hint_instrumentation - elif len(options.ccopts) > 0: - raise ValueError(f'Option `-ccopt` requires `--backend llvm`, not: --backend {options.backend.value}') - elif options.enable_search: - raise ValueError(f'Option `--enable-search` requires `--backend llvm`, not: --backend {options.backend.value}') - elif options.llvm_kompile_type: - raise ValueError( - f'Option `--llvm-kompile-type` requires `--backend llvm`, not: --backend {options.backend.value}' + kompiled_directory = self.options.definition_dir + + kprove = KProve(kompiled_directory, use_directory=self.options.temp_directory) + + @contextmanager + def explore_context() -> Iterator[KCFGExplore]: + with cterm_symbolic( + definition=kprove.definition, + kompiled_kore=kprove.kompiled_kore, + definition_dir=kprove.definition_dir, + ) as cts: + yield KCFGExplore(cts) + + prove_rpc = ProveRpc(kprove, explore_context) + + try: + proofs = prove_rpc.prove_rpc(options=self.options) + except RuntimeError as err: + _, _, _, cpe = err.args + exit_with_process_error(cpe) + for proof in sorted(proofs, key=lambda p: p.id): + print('\n'.join(proof.summary.lines)) + if proof.failed and self.options.failure_info: + failure_info = proof.failure_info + if type(failure_info) is APRFailureInfo: + print('\n'.join(failure_info.print())) + if self.options.show_kcfg and type(proof) is APRProof: + node_printer = APRProofNodePrinter(proof, kprove, full_printer=True, minimize=False) + show = APRProofShow(kprove, node_printer=node_printer) + print('\n'.join(show.show(proof))) + sys.exit(len([p.id for p in proofs if not p.passed])) + + +class ShowCommand(ProveCommand): + def __init__(self) -> None: + Command.__init__(self, 'show', 'Display the status of a given proof', ProveOptionsGroup()) + + def exec(self) -> None: + self.options.max_iterations = 0 + self.options.show_kcfg = True + super().exec() + + +class KompileCommandOptionsGroup( + LoggingOptionsGroup, WarningOptionsGroup, KDefinitionOptionsGroup, KompileOptionsGroup +): + definition_dir: Path | None + main_file: str + backend: KompileBackend + type_inference_mode: TypeInferenceMode | None + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--output-definition', + ensure_dir_path, + 'definition_dir', + 'Path to definition to use.', + toml_name='definition', + aliases=['--definition'], + ) + ) + self.add_option(Option('main_file', str, required=True, help_str='File with the specification module.')) + self.add_option( + Option( + '--backend', + KompileBackend, + 'backend', + 'K backend to target with compilation.', + choices=list(KompileBackend), + default=KompileBackend.LLVM, + ) ) - elif options.llvm_kompile_output: - raise ValueError( - f'Option `--llvm-kompile-output` requires `--backend llvm`, not: --backend {options.backend.value}' + self.add_option( + Option( + '--type-inference-mode', + TypeInferenceMode, + 'type_inference_mode', + 'Mode for doing K rule type inference in.', + choices=list(TypeInferenceMode), + default=None, + ) ) - elif options.llvm_proof_hint_instrumentation: - raise ValueError( - f'Option `--llvm-proof-hint-intrumentation` requires `--backend llvm`, not: --backend {options.backend.value}' + + +class KompileCommand(Command[KompileCommandOptionsGroup]): + def __init__(self) -> None: + super().__init__('kompile', 'Kompile the K specification', KompileCommandOptionsGroup()) + + def exec(self) -> None: + main_file = Path(self.options.main_file) + check_file_path(main_file) + + kompiled_directory: Path + if self.options.definition_dir is None: + kompiled_directory = Path(f'{main_file.stem}-kompiled') + ensure_dir_path(kompiled_directory) + else: + kompiled_directory = self.options.definition_dir + + kompile_dict: dict[str, Any] = { + 'main_file': main_file, + 'backend': self.options.backend.value, + 'syntax_module': self.options.syntax_module, + 'main_module': self.options.main_module, + 'md_selector': self.options.md_selector, + 'include_dirs': (Path(include) for include in self.options.includes), + 'emit_json': self.options.emit_json, + 'coverage': self.options.coverage, + 'gen_bison_parser': self.options.gen_bison_parser, + 'gen_glr_bison_parser': self.options.gen_glr_bison_parser, + 'bison_lists': self.options.bison_lists, + } + if self.options.backend == KompileBackend.LLVM: + kompile_dict['ccopts'] = self.options.ccopts + kompile_dict['enable_search'] = self.options.enable_search + kompile_dict['llvm_kompile_type'] = self.options.llvm_kompile_type + kompile_dict['llvm_kompile_output'] = self.options.llvm_kompile_output + kompile_dict['llvm_proof_hint_instrumentation'] = self.options.llvm_proof_hint_instrumentation + elif len(self.options.ccopts) > 0: + raise ValueError(f'Option `-ccopt` requires `--backend llvm`, not: --backend {self.options.backend.value}') + elif self.options.enable_search: + raise ValueError( + f'Option `--enable-search` requires `--backend llvm`, not: --backend {self.options.backend.value}' + ) + elif self.options.llvm_kompile_type: + raise ValueError( + f'Option `--llvm-kompile-type` requires `--backend llvm`, not: --backend {self.options.backend.value}' + ) + elif self.options.llvm_kompile_output: + raise ValueError( + f'Option `--llvm-kompile-output` requires `--backend llvm`, not: --backend {self.options.backend.value}' + ) + elif self.options.llvm_proof_hint_instrumentation: + raise ValueError( + f'Option `--llvm-proof-hint-intrumentation` requires `--backend llvm`, not: --backend {self.options.backend.value}' + ) + + try: + Kompile.from_dict(kompile_dict)( + output_dir=kompiled_directory, + type_inference_mode=self.options.type_inference_mode, + warnings=self.options.warnings, + warnings_to_errors=self.options.warnings_to_errors, + no_exc_wrap=self.options.no_exc_wrap, + ) + except RuntimeError as err: + _, _, _, _, cpe = err.args + exit_with_process_error(cpe) + + +class RunOptionsGroup(LoggingOptionsGroup): + pgm_file: str + definition_dir: Path | None + + def __init__(self) -> None: + super().__init__() + self.add_option(Option('pgm_file', str, required=True, help_str='File program to run in it.')) + self.add_option(Option('--definition', dir_path, 'definition_dir', 'Path to definition to use', default=None)) + + +class RunCommand(Command[RunOptionsGroup]): + def __init__(self) -> None: + super().__init__('run', 'Run a given program using the K definition.', RunOptionsGroup()) + + def exec(self) -> None: + pgm_file = Path(self.options.pgm_file) + check_file_path(pgm_file) + kompiled_directory: Path + if self.options.definition_dir is None: + kompiled_directory = Kompile.default_directory() + _LOGGER.info(f'Using kompiled directory: {kompiled_directory}.') + else: + kompiled_directory = self.options.definition_dir + krun = KRun(kompiled_directory) + rc, res = krun.krun(pgm_file) + print(krun.pretty_print(res)) + sys.exit(rc) + + +class GraphImportsOptionsGroup(DefinitionOptionsGroup, LoggingOptionsGroup): ... + + +class GraphImportsCommand(Command[GraphImportsOptionsGroup]): + def __init__(self) -> None: + super().__init__('graph-imports', 'Graph the imports of a given definition', GraphImportsOptionsGroup()) + + def exec(self) -> None: + kompiled_dir: Path = self.options.definition_dir + kprinter = KPrint(kompiled_dir) + definition = kprinter.definition + import_graph = Digraph() + graph_file = kompiled_dir / 'import-graph' + for module in definition.modules: + module_name = module.name + import_graph.node(module_name) + for module_import in module.imports: + import_graph.edge(module_name, module_import.name) + import_graph.render(graph_file) + _LOGGER.info(f'Wrote file: {graph_file}') + + +class CoverageOptionsGroup(DefinitionOptionsGroup, OutputFileOptionsGroup, LoggingOptionsGroup): + coverage_file: IO[Any] + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option('coverage_file', FileType('r'), required=True, help_str='Coverage file to build log for.') ) - try: - Kompile.from_dict(kompile_dict)( - output_dir=kompiled_directory, - type_inference_mode=options.type_inference_mode, - warnings=options.warnings, - warnings_to_errors=options.warnings_to_errors, - no_exc_wrap=options.no_exc_wrap, + +class CoverageCommand(Command[CoverageOptionsGroup]): + def __init__(self) -> None: + super().__init__('coverage-options', 'Convert coverage file to human readable log.', CoverageOptionsGroup()) + + def exec(self) -> None: + kompiled_dir: Path = self.options.definition_dir + definition = remove_source_map(read_kast_definition(kompiled_dir / 'compiled.json')) + pretty_printer = PrettyPrinter(definition) + for rid in self.options.coverage_file: + rule = minimize_rule(strip_coverage_logger(get_rule_by_id(definition, rid.strip()))) + self.options.output_file.write('\n\n') + self.options.output_file.write('Rule: ' + rid.strip()) + self.options.output_file.write('\nUnparsed:\n') + self.options.output_file.write(pretty_printer.print(rule)) + _LOGGER.info(f'Wrote file: {self.options.output_file.name}') + + +class KoreToJsonCommand(Command[LoggingOptionsGroup]): + def __init__(self) -> None: + super().__init__('kore-to-json', 'convert textual KORE to JSON', LoggingOptionsGroup()) + + def exec(self) -> None: + text = sys.stdin.read() + kore = KoreParser(text).pattern() + print(kore.json) + + +class JsonToKoreCommand(Command[LoggingOptionsGroup]): + def __init__(self) -> None: + super().__init__('json-to-kore', 'convert JSON to textual KORE', LoggingOptionsGroup()) + + def exec(self) -> None: + text = sys.stdin.read() + kore = Pattern.from_json(text) + kore.write(sys.stdout) + sys.stdout.write('\n') + + +class ParseOuterOptionsGroup(LoggingOptionsGroup): + main_file: Path + md_selector: str + includes: Iterable[str] + output_file: IO[Any] + main_module: str | None + + def __init__(self) -> None: + super().__init__() + self.add_option(Option('main_file', file_path, required=True, help_str='File with the K definition.')) + self.add_option( + Option( + '--md-selector', + str, + 'md_selector', + 'Code selector expression to use when reading markdown.', + default='k', + ) + ) + self.add_option( + Option('-I', str, 'includes', 'Directories to lookup K definitions in.', action='append', default=[]) + ) + self.add_option( + Option( + '--output-file', + FileType('w'), + 'output_file', + 'Write output to file instead of stdout.', + default=sys.stdout, + ) ) - except RuntimeError as err: - _, _, _, _, cpe = err.args - exit_with_process_error(cpe) - - -def exec_run(options: RunOptions) -> None: - pgm_file = Path(options.pgm_file) - check_file_path(pgm_file) - kompiled_directory: Path - if options.definition_dir is None: - kompiled_directory = Kompile.default_directory() - _LOGGER.info(f'Using kompiled directory: {kompiled_directory}.') - else: - kompiled_directory = options.definition_dir - krun = KRun(kompiled_directory) - rc, res = krun.krun(pgm_file) - print(krun.pretty_print(res)) - sys.exit(rc) - - -def exec_graph_imports(options: GraphImportsOptions) -> None: - kompiled_dir: Path = options.definition_dir - kprinter = KPrint(kompiled_dir) - definition = kprinter.definition - import_graph = Digraph() - graph_file = kompiled_dir / 'import-graph' - for module in definition.modules: - module_name = module.name - import_graph.node(module_name) - for module_import in module.imports: - import_graph.edge(module_name, module_import.name) - import_graph.render(graph_file) - _LOGGER.info(f'Wrote file: {graph_file}') - - -def exec_coverage(options: CoverageOptions) -> None: - kompiled_dir: Path = options.definition_dir - definition = remove_source_map(read_kast_definition(kompiled_dir / 'compiled.json')) - pretty_printer = PrettyPrinter(definition) - for rid in options.coverage_file: - rule = minimize_rule(strip_coverage_logger(get_rule_by_id(definition, rid.strip()))) - options.output_file.write('\n\n') - options.output_file.write('Rule: ' + rid.strip()) - options.output_file.write('\nUnparsed:\n') - options.output_file.write(pretty_printer.print(rule)) - _LOGGER.info(f'Wrote file: {options.output_file.name}') - - -def exec_kore_to_json(options: KoreToJsonOptions) -> None: - text = sys.stdin.read() - kore = KoreParser(text).pattern() - print(kore.json) - - -def exec_json_to_kore(options: JsonToKoreOptions) -> None: - text = sys.stdin.read() - kore = Pattern.from_json(text) - kore.write(sys.stdout) - sys.stdout.write('\n') - - -def exec_parse_outer(options: ParseOuterOptions) -> None: - definition_file = options.main_file.resolve() - search_paths = [definition_file.parent] - for include in getattr(options, 'includes', []): - include_path = Path(include) + self.add_option( + Option('--main-module', str, 'main_module', 'The name of the main module for the definition.', default=None) + ) + + +class ParseOuterCommand(Command[ParseOuterOptionsGroup]): + def __init__(self) -> None: + super().__init__('parse-outer', 'Parse an outer K definition into JSON', ParseOuterOptionsGroup()) + + def exec(self) -> None: + definition_file = self.options.main_file.resolve() + search_paths = [definition_file.parent] + for include in getattr(self.options, 'includes', []): + include_path = Path(include) + try: + check_dir_path(include_path) + except ValueError: + _LOGGER.warning(f"Could not find directory '{include}' passed to -I") + search_paths.append(include_path.resolve()) + + main_module_name = getattr(self.options, 'main_module', definition_file.stem.upper()) + try: + final_definition = parse_outer(definition_file, main_module_name, search_paths, self.options.md_selector) + except Exception as e: + _LOGGER.critical(e) + exit(1) + + result_text = json.dumps(final_definition.to_dict()) try: - check_dir_path(include_path) - except ValueError: - _LOGGER.warning(f"Could not find directory '{include}' passed to -I") - search_paths.append(include_path.resolve()) - - main_module_name = getattr(options, 'main_module', definition_file.stem.upper()) - try: - final_definition = parse_outer(definition_file, main_module_name, search_paths, options.md_selector) - except Exception as e: - _LOGGER.critical(e) - exit(1) - - result_text = json.dumps(final_definition.to_dict()) - try: - options.output_file.write(result_text) - except AttributeError: - sys.stdout.write(f'{result_text}\n') + self.options.output_file.write(result_text) + except AttributeError: + sys.stdout.write(f'{result_text}\n') if __name__ == '__main__': diff --git a/pyk/src/pyk/cli/args.py b/pyk/src/pyk/cli/args.py index 41bbdbe7b3b..caf2e3b40e3 100644 --- a/pyk/src/pyk/cli/args.py +++ b/pyk/src/pyk/cli/args.py @@ -1,13 +1,13 @@ from __future__ import annotations import sys -from argparse import ArgumentParser +from argparse import ArgumentParser, FileType from functools import cached_property from pathlib import Path from typing import IO, TYPE_CHECKING, Any from ..ktool.kompile import KompileBackend, LLVMKompileType, TypeInferenceMode, Warnings -from .cli import Options +from .cli import Option, Options, OptionsGroup from .utils import bug_report_arg, dir_path, ensure_dir_path, file_path if TYPE_CHECKING: @@ -16,6 +16,15 @@ from ..utils import BugReport +class LoggingOptionsGroup(OptionsGroup): + def __init__(self) -> None: + super().__init__() + self.add_option(Option('--debug', bool, 'debug', 'Debug output.', default=False, action='store_true')) + self.add_option( + Option('--verbose', bool, 'verbose', 'Verbose output', default=False, action='store_true', aliases=['-v']) + ) + + class LoggingOptions(Options): debug: bool verbose: bool @@ -34,6 +43,36 @@ def from_option_string() -> dict[str, Any]: } +class WarningOptionsGroup(OptionsGroup): + warnings: Warnings | None + warnings_to_errors: bool + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--warnings-to-errors', + bool, + 'warnings_to_errors', + 'Turn warnings into errors (no effect on pyk, only subcommands).', + default=False, + action='store_true', + toml_name='w2e', + ) + ) + self.add_option( + Option( + '--warnings', + Warnings, + 'warnings', + 'Warnings to print about (no effect on pyk, only subcommands).', + default=None, + toml_name='w', + choices=list(Warnings), + ) + ) + + class WarningOptions(Options): warnings: Warnings | None warnings_to_errors: bool @@ -53,6 +92,22 @@ def from_option_string() -> dict[str, Any]: } +class OutputFileOptionsGroup(OptionsGroup): + output_file: IO[Any] + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--output-file', + FileType('w'), + 'output_file', + 'Output File', + default=sys.stdout, + ) + ) + + class OutputFileOptions(Options): output_file: IO[Any] @@ -63,17 +118,33 @@ def default() -> dict[str, Any]: } +class DefinitionOptionsGroup(OptionsGroup): + definition_dir: Path + + def __init__(self) -> None: + super().__init__() + self.add_option(Option('--definition', dir_path, 'definition_dir', 'Path to definition to use.', default=None)) + + class DefinitionOptions(Options): definition_dir: Path @staticmethod - def default() -> dict[str, Any]: + def from_option_string() -> dict[str, Any]: return { 'output-definition': 'definition_dir', 'definition': 'definition_dir', } +class DisplayOptionsGroup(OptionsGroup): + minimize: bool + + def __init__(self) -> None: + super().__init__() + self.add_option(Option('--minimize', bool, 'minimize', 'Minimize output.', action='store_true', default=True)) + + class DisplayOptions(Options): minimize: bool @@ -84,6 +155,53 @@ def default() -> dict[str, Any]: } +class KDefinitionOptionsGroup(OptionsGroup): + includes: list[str] + main_module: str | None + syntax_module: str | None + md_selector: str + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '-I', + str, + 'includes', + 'Directories to lookup K definitions in.', + action='append', + default=[], + ) + ) + self.add_option( + Option( + '--main-module', + str, + 'main_module', + 'Name of the main module.', + default=None, + ) + ) + self.add_option( + Option( + '--syntax-module', + str, + 'syntax_module', + 'Name of the syntax module.', + default=None, + ) + ) + self.add_option( + Option( + '--md-selector', + str, + 'md_selector', + 'Code selector expression to use when reading markdown.', + default='k', + ) + ) + + class KDefinitionOptions(Options): includes: list[str] main_module: str | None @@ -101,12 +219,6 @@ def default() -> dict[str, Any]: 'includes': [], } - @staticmethod - def from_option_string() -> dict[str, str]: - return { - 'includes': 'includes', - } - class SaveDirOptions(Options): save_directory: Path | None @@ -120,6 +232,88 @@ def default() -> dict[str, Any]: } +class SpecOptionsGroup(OptionsGroup): + spec_file: Path + spec_module: str | None + claim_labels: Iterable[str] | None + exclude_claim_labels: Iterable[str] | None + definition_dir: Path | None + type_inference_mode: TypeInferenceMode + save_directory: Path | None + temp_directory: Path | None + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + 'spec_file', + file_path, + help_str='Path to spec_file.', + required=True, + ) + ) + self.add_option( + Option( + '--spec-module', + str, + 'spec_module', + 'Module with claims to be proven.', + default=None, + ) + ) + self.add_option( + Option( + '--claim', + str, + 'claim_labels', + 'Only prove listed claims, MODULE_NAME.claim-id', + action='append', + default=None, + toml_name='claim', + ) + ) + self.add_option( + Option( + '--exclude_claim', + str, + 'exclude_claim_labels', + 'Skip listed claims, MODULE_NAME.claim-id', + action='append', + default=None, + toml_name='exclude-claim', + ) + ) + self.add_option(Option('--definition', dir_path, 'definition_dir', 'Path to definition to use.')) + self.add_option( + Option( + '--type-inference-mode', + TypeInferenceMode, + 'type_inference_mode', + 'Mode for doing K rule type inference in.', + choices=list(TypeInferenceMode), + default=TypeInferenceMode.DEFAULT, + ) + ) + self.add_option( + Option( + '--save-directory', + ensure_dir_path, + 'save_directory', + 'Directory to save proof artifacts at for reuse.', + default=None, + ) + ) + self.add_option( + Option( + '--temp-directory', + ensure_dir_path, + 'temp_directory', + 'Directory to save proof intermediate temporaries to.', + default=None, + ) + ) + + class SpecOptions(SaveDirOptions): spec_file: Path spec_module: str | None @@ -142,6 +336,177 @@ def from_option_string() -> dict[str, str]: } +class KompileOptionsGroup(OptionsGroup): + emit_json: bool + llvm_kompile: bool + llvm_library: bool + enable_llvm_debug: bool + llvm_kompile_type: LLVMKompileType | None + llvm_kompile_output: Path | None + llvm_proof_hint_instrumentation: bool + read_only: bool + o0: bool + o1: bool + o2: bool + o3: bool + ccopts: list[str] + enable_search: bool + coverage: bool + gen_bison_parser: bool + gen_glr_bison_parser: bool + bison_lists: bool + no_exc_wrap: bool + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--emit-json', + bool, + 'emit_json', + 'Emit JSON definition after compilation.', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--no-llvm-kompile', + bool, + 'llvm_kompile', + 'Do not run llvm-kompile process.', + action='store_false', + default=True, + ) + ) + self.add_option( + Option( + '--with-llvm-library', + bool, + 'llvm_library', + 'Make kompile generate a dynamic llvm library.', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--enable-llvm-debug', + bool, + 'enable_llvm_debug', + 'Make kompile generate debug symbols for llvm.', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--llvm-kompile-type', + LLVMKompileType, + 'llvm_kompile_type', + 'Mode to kompile LLVM backend in.', + default=None, + choices=list(LLVMKompileType), + ) + ) + self.add_option( + Option( + '--llvm-kompile-output', + ensure_dir_path, + 'llvm_kompile_output', + 'Location to put kompiled LLVM backend at.', + default=None, + ) + ) + self.add_option( + Option( + '--llvm-proof-hint-instrumentation', + bool, + 'llvm_proof_hint_instrumentation', + 'Enable proof hint generation in LLVM backend kompilation', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--read-only-kompiled-directory', + bool, + 'read_only', + 'Generate a kompiled directory that K will not attempt to write to afterwards.', + action='store_true', + default=False, + ) + ) + self.add_option(Option('-O0', bool, 'o0', 'Optimization level 0', action='store_true', default=False)) + self.add_option(Option('-O1', bool, 'o1', 'Optimization level 1', action='store_true', default=False)) + self.add_option(Option('-O2', bool, 'o2', 'Optimization level 2', action='store_true', default=False)) + self.add_option(Option('-O3', bool, 'o3', 'Optimization level 3', action='store_true', default=False)) + self.add_option( + Option('-ccopt', str, 'ccopts', 'Additional arguments to pass to llvm-kompile', action='append', default=[]) + ) + self.add_option( + Option( + '--enable-search', + bool, + 'enable_search', + 'Enable search mode on LLVM backend krun', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--coverage', + bool, + 'coverage', + 'Enable logging semantics rule coverage measurement', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--gen-bison-parser', + bool, + 'gen_bison_parser', + 'Generate standolone Bison parser for program sort.', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--gen-glr-bison-parser', + bool, + 'gen_glr_bison_parser', + 'Generate standolone GLR Bison parser for program sort.', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--bison-lists', + bool, + 'bison_lists', + 'Disable List{Sort} parsing to make grammar LR(1) for Bison parser.', + action='store_true', + default=False, + ) + ) + self.add_option( + Option( + '--no-exc-wrap', + bool, + 'no_exc_wrap', + 'Do not wrap the output on the CLI.', + action='store_true', + default=False, + ) + ) + + class KompileOptions(Options): emit_json: bool llvm_kompile: bool @@ -200,6 +565,24 @@ def from_option_string() -> dict[str, str]: } +class ParallelOptionsGroup(OptionsGroup): + workers: int + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--workers', + int, + 'workers', + 'Number of processes to run in parallel', + aliases=['-j'], + default=1, + toml_name='j', + ) + ) + + class ParallelOptions(Options): workers: int @@ -216,6 +599,22 @@ def from_option_string() -> dict[str, str]: } +class BugReportOptionsGroup(OptionsGroup): + bug_report: BugReport | None + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option( + '--bug-report', + bug_report_arg, + 'bug_report', + 'Generate bug report with given name.', + default=None, + ) + ) + + class BugReportOptions(Options): bug_report: BugReport | None @@ -224,6 +623,36 @@ def default() -> dict[str, Any]: return {'bug_report': None} +class SMTOptionsGroup(OptionsGroup): + smt_timeout: int + smt_retry_limit: int + smt_tactic: str | None + + def __init__(self) -> None: + super().__init__() + self.add_option( + Option('--smt-timeout', int, 'smt_timeout', 'Timeout in ms to use for SMT queries.', default=300) + ) + self.add_option( + Option( + '--smt-retry-limit', + int, + 'smt_retry_limit', + 'Number of times to retry SMT queries with scaling timeouts.', + default=10, + ) + ) + self.add_option( + Option( + '--smt-tactic', + str, + 'smt_tactic', + 'Z3 tactic to use with checking satisfiability. Example: (check-sat-using-smt)', + default=None, + ) + ) + + class SMTOptions(Options): smt_timeout: int smt_retry_limit: int diff --git a/pyk/src/pyk/cli/cli.py b/pyk/src/pyk/cli/cli.py index c606824d575..fc4fb07aab9 100644 --- a/pyk/src/pyk/cli/cli.py +++ b/pyk/src/pyk/cli/cli.py @@ -1,8 +1,276 @@ from __future__ import annotations -from typing import Any +import logging +from abc import abstractmethod +from argparse import ArgumentParser +from pathlib import Path +# from enum import Enum +from typing import TYPE_CHECKING, Any, Final, Generic, TypeVar +import tomli + +from ..cli.utils import file_path + +if TYPE_CHECKING: + from collections.abc import Callable, Iterable + +T = TypeVar('T') +OG = TypeVar('OG', bound='OptionsGroup') + +_LOGGER: Final = logging.getLogger(__name__) + +NO_DEFAULT: Final = object() + + +class CLI: + _commands: list[Command] + + def __init__(self, commands: Iterable[Command] = ()) -> None: + self._commands = list(commands) + + def add_command(self, command: Command) -> None: + self._commands.append(command) + + def create_argument_parser(self) -> ArgumentParser: + args = ArgumentParser() + subparsers = args.add_subparsers(dest='command', required=True) + for command in self._commands: + command_args = subparsers.add_parser(name=command.name, help=command.help_str) + for option in command._options_group.options: + option.add_arg(command_args) + return args + + def get_command(self, args: dict[str, Any]) -> Command: + command_name = args['command'].lower() + for command in self._commands: + if command.name.lower() == command_name: + return command + raise ValueError(f'Commmand {command_name} not found.') + + def get_and_exec_command(self) -> None: + parser = self.create_argument_parser() + args = parser.parse_args() + stripped_args = {key: val for (key, val) in vars(args).items() if val != NO_DEFAULT} + cmd = self.get_command(stripped_args) + cmd._options_group.extract(stripped_args, cmd.name) + cmd.exec() + + +class Option: + _name: str + _aliases: list[str] + _dest: str + _toml_name: str + _action: str | None + _choices: list[str] | None + _const: Any | None + _default: Any | None + _help_str: str | None + _metavar: str | None + _nargs: int | str | None + _required: bool + _type: Callable[[str], Any] + + def __init__( + self, + name: str, + arg_type: Callable[[str], Any], + dest: str | None = None, + help_str: str | None = None, + action: str | None = None, + choices: list[str] | None = None, + const: Any | None = None, + aliases: Iterable[str] = (), + default: Any | str = NO_DEFAULT, + metavar: str | None = None, + nargs: int | str | None = None, + required: bool = False, + toml_name: str | None = None, + ) -> None: + self._name = name + self._aliases = list(aliases) + self._dest = dest or name + self._toml_name = (toml_name or dest) or name + self._action = action + self._choices = choices + self._const = const + self._default = default + self._help_str = help_str + self._metavar = metavar + self._nargs = nargs + self._required = required + self._type = arg_type + + self.set_default(default) + + def add_arg(self, args: ArgumentParser) -> None: + + params: dict[str, Any] = {} + params['dest'] = self._dest + params['type'] = self._type + if self._action is not None: + params['action'] = self._action + if self._choices is not None: + params['choices'] = self._choices + if self._const is not None: + params['const'] = self._const + if self._default is not None: + params['default'] = self._default + if self._help_str is not None: + params['help'] = self._help_str + if self._metavar is not None: + params['metavar'] = self._metavar + if self._nargs is not None: + params['nargs'] = self._nargs + if self._required is not None: + params['required'] = self._required + + args.add_argument(self._name, *(self._aliases), **params) + + def set_default(self, default: Any) -> None: + self._default = default + + @property + def name(self) -> str: + return self._name + + @property + def dest(self) -> str: + return self._dest + + @property + def toml_name(self) -> str: + return self._toml_name + + @property + def default(self) -> Any: + return self._default + + @property + def is_optional(self) -> bool: + return not self._required + + +class Command(Generic[OG]): + _options_group: OG + _name: str + _help_str: str + + def __init__(self, name: str, help_str: str, options_group: OG) -> None: + self._name = name + self._help_str = help_str + self._options_group = options_group + + @property + def name(self) -> str: + return self._name + + @property + def help_str(self) -> str: + return self._help_str + + @property + def options(self) -> OG: + return self._options_group + + @abstractmethod + def exec(self) -> None: ... + + +def parse_toml_args(args: OptionsGroup, command: str) -> dict[str, Any]: + def get_profile(toml_profile: dict[str, Any], profile_list: list[str]) -> dict[str, Any]: + if len(profile_list) == 0 or profile_list[0] not in toml_profile: + return {k: v for k, v in toml_profile.items() if type(v) is not dict} + elif len(profile_list) == 1: + return {k: v for k, v in toml_profile[profile_list[0]].items() if type(v) is not dict} + return get_profile(toml_profile[profile_list[0]], profile_list[1:]) + + toml_args = {} + if args.config_file.is_file(): + with open(args.config_file, 'rb') as config_file: + try: + toml_args = tomli.load(config_file) + except tomli.TOMLDecodeError: + _LOGGER.error( + 'Input config file is not in TOML format, ignoring the file and carrying on with the provided command line agruments' + ) + + toml_args = get_profile(toml_args[command], args.config_profile.split('.')) if command in toml_args else {} + toml_args = {args.get_toml_name_destination(k): v for k, v in toml_args.items()} + for k, v in toml_args.items(): + if k[:3] == 'no-' and (v == 'true' or v == 'false'): + del toml_args[k] + toml_args[k[3:]] = 'false' if v == 'true' else 'true' + if k == 'optimization-level': + level = toml_args[k] if toml_args[k] >= 0 else 0 + level = level if toml_args[k] <= 3 else 3 + del toml_args[k] + toml_args['-o' + str(level)] = 'true' + + return toml_args + + +class OptionsGroup: + _options: dict[str, Option] + config_file: Path + config_profile: str + + def __init__(self) -> None: + self._options = {} + self.add_option( + Option('--config-file', file_path, 'config_file', 'Path to Pyk config file.', default=Path('./pyk.toml')) + ) + self.add_option( + Option('--config-profile', str, 'config_profile', 'Config profile to be used.', default='default') + ) + + def extract(self, args: dict[str, Any], command: str) -> None: + + config_file = args['config_file'] if 'config_file' in args else self.option_by_name('config_file').default + self.__setattr__('config_file', config_file) + + config_profile = ( + args['config_profile'] if 'config_profile' in args else self.option_by_name('config_profile').default + ) + self.__setattr__('config_profile', config_profile) + + toml_args = parse_toml_args(self, command) + + for option in self.options: + if option.name in args: + self.__setattr__(option.dest, args[option.name]) + # TODO elif option exists in TOML file, set it to the value from there + elif option.name in toml_args: + self.__setattr__(option.dest, toml_args[option.name]) + else: + self.__setattr__(option.dest, option.default) + + def add_option(self, option: Option) -> None: + self._options[option.dest] = option + + def override_default(self, option_name: str, value: T) -> None: + if not self._options[option_name].is_optional: + raise ValueError(f'Cannot provide a default value for a required parameter: {option_name}') + if option_name not in self._options: + raise ValueError(f'Cannot find option with name: {option_name}') + self._options[option_name].set_default(value) + + @property + def options(self) -> list[Option]: + return list(self._options.values()) + + def option_by_name(self, name: str) -> Option: + return self._options[name] + + def get_toml_name_destination(self, option_string: str) -> str: + for option in self.options: + if option.toml_name == option_string: + return option.name + raise ValueError(f'Cannot find option with toml_name {option_string}.') + + +# TODO remove once all implementing semantics use `CLI` system class Options: def __init__(self, args: dict[str, Any]) -> None: # Get defaults from this and all superclasses that define them, preferring the most specific class diff --git a/pyk/src/pyk/cli/pyk.py b/pyk/src/pyk/cli/pyk.py index b94bb26e042..f0d650b6711 100644 --- a/pyk/src/pyk/cli/pyk.py +++ b/pyk/src/pyk/cli/pyk.py @@ -3,100 +3,47 @@ import logging from argparse import ArgumentParser, FileType from enum import Enum -from typing import IO, TYPE_CHECKING, Any - -import tomli - -from ..ktool.kompile import KompileBackend -from .args import ( - ConfigArgs, - DefinitionOptions, - DisplayOptions, - KCLIArgs, - KDefinitionOptions, - KompileOptions, - LoggingOptions, - OutputFileOptions, - SaveDirOptions, - SpecOptions, - WarningOptions, -) +from typing import TYPE_CHECKING + +from .args import ConfigArgs, KCLIArgs from .utils import dir_path, file_path if TYPE_CHECKING: - from argparse import Namespace - from collections.abc import Iterable - from pathlib import Path from typing import Final - from pyk.ktool import TypeInferenceMode - _LOGGER: Final = logging.getLogger(__name__) -def generate_options(args: dict[str, Any]) -> LoggingOptions: - command = args['command'] - match command: - case 'json-to-kore': - return JsonToKoreOptions(args) - case 'kore-to-json': - return KoreToJsonOptions(args) - case 'coverage': - return CoverageOptions(args) - case 'graph-imports': - return GraphImportsOptions(args) - case 'rpc-kast': - return RPCKastOptions(args) - case 'rpc-print': - return RPCPrintOptions(args) - case 'print': - return PrintOptions(args) - case 'prove-legacy': - return ProveLegacyOptions(args) - case 'prove': - return ProveOptions(args) - case 'show': - return ProveOptions(args) - case 'kompile': - return KompileCommandOptions(args) - case 'run': - return RunOptions(args) - case 'parse-outer': - return ParseOuterOptions(args) - case _: - raise ValueError(f'Unrecognized command: {command}') - - -def get_option_string_destination(command: str, option_string: str) -> str: - option_string_destinations = {} - match command: - case 'json-to-kore': - option_string_destinations = JsonToKoreOptions.from_option_string() - case 'kore-to-json': - option_string_destinations = KoreToJsonOptions.from_option_string() - case 'coverage': - option_string_destinations = CoverageOptions.from_option_string() - case 'graph-imports': - option_string_destinations = GraphImportsOptions.from_option_string() - case 'rpc-kast': - option_string_destinations = RPCKastOptions.from_option_string() - case 'rpc-print': - option_string_destinations = RPCPrintOptions.from_option_string() - case 'print': - option_string_destinations = PrintOptions.from_option_string() - case 'prove-legacy': - option_string_destinations = ProveLegacyOptions.from_option_string() - case 'prove': - option_string_destinations = ProveOptions.from_option_string() - case 'kompile': - option_string_destinations = KompileCommandOptions.from_option_string() - case 'run': - option_string_destinations = RunOptions.from_option_string() - - if option_string in option_string_destinations: - return option_string_destinations[option_string] - else: - return option_string.replace('-', '_') +# def get_option_string_destination(command: str, option_string: str) -> str: +# option_string_destinations = {} +# match command: +# case 'json-to-kore': +# option_string_destinations = JsonToKoreOptions.from_option_string() +# case 'kore-to-json': +# option_string_destinations = KoreToJsonOptions.from_option_string() +# case 'coverage': +# option_string_destinations = CoverageOptions.from_option_string() +# case 'graph-imports': +# option_string_destinations = GraphImportsOptions.from_option_string() +# case 'rpc-kast': +# option_string_destinations = RPCKastOptions.from_option_string() +# case 'rpc-print': +# option_string_destinations = RPCPrintOptions.from_option_string() +# case 'print': +# option_string_destinations = PrintOptions.from_option_string() +# case 'prove-legacy': +# option_string_destinations = ProveLegacyOptions.from_option_string() +# case 'prove': +# option_string_destinations = ProveOptions.from_option_string() +# case 'kompile': +# option_string_destinations = KompileCommandOptions.from_option_string() +# case 'run': +# option_string_destinations = RunOptions.from_option_string() +# +# if option_string in option_string_destinations: +# return option_string_destinations[option_string] +# else: +# return option_string.replace('-', '_') class PrintInput(Enum): @@ -104,170 +51,171 @@ class PrintInput(Enum): KAST_JSON = 'kast-json' -class JsonToKoreOptions(LoggingOptions): ... - - -class KoreToJsonOptions(LoggingOptions): ... - - -class CoverageOptions(DefinitionOptions, OutputFileOptions, LoggingOptions): - coverage_file: IO[Any] - - @staticmethod - def from_option_string() -> dict[str, str]: - return ( - DefinitionOptions.from_option_string() - | OutputFileOptions.from_option_string() - | LoggingOptions.from_option_string() - ) - - -class GraphImportsOptions(DefinitionOptions, LoggingOptions): - @staticmethod - def from_option_string() -> dict[str, str]: - return DefinitionOptions.from_option_string() | LoggingOptions.from_option_string() - - -class RPCKastOptions(OutputFileOptions, LoggingOptions): - reference_request_file: IO[Any] - response_file: IO[Any] - - @staticmethod - def from_option_string() -> dict[str, str]: - return OutputFileOptions.from_option_string() | LoggingOptions.from_option_string() - - -class RPCPrintOptions(DefinitionOptions, OutputFileOptions, LoggingOptions): - input_file: IO[Any] - - @staticmethod - def from_option_string() -> dict[str, str]: - return ( - DefinitionOptions.from_option_string() - | OutputFileOptions.from_option_string() - | LoggingOptions.from_option_string() - ) - - -class PrintOptions(DefinitionOptions, OutputFileOptions, DisplayOptions, LoggingOptions): - term: IO[Any] - input: PrintInput - minimize: bool - omit_labels: str | None - keep_cells: str | None - - @staticmethod - def default() -> dict[str, Any]: - return { - 'input': PrintInput.KAST_JSON, - 'omit_labels': None, - 'keep_cells': None, - } - - @staticmethod - def from_option_string() -> dict[str, str]: - return ( - DefinitionOptions.from_option_string() - | OutputFileOptions.from_option_string() - | DisplayOptions.from_option_string() - | LoggingOptions.from_option_string() - ) - - -class ProveLegacyOptions(DefinitionOptions, OutputFileOptions, LoggingOptions): - main_file: Path - spec_file: Path - spec_module: str - k_args: Iterable[str] - - @staticmethod - def default() -> dict[str, Any]: - return { - 'k_args': [], - } - - @staticmethod - def from_option_string() -> dict[str, str]: - return ( - DefinitionOptions.from_option_string() - | OutputFileOptions.from_option_string() - | LoggingOptions.from_option_string() - | {'kArgs': 'k_args'} - ) - - -class KompileCommandOptions(LoggingOptions, WarningOptions, KDefinitionOptions, KompileOptions): - definition_dir: Path | None - main_file: str - backend: KompileBackend - type_inference_mode: TypeInferenceMode | None - - @staticmethod - def default() -> dict[str, Any]: - return { - 'definition_dir': None, - 'backend': KompileBackend.LLVM, - 'type_inference_mode': None, - } - - @staticmethod - def from_option_string() -> dict[str, str]: - return ( - KDefinitionOptions.from_option_string() - | KompileOptions.from_option_string() - | LoggingOptions.from_option_string() - | {'definition': 'definition_dir'} - ) - - -class ProveOptions(LoggingOptions, SpecOptions, SaveDirOptions): - definition_dir: Path | None - type_inference_mode: TypeInferenceMode | None - failure_info: bool - kore_rpc_command: str | Iterable[str] | None - max_depth: int | None - max_iterations: int | None - show_kcfg: bool - - @staticmethod - def default() -> dict[str, Any]: - return { - 'definition_dir': None, - 'type_inference_mode': None, - 'failure_info': False, - 'kore_rpc_command': None, - 'max_depth': None, - 'max_iterations': None, - 'show_kcfg': False, - } - - @staticmethod - def from_option_string() -> dict[str, str]: - return ( - KDefinitionOptions.from_option_string() - | KompileOptions.from_option_string() - | LoggingOptions.from_option_string() - | {'definition': 'definition_dir'} - ) - - -class RunOptions(LoggingOptions): - pgm_file: str - definition_dir: Path | None - - @staticmethod - def default() -> dict[str, Any]: - return { - 'definition_dir': None, - } - - -class ParseOuterOptions(LoggingOptions): - main_file: Path - md_selector: str - includes: Iterable[str] - output_file: IO[Any] - main_module: str +# class JsonToKoreOptions(LoggingOptions): ... + + +# class KoreToJsonOptions(LoggingOptions): ... + + +# class CoverageOptions(DefinitionOptions, OutputFileOptions, LoggingOptions): +# coverage_file: IO[Any] +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return ( +# DefinitionOptions.from_option_string() +# | OutputFileOptions.from_option_string() +# | LoggingOptions.from_option_string() +# ) + + +# class GraphImportsOptions(DefinitionOptions, LoggingOptions): +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return DefinitionOptions.from_option_string() | LoggingOptions.from_option_string() + + +# class RPCKastOptions(OutputFileOptions, LoggingOptions): +# reference_request_file: IO[Any] +# response_file: IO[Any] +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return OutputFileOptions.from_option_string() | LoggingOptions.from_option_string() +# +# +# class RPCPrintOptions(DefinitionOptions, OutputFileOptions, LoggingOptions): +# input_file: IO[Any] +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return ( +# DefinitionOptions.from_option_string() +# | OutputFileOptions.from_option_string() +# | LoggingOptions.from_option_string() +# ) +# +# +# class PrintOptions(DefinitionOptions, OutputFileOptions, DisplayOptions, LoggingOptions): +# term: IO[Any] +# input: PrintInput +# minimize: bool +# omit_labels: str | None +# keep_cells: str | None +# +# @staticmethod +# def default() -> dict[str, Any]: +# return { +# 'input': PrintInput.KAST_JSON, +# 'omit_labels': None, +# 'keep_cells': None, +# } +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return ( +# DefinitionOptions.from_option_string() +# | OutputFileOptions.from_option_string() +# | DisplayOptions.from_option_string() +# | LoggingOptions.from_option_string() +# ) + + +# class ProveLegacyOptions(DefinitionOptions, OutputFileOptions, LoggingOptions): +# main_file: Path +# spec_file: Path +# spec_module: str +# k_args: Iterable[str] +# +# @staticmethod +# def default() -> dict[str, Any]: +# return { +# 'k_args': [], +# } +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return ( +# DefinitionOptions.from_option_string() +# | OutputFileOptions.from_option_string() +# | LoggingOptions.from_option_string() +# | {'kArgs': 'k_args'} +# ) + + +# class KompileCommandOptions(LoggingOptions, WarningOptions, KDefinitionOptions, KompileOptions): +# definition_dir: Path | None +# main_file: str +# backend: KompileBackend +# type_inference_mode: TypeInferenceMode | None +# +# @staticmethod +# def default() -> dict[str, Any]: +# return { +# 'definition_dir': None, +# 'backend': KompileBackend.LLVM, +# 'type_inference_mode': None, +# } +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return ( +# KDefinitionOptions.from_option_string() +# | KompileOptions.from_option_string() +# | LoggingOptions.from_option_string() +# | {'definition': 'definition_dir'} +# ) +# + + +# class ProveOptions(LoggingOptions, SpecOptions, SaveDirOptions): +# definition_dir: Path | None +# type_inference_mode: TypeInferenceMode | None +# failure_info: bool +# kore_rpc_command: str | Iterable[str] | None +# max_depth: int | None +# max_iterations: int | None +# show_kcfg: bool +# +# @staticmethod +# def default() -> dict[str, Any]: +# return { +# 'definition_dir': None, +# 'type_inference_mode': None, +# 'failure_info': False, +# 'kore_rpc_command': None, +# 'max_depth': None, +# 'max_iterations': None, +# 'show_kcfg': False, +# } +# +# @staticmethod +# def from_option_string() -> dict[str, str]: +# return ( +# KDefinitionOptions.from_option_string() +# | KompileOptions.from_option_string() +# | LoggingOptions.from_option_string() +# | {'definition': 'definition_dir'} +# ) + + +# class RunOptions(LoggingOptions): +# pgm_file: str +# definition_dir: Path | None +# +# @staticmethod +# def default() -> dict[str, Any]: +# return { +# 'definition_dir': None, +# } + + +# class ParseOuterOptions(LoggingOptions): +# main_file: Path +# md_selector: str +# includes: Iterable[str] +# output_file: IO[Any] +# main_module: str def create_argument_parser() -> ArgumentParser: @@ -445,36 +393,36 @@ def create_argument_parser() -> ArgumentParser: return pyk_args -def parse_toml_args(args: Namespace) -> dict[str, Any | Iterable]: - def get_profile(toml_profile: dict[str, Any], profile_list: list[str]) -> dict[str, Any]: - if len(profile_list) == 0 or profile_list[0] not in toml_profile: - return {k: v for k, v in toml_profile.items() if type(v) is not dict} - elif len(profile_list) == 1: - return {k: v for k, v in toml_profile[profile_list[0]].items() if type(v) is not dict} - return get_profile(toml_profile[profile_list[0]], profile_list[1:]) - - toml_args = {} - if args.config_file.is_file(): - with open(args.config_file, 'rb') as config_file: - try: - toml_args = tomli.load(config_file) - except tomli.TOMLDecodeError: - _LOGGER.error( - 'Input config file is not in TOML format, ignoring the file and carrying on with the provided command line agruments' - ) - - toml_args = ( - get_profile(toml_args[args.command], args.config_profile.split('.')) if args.command in toml_args else {} - ) - toml_args = {get_option_string_destination(args.command, k): v for k, v in toml_args.items()} - for k, v in toml_args.items(): - if k[:3] == 'no-' and (v == 'true' or v == 'false'): - del toml_args[k] - toml_args[k[3:]] = 'false' if v == 'true' else 'true' - if k == 'optimization-level': - level = toml_args[k] if toml_args[k] >= 0 else 0 - level = level if toml_args[k] <= 3 else 3 - del toml_args[k] - toml_args['-o' + str(level)] = 'true' - - return toml_args +# def parse_toml_args(args: Namespace) -> dict[str, Any | Iterable]: +# def get_profile(toml_profile: dict[str, Any], profile_list: list[str]) -> dict[str, Any]: +# if len(profile_list) == 0 or profile_list[0] not in toml_profile: +# return {k: v for k, v in toml_profile.items() if type(v) is not dict} +# elif len(profile_list) == 1: +# return {k: v for k, v in toml_profile[profile_list[0]].items() if type(v) is not dict} +# return get_profile(toml_profile[profile_list[0]], profile_list[1:]) +# +# toml_args = {} +# if args.config_file.is_file(): +# with open(args.config_file, 'rb') as config_file: +# try: +# toml_args = tomli.load(config_file) +# except tomli.TOMLDecodeError: +# _LOGGER.error( +# 'Input config file is not in TOML format, ignoring the file and carrying on with the provided command line agruments' +# ) +# +# toml_args = ( +# get_profile(toml_args[args.command], args.config_profile.split('.')) if args.command in toml_args else {} +# ) +# toml_args = {get_option_string_destination(args.command, k): v for k, v in toml_args.items()} +# for k, v in toml_args.items(): +# if k[:3] == 'no-' and (v == 'true' or v == 'false'): +# del toml_args[k] +# toml_args[k[3:]] = 'false' if v == 'true' else 'true' +# if k == 'optimization-level': +# level = toml_args[k] if toml_args[k] >= 0 else 0 +# level = level if toml_args[k] <= 3 else 3 +# del toml_args[k] +# toml_args['-o' + str(level)] = 'true' +# +# return toml_args diff --git a/pyk/src/pyk/ktool/kprove.py b/pyk/src/pyk/ktool/kprove.py index db350daa009..df39abe6574 100644 --- a/pyk/src/pyk/ktool/kprove.py +++ b/pyk/src/pyk/ktool/kprove.py @@ -29,7 +29,7 @@ from subprocess import CompletedProcess from typing import ContextManager, Final - from ..cli.pyk import ProveOptions + from ..__main__ import ProveOptionsGroup from ..kast.outer import KClaim, KRule, KRuleLike from ..kast.pretty import SymbolTable from ..kcfg import KCFGExplore @@ -427,7 +427,7 @@ def __init__( self._kprove = kprove self._explore_context = explore_context - def prove_rpc(self, options: ProveOptions) -> list[Proof]: + def prove_rpc(self, options: ProveOptionsGroup) -> list[Proof]: all_claims = self._kprove.get_claims( options.spec_file, spec_module_name=options.spec_module, diff --git a/pyk/src/tests/integration/ktool/test_imp.py b/pyk/src/tests/integration/ktool/test_imp.py index a3828f7a69d..bb18e1b5174 100644 --- a/pyk/src/tests/integration/ktool/test_imp.py +++ b/pyk/src/tests/integration/ktool/test_imp.py @@ -7,7 +7,7 @@ import pytest -from pyk.cli.pyk import ProveOptions +from pyk.__main__ import ProveOptionsGroup from pyk.kast.inner import KApply, KSequence, KVariable from pyk.kcfg.semantics import KCFGSemantics from pyk.ktool.kprove import ProveRpc @@ -174,21 +174,22 @@ def test_prove_rpc( claim_id: str, proof_status: ProofStatus, ) -> None: + + options = ProveOptionsGroup() + options.extract( + { + 'spec_file': Path(spec_file), + 'spec_module': spec_module, + 'claim_labels': [claim_id], + }, + 'prove', + ) + # Given prove_rpc = ProveRpc(kprove, lambda: nullcontext(kcfg_explore)) # When - proof = single( - prove_rpc.prove_rpc( - ProveOptions( - { - 'spec_file': Path(spec_file), - 'spec_module': spec_module, - 'claim_labels': [claim_id], - } - ), - ) - ) + proof = single(prove_rpc.prove_rpc(options)) # Then assert proof.status == proof_status diff --git a/pyk/src/tests/unit/test_toml_args.py b/pyk/src/tests/unit/test_toml_args.py index 2f10fbc5a4a..522d84bc336 100644 --- a/pyk/src/tests/unit/test_toml_args.py +++ b/pyk/src/tests/unit/test_toml_args.py @@ -2,8 +2,6 @@ from typing import TYPE_CHECKING -from pyk.cli.pyk import create_argument_parser, parse_toml_args - from .utils import TEST_DATA_DIR if TYPE_CHECKING: @@ -12,44 +10,44 @@ TEST_TOML: Final = TEST_DATA_DIR / 'pyk_toml_test.toml' -def test_continue_when_default_toml_absent() -> None: - parser = create_argument_parser() - cmd_args = ['coverage', '.', str(TEST_TOML)] - args = parser.parse_args(cmd_args) - assert hasattr(args, 'config_file') - assert str(args.config_file) == 'pyk.toml' - assert hasattr(args, 'config_profile') - assert str(args.config_profile) == 'default' - args_dict = parse_toml_args(args) - assert len(args_dict) == 0 - - -def test_toml_read() -> None: - parser = create_argument_parser() - cmd_args = ['coverage', '--config-file', str(TEST_TOML), '.', str(TEST_TOML), '--verbose'] - args = parser.parse_args(cmd_args) - args_dict = parse_toml_args(args) - assert 'output' in args_dict - assert args_dict['output'] == 'default-file' - assert hasattr(args, 'verbose') - assert args.verbose - - -def test_toml_profiles() -> None: - parser = create_argument_parser() - cmd_args = [ - 'coverage', - '--config-file', - str(TEST_TOML), - '--config-profile', - 'a_profile', - '.', - str(TEST_TOML), - '--verbose', - ] - args = parser.parse_args(cmd_args) - args_dict = parse_toml_args(args) - assert 'verbose' in args_dict - assert args_dict['verbose'] - assert 'output' in args_dict - assert args_dict['output'] == 'a-profile-file' +# def test_continue_when_default_toml_absent() -> None: +# parser = create_argument_parser() +# cmd_args = ['coverage', '.', str(TEST_TOML)] +# args = parser.parse_args(cmd_args) +# assert hasattr(args, 'config_file') +# assert str(args.config_file) == 'pyk.toml' +# assert hasattr(args, 'config_profile') +# assert str(args.config_profile) == 'default' +# args_dict = parse_toml_args(args) +# assert len(args_dict) == 0 +# +# +# def test_toml_read() -> None: +# parser = create_argument_parser() +# cmd_args = ['coverage', '--config-file', str(TEST_TOML), '.', str(TEST_TOML), '--verbose'] +# args = parser.parse_args(cmd_args) +# args_dict = parse_toml_args(args) +# assert 'output' in args_dict +# assert args_dict['output'] == 'default-file' +# assert hasattr(args, 'verbose') +# assert args.verbose +# +# +# def test_toml_profiles() -> None: +# parser = create_argument_parser() +# cmd_args = [ +# 'coverage', +# '--config-file', +# str(TEST_TOML), +# '--config-profile', +# 'a_profile', +# '.', +# str(TEST_TOML), +# '--verbose', +# ] +# args = parser.parse_args(cmd_args) +# args_dict = parse_toml_args(args) +# assert 'verbose' in args_dict +# assert args_dict['verbose'] +# assert 'output' in args_dict +# assert args_dict['output'] == 'a-profile-file'