diff --git a/.github/workflows/msbuild.yml b/.github/workflows/msbuild.yml index 60bb5048658..1a975b5d25e 100644 --- a/.github/workflows/msbuild.yml +++ b/.github/workflows/msbuild.yml @@ -34,6 +34,9 @@ jobs: - name: Restore tools working-directory: dafny run: dotnet tool restore + - name: Check whitespace and style + working-directory: dafny + run: dotnet format whitespace Source/Dafny.sln --verify-no-changes --exclude Source/DafnyCore/Scanner.cs --exclude Source/DafnyCore/Parser.cs --exclude Source/DafnyCore/GeneratedFromDafny.cs --exclude Source/DafnyRuntime/DafnyRuntimeSystemModule.cs - name: Get Boogie Version run: | sudo apt-get update @@ -51,9 +54,6 @@ jobs: - name: Build Dafny with local Boogie working-directory: dafny run: dotnet build Source/Dafny.sln - - name: Check whitespace and style - working-directory: dafny - run: dotnet tool run dotnet-format -w -s error --check Source/Dafny.sln --exclude DafnyCore/Scanner.cs --exclude DafnyCore/Parser.cs --exclude DafnyCore/GeneratedFromDafny.cs --exclude DafnyRuntime/DafnyRuntimeSystemModule.cs - name: Create NuGet package (just to make sure it works) run: dotnet pack --no-build dafny/Source/Dafny.sln - name: Check uniformity of integration tests that exercise backends diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7dff2b96253..9c4a8290fd2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,5 +4,5 @@ repos: - id: dotnet-format name: dotnet-format language: system - entry: dotnet tool run dotnet-format --folder -w --include + entry: dotnet format whitespace Source/Dafny.sln --include types_or: ["c#"] diff --git a/Makefile b/Makefile index 5739a538310..7de9d5483db 100644 --- a/Makefile +++ b/Makefile @@ -62,7 +62,7 @@ z3-ubuntu: chmod +x ${DIR}/Binaries/z3/bin/z3-* format: - dotnet tool run dotnet-format -w -s error Source/Dafny.sln --exclude DafnyCore/Scanner.cs --exclude DafnyCore/Parser.cs --exclude DafnyCore/GeneratedFromDafny.cs + dotnet format whitespace Source/Dafny.sln --exclude Source/DafnyCore/Scanner.cs --exclude Source/DafnyCore/Parser.cs --exclude Source/DafnyCore/GeneratedFromDafny.cs --exclude Source/DafnyRuntime/DafnyRuntimeSystemModule.cs clean: (cd ${DIR}; cd Source; rm -rf Dafny/bin Dafny/obj DafnyDriver/bin DafnyDriver/obj DafnyRuntime/obj DafnyRuntime/bin DafnyServer/bin DafnyServer/obj DafnyPipeline/obj DafnyPipeline/bin DafnyCore/obj DafnyCore/bin) diff --git a/Source/DafnyCore.Test/DafnyProjectTest.cs b/Source/DafnyCore.Test/DafnyProjectTest.cs index ce1c35265be..ffc1167bcb6 100644 --- a/Source/DafnyCore.Test/DafnyProjectTest.cs +++ b/Source/DafnyCore.Test/DafnyProjectTest.cs @@ -1,6 +1,6 @@ using Microsoft.Dafny; -namespace DafnyCore.Test; +namespace DafnyCore.Test; public class DafnyProjectTest { [Fact] diff --git a/Source/DafnyCore/AST/Members/Function.cs b/Source/DafnyCore/AST/Members/Function.cs index dbdf504c98f..475c43d7596 100644 --- a/Source/DafnyCore/AST/Members/Function.cs +++ b/Source/DafnyCore/AST/Members/Function.cs @@ -28,6 +28,8 @@ public string GetFunctionDeclarationKeywords(DafnyOptions options) { k = WhatKind; } + // If this function is opaque due to the opaque keyword, include it. + k = (IsOpaque && !Attributes.Contains(Attributes, "opaque")) ? "opaque " + k : k; return HasStaticKeyword ? "static " + k : k; } diff --git a/Source/DafnyCore/AST/Members/ICanAutoReveal.cs b/Source/DafnyCore/AST/Members/ICanAutoReveal.cs index 46af7903b1c..ef7a4c9cb76 100644 --- a/Source/DafnyCore/AST/Members/ICanAutoReveal.cs +++ b/Source/DafnyCore/AST/Members/ICanAutoReveal.cs @@ -1,4 +1,4 @@ -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public interface ICanAutoRevealDependencies { public void AutoRevealDependencies(AutoRevealFunctionDependencies Rewriter, DafnyOptions Options, ErrorReporter Reporter); diff --git a/Source/DafnyCore/Compilers/Dafny/WrapException.cs b/Source/DafnyCore/Compilers/Dafny/WrapException.cs index d0a58ea628b..15340517734 100644 --- a/Source/DafnyCore/Compilers/Dafny/WrapException.cs +++ b/Source/DafnyCore/Compilers/Dafny/WrapException.cs @@ -1,4 +1,4 @@ -namespace Microsoft.Dafny.Compilers; +namespace Microsoft.Dafny.Compilers; public class WrapException { diff --git a/Source/DafnyCore/Compilers/Library/LibraryBackend.cs b/Source/DafnyCore/Compilers/Library/LibraryBackend.cs index 9bcbbf9f865..d245ce4941a 100644 --- a/Source/DafnyCore/Compilers/Library/LibraryBackend.cs +++ b/Source/DafnyCore/Compilers/Library/LibraryBackend.cs @@ -6,7 +6,7 @@ using System.Linq; using DafnyCore; -namespace Microsoft.Dafny.Compilers; +namespace Microsoft.Dafny.Compilers; public class LibraryBackend : ExecutableBackend { public LibraryBackend(DafnyOptions options) : base(options) { diff --git a/Source/DafnyCore/Compilers/SinglePassCompiler.cs b/Source/DafnyCore/Compilers/SinglePassCompiler.cs index 1ddf0facbb3..85f90102a1d 100644 --- a/Source/DafnyCore/Compilers/SinglePassCompiler.cs +++ b/Source/DafnyCore/Compilers/SinglePassCompiler.cs @@ -5741,21 +5741,18 @@ private ConcreteSyntaxTree MaybeInjectSubsetConstraint(IVariable boundVar, Type IToken tok, ConcreteSyntaxTree wr, bool isReturning = false, bool elseReturnValue = false, bool isSubfiltering = false) { if (!boundVarType.Equals(collectionElementType, true) && - boundVarType.NormalizeExpand(true) is UserDefinedType - { + boundVarType.NormalizeExpand(true) is UserDefinedType { TypeArgs: var typeArgs, ResolvedClass: - SubsetTypeDecl - { + SubsetTypeDecl { TypeArgs: var typeParametersArgs, Var: var variable, Constraint: var constraint } }) { - if (variable.Type.NormalizeExpandKeepConstraints() is UserDefinedType - { - ResolvedClass: SubsetTypeDecl - } normalizedVariableType) { + if (variable.Type.NormalizeExpandKeepConstraints() is UserDefinedType { + ResolvedClass: SubsetTypeDecl + } normalizedVariableType) { wr = MaybeInjectSubsetConstraint(boundVar, normalizedVariableType, collectionElementType, inLetExprBody, tok, wr, isReturning, elseReturnValue, true); } diff --git a/Source/DafnyCore/CoverageReport/CoverageReport.cs b/Source/DafnyCore/CoverageReport/CoverageReport.cs index bccce063507..3e2bb0c5cce 100644 --- a/Source/DafnyCore/CoverageReport/CoverageReport.cs +++ b/Source/DafnyCore/CoverageReport/CoverageReport.cs @@ -5,7 +5,7 @@ using System.Linq; using System.Threading; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public class CoverageReport { diff --git a/Source/DafnyCore/CoverageReport/CoverageSpan.cs b/Source/DafnyCore/CoverageReport/CoverageSpan.cs index a3ae066d427..a6bef7345a2 100644 --- a/Source/DafnyCore/CoverageReport/CoverageSpan.cs +++ b/Source/DafnyCore/CoverageReport/CoverageSpan.cs @@ -2,7 +2,7 @@ using System; using System.Diagnostics.Contracts; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public class CoverageSpan : IComparable { diff --git a/Source/DafnyCore/DafnyGeneratedFromDafny.sh b/Source/DafnyCore/DafnyGeneratedFromDafny.sh index 0d0f711e104..752739c1d28 100755 --- a/Source/DafnyCore/DafnyGeneratedFromDafny.sh +++ b/Source/DafnyCore/DafnyGeneratedFromDafny.sh @@ -28,4 +28,4 @@ with open ('$output.cs', 'r' ) as f: with open('$output.cs', 'w') as w: w.write(content_new) " -dotnet tool run dotnet-format -w --include $output.cs \ No newline at end of file +dotnet format whitespace --include $output.cs \ No newline at end of file diff --git a/Source/DafnyCore/DooFile.cs b/Source/DafnyCore/DooFile.cs index 11e6d8fd1ef..b4f05dd9a53 100644 --- a/Source/DafnyCore/DooFile.cs +++ b/Source/DafnyCore/DooFile.cs @@ -9,7 +9,7 @@ using Microsoft.Dafny; using Tomlyn; -namespace DafnyCore; +namespace DafnyCore; // Model class for the .doo file format for Dafny libraries. // Contains the validation logic for safely consuming libraries as well. diff --git a/Source/DafnyCore/Generic/TomlUtil.cs b/Source/DafnyCore/Generic/TomlUtil.cs index b98d92da3f0..c3f4fbf9e6b 100644 --- a/Source/DafnyCore/Generic/TomlUtil.cs +++ b/Source/DafnyCore/Generic/TomlUtil.cs @@ -4,7 +4,7 @@ using System.Linq; using Tomlyn.Model; -namespace DafnyCore.Generic; +namespace DafnyCore.Generic; public static class TomlUtil { diff --git a/Source/DafnyCore/Options/DafnyProject.cs b/Source/DafnyCore/Options/DafnyProject.cs index 3e076a474d0..36d689a243e 100644 --- a/Source/DafnyCore/Options/DafnyProject.cs +++ b/Source/DafnyCore/Options/DafnyProject.cs @@ -16,7 +16,7 @@ using Tomlyn; using Tomlyn.Model; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public class DafnyProject : IEquatable { public const string FileName = "dfyconfig.toml"; diff --git a/Source/DafnyCore/Plugins/CompilerInstrumenter.cs b/Source/DafnyCore/Plugins/CompilerInstrumenter.cs index 4b8e5ca0b56..65ac3472621 100644 --- a/Source/DafnyCore/Plugins/CompilerInstrumenter.cs +++ b/Source/DafnyCore/Plugins/CompilerInstrumenter.cs @@ -1,6 +1,6 @@ using Microsoft.Dafny.Compilers; -namespace Microsoft.Dafny.Plugins; +namespace Microsoft.Dafny.Plugins; /// /// A hook for plugins to customize some of the code generation of other IExecutableBackends. diff --git a/Source/DafnyCore/Plugins/ErrorReportingBase.cs b/Source/DafnyCore/Plugins/ErrorReportingBase.cs index a86b30a8933..ac390293ec4 100644 --- a/Source/DafnyCore/Plugins/ErrorReportingBase.cs +++ b/Source/DafnyCore/Plugins/ErrorReportingBase.cs @@ -1,6 +1,6 @@ using System.Diagnostics.Contracts; -namespace Microsoft.Dafny.Plugins; +namespace Microsoft.Dafny.Plugins; /// /// Abstract base class for plugin interfaces that may report errors. diff --git a/Source/DafnyCore/Resolver/FuelAdjustment.cs b/Source/DafnyCore/Resolver/FuelAdjustment.cs index 0d26a0ad15f..c848a95764f 100644 --- a/Source/DafnyCore/Resolver/FuelAdjustment.cs +++ b/Source/DafnyCore/Resolver/FuelAdjustment.cs @@ -2,7 +2,7 @@ using System.Diagnostics.Contracts; using System.Numerics; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public static class FuelAdjustment { diff --git a/Source/DafnyCore/Resolver/SubsetConstraintGhostChecker.cs b/Source/DafnyCore/Resolver/SubsetConstraintGhostChecker.cs index 2155b0a2f95..488d2fdab07 100644 --- a/Source/DafnyCore/Resolver/SubsetConstraintGhostChecker.cs +++ b/Source/DafnyCore/Resolver/SubsetConstraintGhostChecker.cs @@ -85,8 +85,7 @@ public override bool Traverse(Expression expr, [CanBeNull] string field, [CanBeN if (e is ForallExpr || e is ExistsExpr || e is SetComprehension || e is MapComprehension) { foreach (var boundVar in e.BoundVars) { - if (boundVar.Type.AsSubsetType is - { + if (boundVar.Type.AsSubsetType is { Constraint: var constraint, ConstraintIsCompilable: false and var constraintIsCompilable } and var subsetTypeDecl diff --git a/Source/DafnyCore/Rewriters/RewriterCollection.cs b/Source/DafnyCore/Rewriters/RewriterCollection.cs index 8749b85c92e..10643dccb04 100644 --- a/Source/DafnyCore/Rewriters/RewriterCollection.cs +++ b/Source/DafnyCore/Rewriters/RewriterCollection.cs @@ -1,6 +1,6 @@ using System.Collections.Generic; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public static class RewriterCollection { diff --git a/Source/DafnyCore/Verifier/BoogieGenerator.Decreases.cs b/Source/DafnyCore/Verifier/BoogieGenerator.Decreases.cs index 0944ac4f940..7856b853455 100644 --- a/Source/DafnyCore/Verifier/BoogieGenerator.Decreases.cs +++ b/Source/DafnyCore/Verifier/BoogieGenerator.Decreases.cs @@ -22,7 +22,7 @@ using PODesc = Microsoft.Dafny.ProofObligationDescription; using static Microsoft.Dafny.GenericErrors; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public partial class BoogieGenerator { diff --git a/Source/DafnyCore/Verifier/BoogieGenerator.Extremes.cs b/Source/DafnyCore/Verifier/BoogieGenerator.Extremes.cs index 5353182e40f..35a478f46b6 100644 --- a/Source/DafnyCore/Verifier/BoogieGenerator.Extremes.cs +++ b/Source/DafnyCore/Verifier/BoogieGenerator.Extremes.cs @@ -22,7 +22,7 @@ using PODesc = Microsoft.Dafny.ProofObligationDescription; using static Microsoft.Dafny.GenericErrors; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public partial class BoogieGenerator { diff --git a/Source/DafnyCore/Verifier/BoogieGenerator.Functions.cs b/Source/DafnyCore/Verifier/BoogieGenerator.Functions.cs index 3a52915210c..973077e08c8 100644 --- a/Source/DafnyCore/Verifier/BoogieGenerator.Functions.cs +++ b/Source/DafnyCore/Verifier/BoogieGenerator.Functions.cs @@ -9,7 +9,7 @@ using PODesc = Microsoft.Dafny.ProofObligationDescription; using static Microsoft.Dafny.GenericErrors; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public partial class BoogieGenerator { diff --git a/Source/DafnyCore/Verifier/BoogieGenerator.Types.cs b/Source/DafnyCore/Verifier/BoogieGenerator.Types.cs index debd2e97e55..1e9b9104dd5 100644 --- a/Source/DafnyCore/Verifier/BoogieGenerator.Types.cs +++ b/Source/DafnyCore/Verifier/BoogieGenerator.Types.cs @@ -22,7 +22,7 @@ using PODesc = Microsoft.Dafny.ProofObligationDescription; using static Microsoft.Dafny.GenericErrors; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public partial class BoogieGenerator { diff --git a/Source/DafnyCore/Verifier/BoogieGenerator.cs b/Source/DafnyCore/Verifier/BoogieGenerator.cs index 649215fbbdc..55562430c5e 100644 --- a/Source/DafnyCore/Verifier/BoogieGenerator.cs +++ b/Source/DafnyCore/Verifier/BoogieGenerator.cs @@ -3994,8 +3994,7 @@ Bpl.Expr GetSubrangeCheck(Bpl.Expr bSource, Type sourceType, Type targetType, ou "it may have read effects"); } else { desc = new PODesc.SubrangeCheck(errorMessagePrefix, sourceType.ToString(), targetType.ToString(), - targetType.NormalizeExpandKeepConstraints() is UserDefinedType - { + targetType.NormalizeExpandKeepConstraints() is UserDefinedType { ResolvedClass: SubsetTypeDecl or NewtypeDecl { Var: { } } }, false, null); diff --git a/Source/DafnyDriver/Commands/CoverageReportCommand.cs b/Source/DafnyDriver/Commands/CoverageReportCommand.cs index b5b538384c3..953186cf3c6 100644 --- a/Source/DafnyDriver/Commands/CoverageReportCommand.cs +++ b/Source/DafnyDriver/Commands/CoverageReportCommand.cs @@ -9,7 +9,7 @@ using System.Threading.Tasks; using DafnyCore; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; static class CoverageReportCommand { diff --git a/Source/DafnyDriver/CoverageReporter.cs b/Source/DafnyDriver/CoverageReporter.cs index 7cfe2157862..f1f3671f10d 100644 --- a/Source/DafnyDriver/CoverageReporter.cs +++ b/Source/DafnyDriver/CoverageReporter.cs @@ -7,7 +7,7 @@ using DafnyCore.Verifier; using Microsoft.Boogie; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public class CoverageReporter { diff --git a/Source/DafnyLanguageServer.Test/Diagnostics/VerificationDiagnostics.cs b/Source/DafnyLanguageServer.Test/Diagnostics/VerificationDiagnostics.cs index 223ed4967c3..8854a6c5f84 100644 --- a/Source/DafnyLanguageServer.Test/Diagnostics/VerificationDiagnostics.cs +++ b/Source/DafnyLanguageServer.Test/Diagnostics/VerificationDiagnostics.cs @@ -6,7 +6,7 @@ using Xunit; using Xunit.Abstractions; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Synchronization; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Synchronization; public class VerificationDiagnostics : ClientBasedLanguageServerTest { diff --git a/Source/DafnyLanguageServer.Test/MultipleFilesNoProjectTest.cs b/Source/DafnyLanguageServer.Test/MultipleFilesNoProjectTest.cs index 6bf1cbbffd9..86b56af37e0 100644 --- a/Source/DafnyLanguageServer.Test/MultipleFilesNoProjectTest.cs +++ b/Source/DafnyLanguageServer.Test/MultipleFilesNoProjectTest.cs @@ -6,7 +6,7 @@ using Xunit; using Xunit.Abstractions; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest; public class MultipleFilesNoProjectTest : ClientBasedLanguageServerTest { [Fact] diff --git a/Source/DafnyLanguageServer.Test/Performance/ThreadUsageTest.cs b/Source/DafnyLanguageServer.Test/Performance/ThreadUsageTest.cs index 377bf85fa35..c84a43a5bd0 100644 --- a/Source/DafnyLanguageServer.Test/Performance/ThreadUsageTest.cs +++ b/Source/DafnyLanguageServer.Test/Performance/ThreadUsageTest.cs @@ -8,7 +8,7 @@ using Xunit.Abstractions; using Range = OmniSharp.Extensions.LanguageServer.Protocol.Models.Range; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Performance; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Performance; [Collection("Sequential Collection")] // Seems to deadlock when run in parallel public class ThreadUsageTest : ClientBasedLanguageServerTest { diff --git a/Source/DafnyLanguageServer.Test/ProjectFiles/CompetingProjectFilesTest.cs b/Source/DafnyLanguageServer.Test/ProjectFiles/CompetingProjectFilesTest.cs index f083d6c2651..07ef2777481 100644 --- a/Source/DafnyLanguageServer.Test/ProjectFiles/CompetingProjectFilesTest.cs +++ b/Source/DafnyLanguageServer.Test/ProjectFiles/CompetingProjectFilesTest.cs @@ -10,7 +10,7 @@ using Xunit.Abstractions; using Range = OmniSharp.Extensions.LanguageServer.Protocol.Models.Range; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.ProjectFiles; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.ProjectFiles; public class CompetingProjectFilesTest : ClientBasedLanguageServerTest { diff --git a/Source/DafnyLanguageServer.Test/ProjectFiles/ProjectFilesTest.cs b/Source/DafnyLanguageServer.Test/ProjectFiles/ProjectFilesTest.cs index 6cd5da33c46..c68954e4823 100644 --- a/Source/DafnyLanguageServer.Test/ProjectFiles/ProjectFilesTest.cs +++ b/Source/DafnyLanguageServer.Test/ProjectFiles/ProjectFilesTest.cs @@ -11,7 +11,7 @@ using Xunit.Abstractions; using Range = OmniSharp.Extensions.LanguageServer.Protocol.Models.Range; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest; public class ProjectFilesTest : ClientBasedLanguageServerTest { diff --git a/Source/DafnyLanguageServer.Test/Refinement/RefinementTests.cs b/Source/DafnyLanguageServer.Test/Refinement/RefinementTests.cs index e5916108c42..2f988f8e84d 100644 --- a/Source/DafnyLanguageServer.Test/Refinement/RefinementTests.cs +++ b/Source/DafnyLanguageServer.Test/Refinement/RefinementTests.cs @@ -6,7 +6,7 @@ using Xunit; using Xunit.Abstractions; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Refinement; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Refinement; public class RefinementTests : ClientBasedLanguageServerTest { [Fact] diff --git a/Source/DafnyLanguageServer.Test/Synchronization/CachingTest.cs b/Source/DafnyLanguageServer.Test/Synchronization/CachingTest.cs index acc1896295b..260b1d17435 100644 --- a/Source/DafnyLanguageServer.Test/Synchronization/CachingTest.cs +++ b/Source/DafnyLanguageServer.Test/Synchronization/CachingTest.cs @@ -15,7 +15,7 @@ using Xunit.Abstractions; using Range = OmniSharp.Extensions.LanguageServer.Protocol.Models.Range; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Synchronization; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Synchronization; public class CachingTest : ClientBasedLanguageServerTest { private InMemorySink sink; diff --git a/Source/DafnyLanguageServer.Test/Synchronization/ProjectManagerDatabaseTest.cs b/Source/DafnyLanguageServer.Test/Synchronization/ProjectManagerDatabaseTest.cs index 588ad2f009a..158c0ec2690 100644 --- a/Source/DafnyLanguageServer.Test/Synchronization/ProjectManagerDatabaseTest.cs +++ b/Source/DafnyLanguageServer.Test/Synchronization/ProjectManagerDatabaseTest.cs @@ -9,7 +9,7 @@ using Xunit; using Xunit.Abstractions; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Synchronization; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Synchronization; [Collection("Sequential Collection")] public class ProjectManagerDatabaseTest : ClientBasedLanguageServerTest { diff --git a/Source/DafnyLanguageServer.Test/Unit/ByteArrayEqualityTest.cs b/Source/DafnyLanguageServer.Test/Unit/ByteArrayEqualityTest.cs index 509f942eebe..24ca25042c4 100644 --- a/Source/DafnyLanguageServer.Test/Unit/ByteArrayEqualityTest.cs +++ b/Source/DafnyLanguageServer.Test/Unit/ByteArrayEqualityTest.cs @@ -2,7 +2,7 @@ using Microsoft.Dafny.LanguageServer.Language; using Xunit; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Unit; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Unit; public class ByteArrayEqualityTest { [Fact] diff --git a/Source/DafnyLanguageServer.Test/Unit/CompilationManagerTest.cs b/Source/DafnyLanguageServer.Test/Unit/CompilationManagerTest.cs index ffcbdb21525..07571fd446c 100644 --- a/Source/DafnyLanguageServer.Test/Unit/CompilationManagerTest.cs +++ b/Source/DafnyLanguageServer.Test/Unit/CompilationManagerTest.cs @@ -7,7 +7,7 @@ using Moq; using Xunit; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Unit; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Unit; public class CompilationManagerTest { [Fact] diff --git a/Source/DafnyLanguageServer.Test/Unit/TextReaderFromCharArraysTest.cs b/Source/DafnyLanguageServer.Test/Unit/TextReaderFromCharArraysTest.cs index 4049b445c23..43cd7185c8f 100644 --- a/Source/DafnyLanguageServer.Test/Unit/TextReaderFromCharArraysTest.cs +++ b/Source/DafnyLanguageServer.Test/Unit/TextReaderFromCharArraysTest.cs @@ -2,7 +2,7 @@ using Microsoft.Dafny.LanguageServer.Language; using Xunit; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Unit; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Unit; public class TextReaderFromCharArraysTest { diff --git a/Source/DafnyLanguageServer.Test/Util/StringifyTest.cs b/Source/DafnyLanguageServer.Test/Util/StringifyTest.cs index 7306abaa247..f0b678f03ac 100644 --- a/Source/DafnyLanguageServer.Test/Util/StringifyTest.cs +++ b/Source/DafnyLanguageServer.Test/Util/StringifyTest.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using Xunit; -namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Util; +namespace Microsoft.Dafny.LanguageServer.IntegrationTest.Util; public class StringifyTest { diff --git a/Source/DafnyLanguageServer/Caching/PruneIfNotUsedSinceLastPruneCache.cs b/Source/DafnyLanguageServer/Caching/PruneIfNotUsedSinceLastPruneCache.cs index a90940af640..b9cd078fcf9 100644 --- a/Source/DafnyLanguageServer/Caching/PruneIfNotUsedSinceLastPruneCache.cs +++ b/Source/DafnyLanguageServer/Caching/PruneIfNotUsedSinceLastPruneCache.cs @@ -5,7 +5,7 @@ using System.Linq; using Microsoft.Dafny.LanguageServer.Workspace; -namespace Microsoft.Dafny; +namespace Microsoft.Dafny; public class PruneIfNotUsedSinceLastPruneCache where TValue : class diff --git a/Source/DafnyLanguageServer/Util/PositionExtensions.cs b/Source/DafnyLanguageServer/Util/PositionExtensions.cs index 7580181849d..eda5f16ab2e 100644 --- a/Source/DafnyLanguageServer/Util/PositionExtensions.cs +++ b/Source/DafnyLanguageServer/Util/PositionExtensions.cs @@ -1,7 +1,7 @@ using Microsoft.Dafny.LanguageServer.Language; using OmniSharp.Extensions.LanguageServer.Protocol.Models; -namespace Microsoft.Dafny.LanguageServer.Util; +namespace Microsoft.Dafny.LanguageServer.Util; public static class PositionExtensions { public static DafnyPosition ToDafnyPosition(this IToken token) { diff --git a/Source/DafnyStandardLibraries/README.md b/Source/DafnyStandardLibraries/README.md index 31afbc61ff4..5ba0478af5a 100644 --- a/Source/DafnyStandardLibraries/README.md +++ b/Source/DafnyStandardLibraries/README.md @@ -42,6 +42,7 @@ The sections below describe how to use each library: - [DafnyStdLibs.Wrappers](src/DafnyStdLibs/Wrappers) -- simple datatypes to support common patterns, such as optional values or the result of operations that can fail - [DafnyStdLibs.Relations](src/DafnyStdLibs/Relations) -- properties of relations - [DafnyStdLibs.Functions](src/DafnyStdLibs/Functions) -- properties of functions +- [DafnyStdLibs.Base64](src/DafnyStdLibs/Base64) -- base-64 encoding and decoding We are in the process of importing many more libraries, in particular from the existing [`dafny-lang/libraries`](https://github.com/dafny-lang/libraries) GitHub repository. diff --git a/Source/DafnyStandardLibraries/binaries/DafnyStandardLibraries.doo b/Source/DafnyStandardLibraries/binaries/DafnyStandardLibraries.doo index 9f607dad172..f9d0a7b7ced 100644 Binary files a/Source/DafnyStandardLibraries/binaries/DafnyStandardLibraries.doo and b/Source/DafnyStandardLibraries/binaries/DafnyStandardLibraries.doo differ diff --git a/Source/DafnyStandardLibraries/examples/Base64Examples.dfy b/Source/DafnyStandardLibraries/examples/Base64Examples.dfy new file mode 100644 index 00000000000..ebdc3874b3c --- /dev/null +++ b/Source/DafnyStandardLibraries/examples/Base64Examples.dfy @@ -0,0 +1,47 @@ +module Base64Examples { + import opened DafnyStdLibs.Base64 + import opened DafnyStdLibs.BoundedInts + import opened DafnyStdLibs.Wrappers + + method CheckEncodeDecode(uints: seq, bytes: seq) { + expect Decode(Encode(uints)) == Success(uints); + expect DecodeBV(EncodeBV(bytes)) == Success(bytes); + } + + method {:test} TestRoundTripEmpty() { + CheckEncodeDecode([], []); + } + + method {:test} TestRoundTripOne() { + CheckEncodeDecode([0], [0]); + } + + method {:test} TestRoundTripTwo() { + CheckEncodeDecode([1, 2], [3, 4]); + } + + method {:test} TestRoundTripThree() { + CheckEncodeDecode([112, 234], [43, 76]); + } + + method {:test} TestRoundTripMedium() { + var medUints := seq(512, _ => 22); + var medBytes := seq(512, _ => 22); + CheckEncodeDecode(medUints, medBytes); + } + + // TODO: even this size is too big to practically test for Go and JS + /* + method {:test} TestRoundTripBig() { + // Larger sizes are unfortunately pretty slow. An + // optimized implementation seems worthwhile. + var bigUints := seq(438530, _ => 76); + var bigBytes := seq(438530, _ => 45); + CheckEncodeDecode(bigUints, bigBytes); + } + */ + + method {:test} TestDecodeFail() { + expect Decode("$&^*(_)").Failure?; + } +} diff --git a/Source/DafnyStandardLibraries/src/DafnyStdLibs/Base64.dfy b/Source/DafnyStandardLibraries/src/DafnyStdLibs/Base64.dfy new file mode 100644 index 00000000000..e463c71b82b --- /dev/null +++ b/Source/DafnyStandardLibraries/src/DafnyStdLibs/Base64.dfy @@ -0,0 +1,1515 @@ +/******************************************************************************* + * Copyright by the contributors to the Dafny Project + * SPDX-License-Identifier: MIT + *******************************************************************************/ + +/** An encoder and decoder for the Base64 encoding scheme. */ +module DafnyStdLibs.Base64 { + import opened Wrappers + import opened BoundedInts + + export + reveals + // Functions mentioned in contracts of functions provided below + IsBase64Char, + index, + CharToIndex, + IndexToChar, + IsBase64String, + IsUnpaddedBase64String, + Is1Padding, + Is2Padding + provides + // uint8-based API + Encode, + Decode, + // bv8-based API + EncodeBV, + DecodeBV, + // Lemmas + EncodeDecode, + DecodeEncode, + EncodeDecodeBV, + DecodeEncodeBV, + // Dependencies + BoundedInts, + Wrappers + + export Internals + reveals * + + // The maximum index for Base64 is less than 64 (0x40) + type index = bv6 + + opaque predicate IsBase64Char(c: char) { + // char values can be compared using standard relational operators + // http://leino.science/papers/krml243.html#sec-char + c == '+' || c == '/' || '0' <= c <= '9' || 'A' <= c <= 'Z' || 'a' <= c <= 'z' + } + + lemma Base64CharIs7Bit(c: char) + requires IsBase64Char(c) + ensures c < 128 as char + { + reveal IsBase64Char(); + } + + opaque predicate IsUnpaddedBase64String(s: string) { + // A Base64 encoded string will use 4 ASCII characters for every 3 bytes of data ==> length is divisible by 4 + |s| % 4 == 0 && forall k :: k in s ==> IsBase64Char(k) + } + + opaque function IndexToChar(i: index): (c: char) + ensures IsBase64Char(c) + { + reveal IsBase64Char(); + // Based on the Base64 index table + if i == 63 then '/' + else if i == 62 then '+' + // Dafny 1.9.9 added support for char to int conversion + // https://github.com/dafny-lang/dafny/releases/tag/v1.9.9 + // 0 - 9 + else if 52 <= i <= 61 then (i - 4) as char + // a - z + else if 26 <= i <= 51 then i as char + 71 as char + // A - Z + else i as char + 65 as char + } + + lemma IndexToCharIsBase64(i: index) + ensures IsBase64Char(IndexToChar(i)) + { + reveal IndexToChar(); + reveal IsBase64Char(); + } + + opaque function CharToIndex(c: char): (i: index) + // This is actually required for the function to be total, + // and that requirement propagates to many places. + requires IsBase64Char(c) + { + reveal IsBase64Char(); + reveal IndexToChar(); + // Perform the inverse operations of IndexToChar + if c == '/' then 63 + else if c == '+' then 62 + else if '0' <= c <= '9' then (c + 4 as char) as index + else if 'a' <= c <= 'z' then (c - 71 as char) as index + else (c - 65 as char) as index + } + + lemma {:vcs_split_on_every_assert} CharToIndexToChar(c: char) + requires IsBase64Char(c) + ensures IndexToChar(CharToIndex(c)) == c + { + // TODO: reduce resource use, brittleness + Base64CharIs7Bit(c); + reveal IsBase64Char(); + reveal IndexToChar(); + reveal CharToIndex(); + if c == '/' { + assert IndexToChar(CharToIndex(c)) == c; + } else if c == '+' { + assert IndexToChar(CharToIndex(c)) == c; + } else if '0' <= c <= '9' { + assert IndexToChar(CharToIndex(c)) == c; + } else if 'a' <= c < 'm' { + assert IndexToChar(CharToIndex(c)) == c; + } else if 'm' <= c <= 'z' { + assert IndexToChar(CharToIndex(c)) == c; + } else { + assert IndexToChar(CharToIndex(c)) == c; + } + } + + lemma {:vcs_split_on_every_assert} IndexToCharToIndex(i: index) + ensures (IndexToCharIsBase64(i); CharToIndex(IndexToChar(i)) == i) + { + // TODO: reduce resource use, brittleness + reveal IsBase64Char(); + reveal IndexToChar(); + reveal CharToIndex(); + IndexToCharIsBase64(i); + if i == 63 { + assert CharToIndex(IndexToChar(i)) == i; + } else if i == 62 { + assert CharToIndex(IndexToChar(i)) == i; + } else if 52 <= i <= 61 { + assert CharToIndex(IndexToChar(i)) == i; + } else if 26 <= i <= 51 { + assert CharToIndex(IndexToChar(i)) == i; + } else { + assert CharToIndex(IndexToChar(i)) == i; + } + } + + lemma IndexToCharToIndexAuto() + ensures forall x :: (IndexToCharIsBase64(x); CharToIndex(IndexToChar(x)) == x) + { + forall x: index + ensures (IndexToCharIsBase64(x); CharToIndex(IndexToChar(x)) == x) + { + IndexToCharToIndex(x); + } + } + + lemma CharToIndexToCharAuto() + ensures forall c | IsBase64Char(c) :: IndexToChar(CharToIndex(c)) == c + { + forall c: char | IsBase64Char(c) + ensures IndexToChar(CharToIndex(c)) == c + { + CharToIndexToChar(c); + } + } + + opaque function BV24ToSeq(x: bv24): (ret: seq) + ensures |ret| == 3 + { + var b0 := ((x >> 16) & 0x0000FF) as bv8; + var b1 := ((x >> 8) & 0x0000FF) as bv8; + var b2 := ((x ) & 0x0000FF) as bv8; + [b0, b1, b2] + } + + opaque function SeqToBV24(x: seq): (ret: bv24) + requires |x| == 3 + { + (x[0] as bv24 << 16) | (x[1] as bv24 << 8) | x[2] as bv24 + } + + lemma BV24ToSeqToBV24(x: bv24) + ensures SeqToBV24(BV24ToSeq(x)) == x + { + reveal BV24ToSeq(); + reveal SeqToBV24(); + } + + lemma SeqToBV24ToSeq(s: seq) + requires |s| == 3 + ensures BV24ToSeq(SeqToBV24(s)) == s + { + reveal SeqToBV24(); + reveal BV24ToSeq(); + } + + opaque function BV24ToIndexSeq(x: bv24): (ret: seq) + ensures |ret| == 4 + { + var b0 := ((x >> 18) & 0x00003F) as index; + var b1 := ((x >> 12) & 0x00003F) as index; + var b2 := ((x >> 6) & 0x00003F) as index; + var b3 := ((x ) & 0x00003F) as index; + [b0, b1, b2, b3] + } + + opaque function IndexSeqToBV24(x: seq): (ret: bv24) + requires |x| == 4 + { + (x[0] as bv24 << 18) | + (x[1] as bv24 << 12) | + (x[2] as bv24 << 6) | + (x[3] as bv24 ) + } + + lemma BV24ToIndexSeqToBV24(x: bv24) + ensures IndexSeqToBV24(BV24ToIndexSeq(x)) == x + { + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + } + + lemma IndexSeqToBV24ToIndexSeq(s: seq) + requires |s| == 4 + ensures BV24ToIndexSeq(IndexSeqToBV24(s)) == s + { + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + } + + opaque function DecodeBlock(s: seq): (ret: seq) + requires |s| == 4 + ensures |ret| == 3 + { + BV24ToSeq(IndexSeqToBV24(s)) + } + + opaque function EncodeBlock(s: seq): (ret: seq) + requires |s| == 3 + ensures |ret| == 4 + { + BV24ToIndexSeq(SeqToBV24(s)) + } + + lemma EncodeDecodeBlock(s: seq) + requires |s| == 3 + ensures DecodeBlock(EncodeBlock(s)) == s + { + reveal EncodeBlock(); + reveal DecodeBlock(); + var b := SeqToBV24(s); + BV24ToIndexSeqToBV24(b); + SeqToBV24ToSeq(s); + } + + lemma DecodeEncodeBlock(s: seq) + requires |s| == 4 + ensures EncodeBlock(DecodeBlock(s)) == s + { + reveal EncodeBlock(); + reveal DecodeBlock(); + var b := IndexSeqToBV24(s); + BV24ToSeqToBV24(b); + IndexSeqToBV24ToIndexSeq(s); + } + + opaque function {:vcs_split_on_every_assert} DecodeRecursively(s: seq): (b: seq) + requires |s| % 4 == 0 + decreases |s| + { + if |s| == 0 then [] + else DecodeBlock(s[..4]) + DecodeRecursively(s[4..]) + } by method { + var resultLength := |s|/4 * 3; + var result := new bv8[resultLength](i => 0); + var i := |s|; + var j := resultLength; + + reveal DecodeRecursively(); + while i > 0 + invariant i % 4 == 0 + invariant 0 <= i <= |s| + invariant i * 3 == j * 4 + invariant 0 <= j <= resultLength + invariant result[j..] == DecodeRecursively(s[i..]) + { + i := i - 4; + j := j - 3; + var block := DecodeBlock(s[i..i+4]); + result[j] := block[0]; + result[j+1] := block[1]; + result[j+2] := block[2]; + //assert result[j+3..] == DecodeRecursively(s[i+4..]); + assert s[i..][..4] == s[i..i+4]; + assert s[i..][4..] == s[i+4..]; + assert result[j..j+3] == block; + calc { + DecodeBlock(s[i..i+4]) + DecodeRecursively(s[i+4..]); + DecodeBlock(s[i..][..4]) + DecodeRecursively(s[i..][4..]); + DecodeRecursively(s[i..]); + } + } + b := result[..]; + } + + lemma DecodeRecursivelyBounds(s: seq) + requires |s| % 4 == 0 + ensures |DecodeRecursively(s)| == |s| / 4 * 3 + ensures |DecodeRecursively(s)| % 3 == 0 + ensures |DecodeRecursively(s)| == 0 ==> |s| == 0 + { + reveal DecodeRecursively(); + } + + lemma DecodeRecursivelyBlock(s: seq) + requires |s| % 4 == 0 + ensures + (DecodeRecursivelyBounds(s); + var b := DecodeRecursively(s); + |b| != 0 ==> EncodeBlock(b[..3]) == s[..4]) + { + DecodeRecursivelyBounds(s); + if |s| == 0 {} + else { + DecodeEncodeBlock(s[..4]); + reveal DecodeRecursively(); + } + } + + opaque function {:vcs_split_on_every_assert} EncodeRecursively(b: seq): (s: seq) + requires |b| % 3 == 0 + { + if |b| == 0 then [] + else EncodeBlock(b[..3]) + EncodeRecursively(b[3..]) + } by method { + var resultLength := |b|/3 * 4; + var result := new index[resultLength](i => 0); + var i := |b|; + var j := resultLength; + + reveal EncodeRecursively(); + while i > 0 + invariant i % 3 == 0 + invariant 0 <= i <= |b| + invariant i * 4 == j * 3 + invariant 0 <= j <= resultLength + invariant result[j..] == EncodeRecursively(b[i..]) + { + i := i - 3; + j := j - 4; + var block := EncodeBlock(b[i..i+3]); + result[j] := block[0]; + result[j+1] := block[1]; + result[j+2] := block[2]; + result[j+3] := block[3]; + assert b[i..][..3] == b[i..i+3]; + assert b[i..][3..] == b[i+3..]; + assert result[j..j+4] == block; + calc { + EncodeBlock(b[i..i+3]) + EncodeRecursively(b[i+3..]); + EncodeBlock(b[i..][..3]) + EncodeRecursively(b[i..][3..]); + EncodeRecursively(b[i..]); + } + } + s := result[..]; + } + + lemma EncodeRecursivelyBounds(b: seq) + requires |b| % 3 == 0 + ensures |EncodeRecursively(b)| == |b| / 3 * 4 + ensures |EncodeRecursively(b)| % 4 == 0 + ensures |EncodeRecursively(b)| == 0 ==> |b| == 0 + { + reveal EncodeRecursively(); + } + + lemma EncodeRecursivelyBlock(b: seq) + requires |b| % 3 == 0 + ensures (EncodeRecursivelyBounds(b); + var s := EncodeRecursively(b); + |s| != 0 ==> DecodeBlock(s[..4]) == b[..3]) + { + EncodeRecursivelyBounds(b); + if |b| == 0 {} + else { + EncodeDecodeBlock(b[..3]); + reveal EncodeRecursively(); + } + } + + lemma EncodeDecodeRecursively(b: seq) + requires |b| % 3 == 0 + ensures (EncodeRecursivelyBounds(b); DecodeRecursively(EncodeRecursively(b)) == b) + { + var s := EncodeRecursively(b); + EncodeRecursivelyBounds(b); + DecodeRecursivelyBounds(s); + if |b| == 0 { + } else { + calc { + DecodeRecursively(EncodeRecursively(b)); + == + DecodeRecursively(s); + == { reveal DecodeRecursively(); } + DecodeBlock(s[..4]) + DecodeRecursively(s[4..]); + == { EncodeRecursivelyBlock(b); } + b[..3] + DecodeRecursively(s[4..]); + == { reveal EncodeRecursively(); } + b[..3] + DecodeRecursively(EncodeRecursively(b[3..])); + == { EncodeDecodeRecursively(b[3..]); } + b[..3] + b[3..]; + == + b; + } + } + } + + lemma DecodeEncodeRecursively(s: seq) + requires |s| % 4 == 0 + ensures (DecodeRecursivelyBounds(s); EncodeRecursively(DecodeRecursively(s)) == s) + { + var b := DecodeRecursively(s); + DecodeRecursivelyBounds(s); + EncodeRecursivelyBounds(b); + if |s| == 0 { + } else { + calc { + EncodeRecursively(DecodeRecursively(s)); + == + EncodeRecursively(b); + == { reveal EncodeRecursively(); } + EncodeBlock(b[..3]) + EncodeRecursively(b[3..]); + == { DecodeRecursivelyBlock(s); } + s[..4] + EncodeRecursively(b[3..]); + == { reveal DecodeRecursively(); } + s[..4] + EncodeRecursively(DecodeRecursively(s[4..])); + == { DecodeEncodeRecursively(s[4..]); } + s[..4] + s[4..]; + == + s; + } + } + } + + opaque function FromCharsToIndices(s: seq): (b: seq) + requires forall k :: k in s ==> IsBase64Char(k) + ensures |b| == |s| + { + seq(|s|, i requires 0 <= i < |s| => CharToIndex(s[i])) + } + + opaque function FromIndicesToChars(b: seq): (s: seq) + ensures forall k :: k in s ==> IsBase64Char(k) + ensures |s| == |b| + { + seq(|b|, i requires 0 <= i < |b| => IndexToChar(b[i])) + } + + lemma FromCharsToIndicesToChars(s: seq) + requires forall k :: k in s ==> IsBase64Char(k) + ensures FromIndicesToChars(FromCharsToIndices(s)) == s + { + reveal FromIndicesToChars(); + reveal FromCharsToIndices(); + CharToIndexToCharAuto(); + } + + lemma FromIndicesToCharsToIndices(b: seq) + ensures FromCharsToIndices(FromIndicesToChars(b)) == b + { + reveal FromIndicesToChars(); + reveal FromCharsToIndices(); + IndexToCharToIndexAuto(); + } + + opaque function DecodeUnpadded(s: seq): (b: seq) + requires IsUnpaddedBase64String(s) + { + reveal IsUnpaddedBase64String(); + DecodeRecursively(FromCharsToIndices(s)) + } + + lemma DecodeUnpaddedBounds(s: seq) + requires IsUnpaddedBase64String(s) + ensures |DecodeUnpadded(s)| == |s| / 4 * 3 + ensures |DecodeUnpadded(s)| % 3 == 0 + { + reveal DecodeUnpadded(); + reveal IsUnpaddedBase64String(); + reveal IsBase64String(); + var indices := FromCharsToIndices(s); + assert |indices| == |s|; + DecodeRecursivelyBounds(indices); + } + + opaque function EncodeUnpadded(b: seq): (s: seq) + requires |b| % 3 == 0 + { + EncodeDecodeRecursively(b); + FromIndicesToChars(EncodeRecursively(b)) + } + + lemma EncodeUnpaddedNotPadded(b: seq) + requires |b| % 3 == 0 + requires b != [] + ensures (EncodeUnpaddedBounds(b); + var s := EncodeUnpadded(b); + !Is1Padding(s[(|s| - 4)..]) && !Is2Padding(s[(|s| - 4)..])) + { + var s := EncodeUnpadded(b); + EncodeUnpaddedBounds(b); + var suffix := s[(|s| - 4)..]; + reveal EncodeUnpadded(); + assert forall c :: c in s ==> IsBase64Char(c); + assert IsBase64Char(s[|s| - 1]); + assert s[|s| - 1] != '=' by { reveal IsBase64Char(); } + reveal Is1Padding(); + reveal Is2Padding(); + } + + lemma EncodeUnpaddedBounds(b: seq) + requires |b| % 3 == 0 + ensures |EncodeUnpadded(b)| == |b| / 3 * 4 + ensures |EncodeUnpadded(b)| % 4 == 0 + { + reveal EncodeUnpadded(); + EncodeRecursivelyBounds(b); + } + + lemma EncodeUnpaddedBase64(b: seq) + requires |b| % 3 == 0 + ensures IsUnpaddedBase64String(EncodeUnpadded(b)) + { + reveal EncodeUnpadded(); + EncodeRecursivelyBounds(b); + reveal IsUnpaddedBase64String(); + } + + lemma EncodeDecodeUnpadded(b: seq) + requires |b| % 3 == 0 + ensures (EncodeUnpaddedBounds(b); EncodeUnpaddedBase64(b); DecodeUnpadded(EncodeUnpadded(b)) == b) + { + EncodeUnpaddedBase64(b); + calc { + DecodeUnpadded(EncodeUnpadded(b)); + == { reveal EncodeUnpadded(); } + DecodeUnpadded(FromIndicesToChars(EncodeRecursively(b))); + == { reveal DecodeUnpadded(); EncodeRecursivelyBounds(b); } + DecodeRecursively(FromCharsToIndices(FromIndicesToChars(EncodeRecursively(b)))); + == { FromIndicesToCharsToIndices(EncodeRecursively(b)); } + DecodeRecursively(EncodeRecursively(b)); + == { EncodeDecodeRecursively(b); } + b; + } + } + + lemma DecodeEncodeUnpadded(s: seq) + requires |s| % 4 == 0 + requires IsUnpaddedBase64String(s) + ensures (DecodeUnpaddedBounds(s); EncodeUnpadded(DecodeUnpadded(s)) == s) + { + DecodeUnpaddedBounds(s); + reveal IsUnpaddedBase64String(); + var fromCharsToIndicesS := FromCharsToIndices(s); + calc { + EncodeUnpadded(DecodeUnpadded(s)); + == { reveal DecodeUnpadded(); } + EncodeUnpadded(DecodeRecursively(FromCharsToIndices(s))); + == + EncodeUnpadded(DecodeRecursively(fromCharsToIndicesS)); + == { reveal EncodeUnpadded(); } + assert |fromCharsToIndicesS| % 4 == 0; + FromIndicesToChars(EncodeRecursively(DecodeRecursively(fromCharsToIndicesS))); + == { DecodeEncodeRecursively(fromCharsToIndicesS); } + FromIndicesToChars(fromCharsToIndicesS); + == + FromIndicesToChars(FromCharsToIndices(s)); + == { FromCharsToIndicesToChars(s); } + s; + } + } + + opaque predicate Is1Padding(s: seq) { + |s| == 4 && + IsBase64Char(s[0]) && + IsBase64Char(s[1]) && + IsBase64Char(s[2]) && + // This is a result of the padded 0's in the sextet in the final element before the = + CharToIndex(s[2]) & 0x3 == 0 && // Using & instead of % here makes the BV proofs easier + s[3] == '=' + } + + opaque function Decode1Padding(s: seq): (b: seq) + requires Is1Padding(s) + // Padding with 1 = implies the sequence represents 2 bytes + ensures |b| == 2 + { + reveal Is1Padding(); + var d := DecodeBlock([CharToIndex(s[0]), CharToIndex(s[1]), CharToIndex(s[2]), 0]); + [d[0], d[1]] + } + + opaque function Encode1Padding(b: seq): (s: seq) + requires |b| == 2 + ensures |s| % 4 == 0 + ensures |s| == 4 + { + // 0 is used to ensure that the final element doesn't affect the EncodeBlock conversion for b + var e := EncodeBlock([b[0], b[1], 0]); + IndexToCharIsBase64(e[0]); + IndexToCharIsBase64(e[1]); + IndexToCharIsBase64(e[2]); + [IndexToChar(e[0]), IndexToChar(e[1]), IndexToChar(e[2]), '='] + } + + lemma EncodeDecodeBlock1Padding(b: seq) + requires |b| == 2 + ensures + var e := EncodeBlock([b[0], b[1], 0]); + var d := DecodeBlock([e[0], e[1], e[2], 0]); + [d[0], d[1]] == b + { + reveal EncodeBlock(); + reveal DecodeBlock(); + reveal BV24ToSeq(); + reveal SeqToBV24(); + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + } + + lemma Encode1PaddingIs1Padding(b: seq) + requires |b| == 2 + ensures Is1Padding(Encode1Padding(b)) + { + // TODO: reduce resource use, brittleness + var s := Encode1Padding(b); + var e := EncodeBlock([b[0], b[1], 0]); + assert s == [IndexToChar(e[0]), IndexToChar(e[1]), IndexToChar(e[2]), '='] by { + reveal Encode1Padding(); + } + IndexToCharIsBase64(e[0]); + IndexToCharIsBase64(e[1]); + IndexToCharIsBase64(e[2]); + assert CharToIndex(s[2]) & 0x3 == 0 by { + // TODO: simplify + reveal Encode1Padding(); + reveal EncodeBlock(); + reveal IndexToChar(); + reveal CharToIndex(); + reveal BV24ToIndexSeq(); + reveal SeqToBV24(); + } + assert Is1Padding(s) by { + reveal Is1Padding(); + } + } + + lemma EncodeDecode1Padding(b: seq) + requires |b| == 2 + ensures (Encode1PaddingIs1Padding(b); Decode1Padding(Encode1Padding(b)) == b) + { + Encode1PaddingIs1Padding(b); + var e := EncodeBlock([b[0], b[1], 0]); + var s := [CharToIndex(IndexToChar(e[0])), CharToIndex(IndexToChar(e[1])), CharToIndex(IndexToChar(e[2])), 0]; + var s' := [e[0], e[1], e[2], 0]; + var d := DecodeBlock(s); + var d' := DecodeBlock(s'); + calc { + Decode1Padding(Encode1Padding(b)); + == { reveal Encode1Padding(); } + Decode1Padding([IndexToChar(e[0]), IndexToChar(e[1]), IndexToChar(e[2]), '=']); + == { reveal Decode1Padding(); } + [d[0], d[1]]; + == { IndexToCharToIndex(e[0]); IndexToCharToIndex(e[1]); IndexToCharToIndex(e[2]); } + [d'[0], d'[1]]; + == { EncodeDecodeBlock1Padding(b); } + b; + } + } + + lemma {:vcs_split_on_every_assert} DecodeEncode1Padding(s: seq) + requires Is1Padding(s) + ensures Encode1Padding(Decode1Padding(s)) == s + { + reveal Is1Padding(); + var i := [CharToIndex(s[0]), CharToIndex(s[1]), CharToIndex(s[2]), 0]; + var d := DecodeBlock(i); + var e := EncodeBlock([d[0], d[1], 0]); + var d' := [IndexToChar(e[0]), IndexToChar(e[1]), IndexToChar(e[2]), '=']; + calc { + Encode1Padding(Decode1Padding(s)); + == { reveal Decode1Padding(); } + Encode1Padding([d[0], d[1]]); + == { reveal Encode1Padding(); } + d'; + == { + // This argument is easiest to make by just automating it + // However, the mix between % and & in padding constraints + // makes it a little difficult + reveal EncodeBlock(); + reveal DecodeBlock(); + reveal BV24ToSeq(); + reveal SeqToBV24(); + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + } + [IndexToChar(CharToIndex(s[0])), IndexToChar(CharToIndex(s[1])), IndexToChar(CharToIndex(s[2])), '=']; + == { CharToIndexToChar(s[0]); CharToIndexToChar(s[1]); CharToIndexToChar(s[2]); } + s; + } + } + + opaque predicate Is2Padding(s: seq) { + |s| == 4 && + IsBase64Char(s[0]) && + IsBase64Char(s[1]) && + // This is a result of the padded 0's in the sextet in the final element before the two = + CharToIndex(s[1]) % 0x10 == 0 && + s[2] == '=' && + s[3] == '=' + } + + opaque function Decode2Padding(s: seq): (b: seq) + requires Is2Padding(s) + // Padding with 2 = implies the sequence represents 1 byte + ensures |b| == 1 + { + reveal Is2Padding(); + var d := DecodeBlock([CharToIndex(s[0]), CharToIndex(s[1]), 0, 0]); + [d[0]] + } + + opaque function Encode2Padding(b: seq): (s: seq) + // Padding with 2 = implies the sequence represents 1 bytes + requires |b| == 1 + ensures |s| % 4 == 0 + ensures |s| == 4 + { + // 0 is used to ensure that the final two elements don't affect the EncodeBlock conversion for b + var e := EncodeBlock([b[0], 0, 0]); + IndexToCharIsBase64(e[0]); + IndexToCharIsBase64(e[1]); + [IndexToChar(e[0]), IndexToChar(e[1]), '=', '='] + } + + lemma Encode2PaddingIs2Padding(b: seq) + requires |b| == 1 + ensures Is2Padding(Encode2Padding(b)) + { + reveal IndexToChar(); + reveal Is2Padding(); + reveal CharToIndex(); + reveal Encode2Padding(); + reveal EncodeBlock(); + reveal BV24ToSeq(); + reveal SeqToBV24(); + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + reveal IsBase64Char(); + } + + lemma DecodeEncodeBlock2Padding(b: seq) + requires |b| == 1 + ensures + var e := EncodeBlock([b[0], 0, 0]); + var d := DecodeBlock([e[0], e[1], 0, 0]); + [d[0]] == b + { + reveal EncodeBlock(); + reveal DecodeBlock(); + reveal BV24ToSeq(); + reveal SeqToBV24(); + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + } + + lemma EncodeDecode2Padding(b: seq) + requires |b| == 1 + ensures (Encode2PaddingIs2Padding(b); Decode2Padding(Encode2Padding(b)) == b) + { + Encode2PaddingIs2Padding(b); + var e := EncodeBlock([b[0], 0, 0]); + calc { + Decode2Padding(Encode2Padding(b)); + == { reveal Encode2Padding(); } + Decode2Padding([IndexToChar(e[0]), IndexToChar(e[1]), '=', '=']); + == { reveal Decode2Padding(); } + [DecodeBlock([CharToIndex(IndexToChar(e[0])), CharToIndex(IndexToChar(e[1])), 0, 0])[0]]; + == { IndexToCharToIndex(e[0]); IndexToCharToIndex(e[1]); } + [DecodeBlock([e[0], e[1], 0, 0])[0]]; + == { DecodeEncodeBlock2Padding(b); } + b; + } + } + + lemma DecodeEncode2Padding(s: seq) + requires Is2Padding(s) + ensures Encode2Padding(Decode2Padding(s)) == s + { + reveal Is2Padding(); + var i := [CharToIndex(s[0]), CharToIndex(s[1]), 0, 0]; + var d := DecodeBlock(i); + var e := EncodeBlock([d[0], 0, 0]); + var d' := [IndexToChar(e[0]), IndexToChar(e[1]), '=', '=']; + calc { + Encode2Padding(Decode2Padding(s)); + == { reveal Decode2Padding(); } + Encode2Padding([d[0]]); + == { reveal Encode2Padding(); } + d'; + == { + // This argument is easiest to make by just automating it + reveal EncodeBlock(); + reveal DecodeBlock(); + reveal BV24ToSeq(); + reveal SeqToBV24(); + reveal IndexSeqToBV24(); + reveal BV24ToIndexSeq(); + } + [IndexToChar(CharToIndex(s[0])), IndexToChar(CharToIndex(s[1])), '=', '=']; + == { CharToIndexToChar(s[0]); CharToIndexToChar(s[1]); } + s; + } + } + + opaque predicate IsBase64String(s: string) { + // All Base64 strings are unpadded until the final block of 4 elements, where a padded seq could exist + reveal IsUnpaddedBase64String(); + reveal Is2Padding(); + var finalBlockStart := |s| - 4; + (|s| % 4 == 0) && + (IsUnpaddedBase64String(s) || + (IsUnpaddedBase64String(s[..finalBlockStart]) && (Is1Padding(s[finalBlockStart..]) || Is2Padding(s[finalBlockStart..])))) + } + + opaque function DecodeValid(s: seq): (b: seq) + requires IsBase64String(s) + { + reveal IsUnpaddedBase64String(); + reveal IsBase64String(); + if s == [] then [] else + var finalBlockStart := |s| - 4; + var prefix, suffix := s[..finalBlockStart], s[finalBlockStart..]; + if Is1Padding(suffix) then + DecodeUnpadded(prefix) + Decode1Padding(suffix) + else if Is2Padding(suffix) then + DecodeUnpadded(prefix) + Decode2Padding(suffix) + else + DecodeUnpadded(s) + } + + lemma AboutDecodeValid(s: seq, b: seq) + requires IsBase64String(s) && b == DecodeValid(s) + ensures 4 <= |s| ==> var finalBlockStart := |s| - 4; + var prefix, suffix := s[..finalBlockStart], s[finalBlockStart..]; + && (Is1Padding(suffix) && IsUnpaddedBase64String(prefix) <==> (|b| % 3 == 2 && |b| > 1)) + && (Is2Padding(suffix) && IsUnpaddedBase64String(prefix) <==> (|b| % 3 == 1 && |b| > 0)) + && (!Is1Padding(suffix) && !Is2Padding(suffix) && IsUnpaddedBase64String(s) <==> (|b| % 3 == 0 && |b| > 1)) + { + reveal DecodeValid(); + reveal IsUnpaddedBase64String(); + reveal IsBase64String(); + + if 4 <= |s| { + var finalBlockStart := |s| - 4; + var prefix, suffix := s[..finalBlockStart], s[finalBlockStart..]; + + if s == [] { + } else if Is1Padding(suffix) { + assert !Is2Padding(suffix) by { + reveal IsBase64Char(); + reveal Is1Padding(); + reveal Is2Padding(); + } + var x, y := DecodeUnpadded(prefix), Decode1Padding(suffix); + assert b == x + y; + assert |x| == |x| / 3 * 3 && |y| == 2 && |b| > 1 by { + DecodeUnpaddedBounds(prefix); + } + Mod3(|x| / 3, |y|, |b|); + } else if Is2Padding(suffix) { + var x, y := DecodeUnpadded(prefix), Decode2Padding(suffix); + assert b == x + y; + assert |x| == |x| / 3 * 3 && |y| == 1 && |b| > 0 by { + DecodeUnpaddedBounds(prefix); + } + Mod3(|x| / 3, |y|, |b|); + } else { + assert b == DecodeUnpadded(s); + assert |b| % 3 == 0 && |b| > 1 by { + DecodeUnpaddedBounds(s); + } + } + } + } + + lemma Mod3(x: nat, k: nat, n: nat) + requires k < 3 && n == 3 * x + k + ensures n % 3 == k + { + } + + opaque function DecodeBV(s: seq): (b: Result, string>) + ensures IsBase64String(s) ==> b.Success? // Hard to use without this + { + if IsBase64String(s) then Success(DecodeValid(s)) else Failure("The encoding is malformed") + } + + lemma DecodeBVFailure(s: seq) + ensures !IsBase64String(s) ==> DecodeBV(s).Failure? + { + reveal DecodeBV(); + } + + opaque ghost predicate StringIs7Bit(s: string) { + forall c :: c in s ==> c < 128 as char + } + + lemma UnpaddedBase64StringIs7Bit(s: string) + requires IsUnpaddedBase64String(s) + ensures StringIs7Bit(s) + { + reveal IsUnpaddedBase64String(); + reveal IsBase64Char(); + reveal StringIs7Bit(); + } + + lemma Is7Bit1Padding(s: string) + requires Is1Padding(s) + ensures StringIs7Bit(s) + { + reveal IsBase64Char(); + reveal Is1Padding(); + reveal StringIs7Bit(); + } + + lemma Is7Bit2Padding(s: string) + requires Is2Padding(s) + ensures StringIs7Bit(s) + { + reveal IsBase64Char(); + reveal Is2Padding(); + reveal StringIs7Bit(); + } + + opaque function EncodeBV(b: seq): (s: seq) + // Rather than ensure Decode(s) == Success(b) directly, lemmas are used to verify this property + { + if |b| % 3 == 0 then + EncodeUnpaddedBounds(b); + EncodeUnpadded(b) + else if |b| % 3 == 1 then + assert |b| >= 1; + EncodeUnpaddedBounds(b[..(|b| - 1)]); + var s1, s2 := EncodeUnpadded(b[..(|b| - 1)]), Encode2Padding(b[(|b| - 1)..]); + s1 + s2 + else + assert |b| % 3 == 2; + assert |b| >= 2; + EncodeUnpaddedBounds(b[..(|b| - 2)]); + var s1, s2 := EncodeUnpadded(b[..(|b| - 2)]), Encode1Padding(b[(|b| - 2)..]); + s1 + s2 + } + + lemma EncodeBVIsUnpadded(b: seq) + requires |b| % 3 == 0 + ensures EncodeBV(b) == EncodeUnpadded(b) + { reveal EncodeBV(); } + + lemma EncodeBVIs2Padded(b: seq) + requires |b| % 3 == 1 + ensures EncodeBV(b) == EncodeUnpadded(b[..(|b| - 1)]) + Encode2Padding(b[(|b| - 1)..]) + { reveal EncodeBV(); } + + lemma EncodeBVIs1Padded(b: seq) + requires |b| % 3 == 2 + ensures EncodeBV(b) == EncodeUnpadded(b[..(|b| - 2)]) + Encode1Padding(b[(|b| - 2)..]) + { reveal EncodeBV(); } + + lemma EncodeBVLengthCongruentToZeroMod4(b: seq) + ensures |EncodeBV(b)| % 4 == 0 + { + reveal EncodeBV(); + if |b| % 3 == 0 { + EncodeUnpaddedBounds(b); + } else if |b| % 3 == 1 { + EncodeUnpaddedBounds(b[..(|b| - 1)]); + } else { + EncodeUnpaddedBounds(b[..(|b| - 2)]); + } + } + + lemma EncodeBVIsBase64(b: seq) + ensures IsBase64String(EncodeBV(b)) + { + reveal EncodeBV(); + reveal IsBase64String(); + EncodeBVLengthExact(b); + if |EncodeBV(b)| < 4 { + reveal IsUnpaddedBase64String(); + } else if |b| % 3 == 0 { + EncodeUnpaddedBase64(b); + } else if |b| % 3 == 1 { + var bStart := b[..(|b| - 1)]; + var bEnd := b[(|b| - 1)..]; + EncodeUnpaddedBase64(bStart); + Encode2PaddingIs2Padding(bEnd); + } else { + var bStart := b[..(|b| - 2)]; + var bEnd := b[(|b| - 2)..]; + EncodeUnpaddedBase64(bStart); + Encode1PaddingIs1Padding(bEnd); + } + } + + lemma EncodeBVLengthExact(b: seq) + ensures var s := EncodeBV(b); + && (|b| % 3 == 0 ==> |s| == |b| / 3 * 4) + && (|b| % 3 != 0 ==> |s| == |b| / 3 * 4 + 4) + { + reveal EncodeBV(); + reveal Is1Padding(); + reveal Is2Padding(); + + var s := EncodeBV(b); + if |b| % 3 == 0 { + assert s == EncodeUnpadded(b); + EncodeUnpaddedBounds(b); + assert |s| == |b| / 3 * 4; + } else if |b| % 3 == 1 { + EncodeUnpaddedBounds(b[..(|b| - 1)]); + assert s == EncodeUnpadded(b[..(|b| - 1)]) + Encode2Padding(b[(|b| - 1)..]); + calc { + |s|; + == + |EncodeUnpadded(b[..(|b| - 1)])| + |Encode2Padding(b[(|b| - 1)..])|; + == { assert |Encode2Padding(b[(|b| - 1)..])| == 4; } + |EncodeUnpadded(b[..(|b| - 1)])| + 4; + == { assert |EncodeUnpadded(b[..(|b| - 1)])| == |b[..(|b| - 1)]| / 3 * 4; } + |b[..(|b| - 1)]| / 3 * 4 + 4; + == { assert |b[..(|b| - 1)]| == |b| - 1; } + (|b| - 1) / 3 * 4 + 4; + == { assert (|b| - 1) / 3 == |b| / 3; } + |b| / 3 * 4 + 4; + } + } else { + EncodeUnpaddedBounds(b[..(|b| - 2)]); + assert s == EncodeUnpadded(b[..(|b| - 2)]) + Encode1Padding(b[(|b| - 2)..]); + Encode1PaddingIs1Padding(b[(|b| - 2)..]); + calc { + |s|; + == + |EncodeUnpadded(b[..(|b| - 2)])| + |Encode1Padding(b[(|b| - 2)..])|; + == { assert |Encode1Padding(b[(|b| - 2)..])| == 4; } + |EncodeUnpadded(b[..(|b| - 2)])| + 4; + == { assert |EncodeUnpadded(b[..(|b| - 2)])| == |b[..(|b| - 2)]| / 3 * 4; } + |b[..(|b| - 2)]| / 3 * 4 + 4; + == { assert |b[..(|b| - 2)]| == |b| - 2; } + (|b| - 2) / 3 * 4 + 4; + == { assert (|b| - 2) / 3 == |b| / 3; } + |b| / 3 * 4 + 4; + } + } + } + + lemma EncodeBVLengthBound(b: seq) + ensures var s := EncodeBV(b); + |s| <= |b| / 3 * 4 + 4 + { + EncodeBVLengthExact(b); + } + + lemma SeqPartsMakeWhole(s: seq, i: nat) + requires i <= |s| + ensures s[..i] + s[i..] == s + {} + + lemma DecodeValidEncodeEmpty(s: seq) + requires s == [] + ensures reveal IsUnpaddedBase64String(); reveal IsBase64String(); EncodeBV(DecodeValid(s)) == s + { + assert IsBase64String(s) by { reveal IsBase64String(); reveal IsUnpaddedBase64String(); } + var b := DecodeValid(s); + assert b == [] by { reveal DecodeValid(); } + assert EncodeBV(b) == [] by { + reveal EncodeBV(); + reveal EncodeUnpadded(); + reveal EncodeRecursively(); + reveal FromIndicesToChars(); + } + } + + lemma EncodeDecodeValidEmpty(b: seq) + requires b == [] + ensures (EncodeBVIsBase64(b) ; DecodeValid(EncodeBV(b)) == b) + { + assert EncodeBV(b) == [] by { + reveal EncodeBV(); + reveal EncodeUnpadded(); + reveal EncodeRecursively(); + reveal FromIndicesToChars(); + } + EncodeBVIsBase64(b); + assert DecodeValid([]) == [] by { + reveal DecodeValid(); + } + } + + lemma DecodeValidEncodeUnpadded(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires !Is1Padding(s[(|s| - 4)..]) + requires !Is2Padding(s[(|s| - 4)..]) + ensures EncodeBV(DecodeValid(s)) == s + { + reveal EncodeBV(); + reveal DecodeValid(); + reveal IsBase64String(); + DecodeUnpaddedBounds(s); + calc { + EncodeBV(DecodeValid(s)); + == + EncodeBV(DecodeUnpadded(s)); + == + EncodeUnpadded(DecodeUnpadded(s)); + == { DecodeEncodeUnpadded(s); } + s; + } + } + + lemma EncodeDecodeValidUnpadded(b: seq) + requires |b| % 3 == 0 + requires b != [] + ensures + var s := EncodeBV(b); + && IsUnpaddedBase64String(s) + && |s| >= 4 + && !Is1Padding(s[(|s| - 4)..]) + && !Is2Padding(s[(|s| - 4)..]) + && s == EncodeUnpadded(b) + { + EncodeUnpaddedBase64(b); + EncodeUnpaddedBounds(b); + var s := EncodeBV(b); + assert s == EncodeUnpadded(b) by { EncodeBVIsUnpadded(b); } + assert !Is1Padding(s[(|s| - 4)..]) by { EncodeUnpaddedNotPadded(b); } + assert !Is2Padding(s[(|s| - 4)..]) by { EncodeUnpaddedNotPadded(b); } + } + + lemma EncodeDecodeValid2Padded(b: seq) + requires |b| % 3 == 1 + ensures + var s := EncodeBV(b); + && s == (EncodeUnpadded(b[..(|b| - 1)]) + Encode2Padding(b[(|b| - 1)..])) + && Is2Padding(s[(|s| - 4)..]) + { + EncodeUnpaddedBase64(b[..(|b| - 1)]); + EncodeUnpaddedBounds(b[..(|b| - 1)]); + reveal EncodeBV(); + var s := EncodeBV(b); + Encode2PaddingIs2Padding(b[(|b| - 1)..]); + assert Is2Padding(s[(|s| - 4)..]); + } + + lemma EncodeDecodeValid1Padded(b: seq) + requires |b| % 3 == 2 + ensures + var s := EncodeBV(b); + && s == (EncodeUnpadded(b[..(|b| - 2)]) + Encode1Padding(b[(|b| - 2)..])) + && |s| >= 4 + && IsUnpaddedBase64String(s[..(|s| - 4)]) + && Is1Padding(s[(|s| - 4)..]) + { + EncodeUnpaddedBase64(b[..(|b| - 2)]); + EncodeUnpaddedBounds(b[..(|b| - 2)]); + reveal EncodeBV(); + var s := EncodeBV(b); + Encode1PaddingIs1Padding(b[(|b| - 2)..]); + assert Is1Padding(s[(|s| - 4)..]); + } + + + lemma DecodeValidUnpaddedPartialFrom1PaddedSeq(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is1Padding(s[(|s| - 4)..]) + ensures reveal IsUnpaddedBase64String(); reveal IsBase64String(); reveal DecodeValid(); DecodeValid(s)[..(|DecodeValid(s)| - 2)] == DecodeUnpadded(s[..(|s| - 4)]) + { + reveal IsBase64String(); + reveal DecodeValid(); + } + + lemma DecodeValid1PaddedPartialFrom1PaddedSeq(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is1Padding(s[(|s| - 4)..]) + ensures reveal DecodeValid(); DecodeValid(s)[(|DecodeValid(s)| - 2)..] == Decode1Padding(s[(|s| - 4)..]) + { + reveal DecodeValid(); + } + + lemma DecodeValid1PaddingLengthMod3(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is1Padding(s[(|s| - 4)..]) + ensures |DecodeValid(s)| % 3 == 2 + { + assert IsUnpaddedBase64String(s[..(|s| - 4)]) by { + UnpaddedBase64Prefix(s); + } + AboutDecodeValid(s, DecodeValid(s)); + } + + lemma DecodeValidEncode1Padding(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is1Padding(s[(|s| - 4)..]) + ensures EncodeBV(DecodeValid(s)) == s + { + assert |DecodeValid(s)| % 3 == 2 by { DecodeValid1PaddingLengthMod3(s); } + calc { + EncodeBV(DecodeValid(s)); + == { reveal EncodeBV(); } + EncodeUnpadded(DecodeValid(s)[..(|DecodeValid(s)| - 2)]) + Encode1Padding(DecodeValid(s)[(|DecodeValid(s)| - 2)..]); + == { DecodeValidUnpaddedPartialFrom1PaddedSeq(s); reveal IsBase64String(); reveal IsUnpaddedBase64String(); } + EncodeUnpadded(DecodeUnpadded(s[..(|s| - 4)])) + Encode1Padding(DecodeValid(s)[(|DecodeValid(s)| - 2)..]); + == { reveal IsUnpaddedBase64String(); DecodeEncodeUnpadded(s[..(|s| - 4)]); } + s[..(|s| - 4)] + Encode1Padding(DecodeValid(s)[(|DecodeValid(s)| - 2)..]); + == { DecodeValid1PaddedPartialFrom1PaddedSeq(s); } + s[..(|s| - 4)] + Encode1Padding(Decode1Padding(s[(|s| - 4)..])); + == { DecodeEncode1Padding(s[(|s| - 4)..]); } + s[..(|s| - 4)] + s[(|s| - 4)..]; + == { SeqPartsMakeWhole(s, (|s| - 4)); } + s; + } + } + + lemma DecodeValidPartialsFrom2PaddedSeq(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is2Padding(s[(|s| - 4)..]) + ensures + (reveal IsUnpaddedBase64String(); + reveal DecodeValid(); + reveal IsBase64String(); + var b := DecodeValid(s); + b[..(|b| - 1)] == DecodeUnpadded(s[..(|s| - 4)]) && + b[(|b| - 1)..] == Decode2Padding(s[(|s| - 4)..])) + { + reveal IsUnpaddedBase64String(); + reveal IsBase64String(); + reveal DecodeValid(); + AboutDecodeValid(s, DecodeValid(s)); + assert Is2Padding(s[(|s| - 4)..]); + } + + lemma DecodeValidPartialsFrom1PaddedSeq(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is1Padding(s[(|s| - 4)..]) + ensures + (reveal IsUnpaddedBase64String(); + reveal DecodeValid(); + reveal IsBase64String(); + var b := DecodeValid(s); + b[..(|b| - 2)] == DecodeUnpadded(s[..(|s| - 4)]) && + b[(|b| - 2)..] == Decode1Padding(s[(|s| - 4)..])) + { + reveal IsUnpaddedBase64String(); + reveal DecodeValid(); + reveal IsBase64String(); + AboutDecodeValid(s, DecodeValid(s)); + } + + lemma UnpaddedBase64Prefix(s: string) + requires IsBase64String(s) + requires |s| >= 4 + ensures IsUnpaddedBase64String(s[..(|s| - 4)]) + { + reveal IsBase64String(); + reveal IsUnpaddedBase64String(); + } + + lemma DecodeValid2PaddingLengthMod3(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is2Padding(s[(|s| - 4)..]) + ensures |DecodeValid(s)| % 3 == 1 + { + assert IsUnpaddedBase64String(s[..(|s| - 4)]) by { + UnpaddedBase64Prefix(s); + } + AboutDecodeValid(s, DecodeValid(s)); + } + + lemma DecodeValidEncode2Padding(s: seq) + requires IsBase64String(s) + requires |s| >= 4 + requires Is2Padding(s[(|s| - 4)..]) + ensures EncodeBV(DecodeValid(s)) == s + { + assert |DecodeValid(s)| % 3 == 1 by { DecodeValid2PaddingLengthMod3(s); } + calc { + EncodeBV(DecodeValid(s)); + == { reveal EncodeBV(); } + EncodeUnpadded(DecodeValid(s)[..(|DecodeValid(s)| - 1)]) + Encode2Padding(DecodeValid(s)[(|DecodeValid(s)| - 1)..]); + == { DecodeValidPartialsFrom2PaddedSeq(s); reveal IsUnpaddedBase64String(); reveal IsBase64String(); } + EncodeUnpadded(DecodeUnpadded(s[..(|s| - 4)])) + Encode2Padding(DecodeValid(s)[(|DecodeValid(s)| - 1)..]); + == { reveal IsBase64String(); DecodeEncodeUnpadded(s[..(|s| - 4)]); } + s[..(|s| - 4)] + Encode2Padding(DecodeValid(s)[(|DecodeValid(s)| - 1)..]); + == { DecodeValidPartialsFrom2PaddedSeq(s); } + s[..(|s| - 4)] + Encode2Padding(Decode2Padding(s[(|s| - 4)..])); + == { DecodeEncode2Padding(s[(|s| - 4)..]); } + s[..(|s| - 4)] + s[(|s| - 4)..]; + == { SeqPartsMakeWhole(s, (|s| - 4)); } + s; + } + } + + lemma DecodeValidEncode(s: seq) + requires IsBase64String(s) + ensures EncodeBV(DecodeValid(s)) == s + { + reveal IsBase64String(); + if s == [] { + calc { + EncodeBV(DecodeValid(s)); + == { DecodeValidEncodeEmpty(s); } + s; + } + } else if |s| >= 4 && Is1Padding(s[(|s| - 4)..]) { + calc { + EncodeBV(DecodeValid(s)); + == { DecodeValidEncode1Padding(s); } + s; + } + } else if |s| >= 4 && Is2Padding(s[(|s| - 4)..]) { + calc { + EncodeBV(DecodeValid(s)); + == { DecodeValidEncode2Padding(s); } + s; + } + } else { + calc { + EncodeBV(DecodeValid(s)); + == { DecodeValidEncodeUnpadded(s); } + s; + } + } + } + + lemma EncodeDecodeValid(b: seq) + ensures (EncodeBVIsBase64(b); DecodeValid(EncodeBV(b)) == b) + { + EncodeBVIsBase64(b); + var s := EncodeBV(b); + if b == [] { + calc { + DecodeValid(EncodeBV(b)); + == { EncodeDecodeValidEmpty(b); } + b; + } + } else if |b| % 3 == 0 { + calc { + DecodeValid(EncodeBV(b)); + == { EncodeBVIsUnpadded(b); } + DecodeValid(EncodeUnpadded(b)); + == { EncodeDecodeValidUnpadded(b); reveal DecodeValid(); } + DecodeUnpadded(EncodeUnpadded(b)); + == { EncodeDecodeUnpadded(b); } + b; + } + } else if |b| % 3 == 1 { + EncodeDecodeValid2Padded(b); + var prefix := b[..(|b| - 1)]; + var suffix := b[(|b| - 1)..]; + EncodeUnpaddedBase64(prefix); + + calc { + DecodeValid(EncodeBV(b)); + == { reveal EncodeBV(); } + DecodeValid(EncodeUnpadded(prefix) + Encode2Padding(suffix)); + == { reveal DecodeValid(); DecodeValidPartialsFrom2PaddedSeq(s); } + DecodeUnpadded(EncodeUnpadded(prefix)) + Decode2Padding(Encode2Padding(suffix)); + == { EncodeDecodeUnpadded(prefix); EncodeDecode2Padding(suffix); } + prefix + suffix; + == + b; + } + } else if |b| % 3 == 2 { + EncodeDecodeValid1Padded(b); + var prefix := b[..(|b| - 2)]; + var suffix := b[(|b| - 2)..]; + EncodeUnpaddedBase64(prefix); + + calc { + DecodeValid(EncodeBV(b)); + == { reveal EncodeBV(); } + DecodeValid(EncodeUnpadded(prefix) + Encode1Padding(suffix)); + == { reveal DecodeValid(); DecodeValidPartialsFrom1PaddedSeq(s); } + DecodeUnpadded(EncodeUnpadded(prefix)) + Decode1Padding(Encode1Padding(suffix)); + == { EncodeDecodeUnpadded(prefix); EncodeDecode1Padding(suffix); } + prefix + suffix; + == + b; + } + } + } + + lemma DecodeEncodeBV(s: seq) + requires IsBase64String(s) + ensures EncodeBV(DecodeBV(s).value) == s + { + reveal DecodeBV(); + calc { + EncodeBV(DecodeBV(s).value); + == { DecodeValidEncode(s); } + s; + } + } + + lemma EncodeDecodeBV(b: seq) + ensures DecodeBV(EncodeBV(b)) == Success(b) + { + reveal DecodeBV(); + EncodeBVIsBase64(b); + calc { + DecodeBV(EncodeBV(b)); + == { assert IsBase64String(EncodeBV(b)); } + Success(DecodeValid(EncodeBV(b))); + == { EncodeDecodeValid(b); } + Success(b); + } + } + + opaque function UInt8sToBVs(u: seq): (r: seq) + ensures |r| == |u| + ensures forall i :: 0 <= i < |u| ==> r[i] == u[i] as bv8 + { + seq(|u|, i requires 0 <= i < |u| => u[i] as bv8) + } + + opaque function BVsToUInt8s(b: seq): (r: seq) + ensures |r| == |b| + ensures forall i :: 0 <= i < |b| ==> r[i] == b[i] as uint8 + { + seq(|b|, i requires 0 <= i < |b| => b[i] as uint8) + } + + lemma UInt8sToBVsToUInt8s(u: seq) + ensures BVsToUInt8s(UInt8sToBVs(u)) == u + { + // TODO: reduce resource use + var b := UInt8sToBVs(u); + assert |b| == |u|; + var u' := BVsToUInt8s(b); + assert |u'| == |b|; + } + + lemma BVsToUInt8sToBVs(b: seq) + ensures UInt8sToBVs(BVsToUInt8s(b)) == b + { + var u := BVsToUInt8s(b); + assert |b| == |u|; + var b' := UInt8sToBVs(u); + assert |b'| == |u|; + } + + opaque function Encode(u: seq): seq { + EncodeBV(UInt8sToBVs(u)) + } + + opaque function Decode(s: seq): (b: Result, string>) + ensures IsBase64String(s) ==> b.Success? // Hard to use without this + { + if IsBase64String(s) + then + var b := DecodeValid(s); + Success(BVsToUInt8s(b)) + else Failure("The encoding is malformed") + } + + lemma EncodeDecode(b: seq) + ensures Decode(Encode(b)) == Success(b) + { + var bvs := UInt8sToBVs(b); + var s := EncodeBV(bvs); + assert Encode(b) == s by { reveal Encode(); } + assert IsBase64String(s) by { EncodeBVIsBase64(bvs); } + var b' := DecodeValid(s); + assert b' == bvs by { EncodeDecodeValid(bvs); } + var us := BVsToUInt8s(b'); + assert Decode(s) == Success(us) by { + reveal Decode(); + } + assert b' == bvs; + assert b == us by { UInt8sToBVsToUInt8s(b); } + } + + lemma DecodeEncode(s: seq) + requires IsBase64String(s) + ensures Encode(Decode(s).value) == s + { + var b := DecodeValid(s); + var u := BVsToUInt8s(b); + assert Decode(s) == Success(u) by { reveal Decode(); } + var s' := EncodeBV(UInt8sToBVs(u)); + assert s' == Encode(u) by { reveal Encode(); } + assert UInt8sToBVs(BVsToUInt8s(b)) == b by { + BVsToUInt8sToBVs(b); + } + assert s == s' by { + DecodeValidEncode(s); + } + } +} diff --git a/Source/DafnyStandardLibraries/src/DafnyStdLibs/Base64.md b/Source/DafnyStandardLibraries/src/DafnyStdLibs/Base64.md new file mode 100644 index 00000000000..090dc49aadb --- /dev/null +++ b/Source/DafnyStandardLibraries/src/DafnyStdLibs/Base64.md @@ -0,0 +1,7 @@ +## The `Base64` module + +The `DafnyStdLibs.Base64` module contains an encoder and decoder for Base64, which allows arbitrary byte sequences to be represented in sequences of characters drawn entirely from 7-bit ASCII. This encoder and decoder have two interfaces, one which uses `uint8` to represent arbitrary bytes, and one which uses `bv8`. The encoder and decoder are proved to be inverses of each other, for both interfaces. + +The `uint8`-based interface consists of the `Encode` and `Decode` functions along with the `EncodeDecode` and `DecodeEncode` lemmas. The `bv8`-based interface consists of the `EncodeBV` and `DecodeBV` functions along with the `EncodeDecodeBV` and `DecodeEncodeBV` lemmas. The `uint8`-based interface is a wrapper on top of the `bv8`-based interface, because Base64 encoding is more straightforward to specify and verify using bit vectors. + +The implementation currently uses a recursive approach with sequence concatenation, which limits its performance on large sequences. A more efficient implementation will be possible without changing the public interface, however. diff --git a/Source/DafnyTestGeneration.Test/ShortCircuitRemoval.cs b/Source/DafnyTestGeneration.Test/ShortCircuitRemoval.cs index a0b8cd3790e..9ee5ad8db65 100644 --- a/Source/DafnyTestGeneration.Test/ShortCircuitRemoval.cs +++ b/Source/DafnyTestGeneration.Test/ShortCircuitRemoval.cs @@ -13,7 +13,7 @@ using Xunit; using Xunit.Abstractions; -namespace DafnyTestGeneration.Test; +namespace DafnyTestGeneration.Test; public class ShortCircuitRemoval : Setup { diff --git a/Source/DafnyTestGeneration/Inlining/FunctionCallToMethodCallRewriter.cs b/Source/DafnyTestGeneration/Inlining/FunctionCallToMethodCallRewriter.cs index cdad5c293a4..3a42c113966 100644 --- a/Source/DafnyTestGeneration/Inlining/FunctionCallToMethodCallRewriter.cs +++ b/Source/DafnyTestGeneration/Inlining/FunctionCallToMethodCallRewriter.cs @@ -11,7 +11,7 @@ using Program = Microsoft.Dafny.Program; -namespace DafnyTestGeneration.Inlining; +namespace DafnyTestGeneration.Inlining; /// /// Change by-method function calls to method calls (after resolution) diff --git a/Source/DafnyTestGeneration/Inlining/InliningTranslator.cs b/Source/DafnyTestGeneration/Inlining/InliningTranslator.cs index deba96032a4..9be33d91d8b 100644 --- a/Source/DafnyTestGeneration/Inlining/InliningTranslator.cs +++ b/Source/DafnyTestGeneration/Inlining/InliningTranslator.cs @@ -8,7 +8,7 @@ using Microsoft.Dafny; using Program = Microsoft.Dafny.Program; -namespace DafnyTestGeneration.Inlining; +namespace DafnyTestGeneration.Inlining; public static class InliningTranslator { private static bool ShouldProcessForInlining(MemberDecl memberDecl) { diff --git a/Source/XUnitExtensions/Lit/NonUniformTestCommand.cs b/Source/XUnitExtensions/Lit/NonUniformTestCommand.cs index 70e68419d84..59968fb3449 100644 --- a/Source/XUnitExtensions/Lit/NonUniformTestCommand.cs +++ b/Source/XUnitExtensions/Lit/NonUniformTestCommand.cs @@ -1,7 +1,7 @@ using System; using System.IO; -namespace XUnitExtensions.Lit; +namespace XUnitExtensions.Lit; public class NonUniformTestCommand : ILitCommand { private NonUniformTestCommand(string reason) { diff --git a/Source/XUnitExtensions/Lit/OutputCheckCommand.cs b/Source/XUnitExtensions/Lit/OutputCheckCommand.cs index 9eed590c2ed..7b2f44f951f 100644 --- a/Source/XUnitExtensions/Lit/OutputCheckCommand.cs +++ b/Source/XUnitExtensions/Lit/OutputCheckCommand.cs @@ -21,237 +21,237 @@ public readonly record struct OutputCheckCommand(OutputCheckOptions options) : I public abstract record CheckDirective(string File, int LineNumber) { - private static readonly Dictionary> DirectiveParsers = new(); - static CheckDirective() { - DirectiveParsers.Add("CHECK:", CheckRegexp.Parse); - DirectiveParsers.Add("CHECK-L:", CheckLiteral.Parse); - DirectiveParsers.Add("CHECK-NEXT:", CheckNextRegexp.Parse); - DirectiveParsers.Add("CHECK-NEXT-L:", CheckNextLiteral.Parse); - DirectiveParsers.Add("CHECK-NOT:", CheckNotRegexp.Parse); - DirectiveParsers.Add("CHECK-NOT-L:", CheckNotLiteral.Parse); - } + private static readonly Dictionary> DirectiveParsers = new(); + static CheckDirective() { + DirectiveParsers.Add("CHECK:", CheckRegexp.Parse); + DirectiveParsers.Add("CHECK-L:", CheckLiteral.Parse); + DirectiveParsers.Add("CHECK-NEXT:", CheckNextRegexp.Parse); + DirectiveParsers.Add("CHECK-NEXT-L:", CheckNextLiteral.Parse); + DirectiveParsers.Add("CHECK-NOT:", CheckNotRegexp.Parse); + DirectiveParsers.Add("CHECK-NOT-L:", CheckNotLiteral.Parse); + } - public static CheckDirective? Parse(string file, int lineNumber, string line) { - foreach (var (keyword, parser) in DirectiveParsers) { - var index = line.IndexOf(keyword); - if (index >= 0) { - var arguments = line[(index + keyword.Length)..].Trim(); - return parser(file, lineNumber, arguments); + public static CheckDirective? Parse(string file, int lineNumber, string line) { + foreach (var (keyword, parser) in DirectiveParsers) { + var index = line.IndexOf(keyword); + if (index >= 0) { + var arguments = line[(index + keyword.Length)..].Trim(); + return parser(file, lineNumber, arguments); + } } + return null; } - return null; - } - - public abstract bool FindMatch(IEnumerator lines); - } - private record CheckRegexp(string File, int LineNumber, Regex Pattern) : CheckDirective(File, LineNumber) { - public new static CheckDirective Parse(string file, int lineNumber, string arguments) { - return new CheckRegexp(file, lineNumber, new Regex(arguments)); + public abstract bool FindMatch(IEnumerator lines); } - public override bool FindMatch(IEnumerator lines) { - while (lines.MoveNext()) { - if (Pattern.IsMatch(lines.Current)) { - return true; - } + private record CheckRegexp(string File, int LineNumber, Regex Pattern) : CheckDirective(File, LineNumber) { + public new static CheckDirective Parse(string file, int lineNumber, string arguments) { + return new CheckRegexp(file, lineNumber, new Regex(arguments)); } - return false; - } - public override string ToString() { - return $"Check Directive ({File}:{LineNumber} Pattern: '{Pattern}')"; - } - } + public override bool FindMatch(IEnumerator lines) { + while (lines.MoveNext()) { + if (Pattern.IsMatch(lines.Current)) { + return true; + } + } + return false; + } - private record CheckNextRegexp(string File, int LineNumber, Regex Pattern) : CheckDirective(File, LineNumber) { - public new static CheckDirective Parse(string file, int lineNumber, string arguments) { - return new CheckNextRegexp(file, lineNumber, new Regex(arguments)); + public override string ToString() { + return $"Check Directive ({File}:{LineNumber} Pattern: '{Pattern}')"; + } } - public override bool FindMatch(IEnumerator lines) { - if (!lines.MoveNext()) { - throw new Exception("No more lines to match against"); + private record CheckNextRegexp(string File, int LineNumber, Regex Pattern) : CheckDirective(File, LineNumber) { + public new static CheckDirective Parse(string file, int lineNumber, string arguments) { + return new CheckNextRegexp(file, lineNumber, new Regex(arguments)); } - return Pattern.IsMatch(lines.Current); - } + public override bool FindMatch(IEnumerator lines) { + if (!lines.MoveNext()) { + throw new Exception("No more lines to match against"); + } - public override string ToString() { - return $"CheckNext Directive ({File}:{LineNumber} Pattern: '{Pattern}')"; - } - } + return Pattern.IsMatch(lines.Current); + } - private record CheckLiteral(string File, int LineNumber, string Literal) : CheckDirective(File, LineNumber) { - public new static CheckDirective Parse(string file, int lineNumber, string arguments) { - return new CheckLiteral(file, lineNumber, arguments); + public override string ToString() { + return $"CheckNext Directive ({File}:{LineNumber} Pattern: '{Pattern}')"; + } } - public override bool FindMatch(IEnumerator lines) { - while (lines.MoveNext()) { - if (Literal == lines.Current.Trim()) { - return true; + private record CheckLiteral(string File, int LineNumber, string Literal) : CheckDirective(File, LineNumber) { + public new static CheckDirective Parse(string file, int lineNumber, string arguments) { + return new CheckLiteral(file, lineNumber, arguments); + } + + public override bool FindMatch(IEnumerator lines) { + while (lines.MoveNext()) { + if (Literal == lines.Current.Trim()) { + return true; + } } + return false; } - return false; - } - public override string ToString() { - return $"CheckLiteral Directive ({File}:{LineNumber} Literal: '{Literal}')"; + public override string ToString() { + return $"CheckLiteral Directive ({File}:{LineNumber} Literal: '{Literal}')"; + } } - } - private record CheckNextLiteral(string File, int LineNumber, string Literal) : CheckDirective(File, LineNumber) { - public new static CheckDirective Parse(string file, int lineNumber, string arguments) { - return new CheckNextLiteral(file, lineNumber, arguments); - } + private record CheckNextLiteral(string File, int LineNumber, string Literal) : CheckDirective(File, LineNumber) { + public new static CheckDirective Parse(string file, int lineNumber, string arguments) { + return new CheckNextLiteral(file, lineNumber, arguments); + } + + public override bool FindMatch(IEnumerator lines) { + if (!lines.MoveNext()) { + throw new Exception("No more lines to match against"); + } - public override bool FindMatch(IEnumerator lines) { - if (!lines.MoveNext()) { - throw new Exception("No more lines to match against"); + return Literal == lines.Current.Trim(); } - return Literal == lines.Current.Trim(); + public override string ToString() { + return $"CheckNextLiteral Directive ({File}:{LineNumber} Literal: '{Literal}')"; + } } - public override string ToString() { - return $"CheckNextLiteral Directive ({File}:{LineNumber} Literal: '{Literal}')"; - } - } + private class CheckingEnumerator : IEnumerator { + private readonly IEnumerator Wrapped; + private readonly Action Check; - private class CheckingEnumerator : IEnumerator { - private readonly IEnumerator Wrapped; - private readonly Action Check; + public CheckingEnumerator(IEnumerator wrapped, Action check) { + Wrapped = wrapped; + Check = check; + } - public CheckingEnumerator(IEnumerator wrapped, Action check) { - Wrapped = wrapped; - Check = check; - } + public bool MoveNext() { + var result = Wrapped.MoveNext(); + if (result) { + Check.Invoke(Wrapped.Current); + } + return result; + } - public bool MoveNext() { - var result = Wrapped.MoveNext(); - if (result) { - Check.Invoke(Wrapped.Current); + public void Reset() { + throw new NotImplementedException(); } - return result; - } - public void Reset() { - throw new NotImplementedException(); - } + object IEnumerator.Current => Current; - object IEnumerator.Current => Current; + public string Current => Wrapped.Current; - public string Current => Wrapped.Current; + public void Dispose() => Wrapped.Dispose(); + } - public void Dispose() => Wrapped.Dispose(); - } + private record CheckNotRegexp(string File, int LineNumber, Regex Pattern) : CheckDirective(File, LineNumber) { + public new static CheckDirective Parse(string file, int lineNumber, string arguments) { + return new CheckNotRegexp(file, lineNumber, new Regex(arguments)); + } - private record CheckNotRegexp(string File, int LineNumber, Regex Pattern) : CheckDirective(File, LineNumber) { - public new static CheckDirective Parse(string file, int lineNumber, string arguments) { - return new CheckNotRegexp(file, lineNumber, new Regex(arguments)); - } + public override bool FindMatch(IEnumerator lines) { + // This directive is tested using a CheckingEnumerator instead. + throw new NotImplementedException(); + } - public override bool FindMatch(IEnumerator lines) { - // This directive is tested using a CheckingEnumerator instead. - throw new NotImplementedException(); + public override string ToString() { + return $"CheckNot Directive ({File}:{LineNumber} Pattern: '{Pattern}')"; + } } - public override string ToString() { - return $"CheckNot Directive ({File}:{LineNumber} Pattern: '{Pattern}')"; - } - } + private record CheckNotLiteral(string File, int LineNumber, string Literal) : CheckDirective(File, LineNumber) { + public new static CheckDirective Parse(string file, int lineNumber, string arguments) { + return new CheckNotLiteral(file, lineNumber, arguments); + } - private record CheckNotLiteral(string File, int LineNumber, string Literal) : CheckDirective(File, LineNumber) { - public new static CheckDirective Parse(string file, int lineNumber, string arguments) { - return new CheckNotLiteral(file, lineNumber, arguments); - } + public override bool FindMatch(IEnumerator lines) { + // This directive is tested using a CheckingEnumerator instead. + throw new NotImplementedException(); + } - public override bool FindMatch(IEnumerator lines) { - // This directive is tested using a CheckingEnumerator instead. - throw new NotImplementedException(); + public override string ToString() { + return $"CheckNotLiteral Directive ({File}:{LineNumber} Literal: '{Literal}')"; + } } - public override string ToString() { - return $"CheckNotLiteral Directive ({File}:{LineNumber} Literal: '{Literal}')"; + public static ILitCommand Parse(IEnumerable args, LitTestConfiguration config) { + ILitCommand? result = null; + Parser.Default.ParseArguments(args).WithParsed(o => { + result = new OutputCheckCommand(o); + }); + return result!; } - } - public static ILitCommand Parse(IEnumerable args, LitTestConfiguration config) { - ILitCommand? result = null; - Parser.Default.ParseArguments(args).WithParsed(o => { - result = new OutputCheckCommand(o); - }); - return result!; - } + public static IList ParseCheckFile(string fileName) { + var result = File.ReadAllLines(fileName) + .Select((line, index) => CheckDirective.Parse(fileName, index + 1, line)) + .Where(e => e != null) + .Cast() + .ToList(); + if (!result.Any()) { + throw new ArgumentException($"'{fileName}' does not contain any CHECK directives"); + } - public static IList ParseCheckFile(string fileName) { - var result = File.ReadAllLines(fileName) - .Select((line, index) => CheckDirective.Parse(fileName, index + 1, line)) - .Where(e => e != null) - .Cast() - .ToList(); - if (!result.Any()) { - throw new ArgumentException($"'{fileName}' does not contain any CHECK directives"); + return result; } - return result; - } + public (int, string, string) Execute(TextReader inputReader, + TextWriter outputWriter, TextWriter errorWriter) { + if (options.FileToCheck == null) { + return (0, "", ""); + } - public (int, string, string) Execute(TextReader inputReader, - TextWriter outputWriter, TextWriter errorWriter) { - if (options.FileToCheck == null) { - return (0, "", ""); - } + var linesToCheck = File.ReadAllLines(options.FileToCheck).ToList(); + var fileName = options.CheckFile; + if (fileName == null) { + return (0, "", ""); + } + var checkDirectives = ParseCheckFile(options.CheckFile!); - var linesToCheck = File.ReadAllLines(options.FileToCheck).ToList(); - var fileName = options.CheckFile; - if (fileName == null) { - return (0, "", ""); + return Execute(linesToCheck, checkDirectives); } - var checkDirectives = ParseCheckFile(options.CheckFile!); - - return Execute(linesToCheck, checkDirectives); - } - public static (int, string, string) Execute(IEnumerable linesToCheck, IEnumerable checkDirectives) { - IEnumerator lineEnumerator = linesToCheck.GetEnumerator(); - IEnumerator? notCheckingEnumerator = null; - foreach (var directive in checkDirectives) { - // CHECK-NOT[-L] directives apply up until the next directive, so we handle - // them by wrapping the line enumerator for the next time through the loop. - if (directive is CheckNotRegexp(var _, var _, var pattern)) { - notCheckingEnumerator = new CheckingEnumerator(lineEnumerator, line => { - if (pattern.IsMatch(line)) { - throw new Exception($"Match found for {directive}: {line}"); + public static (int, string, string) Execute(IEnumerable linesToCheck, IEnumerable checkDirectives) { + IEnumerator lineEnumerator = linesToCheck.GetEnumerator(); + IEnumerator? notCheckingEnumerator = null; + foreach (var directive in checkDirectives) { + // CHECK-NOT[-L] directives apply up until the next directive, so we handle + // them by wrapping the line enumerator for the next time through the loop. + if (directive is CheckNotRegexp(var _, var _, var pattern)) { + notCheckingEnumerator = new CheckingEnumerator(lineEnumerator, line => { + if (pattern.IsMatch(line)) { + throw new Exception($"Match found for {directive}: {line}"); + } + }); + } else if (directive is CheckNotLiteral(var _, var _, var literal)) { + notCheckingEnumerator = new CheckingEnumerator(lineEnumerator, line => { + if (literal == line.Trim()) { + throw new Exception($"Match found for {directive}: {line}"); + } + }); + } else { + var enumerator = notCheckingEnumerator ?? lineEnumerator; + if (!directive.FindMatch(enumerator)) { + return (1, "", $"ERROR: Could not find a match for {directive}"); } - }); - } else if (directive is CheckNotLiteral(var _, var _, var literal)) { - notCheckingEnumerator = new CheckingEnumerator(lineEnumerator, line => { - if (literal == line.Trim()) { - throw new Exception($"Match found for {directive}: {line}"); - } - }); - } else { - var enumerator = notCheckingEnumerator ?? lineEnumerator; - if (!directive.FindMatch(enumerator)) { - return (1, "", $"ERROR: Could not find a match for {directive}"); + + notCheckingEnumerator = null; } + } - notCheckingEnumerator = null; + if (notCheckingEnumerator != null) { + // Traverse the rest of the enumerator to make sure the CHECK-NOT[-L] directive is fully tested. + while (notCheckingEnumerator.MoveNext()) { } } - } - if (notCheckingEnumerator != null) { - // Traverse the rest of the enumerator to make sure the CHECK-NOT[-L] directive is fully tested. - while (notCheckingEnumerator.MoveNext()) { } + return (0, "", ""); } - return (0, "", ""); - } - - public override string ToString() { - return $"OutputCheck --file-to-check {options.FileToCheck} {options.CheckFile}"; + public override string ToString() { + return $"OutputCheck --file-to-check {options.FileToCheck} {options.CheckFile}"; + } } } -} diff --git a/dotnet-tools.json b/dotnet-tools.json index b50d45ebe2b..1c8397c7c3b 100644 --- a/dotnet-tools.json +++ b/dotnet-tools.json @@ -8,12 +8,6 @@ "coco" ] }, - "dotnet-format": { - "version": "5.1.250801", - "commands": [ - "dotnet-format" - ] - }, "boogie": { "version": "3.0.3", "commands": [