Skip to content

Commit

Permalink
Merge branch 'master' into issue-4939
Browse files Browse the repository at this point in the history
  • Loading branch information
RustanLeino committed Jan 19, 2024
2 parents 8079558 + 8480133 commit 2fe447a
Show file tree
Hide file tree
Showing 43 changed files with 129 additions and 60 deletions.
19 changes: 18 additions & 1 deletion .github/workflows/publish-release-reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -141,13 +141,16 @@ jobs:
# Additionally, since the refman build scripts expect to find Dafny in its usual Binaries/ folder (not in
# a platform-specific directory), we build Dafny once here.
- name: Re-build Dafny
if: ${{ !inputs.prerelease }}
run: dotnet build dafny/Source/Dafny.sln
- name: Build reference manual
if: ${{ !inputs.prerelease }}
run: |
eval "$(/usr/libexec/path_helper)"
make -C dafny/docs/DafnyRef
- name: Create GitHub release
- name: Create GitHub release (release)
if: ${{ !inputs.prerelease }}
uses: softprops/action-gh-release@v1
with:
name: Dafny ${{ inputs.name }}
Expand All @@ -159,3 +162,17 @@ jobs:
dafny/Package/dafny-${{ inputs.name }}*
dafny/docs/DafnyRef/DafnyRef.pdf
fail_on_unmatched_files: true

# This step is separate from the release one because the refman is omitted
- name: Create GitHub release (prerelease)
if: ${{ inputs.prerelease }}
uses: softprops/action-gh-release@v1
with:
name: Dafny ${{ inputs.name }}
tag_name: ${{ inputs.tag_name }}
body: ${{ inputs.release_notes }}
draft: ${{ inputs.draft }}
prerelease: ${{ inputs.prerelease }}
files: |
dafny/Package/dafny-${{ inputs.name }}*
fail_on_unmatched_files: true
19 changes: 19 additions & 0 deletions Source/DafnyCore/AST/Modules/ModuleDefinition.cs
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,22 @@ public class ModuleDefinition : RangeNode, IAttributeBearingDeclaration, IClonea
return prefixNameModule with { Parts = rest };
}

private static readonly List<(string, string)> incompatibleAttributePairs =
new() {
("rlimit", "resource_limit")
};

private void CheckIncompatibleAttributes(ModuleResolver resolver, Attributes attrs) {
foreach (var pair in incompatibleAttributePairs) {
var attr1 = Attributes.Find(attrs, pair.Item1);
var attr2 = Attributes.Find(attrs, pair.Item2);
if (attr1 is not null && attr2 is not null) {
resolver.reporter.Error(MessageSource.Resolver, attr1.tok,
$"the {pair.Item1} and {pair.Item2} attributes cannot be used together");
}
}
}

public ModuleSignature RegisterTopLevelDecls(ModuleResolver resolver, bool useImports) {
Contract.Requires(this != null);
var sig = new ModuleSignature();
Expand Down Expand Up @@ -767,6 +783,9 @@ public class ModuleDefinition : RangeNode, IAttributeBearingDeclaration, IClonea

foreach (MemberDecl m in members.Values) {
Contract.Assert(!m.HasStaticKeyword || Attributes.Contains(m.Attributes, "opaque_reveal"));

CheckIncompatibleAttributes(resolver, m.Attributes);

if (m is Function or Method or ConstantField) {
sig.StaticMembers[m.Name] = m;
}
Expand Down
5 changes: 5 additions & 0 deletions Source/DafnyCore/DafnyOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
using System.Collections.ObjectModel;
using System.CommandLine;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.IO;
using System.Reflection;
Expand Down Expand Up @@ -358,6 +359,10 @@ public enum IncludesModes {
).ToArray();
}

public static bool TryParseResourceCount(string value, out uint result) {
return uint.TryParse(value, NumberStyles.AllowExponent, null, out result);
}

/// <summary>
/// Automatic shallow-copy constructor
/// </summary>
Expand Down
15 changes: 13 additions & 2 deletions Source/DafnyCore/Options/BoogieOptionBag.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,18 @@ public static class BoogieOptionBag {
new("--error-limit", () => 5, "Set the maximum number of errors to report (0 for unlimited).");

public static readonly Option<uint> SolverResourceLimit = new("--resource-limit",
@"Specify the maximum resource limit (rlimit) value to pass to Z3. A resource limit is a deterministic alternative to a time limit. The output produced by `--log-format csv` includes the resource use of each proof effort, which you can use to determine an appropriate limit for your program. Multiplied by 1000 before sending to Z3.");
result => {
var value = result.Tokens[^1].Value;
if (DafnyOptions.TryParseResourceCount(value, out var number)) {
return number;
}
result.ErrorMessage = $"Cannot parse resource limit: {value}";
return 0;
},
isDefault: false,
@"Specify the maximum resource limit (rlimit) value to pass to Z3. A resource limit is a deterministic alternative to a time limit. The output produced by `--log-format csv` includes the resource use of each proof effort, which you can use to determine an appropriate limit for your program.");
public static readonly Option<string> SolverPlugin = new("--solver-plugin",
@"Dafny uses Boogie as part of its verification process. This option allows customising that part using a Boogie plugin. More information about Boogie can be found at https://github.com/boogie-org/boogie. Information on how to construct Boogie plugins can be found by looking at the code in https://github.com/boogie-org/boogie/blob/v2.16.3/Source/Provers/SMTLib/ProverUtil.cs#L316");

Expand Down Expand Up @@ -120,7 +131,7 @@ public static class BoogieOptionBag {
o.TheProverFactory = ProverFactory.Load(o.ProverDllName);
}
});
DafnyOptions.RegisterLegacyBinding(SolverResourceLimit, (o, v) => o.ResourceLimit = Boogie.Util.BoundedMultiply(v, 1000));
DafnyOptions.RegisterLegacyBinding(SolverResourceLimit, (o, v) => o.ResourceLimit = v);
DafnyOptions.RegisterLegacyBinding(SolverLog, (o, v) => o.ProverLogFilePath = v);
DafnyOptions.RegisterLegacyBinding(SolverOption, (o, v) => {
if (v is not null) {
Expand Down
13 changes: 13 additions & 0 deletions Source/DafnyCore/Verifier/BoogieGenerator.ExpressionTranslator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1959,6 +1959,19 @@ public ExpressionTranslator(ExpressionTranslator etran, Boogie.Expr heap)
BigNum.FromUInt(Boogie.Util.BoundedMultiply((uint)litExpr.asBigNum.ToIntSafe, 1000)),
litExpr.Immutable);
}

// Do this after the above multiplication because :resource_limit should not be multiplied.
if (name == "resource_limit") {
name = "rlimit";
if (parms[0] is string str) {
if (DafnyOptions.TryParseResourceCount(str, out var resourceLimit)) {
parms[0] = new Boogie.LiteralExpr(attr.tok, BigNum.FromUInt(resourceLimit), true);
} else {
BoogieGenerator.reporter.Error(MessageSource.Verifier, attr.tok,
$"failed to parse resource count: {parms[0]}");
}
}
}
kv = new Boogie.QKeyValue(Token.NoToken, name, parms, kv);
}
return kv;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ namespace Microsoft.Dafny.LanguageServer.IntegrationTest {
public class DafnyLanguageServerTestBase : LanguageProtocolTestBase {

protected readonly string SlowToVerify = @"
lemma {:rlimit 100} SquareRoot2NotRational(p: nat, q: nat)
lemma {:resource_limit 100000} SquareRoot2NotRational(p: nat, q: nat)
requires p > 0 && q > 0
ensures (p * p) != 2 * (q * q)
{
Expand All @@ -42,7 +42,7 @@ public class DafnyLanguageServerTestBase : LanguageProtocolTestBase {
}
}".TrimStart();

protected string SlowToVerifyNoLimit => SlowToVerify.Replace(" {:rlimit 100}", "");
protected string SlowToVerifyNoLimit => SlowToVerify.Replace(" {:resource_limit 100000}", "");

protected readonly string NeverVerifies = @"
lemma {:neverVerify} HasNeverVerifyAttribute(p: nat, q: nat)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ import Producer
await client.OpenDocumentAndWaitAsync(documentItem, CancellationToken);
var diagnostics = await GetLastDiagnostics(documentItem);
Assert.Contains("Verification out of resource", diagnostics[0].Message);
Assert.Equal(new Range(0, 20, 0, 42), diagnostics[0].Range);
Assert.Equal(new Range(0, 31, 0, 53), diagnostics[0].Range);
}

[Fact]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ public class SimpleLinearVerificationGutterStatusTester : LinearVerificationGutt
| : if n <= 1 then n else fib(n-1) + fib(n-2)
| :}
| :
[ ]:method {:rlimit 1} Test(s: seq<nat>)
[ ]:method {:resource_limit 1000} Test(s: seq<nat>)
[=]: requires |s| >= 1 && s[0] >= 0 { assert fib(10) == 1; assert {:split_here} s[0] >= 0;
[ ]:}", true, intermediates: false);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ method Test(i: int)
var documentItem = await GetDocumentItem(@"
ghost function f(i:nat, j:nat):int {if i == 0 then 0 else f(i - 1, i * j + 1) + f(i - 1, 2 * i * j)}
lemma{:rlimit 10000} L()
lemma{:resource_limit 10000000} L()
{
assert f(10, 5) == 0;
} ", "testfileSlow.dfy", true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ public class VerificationStatusTest : ClientBasedLanguageServerTest {

var successfulRun = await client.RunSymbolVerification(new TextDocumentIdentifier(documentItem.Uri), methodHeader, CancellationToken);
Assert.True(successfulRun);
var range = new Range(0, 20, 0, 42);
var range = new Range(0, 31, 0, 53);
await WaitForStatus(range, PublishedVerificationStatus.Running, CancellationToken);
await WaitForStatus(range, PublishedVerificationStatus.Error, CancellationToken);

Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file modified Source/DafnyStandardLibraries/binaries/DafnyStandardLibraries.doo
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ module WriteBytesToFile {
theMain("build/../build/fileioexamples", "");
}

method {:rlimit 2000} theMain(outputDir: string, expectedErrorPrefix: string) {
method {:resource_limit 2000000} theMain(outputDir: string, expectedErrorPrefix: string) {

// Happy paths: write files to the output dir. (The %diff LIT commands check that we wrote the correct content.)
{
Expand Down
6 changes: 3 additions & 3 deletions Source/DafnyStandardLibraries/examples/JSON/JSONExamples.dfy
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ module {:options "-functionSyntax:4"} AbstractSyntax {
The high-level API works with fairly simple datatype values that contain
native Dafny strings.
*/
method {:test} {:rlimit 100000} Test() {
method {:test} {:resource_limit 100000000} Test() {

/**
Use `API.Deserialize` to deserialize a byte string.
Expand Down Expand Up @@ -138,7 +138,7 @@ module {:options "-functionSyntax:4"} ConcreteSyntax {
encoding: each node contains pointers to parts of a string, such that
concatenating the fields of all nodes reconstructs the serialized value.
*/
method {:test} {:rlimit 100000} Test() {
method {:test} {:resource_limit 100000000} Test() {

/**
The low-level API exposes the same functions and methods as the high-level
Expand Down Expand Up @@ -270,4 +270,4 @@ module {:options "-functionSyntax:4"} ConcreteSyntax {

sq'
}
}
}
2 changes: 1 addition & 1 deletion Source/DafnyStandardLibraries/examples/dfyconfig.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ standard-libraries = true
# but consumers don't have to have this option set
# to use the standard libraries in general.
reads-clauses-on-methods = true
resource-limit = 1000
resource-limit = 1000000
warn-as-errors = true
warn-redundant-assumptions = true
warn-contradictory-assumptions = true
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ module {:disableNonlinearArithmetic} Std.Arithmetic.ModInternals {
LemmaModAutoMinus(n);
}

lemma {:rlimit 2000} LemmaModAutoMinus(n: int)
lemma {:resource_limit 2000000} LemmaModAutoMinus(n: int)
requires n > 0
ensures ModAutoMinus(n)
{
Expand Down
6 changes: 3 additions & 3 deletions Source/DafnyStandardLibraries/src/Std/Base64.dfy
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ module Std.Base64 {
else (c - 65 as char) as index
}

lemma {:rlimit 2000} {:vcs_split_on_every_assert} CharToIndexToChar(c: char)
lemma {:resource_limit 2000000} {:vcs_split_on_every_assert} CharToIndexToChar(c: char)
requires IsBase64Char(c)
ensures IndexToChar(CharToIndex(c)) == c
{
Expand Down Expand Up @@ -1213,7 +1213,7 @@ module Std.Base64 {
AboutDecodeValid(s, DecodeValid(s));
}

lemma {:rlimit 12000} DecodeValidEncode1Padding(s: seq<char>)
lemma {:resource_limit 12000000} DecodeValidEncode1Padding(s: seq<char>)
requires IsBase64String(s)
requires |s| >= 4
requires Is1Padding(s[(|s| - 4)..])
Expand Down Expand Up @@ -1449,7 +1449,7 @@ module Std.Base64 {
seq(|b|, i requires 0 <= i < |b| => b[i] as uint8)
}

lemma {:vcs_split_on_every_assert} {:rlimit 1000000} UInt8sToBVsToUInt8s(u: seq<uint8>)
lemma {:vcs_split_on_every_assert} {:resource_limit 1000000000} UInt8sToBVsToUInt8s(u: seq<uint8>)
ensures BVsToUInt8s(UInt8sToBVs(u)) == u
{
// TODO: reduce resource use
Expand Down
2 changes: 1 addition & 1 deletion Source/DafnyStandardLibraries/src/Std/Collections/Seq.dfy
Original file line number Diff line number Diff line change
Expand Up @@ -749,7 +749,7 @@ module Std.Collections.Seq {
/* Applying a function to a sequence is distributive over concatenation. That is, concatenating
two sequences and then applying Map is the same as applying Map to each sequence separately,
and then concatenating the two resulting sequences. */
lemma {:opaque} {:rlimit 1000000} LemmaMapDistributesOverConcat<T,R>(f: (T ~> R), xs: seq<T>, ys: seq<T>)
lemma {:opaque} {:resource_limit 1000000000} LemmaMapDistributesOverConcat<T,R>(f: (T ~> R), xs: seq<T>, ys: seq<T>)
requires forall i {:trigger xs[i]}:: 0 <= i < |xs| ==> f.requires(xs[i])
requires forall j {:trigger ys[j]}:: 0 <= j < |ys| ==> f.requires(ys[j])
ensures Map(f, xs + ys) == Map(f, xs) + Map(f, ys)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ module Std.JSON.ConcreteSyntax.SpecProperties {
ensures Spec.ConcatBytes(ts, pt0) == Spec.ConcatBytes(ts, pt1)
{}

lemma {:induction ts0} {:rlimit 10000} ConcatBytes_Linear<T>(ts0: seq<T>, ts1: seq<T>, pt: T --> bytes)
lemma {:induction ts0} {:resource_limit 10000000} ConcatBytes_Linear<T>(ts0: seq<T>, ts1: seq<T>, pt: T --> bytes)
requires forall d | d in ts0 :: pt.requires(d)
requires forall d | d in ts1 :: pt.requires(d)
ensures Spec.ConcatBytes(ts0 + ts1, pt) == Spec.ConcatBytes(ts0, pt) + Spec.ConcatBytes(ts1, pt)
Expand All @@ -47,4 +47,4 @@ module Std.JSON.ConcreteSyntax.SpecProperties {
assert ts0 + ts1 == [ts0[0]] + (ts0[1..] + ts1);
}
}
}
}
4 changes: 2 additions & 2 deletions Source/DafnyStandardLibraries/src/Std/JSON/Serializer.dfy
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ module Std.JSON.Serializer {
Failure(o.error)
}

function {:vcs_split_on_every_assert} {:rlimit 1000} Number(dec: Values.Decimal): Result<jnumber> {
function {:vcs_split_on_every_assert} {:resource_limit 1000000} Number(dec: Values.Decimal): Result<jnumber> {
var minus: jminus := Sign(dec.n);
var num: jnum :- Int(Math.Abs(dec.n));
var frac: Maybe<jfrac> := Empty();
Expand Down Expand Up @@ -133,4 +133,4 @@ module Std.JSON.Serializer {
var val :- Value(js);
Success(MkStructural(val))
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ module Std.JSON.ZeroCopy.Deserializer {
return Cursor(cs.s, cs.beg, point', cs.end).Split();
}

opaque function {:vcs_split_on_every_assert} {:rlimit 1000000} Structural<T>(cs: FreshCursor, parser: Parser<T>)
opaque function {:vcs_split_on_every_assert} {:resource_limit 1000000000} Structural<T>(cs: FreshCursor, parser: Parser<T>)
: (pr: ParseResult<Structural<T>>)
requires forall cs :: parser.fn.requires(cs)
ensures pr.Success? ==> pr.value.StrictlySplitFrom?(cs, st => Spec.Structural(st, parser.spec))
Expand All @@ -72,7 +72,7 @@ module Std.JSON.ZeroCopy.Deserializer {

type jopt = v: Vs.View | v.Length() <= 1 witness Vs.View.OfBytes([])

function {:rlimit 100000} TryStructural(cs: FreshCursor)
function {:resource_limit 100000000} TryStructural(cs: FreshCursor)
: (sp: Split<Structural<jopt>>)
ensures sp.SplitFrom?(cs, st => Spec.Structural(st, SpecView))
{
Expand Down Expand Up @@ -244,11 +244,11 @@ module Std.JSON.ZeroCopy.Deserializer {
elems'
}

opaque function {:rlimit 10000} {:vcs_split_on_every_assert} AppendLast(ghost cs0: FreshCursor,
ghost json: ValueParser,
elems: Split<seq<TSuffixedElement>>,
elem: Split<TElement>,
sep: Split<Structural<jclose>>)
opaque function {:resource_limit 10000000} {:vcs_split_on_every_assert} AppendLast(ghost cs0: FreshCursor,
ghost json: ValueParser,
elems: Split<seq<TSuffixedElement>>,
elem: Split<TElement>,
sep: Split<Structural<jclose>>)
: (elems': Split<seq<TSuffixedElement>>)
requires elems.cs.StrictlySplitFrom?(json.cs)
requires elems.SplitFrom?(cs0, SuffixedElementsSpec)
Expand Down Expand Up @@ -280,7 +280,7 @@ module Std.JSON.ZeroCopy.Deserializer {
elems'
}

lemma {:rlimit 10000} AboutTryStructural(cs: FreshCursor)
lemma {:resource_limit 10000000} AboutTryStructural(cs: FreshCursor)
ensures
var sp := Core.TryStructural(cs);
var s0 := sp.t.t.Peek();
Expand Down Expand Up @@ -476,7 +476,7 @@ module Std.JSON.ZeroCopy.Deserializer {
case OtherError(err) => err
}

opaque function {:vcs_split_on_every_assert} {:rlimit 10000} JSON(cs: Cursors.FreshCursor) : (pr: DeserializationResult<Cursors.Split<JSON>>)
opaque function {:vcs_split_on_every_assert} {:resource_limit 10000000} JSON(cs: Cursors.FreshCursor) : (pr: DeserializationResult<Cursors.Split<JSON>>)
ensures pr.Success? ==> pr.value.StrictlySplitFrom?(cs, Spec.JSON)
{
Core.Structural(cs, Parsers.Parser(Values.Value, Spec.Value)).MapFailure(LiftCursorError)
Expand Down Expand Up @@ -694,7 +694,7 @@ module Std.JSON.ZeroCopy.Deserializer {
Success(cs.Split())
}

opaque function {:rlimit 10000} String(cs: FreshCursor): (pr: ParseResult<jstring>)
opaque function {:resource_limit 10000000} String(cs: FreshCursor): (pr: ParseResult<jstring>)
ensures pr.Success? ==> pr.value.StrictlySplitFrom?(cs, Spec.String)
{
var SP(lq, cs) :- Quote(cs);
Expand Down Expand Up @@ -755,7 +755,7 @@ module Std.JSON.ZeroCopy.Deserializer {
else Success(sp)
}

opaque function {:vcs_split_on_every_assert} {:rlimit 100000} Exp(cs: FreshCursor) : (pr: ParseResult<Maybe<jexp>>)
opaque function {:vcs_split_on_every_assert} {:resource_limit 100000000} Exp(cs: FreshCursor) : (pr: ParseResult<Maybe<jexp>>)
ensures pr.Success? ==> pr.value.SplitFrom?(cs, exp => Spec.Maybe(exp, Spec.Exp))
{
var SP(e, cs) :=
Expand Down Expand Up @@ -974,7 +974,7 @@ module Std.JSON.ZeroCopy.Deserializer {
}
}

opaque function {:vcs_split_on_every_assert} {:rlimit 10000} Object(cs: FreshCursor, json: ValueParser)
opaque function {:vcs_split_on_every_assert} {:resource_limit 10000000} Object(cs: FreshCursor, json: ValueParser)
: (pr: ParseResult<jobject>)
requires cs.SplitFrom?(json.cs)
ensures pr.Success? ==> pr.value.StrictlySplitFrom?(cs, Spec.Object)
Expand All @@ -985,4 +985,4 @@ module Std.JSON.ZeroCopy.Deserializer {
Success(sp)
}
}
}
}
Loading

0 comments on commit 2fe447a

Please sign in to comment.