Skip to content

Commit a10e3b5

Browse files
committed
pkg/workflows/sdk: add WorkflowSpec.FormatChart for mermaid flowcharts
1 parent 34e8551 commit a10e3b5

11 files changed

+519
-148
lines changed

pkg/capabilities/capabilities.go

+21
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package capabilities
22

33
import (
4+
"cmp"
45
"context"
56
"fmt"
67
"regexp"
@@ -53,6 +54,26 @@ func (c CapabilityType) IsValid() error {
5354
return fmt.Errorf("invalid capability type: %s", c)
5455
}
5556

57+
func (c CapabilityType) cmpOrder() int {
58+
switch c {
59+
case CapabilityTypeTrigger:
60+
return 0
61+
case CapabilityTypeAction:
62+
return 1
63+
case CapabilityTypeConsensus:
64+
return 2
65+
case CapabilityTypeTarget:
66+
return 3
67+
case CapabilityTypeUnknown:
68+
return 4
69+
default:
70+
return 5
71+
}
72+
}
73+
func (c CapabilityType) Compare(c2 CapabilityType) int {
74+
return cmp.Compare(c.cmpOrder(), c2.cmpOrder())
75+
}
76+
5677
// CapabilityResponse is a struct for the Execute response of a capability.
5778
type CapabilityResponse struct {
5879
Value *values.Map

pkg/workflows/models_yaml_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ var transformJSON = cmp.FilterValues(func(x, y []byte) bool {
4949
return out
5050
}))
5151

52-
func TestWorkflowSpecMarshalling(t *testing.T) {
52+
func TestWorkflowSpecYamlMarshalling(t *testing.T) {
5353
t.Parallel()
5454
fixtureReader := yamlFixtureReaderBytes(t, "marshalling")
5555

pkg/workflows/sdk/builder_test.go

+1-76
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ import (
77
"github.com/stretchr/testify/require"
88
"sigs.k8s.io/yaml"
99

10-
"github.com/smartcontractkit/chainlink-common/pkg/capabilities"
1110
ocr3 "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap"
1211
"github.com/smartcontractkit/chainlink-common/pkg/capabilities/targets/chainwriter"
1312
"github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams"
@@ -205,81 +204,7 @@ func TestBuilder_ValidSpec(t *testing.T) {
205204
actual, err := factory.Spec()
206205
require.NoError(t, err)
207206

208-
expected := sdk.WorkflowSpec{
209-
Name: "notccipethsep",
210-
Owner: "0x00000000000000000000000000000000000000aa",
211-
Triggers: []sdk.StepDefinition{
212-
{
213-
214-
Ref: "trigger",
215-
Inputs: sdk.StepInputs{},
216-
Config: map[string]any{"maxFrequencyMs": 5000},
217-
CapabilityType: capabilities.CapabilityTypeTrigger,
218-
},
219-
},
220-
Actions: make([]sdk.StepDefinition, 0),
221-
Consensus: []sdk.StepDefinition{
222-
{
223-
224-
Ref: "data-feeds-report",
225-
Inputs: sdk.StepInputs{
226-
Mapping: map[string]any{"observations": []map[string]any{
227-
{
228-
"Metadata": map[string]any{
229-
"MinRequiredSignatures": 1,
230-
"Signers": []string{"$(trigger.outputs.Metadata.Signer)"},
231-
},
232-
"Payload": []map[string]any{
233-
{
234-
"BenchmarkPrice": "$(trigger.outputs.Payload.BuyPrice)",
235-
"FeedID": anyFakeFeedID,
236-
"FullReport": "$(trigger.outputs.Payload.FullReport)",
237-
"ObservationTimestamp": "$(trigger.outputs.Payload.ObservationTimestamp)",
238-
"ReportContext": "$(trigger.outputs.Payload.ReportContext)",
239-
"Signatures": []string{"$(trigger.outputs.Payload.Signature)"},
240-
},
241-
},
242-
"Timestamp": "$(trigger.outputs.Timestamp)",
243-
},
244-
}},
245-
},
246-
Config: map[string]any{
247-
"aggregation_config": ocr3.DataFeedsConsensusConfigAggregationConfig{
248-
AllowedPartialStaleness: "0.5",
249-
Feeds: map[string]ocr3.FeedValue{
250-
anyFakeFeedID: {
251-
Deviation: "0.5",
252-
Heartbeat: 3600,
253-
},
254-
},
255-
},
256-
"aggregation_method": "data_feeds",
257-
"encoder": "EVM",
258-
"encoder_config": ocr3.EncoderConfig{
259-
"Abi": "(bytes32 FeedID, uint224 Price, uint32 Timestamp)[] Reports",
260-
},
261-
"report_id": "0001",
262-
},
263-
CapabilityType: capabilities.CapabilityTypeConsensus,
264-
},
265-
},
266-
Targets: []sdk.StepDefinition{
267-
{
268-
269-
Inputs: sdk.StepInputs{
270-
Mapping: map[string]any{"signed_report": "$(data-feeds-report.outputs)"},
271-
},
272-
Config: map[string]any{
273-
"address": "0xE0082363396985ae2FdcC3a9F816A586Eed88416",
274-
"deltaStage": "45s",
275-
"schedule": "oneAtATime",
276-
},
277-
CapabilityType: capabilities.CapabilityTypeTarget,
278-
},
279-
},
280-
}
281-
282-
testutils.AssertWorkflowSpec(t, expected, actual)
207+
testutils.AssertWorkflowSpec(t, notStreamSepoliaWorkflowSpec, actual)
283208
})
284209

285210
t.Run("duplicate names causes errors", func(t *testing.T) {

pkg/workflows/sdk/compute_test.go

+2-65
Original file line numberDiff line numberDiff line change
@@ -39,71 +39,8 @@ func TestCompute(t *testing.T) {
3939

4040
spec, err2 := workflow.Spec()
4141
require.NoError(t, err2)
42-
expectedSpec := sdk.WorkflowSpec{
43-
Name: "name",
44-
Owner: "owner",
45-
Triggers: []sdk.StepDefinition{
46-
{
47-
48-
Ref: "trigger",
49-
Inputs: sdk.StepInputs{},
50-
Config: map[string]any{"maxFrequencyMs": 5000},
51-
CapabilityType: capabilities.CapabilityTypeTrigger,
52-
},
53-
},
54-
Actions: []sdk.StepDefinition{
55-
{
56-
57-
Ref: "Compute",
58-
Inputs: sdk.StepInputs{
59-
Mapping: map[string]any{"Arg0": "$(trigger.outputs)"},
60-
},
61-
Config: map[string]any{},
62-
CapabilityType: capabilities.CapabilityTypeAction,
63-
},
64-
},
65-
Consensus: []sdk.StepDefinition{
66-
{
67-
68-
Ref: "data-feeds-report",
69-
Inputs: sdk.StepInputs{
70-
Mapping: map[string]any{"observations": "$(Compute.outputs.Value)"},
71-
},
72-
Config: map[string]any{
73-
"aggregation_config": ocr3.DataFeedsConsensusConfigAggregationConfig{
74-
AllowedPartialStaleness: "false",
75-
Feeds: map[string]ocr3.FeedValue{
76-
anyFakeFeedID: {
77-
Deviation: "0.5",
78-
Heartbeat: 3600,
79-
},
80-
},
81-
},
82-
"aggregation_method": "data_feeds",
83-
"encoder": ocr3.EncoderEVM,
84-
"encoder_config": ocr3.EncoderConfig{},
85-
"report_id": "0001",
86-
},
87-
CapabilityType: capabilities.CapabilityTypeConsensus,
88-
},
89-
},
90-
Targets: []sdk.StepDefinition{
91-
{
92-
93-
Inputs: sdk.StepInputs{
94-
Mapping: map[string]any{"signed_report": "$(data-feeds-report.outputs)"},
95-
},
96-
Config: map[string]any{
97-
"address": "0xE0082363396985ae2FdcC3a9F816A586Eed88416",
98-
"deltaStage": "45s",
99-
"schedule": "oneAtATime",
100-
},
101-
CapabilityType: capabilities.CapabilityTypeTarget,
102-
},
103-
},
104-
}
10542

106-
testutils.AssertWorkflowSpec(t, expectedSpec, spec)
43+
testutils.AssertWorkflowSpec(t, serialWorkflowSpec, spec)
10744
})
10845

10946
t.Run("compute runs the function and returns the value", func(t *testing.T) {
@@ -130,7 +67,7 @@ func TestCompute(t *testing.T) {
13067
func createWorkflow(fn func(_ sdk.Runtime, inputFeed notstreams.Feed) ([]streams.Feed, error)) *sdk.WorkflowSpecFactory {
13168
workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{
13269
Owner: "owner",
133-
Name: "name",
70+
Name: "serial",
13471
})
13572

13673
trigger := notstreams.TriggerConfig{MaxFrequencyMs: 5000}.New(workflow)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
```mermaid
2+
flowchart
3+
4+
trigger[\"<b>trigger</b><br><small>trigger<br><i>(notstreams[at]1.0.0)</i></small>"/]
5+
6+
data-feeds-report[["<b>data-feeds-report</b><br><small>consensus<br><i>(offchain_reporting[at]1.0.0)</i></small>"]]
7+
trigger -- Metadata.Signer<br>Payload.BuyPrice<br>Payload.FullReport<br>Payload.ObservationTimestamp<br>Payload.ReportContext<br>Payload.Signature<br>Timestamp --> data-feeds-report
8+
9+
unnamed2[/"<small>target<br><i>(write_ethereum-testnet-sepolia[at]1.0.0)</i></small>"\]
10+
data-feeds-report --> unnamed2
11+
12+
```
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
```mermaid
2+
flowchart
3+
4+
trigger-chain-event[\"<b>trigger-chain-event</b><br><small>trigger<br><i>(chain_reader[at]1.0.0)</i></small>"/]
5+
6+
compute-bar["<b>compute-bar</b><br><small>action<br><i>(custom_compute[at]1.0.0)</i></small>"]
7+
get-bar --> compute-bar
8+
9+
compute-foo["<b>compute-foo</b><br><small>action<br><i>(custom_compute[at]1.0.0)</i></small>"]
10+
get-foo --> compute-foo
11+
12+
get-bar["<b>get-bar</b><br><small>action<br><i>(http[at]1.0.0)</i></small>"]
13+
trigger-chain-event --> get-bar
14+
15+
get-foo["<b>get-foo</b><br><small>action<br><i>(http[at]1.0.0)</i></small>"]
16+
trigger-chain-event --> get-foo
17+
18+
read-token-price["<b>read-token-price</b><br><small>action<br><i>(chain_reader[at]1.0.0)</i></small>"]
19+
trigger-chain-event --> read-token-price
20+
21+
data-feeds-report[["<b>data-feeds-report</b><br><small>consensus<br><i>(offchain_reporting[at]1.0.0)</i></small>"]]
22+
compute-bar -- Value --> data-feeds-report
23+
compute-foo -- Value --> data-feeds-report
24+
read-token-price -- Value --> data-feeds-report
25+
26+
unnamed7[/"<small>target<br><i>(write_ethereum-testnet-sepolia[at]1.0.0)</i></small>"\]
27+
data-feeds-report --> unnamed7
28+
29+
```
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
```mermaid
2+
flowchart
3+
4+
trigger-chain-event[\"<b>trigger-chain-event</b><br><small>trigger<br><i>(chain_reader[at]1.0.0)</i></small>"/]
5+
6+
compute-bar["<b>compute-bar</b><br><small>action<br><i>(custom_compute[at]1.0.0)</i></small>"]
7+
get-bar --> compute-bar
8+
9+
compute-foo["<b>compute-foo</b><br><small>action<br><i>(custom_compute[at]1.0.0)</i></small>"]
10+
get-foo --> compute-foo
11+
12+
get-bar["<b>get-bar</b><br><small>action<br><i>(http[at]1.0.0)</i></small>"]
13+
trigger-chain-event --> get-bar
14+
15+
get-foo["<b>get-foo</b><br><small>action<br><i>(http[at]1.0.0)</i></small>"]
16+
trigger-chain-event --> get-foo
17+
18+
read-token-price["<b>read-token-price</b><br><small>action<br><i>(chain_reader[at]1.0.0)</i></small>"]
19+
trigger-chain-event --> read-token-price
20+
21+
data-feeds-report[["<b>data-feeds-report</b><br><small>consensus<br><i>(offchain_reporting[at]1.0.0)</i></small>"]]
22+
compute-bar -- Value --> data-feeds-report
23+
compute-foo -- Value --> data-feeds-report
24+
read-token-price -- Value --> data-feeds-report
25+
26+
unnamed7[/"<small>target<br><i>(write_ethereum-testnet-sepolia[at]1.0.0)</i></small>"\]
27+
data-feeds-report --> unnamed7
28+
29+
compute-foo -..-> get-bar
30+
compute-bar -..-> read-token-price
31+
```
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
```mermaid
2+
flowchart
3+
4+
trigger[\"<b>trigger</b><br><small>trigger<br><i>(notstreams[at]1.0.0)</i></small>"/]
5+
6+
Compute["<b>Compute</b><br><small>action<br><i>(__internal__custom_compute[at]1.0.0)</i></small>"]
7+
trigger --> Compute
8+
9+
data-feeds-report[["<b>data-feeds-report</b><br><small>consensus<br><i>(offchain_reporting[at]1.0.0)</i></small>"]]
10+
Compute -- Value --> data-feeds-report
11+
12+
unnamed3[/"<small>target<br><i>(write_ethereum-testnet-sepolia[at]1.0.0)</i></small>"\]
13+
data-feeds-report --> unnamed3
14+
15+
```

pkg/workflows/sdk/testutils/utils.go

+10-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package testutils
22

33
import (
4+
"bytes"
45
"encoding/json"
56
"testing"
67

@@ -11,11 +12,15 @@ import (
1112
)
1213

1314
func AssertWorkflowSpec(t *testing.T, expectedSpec, testWorkflowSpec sdk.WorkflowSpec) {
14-
expected, err := json.Marshal(expectedSpec)
15-
require.NoError(t, err)
15+
var b bytes.Buffer
16+
e := json.NewEncoder(&b)
17+
e.SetIndent("", " ")
18+
require.NoError(t, e.Encode(expectedSpec))
19+
expected := b.String()
1620

17-
actual, err := json.Marshal(testWorkflowSpec)
18-
require.NoError(t, err)
21+
b.Reset()
22+
require.NoError(t, e.Encode(testWorkflowSpec))
23+
actual := b.String()
1924

20-
assert.Equal(t, string(expected), string(actual))
25+
assert.Equal(t, expected, actual)
2126
}

0 commit comments

Comments
 (0)