Compare commits
12 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
5e76853b55 | ||
|
2eb4de02ee | ||
|
342ad6a51a | ||
|
568f053723 | ||
|
8f12a6c947 | ||
|
83fb85f702 | ||
|
3daf313205 | ||
|
7c5400d75b | ||
|
929ea6df75 | ||
|
f6a8a0e643 | ||
|
556fd20aed | ||
|
a8298365fe |
44
.gitea/workflows/test.yml
Normal file
44
.gitea/workflows/test.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: checks
|
||||||
|
on:
|
||||||
|
- push
|
||||||
|
- pull_request
|
||||||
|
|
||||||
|
env:
|
||||||
|
GOPROXY: https://goproxy.io,direct
|
||||||
|
GOPATH: /go_path
|
||||||
|
GOCACHE: /go_cache
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: check and test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: cache go path
|
||||||
|
id: cache-go-path
|
||||||
|
uses: https://github.com/actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: /go_path
|
||||||
|
key: go_path-${{ github.repository }}-${{ github.ref_name }}
|
||||||
|
restore-keys: |
|
||||||
|
go_path-${{ github.repository }}-
|
||||||
|
go_path-
|
||||||
|
- name: cache go cache
|
||||||
|
id: cache-go-cache
|
||||||
|
uses: https://github.com/actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: /go_cache
|
||||||
|
key: go_cache-${{ github.repository }}-${{ github.ref_name }}
|
||||||
|
restore-keys: |
|
||||||
|
go_cache-${{ github.repository }}-
|
||||||
|
go_cache-
|
||||||
|
- uses: actions/setup-go@v3
|
||||||
|
with:
|
||||||
|
go-version: 1.20
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: vet checks
|
||||||
|
run: go vet -v ./...
|
||||||
|
- name: build
|
||||||
|
run: go build -v ./...
|
||||||
|
- name: test
|
||||||
|
run: go test -v ./pkg/jobparser
|
||||||
|
# TODO test more packages
|
2
go.mod
2
go.mod
@@ -80,4 +80,4 @@ require (
|
|||||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
replace github.com/go-git/go-git/v5 => github.com/ZauberNerd/go-git/v5 v5.4.3-0.20220315170230-29ec1bc1e5db
|
replace github.com/go-git/go-git/v5 => github.com/ZauberNerd/go-git/v5 v5.4.3-0.20220224134545-c785af3f4559
|
||||||
|
4
go.sum
4
go.sum
@@ -14,8 +14,8 @@ github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDe
|
|||||||
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
|
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
|
||||||
github.com/ProtonMail/go-crypto v0.0.0-20220404123522-616f957b79ad h1:K3cVQxnwoVf5R2XLZknct3+tJWocEuJUmF7ZGwB2FK8=
|
github.com/ProtonMail/go-crypto v0.0.0-20220404123522-616f957b79ad h1:K3cVQxnwoVf5R2XLZknct3+tJWocEuJUmF7ZGwB2FK8=
|
||||||
github.com/ProtonMail/go-crypto v0.0.0-20220404123522-616f957b79ad/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
|
github.com/ProtonMail/go-crypto v0.0.0-20220404123522-616f957b79ad/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
|
||||||
github.com/ZauberNerd/go-git/v5 v5.4.3-0.20220315170230-29ec1bc1e5db h1:b0xyxkCQ0PQEH7gFQ8D+xa9lb+bur6RgVsRBodaqza4=
|
github.com/ZauberNerd/go-git/v5 v5.4.3-0.20220224134545-c785af3f4559 h1:N+UKYPjQ7xkYzbzKWUkDGW5XrhhQDMD4lkwRJCUUA8w=
|
||||||
github.com/ZauberNerd/go-git/v5 v5.4.3-0.20220315170230-29ec1bc1e5db/go.mod h1:U7oc8MDRtQhVD6StooNkBMVsh/Y4J/2Vl36Mo4IclvM=
|
github.com/ZauberNerd/go-git/v5 v5.4.3-0.20220224134545-c785af3f4559/go.mod h1:U7oc8MDRtQhVD6StooNkBMVsh/Y4J/2Vl36Mo4IclvM=
|
||||||
github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk=
|
github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk=
|
||||||
github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
|
github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
|
||||||
github.com/adrg/xdg v0.4.0 h1:RzRqFcjH4nE5C6oTAxhBtoE2IRyjBSa62SCbyPidvls=
|
github.com/adrg/xdg v0.4.0 h1:RzRqFcjH4nE5C6oTAxhBtoE2IRyjBSa62SCbyPidvls=
|
||||||
|
@@ -152,6 +152,8 @@ func (impl *interperterImpl) evaluateVariable(variableNode *actionlint.VariableN
|
|||||||
switch strings.ToLower(variableNode.Name) {
|
switch strings.ToLower(variableNode.Name) {
|
||||||
case "github":
|
case "github":
|
||||||
return impl.env.Github, nil
|
return impl.env.Github, nil
|
||||||
|
case "gitea": // compatible with Gitea
|
||||||
|
return impl.env.Github, nil
|
||||||
case "env":
|
case "env":
|
||||||
return impl.env.Env, nil
|
return impl.env.Env, nil
|
||||||
case "job":
|
case "job":
|
||||||
|
@@ -41,11 +41,12 @@ func NewInterpeter(
|
|||||||
jobs := run.Workflow.Jobs
|
jobs := run.Workflow.Jobs
|
||||||
jobNeeds := run.Job().Needs()
|
jobNeeds := run.Job().Needs()
|
||||||
|
|
||||||
using := map[string]map[string]map[string]string{}
|
using := map[string]exprparser.Needs{}
|
||||||
for _, need := range jobNeeds {
|
for _, need := range jobNeeds {
|
||||||
if v, ok := jobs[need]; ok {
|
if v, ok := jobs[need]; ok {
|
||||||
using[need] = map[string]map[string]string{
|
using[need] = exprparser.Needs{
|
||||||
"outputs": v.Outputs,
|
Outputs: v.Outputs,
|
||||||
|
Result: v.Result,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -36,7 +36,12 @@ func Parse(content []byte, options ...ParseOption) ([]*SingleWorkflow, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ret []*SingleWorkflow
|
var ret []*SingleWorkflow
|
||||||
for id, job := range workflow.Jobs {
|
ids, jobs, err := workflow.jobs()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid jobs: %w", err)
|
||||||
|
}
|
||||||
|
for i, id := range ids {
|
||||||
|
job := jobs[i]
|
||||||
for _, matrix := range getMatrixes(origin.GetJob(id)) {
|
for _, matrix := range getMatrixes(origin.GetJob(id)) {
|
||||||
job := job.Clone()
|
job := job.Clone()
|
||||||
if job.Name == "" {
|
if job.Name == "" {
|
||||||
@@ -50,17 +55,18 @@ func Parse(content []byte, options ...ParseOption) ([]*SingleWorkflow, error) {
|
|||||||
runsOn[i] = evaluator.Interpolate(v)
|
runsOn[i] = evaluator.Interpolate(v)
|
||||||
}
|
}
|
||||||
job.RawRunsOn = encodeRunsOn(runsOn)
|
job.RawRunsOn = encodeRunsOn(runsOn)
|
||||||
job.EraseNeeds() // there will be only one job in SingleWorkflow, it cannot have needs
|
swf := &SingleWorkflow{
|
||||||
ret = append(ret, &SingleWorkflow{
|
|
||||||
Name: workflow.Name,
|
Name: workflow.Name,
|
||||||
RawOn: workflow.RawOn,
|
RawOn: workflow.RawOn,
|
||||||
Env: workflow.Env,
|
Env: workflow.Env,
|
||||||
Jobs: map[string]*Job{id: job},
|
|
||||||
Defaults: workflow.Defaults,
|
Defaults: workflow.Defaults,
|
||||||
})
|
}
|
||||||
|
if err := swf.SetJob(id, job); err != nil {
|
||||||
|
return nil, fmt.Errorf("SetJob: %w", err)
|
||||||
|
}
|
||||||
|
ret = append(ret, swf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sortWorkflows(ret)
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,19 +141,3 @@ func matrixName(m map[string]interface{}) string {
|
|||||||
|
|
||||||
return fmt.Sprintf("(%s)", strings.Join(vs, ", "))
|
return fmt.Sprintf("(%s)", strings.Join(vs, ", "))
|
||||||
}
|
}
|
||||||
|
|
||||||
func sortWorkflows(wfs []*SingleWorkflow) {
|
|
||||||
sort.Slice(wfs, func(i, j int) bool {
|
|
||||||
ki := ""
|
|
||||||
for k := range wfs[i].Jobs {
|
|
||||||
ki = k
|
|
||||||
break
|
|
||||||
}
|
|
||||||
kj := ""
|
|
||||||
for k := range wfs[j].Jobs {
|
|
||||||
kj = k
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return ki < kj
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
@@ -1,8 +1,6 @@
|
|||||||
package jobparser
|
package jobparser
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"embed"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@@ -13,9 +11,6 @@ import (
|
|||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed testdata
|
|
||||||
var f embed.FS
|
|
||||||
|
|
||||||
func TestParse(t *testing.T) {
|
func TestParse(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -40,10 +35,8 @@ func TestParse(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
content, err := f.ReadFile(filepath.Join("testdata", tt.name+".in.yaml"))
|
content := ReadTestdata(t, tt.name+".in.yaml")
|
||||||
require.NoError(t, err)
|
want := ReadTestdata(t, tt.name+".out.yaml")
|
||||||
want, err := f.ReadFile(filepath.Join("testdata", tt.name+".out.yaml"))
|
|
||||||
require.NoError(t, err)
|
|
||||||
got, err := Parse(content, tt.options...)
|
got, err := Parse(content, tt.options...)
|
||||||
if tt.wantErr {
|
if tt.wantErr {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
@@ -57,7 +50,10 @@ func TestParse(t *testing.T) {
|
|||||||
}
|
}
|
||||||
encoder := yaml.NewEncoder(builder)
|
encoder := yaml.NewEncoder(builder)
|
||||||
encoder.SetIndent(2)
|
encoder.SetIndent(2)
|
||||||
_ = encoder.Encode(v)
|
require.NoError(t, encoder.Encode(v))
|
||||||
|
id, job := v.Job()
|
||||||
|
assert.NotEmpty(t, id)
|
||||||
|
assert.NotNil(t, job)
|
||||||
}
|
}
|
||||||
assert.Equal(t, string(want), builder.String())
|
assert.Equal(t, string(want), builder.String())
|
||||||
})
|
})
|
||||||
|
@@ -12,17 +12,63 @@ type SingleWorkflow struct {
|
|||||||
Name string `yaml:"name,omitempty"`
|
Name string `yaml:"name,omitempty"`
|
||||||
RawOn yaml.Node `yaml:"on,omitempty"`
|
RawOn yaml.Node `yaml:"on,omitempty"`
|
||||||
Env map[string]string `yaml:"env,omitempty"`
|
Env map[string]string `yaml:"env,omitempty"`
|
||||||
Jobs map[string]*Job `yaml:"jobs,omitempty"`
|
RawJobs yaml.Node `yaml:"jobs,omitempty"`
|
||||||
Defaults Defaults `yaml:"defaults,omitempty"`
|
Defaults Defaults `yaml:"defaults,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *SingleWorkflow) Job() (string, *Job) {
|
func (w *SingleWorkflow) Job() (string, *Job) {
|
||||||
for k, v := range w.Jobs {
|
ids, jobs, _ := w.jobs()
|
||||||
return k, v
|
if len(ids) >= 1 {
|
||||||
|
return ids[0], jobs[0]
|
||||||
}
|
}
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (w *SingleWorkflow) jobs() ([]string, []*Job, error) {
|
||||||
|
var ids []string
|
||||||
|
var jobs []*Job
|
||||||
|
expectKey := true
|
||||||
|
for _, item := range w.RawJobs.Content {
|
||||||
|
if expectKey {
|
||||||
|
if item.Kind != yaml.ScalarNode {
|
||||||
|
return nil, nil, fmt.Errorf("invalid job id: %v", item.Value)
|
||||||
|
}
|
||||||
|
ids = append(ids, item.Value)
|
||||||
|
expectKey = false
|
||||||
|
} else {
|
||||||
|
job := &Job{}
|
||||||
|
if err := item.Decode(job); err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("yaml.Unmarshal: %w", err)
|
||||||
|
}
|
||||||
|
jobs = append(jobs, job)
|
||||||
|
expectKey = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(ids) != len(jobs) {
|
||||||
|
return nil, nil, fmt.Errorf("invalid jobs: %v", w.RawJobs.Value)
|
||||||
|
}
|
||||||
|
return ids, jobs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *SingleWorkflow) SetJob(id string, job *Job) error {
|
||||||
|
m := map[string]*Job{
|
||||||
|
id: job,
|
||||||
|
}
|
||||||
|
out, err := yaml.Marshal(m)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
node := yaml.Node{}
|
||||||
|
if err := yaml.Unmarshal(out, &node); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(node.Content) != 1 || node.Content[0].Kind != yaml.MappingNode {
|
||||||
|
return fmt.Errorf("can not set job: %q", out)
|
||||||
|
}
|
||||||
|
w.RawJobs = *node.Content[0]
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (w *SingleWorkflow) Marshal() ([]byte, error) {
|
func (w *SingleWorkflow) Marshal() ([]byte, error) {
|
||||||
return yaml.Marshal(w)
|
return yaml.Marshal(w)
|
||||||
}
|
}
|
||||||
@@ -68,8 +114,9 @@ func (j *Job) Needs() []string {
|
|||||||
return (&model.Job{RawNeeds: j.RawNeeds}).Needs()
|
return (&model.Job{RawNeeds: j.RawNeeds}).Needs()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *Job) EraseNeeds() {
|
func (j *Job) EraseNeeds() *Job {
|
||||||
j.RawNeeds = yaml.Node{}
|
j.RawNeeds = yaml.Node{}
|
||||||
|
return j
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *Job) RunsOn() []string {
|
func (j *Job) RunsOn() []string {
|
||||||
@@ -126,7 +173,20 @@ type RunDefaults struct {
|
|||||||
|
|
||||||
type Event struct {
|
type Event struct {
|
||||||
Name string
|
Name string
|
||||||
Acts map[string][]string
|
acts map[string][]string
|
||||||
|
schedules []map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (evt *Event) IsSchedule() bool {
|
||||||
|
return evt.schedules != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (evt *Event) Acts() map[string][]string {
|
||||||
|
return evt.acts
|
||||||
|
}
|
||||||
|
|
||||||
|
func (evt *Event) Schedules() []map[string]string {
|
||||||
|
return evt.schedules
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
||||||
@@ -164,16 +224,23 @@ func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
|||||||
}
|
}
|
||||||
res := make([]*Event, 0, len(val))
|
res := make([]*Event, 0, len(val))
|
||||||
for k, v := range val {
|
for k, v := range val {
|
||||||
|
if v == nil {
|
||||||
|
res = append(res, &Event{
|
||||||
|
Name: k,
|
||||||
|
acts: map[string][]string{},
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
switch t := v.(type) {
|
switch t := v.(type) {
|
||||||
case string:
|
case string:
|
||||||
res = append(res, &Event{
|
res = append(res, &Event{
|
||||||
Name: k,
|
Name: k,
|
||||||
Acts: map[string][]string{},
|
acts: map[string][]string{},
|
||||||
})
|
})
|
||||||
case []string:
|
case []string:
|
||||||
res = append(res, &Event{
|
res = append(res, &Event{
|
||||||
Name: k,
|
Name: k,
|
||||||
Acts: map[string][]string{},
|
acts: map[string][]string{},
|
||||||
})
|
})
|
||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
acts := make(map[string][]string, len(t))
|
acts := make(map[string][]string, len(t))
|
||||||
@@ -186,7 +253,10 @@ func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
|||||||
case []interface{}:
|
case []interface{}:
|
||||||
acts[act] = make([]string, len(b))
|
acts[act] = make([]string, len(b))
|
||||||
for i, v := range b {
|
for i, v := range b {
|
||||||
acts[act][i] = v.(string)
|
var ok bool
|
||||||
|
if acts[act][i], ok = v.(string); !ok {
|
||||||
|
return nil, fmt.Errorf("unknown on type: %#v", branches)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unknown on type: %#v", branches)
|
return nil, fmt.Errorf("unknown on type: %#v", branches)
|
||||||
@@ -194,7 +264,29 @@ func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
|||||||
}
|
}
|
||||||
res = append(res, &Event{
|
res = append(res, &Event{
|
||||||
Name: k,
|
Name: k,
|
||||||
Acts: acts,
|
acts: acts,
|
||||||
|
})
|
||||||
|
case []interface{}:
|
||||||
|
if k != "schedule" {
|
||||||
|
return nil, fmt.Errorf("unknown on type: %#v", v)
|
||||||
|
}
|
||||||
|
schedules := make([]map[string]string, len(t))
|
||||||
|
for i, tt := range t {
|
||||||
|
vv, ok := tt.(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unknown on type: %#v", v)
|
||||||
|
}
|
||||||
|
schedules[i] = make(map[string]string, len(vv))
|
||||||
|
for k, vvv := range vv {
|
||||||
|
var ok bool
|
||||||
|
if schedules[i][k], ok = vvv.(string); !ok {
|
||||||
|
return nil, fmt.Errorf("unknown on type: %#v", v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res = append(res, &Event{
|
||||||
|
Name: k,
|
||||||
|
schedules: schedules,
|
||||||
})
|
})
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unknown on type: %#v", v)
|
return nil, fmt.Errorf("unknown on type: %#v", v)
|
||||||
|
@@ -6,7 +6,10 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/nektos/act/pkg/model"
|
"github.com/nektos/act/pkg/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseRawOn(t *testing.T) {
|
func TestParseRawOn(t *testing.T) {
|
||||||
@@ -47,7 +50,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "push",
|
Name: "push",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"branches": {
|
"branches": {
|
||||||
"master",
|
"master",
|
||||||
},
|
},
|
||||||
@@ -60,7 +63,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "branch_protection_rule",
|
Name: "branch_protection_rule",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"types": {
|
"types": {
|
||||||
"created",
|
"created",
|
||||||
"deleted",
|
"deleted",
|
||||||
@@ -74,7 +77,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "project",
|
Name: "project",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"types": {
|
"types": {
|
||||||
"created",
|
"created",
|
||||||
"deleted",
|
"deleted",
|
||||||
@@ -83,7 +86,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "milestone",
|
Name: "milestone",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"types": {
|
"types": {
|
||||||
"opened",
|
"opened",
|
||||||
"deleted",
|
"deleted",
|
||||||
@@ -97,7 +100,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "pull_request",
|
Name: "pull_request",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"types": {
|
"types": {
|
||||||
"opened",
|
"opened",
|
||||||
},
|
},
|
||||||
@@ -113,7 +116,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "push",
|
Name: "push",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"branches": {
|
"branches": {
|
||||||
"main",
|
"main",
|
||||||
},
|
},
|
||||||
@@ -121,7 +124,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "pull_request",
|
Name: "pull_request",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"types": {
|
"types": {
|
||||||
"opened",
|
"opened",
|
||||||
},
|
},
|
||||||
@@ -137,7 +140,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "push",
|
Name: "push",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"branches": {
|
"branches": {
|
||||||
"main",
|
"main",
|
||||||
"releases/**",
|
"releases/**",
|
||||||
@@ -151,7 +154,7 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
result: []*Event{
|
result: []*Event{
|
||||||
{
|
{
|
||||||
Name: "push",
|
Name: "push",
|
||||||
Acts: map[string][]string{
|
acts: map[string][]string{
|
||||||
"tags": {
|
"tags": {
|
||||||
"v1.**",
|
"v1.**",
|
||||||
},
|
},
|
||||||
@@ -170,6 +173,19 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
input: "on:\n schedule:\n - cron: '20 6 * * *'",
|
||||||
|
result: []*Event{
|
||||||
|
{
|
||||||
|
Name: "schedule",
|
||||||
|
schedules: []map[string]string{
|
||||||
|
{
|
||||||
|
"cron": "20 6 * * *",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, kase := range kases {
|
for _, kase := range kases {
|
||||||
t.Run(kase.input, func(t *testing.T) {
|
t.Run(kase.input, func(t *testing.T) {
|
||||||
@@ -182,3 +198,25 @@ func TestParseRawOn(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSingleWorkflow_SetJob(t *testing.T) {
|
||||||
|
t.Run("erase needs", func(t *testing.T) {
|
||||||
|
content := ReadTestdata(t, "erase_needs.in.yaml")
|
||||||
|
want := ReadTestdata(t, "erase_needs.out.yaml")
|
||||||
|
swf, err := Parse(content)
|
||||||
|
require.NoError(t, err)
|
||||||
|
builder := &strings.Builder{}
|
||||||
|
for _, v := range swf {
|
||||||
|
id, job := v.Job()
|
||||||
|
require.NoError(t, v.SetJob(id, job.EraseNeeds()))
|
||||||
|
|
||||||
|
if builder.Len() > 0 {
|
||||||
|
builder.WriteString("---\n")
|
||||||
|
}
|
||||||
|
encoder := yaml.NewEncoder(builder)
|
||||||
|
encoder.SetIndent(2)
|
||||||
|
require.NoError(t, encoder.Encode(v))
|
||||||
|
}
|
||||||
|
assert.Equal(t, string(want), builder.String())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
16
pkg/jobparser/testdata/erase_needs.in.yaml
vendored
Normal file
16
pkg/jobparser/testdata/erase_needs.in.yaml
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
name: test
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a
|
||||||
|
job2:
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a
|
||||||
|
needs: job1
|
||||||
|
job3:
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a
|
||||||
|
needs: [job1, job2]
|
23
pkg/jobparser/testdata/erase_needs.out.yaml
vendored
Normal file
23
pkg/jobparser/testdata/erase_needs.out.yaml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: test
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
name: job1
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a
|
||||||
|
---
|
||||||
|
name: test
|
||||||
|
jobs:
|
||||||
|
job2:
|
||||||
|
name: job2
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a
|
||||||
|
---
|
||||||
|
name: test
|
||||||
|
jobs:
|
||||||
|
job3:
|
||||||
|
name: job3
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a
|
2
pkg/jobparser/testdata/has_needs.out.yaml
vendored
2
pkg/jobparser/testdata/has_needs.out.yaml
vendored
@@ -10,6 +10,7 @@ name: test
|
|||||||
jobs:
|
jobs:
|
||||||
job2:
|
job2:
|
||||||
name: job2
|
name: job2
|
||||||
|
needs: job1
|
||||||
runs-on: linux
|
runs-on: linux
|
||||||
steps:
|
steps:
|
||||||
- run: uname -a
|
- run: uname -a
|
||||||
@@ -18,6 +19,7 @@ name: test
|
|||||||
jobs:
|
jobs:
|
||||||
job3:
|
job3:
|
||||||
name: job3
|
name: job3
|
||||||
|
needs: [job1, job2]
|
||||||
runs-on: linux
|
runs-on: linux
|
||||||
steps:
|
steps:
|
||||||
- run: uname -a
|
- run: uname -a
|
||||||
|
8
pkg/jobparser/testdata/multiple_jobs.in.yaml
vendored
8
pkg/jobparser/testdata/multiple_jobs.in.yaml
vendored
@@ -1,5 +1,9 @@
|
|||||||
name: test
|
name: test
|
||||||
jobs:
|
jobs:
|
||||||
|
zzz:
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: echo zzz
|
||||||
job1:
|
job1:
|
||||||
runs-on: linux
|
runs-on: linux
|
||||||
steps:
|
steps:
|
||||||
@@ -12,3 +16,7 @@ jobs:
|
|||||||
runs-on: linux
|
runs-on: linux
|
||||||
steps:
|
steps:
|
||||||
- run: uname -a && go version
|
- run: uname -a && go version
|
||||||
|
aaa:
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a && go version
|
||||||
|
16
pkg/jobparser/testdata/multiple_jobs.out.yaml
vendored
16
pkg/jobparser/testdata/multiple_jobs.out.yaml
vendored
@@ -1,4 +1,12 @@
|
|||||||
name: test
|
name: test
|
||||||
|
jobs:
|
||||||
|
zzz:
|
||||||
|
name: zzz
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: echo zzz
|
||||||
|
---
|
||||||
|
name: test
|
||||||
jobs:
|
jobs:
|
||||||
job1:
|
job1:
|
||||||
name: job1
|
name: job1
|
||||||
@@ -21,3 +29,11 @@ jobs:
|
|||||||
runs-on: linux
|
runs-on: linux
|
||||||
steps:
|
steps:
|
||||||
- run: uname -a && go version
|
- run: uname -a && go version
|
||||||
|
---
|
||||||
|
name: test
|
||||||
|
jobs:
|
||||||
|
aaa:
|
||||||
|
name: aaa
|
||||||
|
runs-on: linux
|
||||||
|
steps:
|
||||||
|
- run: uname -a && go version
|
||||||
|
18
pkg/jobparser/testdata_test.go
Normal file
18
pkg/jobparser/testdata_test.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package jobparser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"embed"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed testdata
|
||||||
|
var testdata embed.FS
|
||||||
|
|
||||||
|
func ReadTestdata(t *testing.T, name string) []byte {
|
||||||
|
content, err := testdata.ReadFile(filepath.Join("testdata", name))
|
||||||
|
require.NoError(t, err)
|
||||||
|
return content
|
||||||
|
}
|
@@ -506,8 +506,12 @@ func (j JobType) String() string {
|
|||||||
func (j *Job) Type() JobType {
|
func (j *Job) Type() JobType {
|
||||||
if strings.HasPrefix(j.Uses, "./.github/workflows") && (strings.HasSuffix(j.Uses, ".yml") || strings.HasSuffix(j.Uses, ".yaml")) {
|
if strings.HasPrefix(j.Uses, "./.github/workflows") && (strings.HasSuffix(j.Uses, ".yml") || strings.HasSuffix(j.Uses, ".yaml")) {
|
||||||
return JobTypeReusableWorkflowLocal
|
return JobTypeReusableWorkflowLocal
|
||||||
|
} else if strings.HasPrefix(j.Uses, "./.gitea/workflows") && (strings.HasSuffix(j.Uses, ".yml") || strings.HasSuffix(j.Uses, ".yaml")) {
|
||||||
|
return JobTypeReusableWorkflowLocal
|
||||||
} else if !strings.HasPrefix(j.Uses, "./") && strings.Contains(j.Uses, ".github/workflows") && (strings.Contains(j.Uses, ".yml@") || strings.Contains(j.Uses, ".yaml@")) {
|
} else if !strings.HasPrefix(j.Uses, "./") && strings.Contains(j.Uses, ".github/workflows") && (strings.Contains(j.Uses, ".yml@") || strings.Contains(j.Uses, ".yaml@")) {
|
||||||
return JobTypeReusableWorkflowRemote
|
return JobTypeReusableWorkflowRemote
|
||||||
|
} else if !strings.HasPrefix(j.Uses, "./") && strings.Contains(j.Uses, ".gitea/workflows") && (strings.Contains(j.Uses, ".yml@") || strings.Contains(j.Uses, ".yaml@")) {
|
||||||
|
return JobTypeReusableWorkflowRemote
|
||||||
}
|
}
|
||||||
return JobTypeDefault
|
return JobTypeDefault
|
||||||
}
|
}
|
||||||
|
@@ -176,6 +176,8 @@ func runActionImpl(step actionStep, actionDir string, remoteAction *remoteAction
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rc.ApplyExtraPath(ctx, step.getEnv())
|
||||||
|
|
||||||
execFileName := fmt.Sprintf("%s.out", action.Runs.Main)
|
execFileName := fmt.Sprintf("%s.out", action.Runs.Main)
|
||||||
buildArgs := []string{"go", "build", "-o", execFileName, action.Runs.Main}
|
buildArgs := []string{"go", "build", "-o", execFileName, action.Runs.Main}
|
||||||
execArgs := []string{filepath.Join(containerActionDir, execFileName)}
|
execArgs := []string{filepath.Join(containerActionDir, execFileName)}
|
||||||
@@ -554,6 +556,8 @@ func runPreStep(step actionStep) common.Executor {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rc.ApplyExtraPath(ctx, step.getEnv())
|
||||||
|
|
||||||
execFileName := fmt.Sprintf("%s.out", action.Runs.Pre)
|
execFileName := fmt.Sprintf("%s.out", action.Runs.Pre)
|
||||||
buildArgs := []string{"go", "build", "-o", execFileName, action.Runs.Pre}
|
buildArgs := []string{"go", "build", "-o", execFileName, action.Runs.Pre}
|
||||||
execArgs := []string{filepath.Join(containerActionDir, execFileName)}
|
execArgs := []string{filepath.Join(containerActionDir, execFileName)}
|
||||||
@@ -657,6 +661,7 @@ func runPostStep(step actionStep) common.Executor {
|
|||||||
|
|
||||||
case model.ActionRunsUsingGo:
|
case model.ActionRunsUsingGo:
|
||||||
populateEnvsFromSavedState(step.getEnv(), step, rc)
|
populateEnvsFromSavedState(step.getEnv(), step, rc)
|
||||||
|
rc.ApplyExtraPath(ctx, step.getEnv())
|
||||||
|
|
||||||
execFileName := fmt.Sprintf("%s.out", action.Runs.Post)
|
execFileName := fmt.Sprintf("%s.out", action.Runs.Post)
|
||||||
buildArgs := []string{"go", "build", "-o", execFileName, action.Runs.Post}
|
buildArgs := []string{"go", "build", "-o", execFileName, action.Runs.Post}
|
||||||
|
@@ -77,7 +77,8 @@ func (rc *RunContext) commandHandler(ctx context.Context) common.LineHandler {
|
|||||||
logger.Infof(" \U00002753 %s", line)
|
logger.Infof(" \U00002753 %s", line)
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
// return true to let gitea's logger handle these special outputs also
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -8,6 +8,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/nektos/act/pkg/common"
|
"github.com/nektos/act/pkg/common"
|
||||||
@@ -16,15 +17,14 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func newLocalReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
func newLocalReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
||||||
return newReusableWorkflowExecutor(rc, rc.Config.Workdir, rc.Run.Job().Uses)
|
// ./.gitea/workflows/wf.yml -> .gitea/workflows/wf.yml
|
||||||
}
|
trimmedUses := strings.TrimPrefix(rc.Run.Job().Uses, "./")
|
||||||
|
// uses string format is {owner}/{repo}/.{git_platform}/workflows/{filename}@{ref}
|
||||||
|
uses := fmt.Sprintf("%s/%s@%s", rc.Config.PresetGitHubContext.Repository, trimmedUses, rc.Config.PresetGitHubContext.Sha)
|
||||||
|
|
||||||
func newRemoteReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
remoteReusableWorkflow := newRemoteReusableWorkflowWithPlat(uses)
|
||||||
uses := rc.Run.Job().Uses
|
|
||||||
|
|
||||||
remoteReusableWorkflow := newRemoteReusableWorkflow(uses)
|
|
||||||
if remoteReusableWorkflow == nil {
|
if remoteReusableWorkflow == nil {
|
||||||
return common.NewErrorExecutor(fmt.Errorf("expected format {owner}/{repo}/.github/workflows/{filename}@{ref}. Actual '%s' Input string was not in a correct format", uses))
|
return common.NewErrorExecutor(fmt.Errorf("expected format {owner}/{repo}/.{git_platform}/workflows/{filename}@{ref}. Actual '%s' Input string was not in a correct format", uses))
|
||||||
}
|
}
|
||||||
remoteReusableWorkflow.URL = rc.Config.GitHubInstance
|
remoteReusableWorkflow.URL = rc.Config.GitHubInstance
|
||||||
|
|
||||||
@@ -32,7 +32,24 @@ func newRemoteReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
|||||||
|
|
||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
newMutexExecutor(cloneIfRequired(rc, *remoteReusableWorkflow, workflowDir)),
|
newMutexExecutor(cloneIfRequired(rc, *remoteReusableWorkflow, workflowDir)),
|
||||||
newReusableWorkflowExecutor(rc, workflowDir, fmt.Sprintf("./.github/workflows/%s", remoteReusableWorkflow.Filename)),
|
newReusableWorkflowExecutor(rc, workflowDir, remoteReusableWorkflow.FilePath()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newRemoteReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
||||||
|
uses := rc.Run.Job().Uses
|
||||||
|
|
||||||
|
remoteReusableWorkflow := newRemoteReusableWorkflowWithPlat(uses)
|
||||||
|
if remoteReusableWorkflow == nil {
|
||||||
|
return common.NewErrorExecutor(fmt.Errorf("expected format {owner}/{repo}/.{git_platform}/workflows/{filename}@{ref}. Actual '%s' Input string was not in a correct format", uses))
|
||||||
|
}
|
||||||
|
remoteReusableWorkflow.URL = rc.Config.GitHubInstance
|
||||||
|
|
||||||
|
workflowDir := fmt.Sprintf("%s/%s", rc.ActionCacheDir(), safeFilename(uses))
|
||||||
|
|
||||||
|
return common.NewPipelineExecutor(
|
||||||
|
newMutexExecutor(cloneIfRequired(rc, *remoteReusableWorkflow, workflowDir)),
|
||||||
|
newReusableWorkflowExecutor(rc, workflowDir, remoteReusableWorkflow.FilePath()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,12 +122,40 @@ type remoteReusableWorkflow struct {
|
|||||||
Repo string
|
Repo string
|
||||||
Filename string
|
Filename string
|
||||||
Ref string
|
Ref string
|
||||||
|
|
||||||
|
GitPlatform string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *remoteReusableWorkflow) CloneURL() string {
|
func (r *remoteReusableWorkflow) CloneURL() string {
|
||||||
|
// In Gitea, r.URL always has the protocol prefix, we don't need to add extra prefix in this case.
|
||||||
|
if strings.HasPrefix(r.URL, "http://") || strings.HasPrefix(r.URL, "https://") {
|
||||||
|
return fmt.Sprintf("%s/%s/%s", r.URL, r.Org, r.Repo)
|
||||||
|
}
|
||||||
return fmt.Sprintf("https://%s/%s/%s", r.URL, r.Org, r.Repo)
|
return fmt.Sprintf("https://%s/%s/%s", r.URL, r.Org, r.Repo)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *remoteReusableWorkflow) FilePath() string {
|
||||||
|
return fmt.Sprintf("./.%s/workflows/%s", r.GitPlatform, r.Filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newRemoteReusableWorkflowWithPlat(uses string) *remoteReusableWorkflow {
|
||||||
|
// GitHub docs:
|
||||||
|
// https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_iduses
|
||||||
|
r := regexp.MustCompile(`^([^/]+)/([^/]+)/\.([^/]+)/workflows/([^@]+)@(.*)$`)
|
||||||
|
matches := r.FindStringSubmatch(uses)
|
||||||
|
if len(matches) != 6 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &remoteReusableWorkflow{
|
||||||
|
Org: matches[1],
|
||||||
|
Repo: matches[2],
|
||||||
|
GitPlatform: matches[3],
|
||||||
|
Filename: matches[4],
|
||||||
|
Ref: matches[5],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// deprecated: use newRemoteReusableWorkflowWithPlat
|
||||||
func newRemoteReusableWorkflow(uses string) *remoteReusableWorkflow {
|
func newRemoteReusableWorkflow(uses string) *remoteReusableWorkflow {
|
||||||
// GitHub docs:
|
// GitHub docs:
|
||||||
// https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_iduses
|
// https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_iduses
|
||||||
|
Reference in New Issue
Block a user