Merge tag 'nektos/v0.2.43'
Conflicts: pkg/container/docker_run.go pkg/runner/action.go pkg/runner/logger.go pkg/runner/run_context.go pkg/runner/runner.go pkg/runner/step_action_remote_test.go
This commit is contained in:
@@ -14,6 +14,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/kballard/go-shellquote"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/container"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
@@ -29,10 +30,9 @@ type actionStep interface {
|
||||
|
||||
type readAction func(ctx context.Context, step *model.Step, actionDir string, actionPath string, readFile actionYamlReader, writeFile fileWriter) (*model.Action, error)
|
||||
|
||||
type (
|
||||
actionYamlReader func(filename string) (io.Reader, io.Closer, error)
|
||||
fileWriter func(filename string, data []byte, perm fs.FileMode) error
|
||||
)
|
||||
type actionYamlReader func(filename string) (io.Reader, io.Closer, error)
|
||||
|
||||
type fileWriter func(filename string, data []byte, perm fs.FileMode) error
|
||||
|
||||
type runAction func(step actionStep, actionDir string, remoteAction *remoteAction) common.Executor
|
||||
|
||||
@@ -156,6 +156,8 @@ func runActionImpl(step actionStep, actionDir string, remoteAction *remoteAction
|
||||
containerArgs := []string{"node", path.Join(containerActionDir, action.Runs.Main)}
|
||||
logger.Debugf("executing remote job container: %s", containerArgs)
|
||||
|
||||
rc.ApplyExtraPath(ctx, step.getEnv())
|
||||
|
||||
return rc.execJobContainer(containerArgs, *step.getEnv(), "", "")(ctx)
|
||||
case model.ActionRunsUsingDocker:
|
||||
location := actionLocation
|
||||
@@ -235,14 +237,17 @@ func execAsDocker(ctx context.Context, step actionStep, actionName string, based
|
||||
|
||||
var prepImage common.Executor
|
||||
var image string
|
||||
forcePull := false
|
||||
if strings.HasPrefix(action.Runs.Image, "docker://") {
|
||||
image = strings.TrimPrefix(action.Runs.Image, "docker://")
|
||||
// Apply forcePull only for prebuild docker images
|
||||
forcePull = rc.Config.ForcePull
|
||||
} else {
|
||||
// "-dockeraction" enshures that "./", "./test " won't get converted to "act-:latest", "act-test-:latest" which are invalid docker image names
|
||||
image = fmt.Sprintf("%s-dockeraction:%s", regexp.MustCompile("[^a-zA-Z0-9]").ReplaceAllString(actionName, "-"), "latest")
|
||||
image = fmt.Sprintf("act-%s", strings.TrimLeft(image, "-"))
|
||||
image = strings.ToLower(image)
|
||||
contextDir := filepath.Join(basedir, action.Runs.Main)
|
||||
contextDir, fileName := filepath.Split(filepath.Join(basedir, action.Runs.Image))
|
||||
|
||||
anyArchExists, err := container.ImageExistsLocally(ctx, image, "any")
|
||||
if err != nil {
|
||||
@@ -272,6 +277,7 @@ func execAsDocker(ctx context.Context, step actionStep, actionName string, based
|
||||
}
|
||||
prepImage = container.NewDockerBuildExecutor(container.NewDockerBuildExecutorInput{
|
||||
ContextDir: contextDir,
|
||||
Dockerfile: fileName,
|
||||
ImageTag: image,
|
||||
Container: actionContainer,
|
||||
Platform: rc.Config.ContainerArchitecture,
|
||||
@@ -303,7 +309,7 @@ func execAsDocker(ctx context.Context, step actionStep, actionName string, based
|
||||
stepContainer := newStepContainer(ctx, step, image, cmd, entrypoint)
|
||||
return common.NewPipelineExecutor(
|
||||
prepImage,
|
||||
stepContainer.Pull(rc.Config.ForcePull),
|
||||
stepContainer.Pull(forcePull),
|
||||
stepContainer.Remove().IfBool(!rc.Config.ReuseContainers),
|
||||
stepContainer.Create(rc.Config.ContainerCapAdd, rc.Config.ContainerCapDrop),
|
||||
stepContainer.Start(true),
|
||||
@@ -364,7 +370,10 @@ func newStepContainer(ctx context.Context, step step, image string, cmd []string
|
||||
envList = append(envList, fmt.Sprintf("%s=%s", "RUNNER_TEMP", "/tmp"))
|
||||
|
||||
binds, mounts := rc.GetBindsAndMounts()
|
||||
|
||||
networkMode := fmt.Sprintf("container:%s", rc.jobContainerName())
|
||||
if rc.IsHostEnv(ctx) {
|
||||
networkMode = "default"
|
||||
}
|
||||
stepContainer := container.NewContainer(&container.NewContainerInput{
|
||||
Cmd: cmd,
|
||||
Entrypoint: entrypoint,
|
||||
@@ -375,22 +384,23 @@ func newStepContainer(ctx context.Context, step step, image string, cmd []string
|
||||
Name: createSimpleContainerName(rc.jobContainerName(), "STEP-"+stepModel.ID),
|
||||
Env: envList,
|
||||
Mounts: mounts,
|
||||
NetworkMode: fmt.Sprintf("container:%s", rc.jobContainerName()),
|
||||
NetworkMode: networkMode,
|
||||
Binds: binds,
|
||||
Stdout: logWriter,
|
||||
Stderr: logWriter,
|
||||
Privileged: rc.Config.Privileged,
|
||||
UsernsMode: rc.Config.UsernsMode,
|
||||
Platform: rc.Config.ContainerArchitecture,
|
||||
Options: rc.Config.ContainerOptions,
|
||||
AutoRemove: rc.Config.AutoRemove,
|
||||
})
|
||||
return stepContainer
|
||||
}
|
||||
|
||||
func populateEnvsFromSavedState(env *map[string]string, step actionStep, rc *RunContext) {
|
||||
stepResult := rc.StepResults[step.getStepModel().ID]
|
||||
if stepResult != nil {
|
||||
for name, value := range stepResult.State {
|
||||
state, ok := rc.IntraActionState[step.getStepModel().ID]
|
||||
if ok {
|
||||
for name, value := range state {
|
||||
envName := fmt.Sprintf("STATE_%s", name)
|
||||
(*env)[envName] = value
|
||||
}
|
||||
@@ -503,6 +513,8 @@ func runPreStep(step actionStep) common.Executor {
|
||||
containerArgs := []string{"node", path.Join(containerActionDir, action.Runs.Pre)}
|
||||
logger.Debugf("executing remote job container: %s", containerArgs)
|
||||
|
||||
rc.ApplyExtraPath(ctx, step.getEnv())
|
||||
|
||||
return rc.execJobContainer(containerArgs, *step.getEnv(), "", "")(ctx)
|
||||
|
||||
case model.ActionRunsUsingComposite:
|
||||
@@ -510,7 +522,10 @@ func runPreStep(step actionStep) common.Executor {
|
||||
step.getCompositeRunContext(ctx)
|
||||
}
|
||||
|
||||
return step.getCompositeSteps().pre(ctx)
|
||||
if steps := step.getCompositeSteps(); steps != nil && steps.pre != nil {
|
||||
return steps.pre(ctx)
|
||||
}
|
||||
return fmt.Errorf("missing steps in composite action")
|
||||
|
||||
case model.ActionRunsUsingGo:
|
||||
// defaults in pre steps were missing, however provided inputs are available
|
||||
@@ -626,6 +641,8 @@ func runPostStep(step actionStep) common.Executor {
|
||||
containerArgs := []string{"node", path.Join(containerActionDir, action.Runs.Post)}
|
||||
logger.Debugf("executing remote job container: %s", containerArgs)
|
||||
|
||||
rc.ApplyExtraPath(ctx, step.getEnv())
|
||||
|
||||
return rc.execJobContainer(containerArgs, *step.getEnv(), "", "")(ctx)
|
||||
|
||||
case model.ActionRunsUsingComposite:
|
||||
@@ -633,7 +650,10 @@ func runPostStep(step actionStep) common.Executor {
|
||||
return err
|
||||
}
|
||||
|
||||
return step.getCompositeSteps().post(ctx)
|
||||
if steps := step.getCompositeSteps(); steps != nil && steps.post != nil {
|
||||
return steps.post(ctx)
|
||||
}
|
||||
return fmt.Errorf("missing steps in composite action")
|
||||
|
||||
case model.ActionRunsUsingGo:
|
||||
populateEnvsFromSavedState(step.getEnv(), step, rc)
|
||||
|
@@ -66,6 +66,7 @@ func newCompositeRunContext(ctx context.Context, parent *RunContext, step action
|
||||
JobContainer: parent.JobContainer,
|
||||
ActionPath: actionPath,
|
||||
Env: env,
|
||||
GlobalEnv: parent.GlobalEnv,
|
||||
Masks: parent.Masks,
|
||||
ExtraPath: parent.ExtraPath,
|
||||
Parent: parent,
|
||||
@@ -85,6 +86,10 @@ func execAsComposite(step actionStep) common.Executor {
|
||||
|
||||
steps := step.getCompositeSteps()
|
||||
|
||||
if steps == nil || steps.main == nil {
|
||||
return fmt.Errorf("missing steps in composite action")
|
||||
}
|
||||
|
||||
ctx = WithCompositeLogger(ctx, &compositeRC.Masks)
|
||||
|
||||
err := steps.main(ctx)
|
||||
@@ -99,6 +104,14 @@ func execAsComposite(step actionStep) common.Executor {
|
||||
|
||||
rc.Masks = append(rc.Masks, compositeRC.Masks...)
|
||||
rc.ExtraPath = compositeRC.ExtraPath
|
||||
// compositeRC.Env is dirty, contains INPUT_ and merged step env, only rely on compositeRC.GlobalEnv
|
||||
for k, v := range compositeRC.GlobalEnv {
|
||||
rc.Env[k] = v
|
||||
if rc.GlobalEnv == nil {
|
||||
rc.GlobalEnv = map[string]string{}
|
||||
}
|
||||
rc.GlobalEnv[k] = v
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
@@ -201,10 +201,11 @@ func TestActionRunner(t *testing.T) {
|
||||
},
|
||||
CurrentStep: "post-step",
|
||||
StepResults: map[string]*model.StepResult{
|
||||
"step": {},
|
||||
},
|
||||
IntraActionState: map[string]map[string]string{
|
||||
"step": {
|
||||
State: map[string]string{
|
||||
"name": "state value",
|
||||
},
|
||||
"name": "state value",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
66
pkg/runner/command.go
Executable file → Normal file
66
pkg/runner/command.go
Executable file → Normal file
@@ -16,22 +16,27 @@ func init() {
|
||||
commandPatternADO = regexp.MustCompile("^##\\[([^ ]+)( (.+))?]([^\r\n]*)[\r\n]+$")
|
||||
}
|
||||
|
||||
func tryParseRawActionCommand(line string) (command string, kvPairs map[string]string, arg string, ok bool) {
|
||||
if m := commandPatternGA.FindStringSubmatch(line); m != nil {
|
||||
command = m[1]
|
||||
kvPairs = parseKeyValuePairs(m[3], ",")
|
||||
arg = m[4]
|
||||
ok = true
|
||||
} else if m := commandPatternADO.FindStringSubmatch(line); m != nil {
|
||||
command = m[1]
|
||||
kvPairs = parseKeyValuePairs(m[3], ";")
|
||||
arg = m[4]
|
||||
ok = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (rc *RunContext) commandHandler(ctx context.Context) common.LineHandler {
|
||||
logger := common.Logger(ctx)
|
||||
resumeCommand := ""
|
||||
return func(line string) bool {
|
||||
var command string
|
||||
var kvPairs map[string]string
|
||||
var arg string
|
||||
if m := commandPatternGA.FindStringSubmatch(line); m != nil {
|
||||
command = m[1]
|
||||
kvPairs = parseKeyValuePairs(m[3], ",")
|
||||
arg = m[4]
|
||||
} else if m := commandPatternADO.FindStringSubmatch(line); m != nil {
|
||||
command = m[1]
|
||||
kvPairs = parseKeyValuePairs(m[3], ";")
|
||||
arg = m[4]
|
||||
} else {
|
||||
command, kvPairs, arg, ok := tryParseRawActionCommand(line)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -66,6 +71,8 @@ func (rc *RunContext) commandHandler(ctx context.Context) common.LineHandler {
|
||||
case "save-state":
|
||||
logger.Infof(" \U0001f4be %s", line)
|
||||
rc.saveState(ctx, kvPairs, arg)
|
||||
case "add-matcher":
|
||||
logger.Infof(" \U00002753 add-matcher %s", arg)
|
||||
default:
|
||||
logger.Infof(" \U00002753 %s", line)
|
||||
}
|
||||
@@ -75,11 +82,17 @@ func (rc *RunContext) commandHandler(ctx context.Context) common.LineHandler {
|
||||
}
|
||||
|
||||
func (rc *RunContext) setEnv(ctx context.Context, kvPairs map[string]string, arg string) {
|
||||
common.Logger(ctx).Infof(" \U00002699 ::set-env:: %s=%s", kvPairs["name"], arg)
|
||||
name := kvPairs["name"]
|
||||
common.Logger(ctx).Infof(" \U00002699 ::set-env:: %s=%s", name, arg)
|
||||
if rc.Env == nil {
|
||||
rc.Env = make(map[string]string)
|
||||
}
|
||||
rc.Env[kvPairs["name"]] = arg
|
||||
rc.Env[name] = arg
|
||||
// for composite action GITHUB_ENV and set-env passing
|
||||
if rc.GlobalEnv == nil {
|
||||
rc.GlobalEnv = map[string]string{}
|
||||
}
|
||||
rc.GlobalEnv[name] = arg
|
||||
}
|
||||
func (rc *RunContext) setOutput(ctx context.Context, kvPairs map[string]string, arg string) {
|
||||
logger := common.Logger(ctx)
|
||||
@@ -101,7 +114,13 @@ func (rc *RunContext) setOutput(ctx context.Context, kvPairs map[string]string,
|
||||
}
|
||||
func (rc *RunContext) addPath(ctx context.Context, arg string) {
|
||||
common.Logger(ctx).Infof(" \U00002699 ::add-path:: %s", arg)
|
||||
rc.ExtraPath = append(rc.ExtraPath, arg)
|
||||
extraPath := []string{arg}
|
||||
for _, v := range rc.ExtraPath {
|
||||
if v != arg {
|
||||
extraPath = append(extraPath, v)
|
||||
}
|
||||
}
|
||||
rc.ExtraPath = extraPath
|
||||
}
|
||||
|
||||
func parseKeyValuePairs(kvPairs string, separator string) map[string]string {
|
||||
@@ -147,13 +166,16 @@ func unescapeKvPairs(kvPairs map[string]string) map[string]string {
|
||||
}
|
||||
|
||||
func (rc *RunContext) saveState(ctx context.Context, kvPairs map[string]string, arg string) {
|
||||
if rc.CurrentStep != "" {
|
||||
stepResult := rc.StepResults[rc.CurrentStep]
|
||||
if stepResult != nil {
|
||||
if stepResult.State == nil {
|
||||
stepResult.State = map[string]string{}
|
||||
}
|
||||
stepResult.State[kvPairs["name"]] = arg
|
||||
stepID := rc.CurrentStep
|
||||
if stepID != "" {
|
||||
if rc.IntraActionState == nil {
|
||||
rc.IntraActionState = map[string]map[string]string{}
|
||||
}
|
||||
state, ok := rc.IntraActionState[stepID]
|
||||
if !ok {
|
||||
state = map[string]string{}
|
||||
rc.IntraActionState[stepID] = state
|
||||
}
|
||||
state[kvPairs["name"]] = arg
|
||||
}
|
||||
}
|
||||
|
@@ -64,7 +64,7 @@ func TestAddpath(t *testing.T) {
|
||||
a.Equal("/zoo", rc.ExtraPath[0])
|
||||
|
||||
handler("::add-path::/boo\n")
|
||||
a.Equal("/boo", rc.ExtraPath[1])
|
||||
a.Equal("/boo", rc.ExtraPath[0])
|
||||
}
|
||||
|
||||
func TestStopCommands(t *testing.T) {
|
||||
@@ -102,7 +102,7 @@ func TestAddpathADO(t *testing.T) {
|
||||
a.Equal("/zoo", rc.ExtraPath[0])
|
||||
|
||||
handler("##[add-path]/boo\n")
|
||||
a.Equal("/boo", rc.ExtraPath[1])
|
||||
a.Equal("/boo", rc.ExtraPath[0])
|
||||
}
|
||||
|
||||
func TestAddmask(t *testing.T) {
|
||||
@@ -177,11 +177,7 @@ func TestAddmaskUsemask(t *testing.T) {
|
||||
func TestSaveState(t *testing.T) {
|
||||
rc := &RunContext{
|
||||
CurrentStep: "step",
|
||||
StepResults: map[string]*model.StepResult{
|
||||
"step": {
|
||||
State: map[string]string{},
|
||||
},
|
||||
},
|
||||
StepResults: map[string]*model.StepResult{},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
@@ -189,5 +185,5 @@ func TestSaveState(t *testing.T) {
|
||||
handler := rc.commandHandler(ctx)
|
||||
handler("::save-state name=state-name::state-value\n")
|
||||
|
||||
assert.Equal(t, "state-value", rc.StepResults["step"].State["state-name"])
|
||||
assert.Equal(t, "state-value", rc.IntraActionState["step"]["state-name"])
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@ package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/container"
|
||||
@@ -49,11 +50,6 @@ func (cm *containerMock) UpdateFromImageEnv(env *map[string]string) common.Execu
|
||||
return args.Get(0).(func(context.Context) error)
|
||||
}
|
||||
|
||||
func (cm *containerMock) UpdateFromPath(env *map[string]string) common.Executor {
|
||||
args := cm.Called(env)
|
||||
return args.Get(0).(func(context.Context) error)
|
||||
}
|
||||
|
||||
func (cm *containerMock) Copy(destPath string, files ...*container.FileEntry) common.Executor {
|
||||
args := cm.Called(destPath, files)
|
||||
return args.Get(0).(func(context.Context) error)
|
||||
@@ -63,7 +59,17 @@ func (cm *containerMock) CopyDir(destPath string, srcPath string, useGitIgnore b
|
||||
args := cm.Called(destPath, srcPath, useGitIgnore)
|
||||
return args.Get(0).(func(context.Context) error)
|
||||
}
|
||||
|
||||
func (cm *containerMock) Exec(command []string, env map[string]string, user, workdir string) common.Executor {
|
||||
args := cm.Called(command, env, user, workdir)
|
||||
return args.Get(0).(func(context.Context) error)
|
||||
}
|
||||
|
||||
func (cm *containerMock) GetContainerArchive(ctx context.Context, srcPath string) (io.ReadCloser, error) {
|
||||
args := cm.Called(ctx, srcPath)
|
||||
err, hasErr := args.Get(1).(error)
|
||||
if !hasErr {
|
||||
err = nil
|
||||
}
|
||||
return args.Get(0).(io.ReadCloser), err
|
||||
}
|
||||
|
@@ -21,8 +21,14 @@ type ExpressionEvaluator interface {
|
||||
|
||||
// NewExpressionEvaluator creates a new evaluator
|
||||
func (rc *RunContext) NewExpressionEvaluator(ctx context.Context) ExpressionEvaluator {
|
||||
return rc.NewExpressionEvaluatorWithEnv(ctx, rc.GetEnv())
|
||||
}
|
||||
|
||||
func (rc *RunContext) NewExpressionEvaluatorWithEnv(ctx context.Context, env map[string]string) ExpressionEvaluator {
|
||||
var workflowCallResult map[string]*model.WorkflowCallResult
|
||||
|
||||
// todo: cleanup EvaluationEnvironment creation
|
||||
using := make(map[string]map[string]map[string]string)
|
||||
using := make(map[string]exprparser.Needs)
|
||||
strategy := make(map[string]interface{})
|
||||
if rc.Run != nil {
|
||||
job := rc.Run.Job()
|
||||
@@ -35,8 +41,26 @@ func (rc *RunContext) NewExpressionEvaluator(ctx context.Context) ExpressionEval
|
||||
jobNeeds := rc.Run.Job().Needs()
|
||||
|
||||
for _, needs := range jobNeeds {
|
||||
using[needs] = map[string]map[string]string{
|
||||
"outputs": jobs[needs].Outputs,
|
||||
using[needs] = exprparser.Needs{
|
||||
Outputs: jobs[needs].Outputs,
|
||||
Result: jobs[needs].Result,
|
||||
}
|
||||
}
|
||||
|
||||
// only setup jobs context in case of workflow_call
|
||||
// and existing expression evaluator (this means, jobs are at
|
||||
// least ready to run)
|
||||
if rc.caller != nil && rc.ExprEval != nil {
|
||||
workflowCallResult = map[string]*model.WorkflowCallResult{}
|
||||
|
||||
for jobName, job := range jobs {
|
||||
result := model.WorkflowCallResult{
|
||||
Outputs: map[string]string{},
|
||||
}
|
||||
for k, v := range job.Outputs {
|
||||
result.Outputs[k] = v
|
||||
}
|
||||
workflowCallResult[jobName] = &result
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -46,12 +70,13 @@ func (rc *RunContext) NewExpressionEvaluator(ctx context.Context) ExpressionEval
|
||||
|
||||
ee := &exprparser.EvaluationEnvironment{
|
||||
Github: ghc,
|
||||
Env: rc.GetEnv(),
|
||||
Env: env,
|
||||
Job: rc.getJobContext(),
|
||||
Jobs: &workflowCallResult,
|
||||
// todo: should be unavailable
|
||||
// but required to interpolate/evaluate the step outputs on the job
|
||||
Steps: rc.getStepsContext(),
|
||||
Secrets: rc.Config.Secrets,
|
||||
Secrets: getWorkflowSecrets(ctx, rc),
|
||||
Strategy: strategy,
|
||||
Matrix: rc.Matrix,
|
||||
Needs: using,
|
||||
@@ -82,10 +107,11 @@ func (rc *RunContext) NewStepExpressionEvaluator(ctx context.Context, step step)
|
||||
jobs := rc.Run.Workflow.Jobs
|
||||
jobNeeds := rc.Run.Job().Needs()
|
||||
|
||||
using := make(map[string]map[string]map[string]string)
|
||||
using := make(map[string]exprparser.Needs)
|
||||
for _, needs := range jobNeeds {
|
||||
using[needs] = map[string]map[string]string{
|
||||
"outputs": jobs[needs].Outputs,
|
||||
using[needs] = exprparser.Needs{
|
||||
Outputs: jobs[needs].Outputs,
|
||||
Result: jobs[needs].Result,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,7 +123,7 @@ func (rc *RunContext) NewStepExpressionEvaluator(ctx context.Context, step step)
|
||||
Env: *step.getEnv(),
|
||||
Job: rc.getJobContext(),
|
||||
Steps: rc.getStepsContext(),
|
||||
Secrets: rc.Config.Secrets,
|
||||
Secrets: getWorkflowSecrets(ctx, rc),
|
||||
Strategy: strategy,
|
||||
Matrix: rc.Matrix,
|
||||
Needs: using,
|
||||
@@ -311,6 +337,8 @@ func rewriteSubExpression(ctx context.Context, in string, forceFormat bool) (str
|
||||
func getEvaluatorInputs(ctx context.Context, rc *RunContext, step step, ghc *model.GithubContext) map[string]interface{} {
|
||||
inputs := map[string]interface{}{}
|
||||
|
||||
setupWorkflowInputs(ctx, &inputs, rc)
|
||||
|
||||
var env map[string]string
|
||||
if step != nil {
|
||||
env = *step.getEnv()
|
||||
@@ -343,3 +371,54 @@ func getEvaluatorInputs(ctx context.Context, rc *RunContext, step step, ghc *mod
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
func setupWorkflowInputs(ctx context.Context, inputs *map[string]interface{}, rc *RunContext) {
|
||||
if rc.caller != nil {
|
||||
config := rc.Run.Workflow.WorkflowCallConfig()
|
||||
|
||||
for name, input := range config.Inputs {
|
||||
value := rc.caller.runContext.Run.Job().With[name]
|
||||
if value != nil {
|
||||
if str, ok := value.(string); ok {
|
||||
// evaluate using the calling RunContext (outside)
|
||||
value = rc.caller.runContext.ExprEval.Interpolate(ctx, str)
|
||||
}
|
||||
}
|
||||
|
||||
if value == nil && config != nil && config.Inputs != nil {
|
||||
value = input.Default
|
||||
if rc.ExprEval != nil {
|
||||
if str, ok := value.(string); ok {
|
||||
// evaluate using the called RunContext (inside)
|
||||
value = rc.ExprEval.Interpolate(ctx, str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(*inputs)[name] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getWorkflowSecrets(ctx context.Context, rc *RunContext) map[string]string {
|
||||
if rc.caller != nil {
|
||||
job := rc.caller.runContext.Run.Job()
|
||||
secrets := job.Secrets()
|
||||
|
||||
if secrets == nil && job.InheritSecrets() {
|
||||
secrets = rc.caller.runContext.Config.Secrets
|
||||
}
|
||||
|
||||
if secrets == nil {
|
||||
secrets = map[string]string{}
|
||||
}
|
||||
|
||||
for k, v := range secrets {
|
||||
secrets[k] = rc.caller.runContext.ExprEval.Interpolate(ctx, v)
|
||||
}
|
||||
|
||||
return secrets
|
||||
}
|
||||
|
||||
return rc.Config.Secrets
|
||||
}
|
||||
|
@@ -96,21 +96,18 @@ func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executo
|
||||
}
|
||||
|
||||
postExecutor = postExecutor.Finally(func(ctx context.Context) error {
|
||||
logger := common.Logger(ctx)
|
||||
jobError := common.JobError(ctx)
|
||||
if jobError != nil {
|
||||
info.result("failure")
|
||||
logger.WithField("jobResult", "failure").Infof("\U0001F3C1 Job failed")
|
||||
} else {
|
||||
err := info.stopContainer()(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info.result("success")
|
||||
logger.WithField("jobResult", "success").Infof("\U0001F3C1 Job succeeded")
|
||||
var err error
|
||||
if rc.Config.AutoRemove || jobError == nil {
|
||||
// always allow 1 min for stopping and removing the runner, even if we were cancelled
|
||||
ctx, cancel := context.WithTimeout(common.WithLogger(context.Background(), common.Logger(ctx)), time.Minute)
|
||||
defer cancel()
|
||||
err = info.stopContainer()(ctx)
|
||||
}
|
||||
setJobResult(ctx, info, rc, jobError == nil)
|
||||
setJobOutputs(ctx, rc)
|
||||
|
||||
return nil
|
||||
return err
|
||||
})
|
||||
|
||||
pipeline := make([]common.Executor, 0)
|
||||
@@ -123,7 +120,7 @@ func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executo
|
||||
if ctx.Err() == context.Canceled {
|
||||
// in case of an aborted run, we still should execute the
|
||||
// post steps to allow cleanup.
|
||||
ctx, cancel = context.WithTimeout(WithJobLogger(context.Background(), rc.Run.JobID, rc.String(), rc.Config, &rc.Masks, rc.Matrix), 5*time.Minute)
|
||||
ctx, cancel = context.WithTimeout(common.WithLogger(context.Background(), common.Logger(ctx)), 5*time.Minute)
|
||||
defer cancel()
|
||||
}
|
||||
return postExecutor(ctx)
|
||||
@@ -132,6 +129,49 @@ func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executo
|
||||
Finally(info.closeContainer()))
|
||||
}
|
||||
|
||||
func setJobResult(ctx context.Context, info jobInfo, rc *RunContext, success bool) {
|
||||
logger := common.Logger(ctx)
|
||||
|
||||
jobResult := "success"
|
||||
// we have only one result for a whole matrix build, so we need
|
||||
// to keep an existing result state if we run a matrix
|
||||
if len(info.matrix()) > 0 && rc.Run.Job().Result != "" {
|
||||
jobResult = rc.Run.Job().Result
|
||||
}
|
||||
|
||||
if !success {
|
||||
jobResult = "failure"
|
||||
}
|
||||
|
||||
info.result(jobResult)
|
||||
if rc.caller != nil {
|
||||
// set reusable workflow job result
|
||||
rc.caller.runContext.result(jobResult)
|
||||
}
|
||||
|
||||
jobResultMessage := "succeeded"
|
||||
if jobResult != "success" {
|
||||
jobResultMessage = "failed"
|
||||
}
|
||||
|
||||
logger.WithField("jobResult", jobResult).Infof("\U0001F3C1 Job %s", jobResultMessage)
|
||||
}
|
||||
|
||||
func setJobOutputs(ctx context.Context, rc *RunContext) {
|
||||
if rc.caller != nil {
|
||||
// map outputs for reusable workflows
|
||||
callerOutputs := make(map[string]string)
|
||||
|
||||
ee := rc.NewExpressionEvaluator(ctx)
|
||||
|
||||
for k, v := range rc.Run.Workflow.WorkflowCallConfig().Outputs {
|
||||
callerOutputs[k] = ee.Interpolate(ctx, ee.Interpolate(ctx, v.Value))
|
||||
}
|
||||
|
||||
rc.caller.runContext.Run.Job().Outputs = callerOutputs
|
||||
}
|
||||
}
|
||||
|
||||
func useStepLogger(rc *RunContext, stepModel *model.Step, stage stepStage, executor common.Executor) common.Executor {
|
||||
return func(ctx context.Context) error {
|
||||
ctx = withStepLogger(ctx, stepModel.Number, stepModel.ID, rc.ExprEval.Interpolate(ctx, stepModel.String()), stage.String())
|
||||
|
@@ -15,15 +15,15 @@ import (
|
||||
|
||||
func TestJobExecutor(t *testing.T) {
|
||||
tables := []TestJobFileInfo{
|
||||
{workdir, "uses-and-run-in-one-step", "push", "Invalid run/uses syntax for job:test step:Test", platforms},
|
||||
{workdir, "uses-github-empty", "push", "Expected format {org}/{repo}[/path]@ref", platforms},
|
||||
{workdir, "uses-github-noref", "push", "Expected format {org}/{repo}[/path]@ref", platforms},
|
||||
{workdir, "uses-github-root", "push", "", platforms},
|
||||
{workdir, "uses-github-path", "push", "", platforms},
|
||||
{workdir, "uses-docker-url", "push", "", platforms},
|
||||
{workdir, "uses-github-full-sha", "push", "", platforms},
|
||||
{workdir, "uses-github-short-sha", "push", "Unable to resolve action `actions/hello-world-docker-action@b136eb8`, the provided ref `b136eb8` is the shortened version of a commit SHA, which is not supported. Please use the full commit SHA `b136eb8894c5cb1dd5807da824be97ccdf9b5423` instead", platforms},
|
||||
{workdir, "job-nil-step", "push", "invalid Step 0: missing run or uses key", platforms},
|
||||
{workdir, "uses-and-run-in-one-step", "push", "Invalid run/uses syntax for job:test step:Test", platforms, secrets},
|
||||
{workdir, "uses-github-empty", "push", "Expected format {org}/{repo}[/path]@ref", platforms, secrets},
|
||||
{workdir, "uses-github-noref", "push", "Expected format {org}/{repo}[/path]@ref", platforms, secrets},
|
||||
{workdir, "uses-github-root", "push", "", platforms, secrets},
|
||||
{workdir, "uses-github-path", "push", "", platforms, secrets},
|
||||
{workdir, "uses-docker-url", "push", "", platforms, secrets},
|
||||
{workdir, "uses-github-full-sha", "push", "", platforms, secrets},
|
||||
{workdir, "uses-github-short-sha", "push", "Unable to resolve action `actions/hello-world-docker-action@b136eb8`, the provided ref `b136eb8` is the shortened version of a commit SHA, which is not supported. Please use the full commit SHA `b136eb8894c5cb1dd5807da824be97ccdf9b5423` instead", platforms, secrets},
|
||||
{workdir, "job-nil-step", "push", "invalid Step 0: missing run or uses key", platforms, secrets},
|
||||
}
|
||||
// These tests are sufficient to only check syntax.
|
||||
ctx := common.WithDryrun(context.Background(), true)
|
||||
|
@@ -57,38 +57,59 @@ func WithMasks(ctx context.Context, masks *[]string) context.Context {
|
||||
return context.WithValue(ctx, masksContextKeyVal, masks)
|
||||
}
|
||||
|
||||
type JobLoggerFactory interface {
|
||||
WithJobLogger() *logrus.Logger
|
||||
}
|
||||
|
||||
type jobLoggerFactoryContextKey string
|
||||
|
||||
var jobLoggerFactoryContextKeyVal = (jobLoggerFactoryContextKey)("jobloggerkey")
|
||||
|
||||
func WithJobLoggerFactory(ctx context.Context, factory JobLoggerFactory) context.Context {
|
||||
return context.WithValue(ctx, jobLoggerFactoryContextKeyVal, factory)
|
||||
}
|
||||
|
||||
// WithJobLogger attaches a new logger to context that is aware of steps
|
||||
func WithJobLogger(ctx context.Context, jobID string, jobName string, config *Config, masks *[]string, matrix map[string]interface{}) context.Context {
|
||||
mux.Lock()
|
||||
defer mux.Unlock()
|
||||
|
||||
var formatter logrus.Formatter
|
||||
if config.JSONLogger {
|
||||
formatter = &jobLogJSONFormatter{
|
||||
formatter: &logrus.JSONFormatter{},
|
||||
masker: valueMasker(config.InsecureSecrets, config.Secrets),
|
||||
}
|
||||
} else {
|
||||
formatter = &jobLogFormatter{
|
||||
color: colors[nextColor%len(colors)],
|
||||
masker: valueMasker(config.InsecureSecrets, config.Secrets),
|
||||
}
|
||||
}
|
||||
|
||||
nextColor++
|
||||
ctx = WithMasks(ctx, masks)
|
||||
|
||||
logger := logrus.New()
|
||||
if hook := common.LoggerHook(ctx); hook != nil {
|
||||
logger.AddHook(hook)
|
||||
}
|
||||
logger.SetFormatter(formatter)
|
||||
logger.SetOutput(os.Stdout)
|
||||
if config.JobLoggerLevel != nil {
|
||||
logger.SetLevel(*config.JobLoggerLevel)
|
||||
var logger *logrus.Logger
|
||||
if jobLoggerFactory, ok := ctx.Value(jobLoggerFactoryContextKeyVal).(JobLoggerFactory); ok && jobLoggerFactory != nil {
|
||||
logger = jobLoggerFactory.WithJobLogger()
|
||||
} else {
|
||||
logger.SetLevel(logrus.TraceLevel)
|
||||
var formatter logrus.Formatter
|
||||
if config.JSONLogger {
|
||||
formatter = &logrus.JSONFormatter{}
|
||||
} else {
|
||||
mux.Lock()
|
||||
defer mux.Unlock()
|
||||
nextColor++
|
||||
formatter = &jobLogFormatter{
|
||||
color: colors[nextColor%len(colors)],
|
||||
}
|
||||
}
|
||||
|
||||
logger = logrus.New()
|
||||
logger.SetOutput(os.Stdout)
|
||||
logger.SetLevel(logrus.GetLevel())
|
||||
logger.SetFormatter(formatter)
|
||||
}
|
||||
|
||||
{ // Adapt to Gitea
|
||||
if hook := common.LoggerHook(ctx); hook != nil {
|
||||
logger.AddHook(hook)
|
||||
}
|
||||
if config.JobLoggerLevel != nil {
|
||||
logger.SetLevel(*config.JobLoggerLevel)
|
||||
} else {
|
||||
logger.SetLevel(logrus.TraceLevel)
|
||||
}
|
||||
}
|
||||
|
||||
logger.SetFormatter(&maskedFormatter{
|
||||
Formatter: logger.Formatter,
|
||||
masker: valueMasker(config.InsecureSecrets, config.Secrets),
|
||||
})
|
||||
rtn := logger.WithFields(logrus.Fields{
|
||||
"job": jobName,
|
||||
"jobID": jobID,
|
||||
@@ -157,16 +178,22 @@ func valueMasker(insecureSecrets bool, secrets map[string]string) entryProcessor
|
||||
}
|
||||
}
|
||||
|
||||
type jobLogFormatter struct {
|
||||
color int
|
||||
type maskedFormatter struct {
|
||||
logrus.Formatter
|
||||
masker entryProcessor
|
||||
}
|
||||
|
||||
func (f *maskedFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||
return f.Formatter.Format(f.masker(entry))
|
||||
}
|
||||
|
||||
type jobLogFormatter struct {
|
||||
color int
|
||||
}
|
||||
|
||||
func (f *jobLogFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||
b := &bytes.Buffer{}
|
||||
|
||||
entry = f.masker(entry)
|
||||
|
||||
if f.isColored(entry) {
|
||||
f.printColored(b, entry)
|
||||
} else {
|
||||
@@ -233,12 +260,3 @@ func checkIfTerminal(w io.Writer) bool {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type jobLogJSONFormatter struct {
|
||||
masker entryProcessor
|
||||
formatter *logrus.JSONFormatter
|
||||
}
|
||||
|
||||
func (f *jobLogJSONFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||
return f.formatter.Format(f.masker(entry))
|
||||
}
|
||||
|
129
pkg/runner/reusable_workflow.go
Normal file
129
pkg/runner/reusable_workflow.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"sync"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/common/git"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
)
|
||||
|
||||
func newLocalReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
||||
return newReusableWorkflowExecutor(rc, rc.Config.Workdir, rc.Run.Job().Uses)
|
||||
}
|
||||
|
||||
func newRemoteReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
||||
uses := rc.Run.Job().Uses
|
||||
|
||||
remoteReusableWorkflow := newRemoteReusableWorkflow(uses)
|
||||
if remoteReusableWorkflow == nil {
|
||||
return common.NewErrorExecutor(fmt.Errorf("expected format {owner}/{repo}/.github/workflows/{filename}@{ref}. Actual '%s' Input string was not in a correct format", uses))
|
||||
}
|
||||
remoteReusableWorkflow.URL = rc.Config.GitHubInstance
|
||||
|
||||
workflowDir := fmt.Sprintf("%s/%s", rc.ActionCacheDir(), safeFilename(uses))
|
||||
|
||||
return common.NewPipelineExecutor(
|
||||
newMutexExecutor(cloneIfRequired(rc, *remoteReusableWorkflow, workflowDir)),
|
||||
newReusableWorkflowExecutor(rc, workflowDir, fmt.Sprintf("./.github/workflows/%s", remoteReusableWorkflow.Filename)),
|
||||
)
|
||||
}
|
||||
|
||||
var (
|
||||
executorLock sync.Mutex
|
||||
)
|
||||
|
||||
func newMutexExecutor(executor common.Executor) common.Executor {
|
||||
return func(ctx context.Context) error {
|
||||
executorLock.Lock()
|
||||
defer executorLock.Unlock()
|
||||
|
||||
return executor(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func cloneIfRequired(rc *RunContext, remoteReusableWorkflow remoteReusableWorkflow, targetDirectory string) common.Executor {
|
||||
return common.NewConditionalExecutor(
|
||||
func(ctx context.Context) bool {
|
||||
_, err := os.Stat(targetDirectory)
|
||||
notExists := errors.Is(err, fs.ErrNotExist)
|
||||
return notExists
|
||||
},
|
||||
git.NewGitCloneExecutor(git.NewGitCloneExecutorInput{
|
||||
URL: remoteReusableWorkflow.CloneURL(),
|
||||
Ref: remoteReusableWorkflow.Ref,
|
||||
Dir: targetDirectory,
|
||||
Token: rc.Config.Token,
|
||||
}),
|
||||
nil,
|
||||
)
|
||||
}
|
||||
|
||||
func newReusableWorkflowExecutor(rc *RunContext, directory string, workflow string) common.Executor {
|
||||
return func(ctx context.Context) error {
|
||||
planner, err := model.NewWorkflowPlanner(path.Join(directory, workflow), true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
plan, err := planner.PlanEvent("workflow_call")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
runner, err := NewReusableWorkflowRunner(rc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return runner.NewPlanExecutor(plan)(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func NewReusableWorkflowRunner(rc *RunContext) (Runner, error) {
|
||||
runner := &runnerImpl{
|
||||
config: rc.Config,
|
||||
eventJSON: rc.EventJSON,
|
||||
caller: &caller{
|
||||
runContext: rc,
|
||||
},
|
||||
}
|
||||
|
||||
return runner.configure()
|
||||
}
|
||||
|
||||
type remoteReusableWorkflow struct {
|
||||
URL string
|
||||
Org string
|
||||
Repo string
|
||||
Filename string
|
||||
Ref string
|
||||
}
|
||||
|
||||
func (r *remoteReusableWorkflow) CloneURL() string {
|
||||
return fmt.Sprintf("https://%s/%s/%s", r.URL, r.Org, r.Repo)
|
||||
}
|
||||
|
||||
func newRemoteReusableWorkflow(uses string) *remoteReusableWorkflow {
|
||||
// GitHub docs:
|
||||
// https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_iduses
|
||||
r := regexp.MustCompile(`^([^/]+)/([^/]+)/.github/workflows/([^@]+)@(.*)$`)
|
||||
matches := r.FindStringSubmatch(uses)
|
||||
if len(matches) != 5 {
|
||||
return nil
|
||||
}
|
||||
return &remoteReusableWorkflow{
|
||||
Org: matches[1],
|
||||
Repo: matches[2],
|
||||
Filename: matches[3],
|
||||
Ref: matches[4],
|
||||
URL: "github.com",
|
||||
}
|
||||
}
|
@@ -1,12 +1,16 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bufio"
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
@@ -19,7 +23,6 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/common/git"
|
||||
"github.com/nektos/act/pkg/container"
|
||||
"github.com/nektos/act/pkg/exprparser"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
@@ -33,9 +36,11 @@ type RunContext struct {
|
||||
Run *model.Run
|
||||
EventJSON string
|
||||
Env map[string]string
|
||||
GlobalEnv map[string]string // to pass env changes of GITHUB_ENV and set-env correctly, due to dirty Env field
|
||||
ExtraPath []string
|
||||
CurrentStep string
|
||||
StepResults map[string]*model.StepResult
|
||||
IntraActionState map[string]map[string]string
|
||||
ExprEval ExpressionEvaluator
|
||||
JobContainer container.ExecutionsEnvironment
|
||||
OutputMappings map[MappableOutput]MappableOutput
|
||||
@@ -44,6 +49,7 @@ type RunContext struct {
|
||||
Parent *RunContext
|
||||
Masks []string
|
||||
cleanUpJobContainer common.Executor
|
||||
caller *caller // job calling this RunContext (reusable workflows)
|
||||
}
|
||||
|
||||
func (rc *RunContext) AddMask(mask string) {
|
||||
@@ -56,7 +62,13 @@ type MappableOutput struct {
|
||||
}
|
||||
|
||||
func (rc *RunContext) String() string {
|
||||
return fmt.Sprintf("%s/%s", rc.Run.Workflow.Name, rc.Name)
|
||||
name := fmt.Sprintf("%s/%s", rc.Run.Workflow.Name, rc.Name)
|
||||
if rc.caller != nil {
|
||||
// prefix the reusable workflow with the caller job
|
||||
// this is required to create unique container names
|
||||
name = fmt.Sprintf("%s/%s", rc.caller.runContext.Run.JobID, name)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
// GetEnv returns the env for the context
|
||||
@@ -145,15 +157,15 @@ func (rc *RunContext) startHostEnvironment() common.Executor {
|
||||
_, _ = rand.Read(randBytes)
|
||||
miscpath := filepath.Join(cacheDir, hex.EncodeToString(randBytes))
|
||||
actPath := filepath.Join(miscpath, "act")
|
||||
if err := os.MkdirAll(actPath, 0777); err != nil {
|
||||
if err := os.MkdirAll(actPath, 0o777); err != nil {
|
||||
return err
|
||||
}
|
||||
path := filepath.Join(miscpath, "hostexecutor")
|
||||
if err := os.MkdirAll(path, 0777); err != nil {
|
||||
if err := os.MkdirAll(path, 0o777); err != nil {
|
||||
return err
|
||||
}
|
||||
runnerTmp := filepath.Join(miscpath, "tmp")
|
||||
if err := os.MkdirAll(runnerTmp, 0777); err != nil {
|
||||
if err := os.MkdirAll(runnerTmp, 0o777); err != nil {
|
||||
return err
|
||||
}
|
||||
toolCache := filepath.Join(cacheDir, "tool_cache")
|
||||
@@ -169,29 +181,28 @@ func (rc *RunContext) startHostEnvironment() common.Executor {
|
||||
StdOut: logWriter,
|
||||
}
|
||||
rc.cleanUpJobContainer = rc.JobContainer.Remove()
|
||||
rc.Env["RUNNER_TOOL_CACHE"] = toolCache
|
||||
rc.Env["RUNNER_OS"] = runtime.GOOS
|
||||
rc.Env["RUNNER_ARCH"] = runtime.GOARCH
|
||||
rc.Env["RUNNER_TEMP"] = runnerTmp
|
||||
for k, v := range rc.JobContainer.GetRunnerContext(ctx) {
|
||||
if v, ok := v.(string); ok {
|
||||
rc.Env[fmt.Sprintf("RUNNER_%s", strings.ToUpper(k))] = v
|
||||
}
|
||||
}
|
||||
for _, env := range os.Environ() {
|
||||
i := strings.Index(env, "=")
|
||||
if i > 0 {
|
||||
rc.Env[env[0:i]] = env[i+1:]
|
||||
if k, v, ok := strings.Cut(env, "="); ok {
|
||||
// don't override
|
||||
if _, ok := rc.Env[k]; !ok {
|
||||
rc.Env[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return common.NewPipelineExecutor(
|
||||
rc.JobContainer.Copy(rc.JobContainer.GetActPath()+"/", &container.FileEntry{
|
||||
Name: "workflow/event.json",
|
||||
Mode: 0644,
|
||||
Mode: 0o644,
|
||||
Body: rc.EventJSON,
|
||||
}, &container.FileEntry{
|
||||
Name: "workflow/envs.txt",
|
||||
Mode: 0666,
|
||||
Body: "",
|
||||
}, &container.FileEntry{
|
||||
Name: "workflow/paths.txt",
|
||||
Mode: 0666,
|
||||
Mode: 0o666,
|
||||
Body: "",
|
||||
}),
|
||||
)(ctx)
|
||||
@@ -226,6 +237,7 @@ func (rc *RunContext) startJobContainer() common.Executor {
|
||||
envList = append(envList, fmt.Sprintf("%s=%s", "RUNNER_OS", "Linux"))
|
||||
envList = append(envList, fmt.Sprintf("%s=%s", "RUNNER_ARCH", container.RunnerArch(ctx)))
|
||||
envList = append(envList, fmt.Sprintf("%s=%s", "RUNNER_TEMP", "/tmp"))
|
||||
envList = append(envList, fmt.Sprintf("%s=%s", "LANG", "C.UTF-8")) // Use same locale as GitHub Actions
|
||||
|
||||
ext := container.LinuxContainerEnvironmentExtensions{}
|
||||
binds, mounts := rc.GetBindsAndMounts()
|
||||
@@ -268,19 +280,13 @@ func (rc *RunContext) startJobContainer() common.Executor {
|
||||
rc.stopJobContainer(),
|
||||
rc.JobContainer.Create(rc.Config.ContainerCapAdd, rc.Config.ContainerCapDrop),
|
||||
rc.JobContainer.Start(false),
|
||||
rc.JobContainer.UpdateFromImageEnv(&rc.Env),
|
||||
rc.JobContainer.UpdateFromEnv("/etc/environment", &rc.Env),
|
||||
rc.JobContainer.Copy(rc.JobContainer.GetActPath()+"/", &container.FileEntry{
|
||||
Name: "workflow/event.json",
|
||||
Mode: 0644,
|
||||
Mode: 0o644,
|
||||
Body: rc.EventJSON,
|
||||
}, &container.FileEntry{
|
||||
Name: "workflow/envs.txt",
|
||||
Mode: 0666,
|
||||
Body: "",
|
||||
}, &container.FileEntry{
|
||||
Name: "workflow/paths.txt",
|
||||
Mode: 0666,
|
||||
Mode: 0o666,
|
||||
Body: "",
|
||||
}),
|
||||
)(ctx)
|
||||
@@ -293,6 +299,51 @@ func (rc *RunContext) execJobContainer(cmd []string, env map[string]string, user
|
||||
}
|
||||
}
|
||||
|
||||
func (rc *RunContext) ApplyExtraPath(ctx context.Context, env *map[string]string) {
|
||||
if rc.ExtraPath != nil && len(rc.ExtraPath) > 0 {
|
||||
path := rc.JobContainer.GetPathVariableName()
|
||||
if (*env)[path] == "" {
|
||||
cenv := map[string]string{}
|
||||
var cpath string
|
||||
if err := rc.JobContainer.UpdateFromImageEnv(&cenv)(ctx); err == nil {
|
||||
if p, ok := cenv[path]; ok {
|
||||
cpath = p
|
||||
}
|
||||
}
|
||||
if len(cpath) == 0 {
|
||||
cpath = rc.JobContainer.DefaultPathVariable()
|
||||
}
|
||||
(*env)[path] = cpath
|
||||
}
|
||||
(*env)[path] = rc.JobContainer.JoinPathVariable(append(rc.ExtraPath, (*env)[path])...)
|
||||
}
|
||||
}
|
||||
|
||||
func (rc *RunContext) UpdateExtraPath(ctx context.Context, githubEnvPath string) error {
|
||||
if common.Dryrun(ctx) {
|
||||
return nil
|
||||
}
|
||||
pathTar, err := rc.JobContainer.GetContainerArchive(ctx, githubEnvPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer pathTar.Close()
|
||||
|
||||
reader := tar.NewReader(pathTar)
|
||||
_, err = reader.Next()
|
||||
if err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
s := bufio.NewScanner(reader)
|
||||
for s.Scan() {
|
||||
line := s.Text()
|
||||
if len(line) > 0 {
|
||||
rc.addPath(ctx, line)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// stopJobContainer removes the job container (if it exists) and its volume (if it exists) if !rc.Config.ReuseContainers
|
||||
func (rc *RunContext) stopJobContainer() common.Executor {
|
||||
return func(ctx context.Context) error {
|
||||
@@ -335,14 +386,18 @@ func (rc *RunContext) interpolateOutputs() common.Executor {
|
||||
|
||||
func (rc *RunContext) startContainer() common.Executor {
|
||||
return func(ctx context.Context) error {
|
||||
image := rc.platformImage(ctx)
|
||||
if strings.EqualFold(image, "-self-hosted") {
|
||||
if rc.IsHostEnv(ctx) {
|
||||
return rc.startHostEnvironment()(ctx)
|
||||
}
|
||||
return rc.startJobContainer()(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func (rc *RunContext) IsHostEnv(ctx context.Context) bool {
|
||||
image := rc.platformImage(ctx)
|
||||
return strings.EqualFold(image, "-self-hosted")
|
||||
}
|
||||
|
||||
func (rc *RunContext) stopContainer() common.Executor {
|
||||
return rc.stopJobContainer()
|
||||
}
|
||||
@@ -370,16 +425,25 @@ func (rc *RunContext) steps() []*model.Step {
|
||||
|
||||
// Executor returns a pipeline executor for all the steps in the job
|
||||
func (rc *RunContext) Executor() common.Executor {
|
||||
var executor common.Executor
|
||||
|
||||
switch rc.Run.Job().Type() {
|
||||
case model.JobTypeDefault:
|
||||
executor = newJobExecutor(rc, &stepFactoryImpl{}, rc)
|
||||
case model.JobTypeReusableWorkflowLocal:
|
||||
executor = newLocalReusableWorkflowExecutor(rc)
|
||||
case model.JobTypeReusableWorkflowRemote:
|
||||
executor = newRemoteReusableWorkflowExecutor(rc)
|
||||
}
|
||||
|
||||
return func(ctx context.Context) error {
|
||||
isEnabled, err := rc.isEnabled(ctx)
|
||||
res, err := rc.isEnabled(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if isEnabled {
|
||||
return newJobExecutor(rc, &stepFactoryImpl{}, rc)(ctx)
|
||||
if res {
|
||||
return executor(ctx)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -421,7 +485,7 @@ func (rc *RunContext) options(ctx context.Context) string {
|
||||
job := rc.Run.Job()
|
||||
c := job.Container()
|
||||
if c == nil {
|
||||
return ""
|
||||
return rc.Config.ContainerOptions
|
||||
}
|
||||
|
||||
return c.Options
|
||||
@@ -439,6 +503,10 @@ func (rc *RunContext) isEnabled(ctx context.Context) (bool, error) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if job.Type() != model.JobTypeDefault {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
img := rc.platformImage(ctx)
|
||||
if img == "" {
|
||||
if job.RunsOn() == nil {
|
||||
@@ -466,26 +534,16 @@ func mergeMaps(maps ...map[string]string) map[string]string {
|
||||
|
||||
// deprecated: use createSimpleContainerName
|
||||
func createContainerName(parts ...string) string {
|
||||
name := make([]string, 0)
|
||||
name := strings.Join(parts, "-")
|
||||
pattern := regexp.MustCompile("[^a-zA-Z0-9]")
|
||||
partLen := (30 / len(parts)) - 1
|
||||
for i, part := range parts {
|
||||
if i == len(parts)-1 {
|
||||
name = append(name, pattern.ReplaceAllString(part, "-"))
|
||||
} else {
|
||||
// If any part has a '-<number>' on the end it is likely part of a matrix job.
|
||||
// Let's preserve the number to prevent clashes in container names.
|
||||
re := regexp.MustCompile("-[0-9]+$")
|
||||
num := re.FindStringSubmatch(part)
|
||||
if len(num) > 0 {
|
||||
name = append(name, trimToLen(pattern.ReplaceAllString(part, "-"), partLen-len(num[0])))
|
||||
name = append(name, num[0])
|
||||
} else {
|
||||
name = append(name, trimToLen(pattern.ReplaceAllString(part, "-"), partLen))
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.ReplaceAll(strings.Trim(strings.Join(name, "-"), "-"), "--", "-")
|
||||
name = pattern.ReplaceAllString(name, "-")
|
||||
name = strings.ReplaceAll(name, "--", "-")
|
||||
hash := sha256.Sum256([]byte(name))
|
||||
|
||||
// SHA256 is 64 hex characters. So trim name to 63 characters to make room for the hash and separator
|
||||
trimmedName := strings.Trim(trimToLen(name, 63), "-")
|
||||
|
||||
return fmt.Sprintf("%s-%x", trimmedName, hash)
|
||||
}
|
||||
|
||||
func createSimpleContainerName(parts ...string) string {
|
||||
@@ -542,11 +600,20 @@ func (rc *RunContext) getGithubContext(ctx context.Context) *model.GithubContext
|
||||
EventName: rc.Config.EventName,
|
||||
Action: rc.CurrentStep,
|
||||
Token: rc.Config.Token,
|
||||
Job: rc.Run.JobID,
|
||||
ActionPath: rc.ActionPath,
|
||||
RepositoryOwner: rc.Config.Env["GITHUB_REPOSITORY_OWNER"],
|
||||
RetentionDays: rc.Config.Env["GITHUB_RETENTION_DAYS"],
|
||||
RunnerPerflog: rc.Config.Env["RUNNER_PERFLOG"],
|
||||
RunnerTrackingID: rc.Config.Env["RUNNER_TRACKING_ID"],
|
||||
Repository: rc.Config.Env["GITHUB_REPOSITORY"],
|
||||
Ref: rc.Config.Env["GITHUB_REF"],
|
||||
Sha: rc.Config.Env["SHA_REF"],
|
||||
RefName: rc.Config.Env["GITHUB_REF_NAME"],
|
||||
RefType: rc.Config.Env["GITHUB_REF_TYPE"],
|
||||
BaseRef: rc.Config.Env["GITHUB_BASE_REF"],
|
||||
HeadRef: rc.Config.Env["GITHUB_HEAD_REF"],
|
||||
Workspace: rc.Config.Env["GITHUB_WORKSPACE"],
|
||||
}
|
||||
if rc.JobContainer != nil {
|
||||
ghc.EventPath = rc.JobContainer.GetActPath() + "/workflow/event.json"
|
||||
@@ -575,58 +642,45 @@ func (rc *RunContext) getGithubContext(ctx context.Context) *model.GithubContext
|
||||
ghc.Actor = "nektos/act"
|
||||
}
|
||||
|
||||
if preset := rc.Config.PresetGitHubContext; preset != nil {
|
||||
ghc.Event = preset.Event
|
||||
ghc.RunID = preset.RunID
|
||||
ghc.RunNumber = preset.RunNumber
|
||||
ghc.Actor = preset.Actor
|
||||
ghc.Repository = preset.Repository
|
||||
ghc.EventName = preset.EventName
|
||||
ghc.Sha = preset.Sha
|
||||
ghc.Ref = preset.Ref
|
||||
ghc.RefName = preset.RefName
|
||||
ghc.RefType = preset.RefType
|
||||
ghc.HeadRef = preset.HeadRef
|
||||
ghc.BaseRef = preset.BaseRef
|
||||
ghc.Token = preset.Token
|
||||
ghc.RepositoryOwner = preset.RepositoryOwner
|
||||
ghc.RetentionDays = preset.RetentionDays
|
||||
return ghc
|
||||
}
|
||||
|
||||
repoPath := rc.Config.Workdir
|
||||
repo, err := git.FindGithubRepo(ctx, repoPath, rc.Config.GitHubInstance, rc.Config.RemoteName)
|
||||
if err != nil {
|
||||
logger.Warningf("unable to get git repo: %v", err)
|
||||
} else {
|
||||
ghc.Repository = repo
|
||||
if ghc.RepositoryOwner == "" {
|
||||
ghc.RepositoryOwner = strings.Split(repo, "/")[0]
|
||||
{ // Adapt to Gitea
|
||||
if preset := rc.Config.PresetGitHubContext; preset != nil {
|
||||
ghc.Event = preset.Event
|
||||
ghc.RunID = preset.RunID
|
||||
ghc.RunNumber = preset.RunNumber
|
||||
ghc.Actor = preset.Actor
|
||||
ghc.Repository = preset.Repository
|
||||
ghc.EventName = preset.EventName
|
||||
ghc.Sha = preset.Sha
|
||||
ghc.Ref = preset.Ref
|
||||
ghc.RefName = preset.RefName
|
||||
ghc.RefType = preset.RefType
|
||||
ghc.HeadRef = preset.HeadRef
|
||||
ghc.BaseRef = preset.BaseRef
|
||||
ghc.Token = preset.Token
|
||||
ghc.RepositoryOwner = preset.RepositoryOwner
|
||||
ghc.RetentionDays = preset.RetentionDays
|
||||
return ghc
|
||||
}
|
||||
}
|
||||
|
||||
if rc.EventJSON != "" {
|
||||
err = json.Unmarshal([]byte(rc.EventJSON), &ghc.Event)
|
||||
err := json.Unmarshal([]byte(rc.EventJSON), &ghc.Event)
|
||||
if err != nil {
|
||||
logger.Errorf("Unable to Unmarshal event '%s': %v", rc.EventJSON, err)
|
||||
}
|
||||
}
|
||||
|
||||
if ghc.EventName == "pull_request" || ghc.EventName == "pull_request_target" {
|
||||
ghc.BaseRef = asString(nestedMapLookup(ghc.Event, "pull_request", "base", "ref"))
|
||||
ghc.HeadRef = asString(nestedMapLookup(ghc.Event, "pull_request", "head", "ref"))
|
||||
ghc.SetBaseAndHeadRef()
|
||||
repoPath := rc.Config.Workdir
|
||||
ghc.SetRepositoryAndOwner(ctx, rc.Config.GitHubInstance, rc.Config.RemoteName, repoPath)
|
||||
if ghc.Ref == "" {
|
||||
ghc.SetRef(ctx, rc.Config.DefaultBranch, repoPath)
|
||||
}
|
||||
if ghc.Sha == "" {
|
||||
ghc.SetSha(ctx, repoPath)
|
||||
}
|
||||
|
||||
ghc.SetRefAndSha(ctx, rc.Config.DefaultBranch, repoPath)
|
||||
|
||||
// https://docs.github.com/en/actions/learn-github-actions/environment-variables
|
||||
if strings.HasPrefix(ghc.Ref, "refs/tags/") {
|
||||
ghc.RefType = "tag"
|
||||
ghc.RefName = ghc.Ref[len("refs/tags/"):]
|
||||
} else if strings.HasPrefix(ghc.Ref, "refs/heads/") {
|
||||
ghc.RefType = "branch"
|
||||
ghc.RefName = ghc.Ref[len("refs/heads/"):]
|
||||
}
|
||||
ghc.SetRefTypeAndName()
|
||||
|
||||
return ghc
|
||||
}
|
||||
@@ -657,15 +711,6 @@ func isLocalCheckout(ghc *model.GithubContext, step *model.Step) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func asString(v interface{}) string {
|
||||
if v == nil {
|
||||
return ""
|
||||
} else if s, ok := v.(string); ok {
|
||||
return s
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func nestedMapLookup(m map[string]interface{}, ks ...string) (rval interface{}) {
|
||||
var ok bool
|
||||
|
||||
@@ -685,8 +730,6 @@ func nestedMapLookup(m map[string]interface{}, ks ...string) (rval interface{})
|
||||
|
||||
func (rc *RunContext) withGithubEnv(ctx context.Context, github *model.GithubContext, env map[string]string) map[string]string {
|
||||
env["CI"] = "true"
|
||||
env["GITHUB_ENV"] = rc.JobContainer.GetActPath() + "/workflow/envs.txt"
|
||||
env["GITHUB_PATH"] = rc.JobContainer.GetActPath() + "/workflow/paths.txt"
|
||||
env["GITHUB_WORKFLOW"] = github.Workflow
|
||||
env["GITHUB_RUN_ID"] = github.RunID
|
||||
env["GITHUB_RUN_NUMBER"] = github.RunNumber
|
||||
@@ -705,27 +748,45 @@ func (rc *RunContext) withGithubEnv(ctx context.Context, github *model.GithubCon
|
||||
env["GITHUB_REF_NAME"] = github.RefName
|
||||
env["GITHUB_REF_TYPE"] = github.RefType
|
||||
env["GITHUB_TOKEN"] = github.Token
|
||||
env["GITHUB_SERVER_URL"] = "https://github.com"
|
||||
env["GITHUB_API_URL"] = "https://api.github.com"
|
||||
env["GITHUB_GRAPHQL_URL"] = "https://api.github.com/graphql"
|
||||
env["GITHUB_BASE_REF"] = github.BaseRef
|
||||
env["GITHUB_HEAD_REF"] = github.HeadRef
|
||||
env["GITHUB_JOB"] = rc.JobName
|
||||
env["GITHUB_JOB"] = github.Job
|
||||
env["GITHUB_REPOSITORY_OWNER"] = github.RepositoryOwner
|
||||
env["GITHUB_RETENTION_DAYS"] = github.RetentionDays
|
||||
env["RUNNER_PERFLOG"] = github.RunnerPerflog
|
||||
env["RUNNER_TRACKING_ID"] = github.RunnerTrackingID
|
||||
env["GITHUB_BASE_REF"] = github.BaseRef
|
||||
env["GITHUB_HEAD_REF"] = github.HeadRef
|
||||
|
||||
defaultServerURL := "https://github.com"
|
||||
defaultAPIURL := "https://api.github.com"
|
||||
defaultGraphqlURL := "https://api.github.com/graphql"
|
||||
|
||||
if rc.Config.GitHubInstance != "github.com" {
|
||||
hasProtocol := strings.HasPrefix(rc.Config.GitHubInstance, "http://") || strings.HasPrefix(rc.Config.GitHubInstance, "https://")
|
||||
if hasProtocol {
|
||||
env["GITHUB_SERVER_URL"] = rc.Config.GitHubInstance
|
||||
env["GITHUB_API_URL"] = fmt.Sprintf("%s/api/v1", rc.Config.GitHubInstance)
|
||||
env["GITHUB_GRAPHQL_URL"] = "" // disable graphql url because Gitea doesn't support that
|
||||
} else {
|
||||
env["GITHUB_SERVER_URL"] = fmt.Sprintf("https://%s", rc.Config.GitHubInstance)
|
||||
env["GITHUB_API_URL"] = fmt.Sprintf("https://%s/api/v1", rc.Config.GitHubInstance)
|
||||
env["GITHUB_GRAPHQL_URL"] = "" // disable graphql url because Gitea doesn't support that
|
||||
defaultServerURL = fmt.Sprintf("https://%s", rc.Config.GitHubInstance)
|
||||
defaultAPIURL = fmt.Sprintf("https://%s/api/v3", rc.Config.GitHubInstance)
|
||||
defaultGraphqlURL = fmt.Sprintf("https://%s/api/graphql", rc.Config.GitHubInstance)
|
||||
}
|
||||
|
||||
{ // Adapt to Gitea
|
||||
instance := rc.Config.GitHubInstance
|
||||
if !strings.HasPrefix(instance, "http://") &&
|
||||
!strings.HasPrefix(instance, "https://") {
|
||||
instance = "https://" + instance
|
||||
}
|
||||
defaultServerURL = instance
|
||||
defaultAPIURL = instance + "/api/v1" // the version of Gitea is v1
|
||||
defaultGraphqlURL = "" // Gitea doesn't support graphql
|
||||
}
|
||||
|
||||
if env["GITHUB_SERVER_URL"] == "" {
|
||||
env["GITHUB_SERVER_URL"] = defaultServerURL
|
||||
}
|
||||
|
||||
if env["GITHUB_API_URL"] == "" {
|
||||
env["GITHUB_API_URL"] = defaultAPIURL
|
||||
}
|
||||
|
||||
if env["GITHUB_GRAPHQL_URL"] == "" {
|
||||
env["GITHUB_GRAPHQL_URL"] = defaultGraphqlURL
|
||||
}
|
||||
|
||||
if rc.Config.ArtifactServerPath != "" {
|
||||
@@ -754,7 +815,7 @@ func (rc *RunContext) withGithubEnv(ctx context.Context, github *model.GithubCon
|
||||
func setActionRuntimeVars(rc *RunContext, env map[string]string) {
|
||||
actionsRuntimeURL := os.Getenv("ACTIONS_RUNTIME_URL")
|
||||
if actionsRuntimeURL == "" {
|
||||
actionsRuntimeURL = fmt.Sprintf("http://%s:%s/", common.GetOutboundIP().String(), rc.Config.ArtifactServerPort)
|
||||
actionsRuntimeURL = fmt.Sprintf("http://%s:%s/", rc.Config.ArtifactServerAddr, rc.Config.ArtifactServerPort)
|
||||
}
|
||||
env["ACTIONS_RUNTIME_URL"] = actionsRuntimeURL
|
||||
|
||||
|
@@ -144,6 +144,7 @@ func TestRunContext_EvalBool(t *testing.T) {
|
||||
// Check github context
|
||||
{in: "github.actor == 'nektos/act'", out: true},
|
||||
{in: "github.actor == 'unknown'", out: false},
|
||||
{in: "github.job == 'job1'", out: true},
|
||||
// The special ACT flag
|
||||
{in: "${{ env.ACT }}", out: true},
|
||||
{in: "${{ !env.ACT }}", out: false},
|
||||
@@ -364,6 +365,7 @@ func TestGetGitHubContext(t *testing.T) {
|
||||
StepResults: map[string]*model.StepResult{},
|
||||
OutputMappings: map[MappableOutput]MappableOutput{},
|
||||
}
|
||||
rc.Run.JobID = "job1"
|
||||
|
||||
ghc := rc.getGithubContext(context.Background())
|
||||
|
||||
@@ -392,6 +394,7 @@ func TestGetGitHubContext(t *testing.T) {
|
||||
assert.Equal(t, ghc.RepositoryOwner, owner)
|
||||
assert.Equal(t, ghc.RunnerPerflog, "/dev/null")
|
||||
assert.Equal(t, ghc.Token, rc.Config.Secrets["GITHUB_TOKEN"])
|
||||
assert.Equal(t, ghc.Job, "job1")
|
||||
}
|
||||
|
||||
func TestGetGithubContextRef(t *testing.T) {
|
||||
@@ -410,7 +413,7 @@ func TestGetGithubContextRef(t *testing.T) {
|
||||
{event: "pull_request_target", json: `{"pull_request":{"base":{"ref": "main"}}}`, ref: "refs/heads/main"},
|
||||
{event: "deployment", json: `{"deployment": {"ref": "tag-name"}}`, ref: "tag-name"},
|
||||
{event: "deployment_status", json: `{"deployment": {"ref": "tag-name"}}`, ref: "tag-name"},
|
||||
{event: "release", json: `{"release": {"tag_name": "tag-name"}}`, ref: "tag-name"},
|
||||
{event: "release", json: `{"release": {"tag_name": "tag-name"}}`, ref: "refs/tags/tag-name"},
|
||||
}
|
||||
|
||||
for _, data := range table {
|
||||
|
@@ -2,6 +2,7 @@ package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
@@ -32,6 +33,7 @@ type Config struct {
|
||||
LogOutput bool // log the output from docker run
|
||||
JSONLogger bool // use json or text logger
|
||||
Env map[string]string // env for containers
|
||||
Inputs map[string]string // manually passed action inputs
|
||||
Secrets map[string]string // list of secrets
|
||||
Token string // GitHub token
|
||||
InsecureSecrets bool // switch hiding output when printing to terminal
|
||||
@@ -40,12 +42,14 @@ type Config struct {
|
||||
UsernsMode string // user namespace to use
|
||||
ContainerArchitecture string // Desired OS/architecture platform for running containers
|
||||
ContainerDaemonSocket string // Path to Docker daemon socket
|
||||
ContainerOptions string // Options for the job container
|
||||
UseGitIgnore bool // controls if paths in .gitignore should not be copied into container, default true
|
||||
GitHubInstance string // GitHub instance to use, default "github.com"
|
||||
ContainerCapAdd []string // list of kernel capabilities to add to the containers
|
||||
ContainerCapDrop []string // list of kernel capabilities to remove from the containers
|
||||
AutoRemove bool // controls if the container is automatically removed upon workflow completion
|
||||
ArtifactServerPath string // the path where the artifact server stores uploads
|
||||
ArtifactServerAddr string // the address the artifact server binds to
|
||||
ArtifactServerPort string // the port the artifact server binds to
|
||||
NoSkipCheckout bool // do not skip actions/checkout
|
||||
RemoteName string // remote name in local git repo config
|
||||
@@ -62,9 +66,14 @@ type Config struct {
|
||||
JobLoggerLevel *log.Level // the level of job logger
|
||||
}
|
||||
|
||||
type caller struct {
|
||||
runContext *RunContext
|
||||
}
|
||||
|
||||
type runnerImpl struct {
|
||||
config *Config
|
||||
eventJSON string
|
||||
caller *caller // the job calling this runner (caller of a reusable workflow)
|
||||
}
|
||||
|
||||
// New Creates a new Runner
|
||||
@@ -73,40 +82,46 @@ func New(runnerConfig *Config) (Runner, error) {
|
||||
config: runnerConfig,
|
||||
}
|
||||
|
||||
return runner.configure()
|
||||
}
|
||||
|
||||
func (runner *runnerImpl) configure() (Runner, error) {
|
||||
runner.eventJSON = "{}"
|
||||
if runnerConfig.EventJSON != "" {
|
||||
runner.eventJSON = runnerConfig.EventJSON
|
||||
} else if runnerConfig.EventPath != "" {
|
||||
if runner.config.EventJSON != "" {
|
||||
runner.eventJSON = runner.config.EventJSON
|
||||
} else if runner.config.EventPath != "" {
|
||||
log.Debugf("Reading event.json from %s", runner.config.EventPath)
|
||||
eventJSONBytes, err := os.ReadFile(runner.config.EventPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
runner.eventJSON = string(eventJSONBytes)
|
||||
} else if len(runner.config.Inputs) != 0 {
|
||||
eventMap := map[string]map[string]string{
|
||||
"inputs": runner.config.Inputs,
|
||||
}
|
||||
eventJSON, err := json.Marshal(eventMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
runner.eventJSON = string(eventJSON)
|
||||
}
|
||||
return runner, nil
|
||||
}
|
||||
|
||||
// NewPlanExecutor ...
|
||||
//
|
||||
//nolint:gocyclo
|
||||
func (runner *runnerImpl) NewPlanExecutor(plan *model.Plan) common.Executor {
|
||||
maxJobNameLen := 0
|
||||
|
||||
stagePipeline := make([]common.Executor, 0)
|
||||
for i := range plan.Stages {
|
||||
s := i
|
||||
stage := plan.Stages[i]
|
||||
stagePipeline = append(stagePipeline, func(ctx context.Context) error {
|
||||
pipeline := make([]common.Executor, 0)
|
||||
for r, run := range stage.Runs {
|
||||
for _, run := range stage.Runs {
|
||||
stageExecutor := make([]common.Executor, 0)
|
||||
job := run.Job()
|
||||
|
||||
if job.Uses != "" {
|
||||
return fmt.Errorf("reusable workflows are currently not supported (see https://github.com/nektos/act/issues/826 for updates)")
|
||||
}
|
||||
|
||||
if job.Strategy != nil {
|
||||
strategyRc := runner.newRunContext(ctx, run, nil)
|
||||
if err := strategyRc.NewExpressionEvaluator(ctx).EvaluateYamlNode(ctx, &job.Strategy.RawMatrix); err != nil {
|
||||
@@ -134,29 +149,8 @@ func (runner *runnerImpl) NewPlanExecutor(plan *model.Plan) common.Executor {
|
||||
maxJobNameLen = len(rc.String())
|
||||
}
|
||||
stageExecutor = append(stageExecutor, func(ctx context.Context) error {
|
||||
logger := common.Logger(ctx)
|
||||
jobName := fmt.Sprintf("%-*s", maxJobNameLen, rc.String())
|
||||
return rc.Executor().Finally(func(ctx context.Context) error {
|
||||
isLastRunningContainer := func(currentStage int, currentRun int) bool {
|
||||
return currentStage == len(plan.Stages)-1 && currentRun == len(stage.Runs)-1
|
||||
}
|
||||
|
||||
if runner.config.AutoRemove && isLastRunningContainer(s, r) {
|
||||
var cancel context.CancelFunc
|
||||
if ctx.Err() == context.Canceled {
|
||||
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Minute)
|
||||
defer cancel()
|
||||
}
|
||||
|
||||
log.Infof("Cleaning up container for job %s", rc.JobName)
|
||||
|
||||
if err := rc.stopJobContainer()(ctx); err != nil {
|
||||
logger.Errorf("Error while cleaning container: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})(common.WithJobErrorContainer(WithJobLogger(ctx, rc.Run.JobID, jobName, rc.Config, &rc.Masks, matrix)))
|
||||
return rc.Executor()(common.WithJobErrorContainer(WithJobLogger(ctx, rc.Run.JobID, jobName, rc.Config, &rc.Masks, matrix)))
|
||||
})
|
||||
}
|
||||
pipeline = append(pipeline, common.NewParallelExecutor(maxParallel, stageExecutor...))
|
||||
@@ -196,8 +190,10 @@ func (runner *runnerImpl) newRunContext(ctx context.Context, run *model.Run, mat
|
||||
EventJSON: runner.eventJSON,
|
||||
StepResults: make(map[string]*model.StepResult),
|
||||
Matrix: matrix,
|
||||
caller: runner.caller,
|
||||
}
|
||||
rc.ExprEval = rc.NewExpressionEvaluator(ctx)
|
||||
rc.Name = rc.ExprEval.Interpolate(ctx, run.String())
|
||||
|
||||
return rc
|
||||
}
|
||||
|
@@ -1,8 +1,10 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
@@ -22,6 +24,7 @@ var (
|
||||
platforms map[string]string
|
||||
logLevel = log.DebugLevel
|
||||
workdir = "testdata"
|
||||
secrets map[string]string
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -42,14 +45,103 @@ func init() {
|
||||
if wd, err := filepath.Abs(workdir); err == nil {
|
||||
workdir = wd
|
||||
}
|
||||
|
||||
secrets = map[string]string{}
|
||||
}
|
||||
|
||||
func TestNoWorkflowsFoundByPlanner(t *testing.T) {
|
||||
planner, err := model.NewWorkflowPlanner("res", true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
out := log.StandardLogger().Out
|
||||
var buf bytes.Buffer
|
||||
log.SetOutput(&buf)
|
||||
log.SetLevel(log.DebugLevel)
|
||||
plan, err := planner.PlanEvent("pull_request")
|
||||
assert.NotNil(t, plan)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, buf.String(), "no workflows found by planner")
|
||||
buf.Reset()
|
||||
plan, err = planner.PlanAll()
|
||||
assert.NotNil(t, plan)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, buf.String(), "no workflows found by planner")
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
func TestGraphMissingEvent(t *testing.T) {
|
||||
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/no-event.yml", true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
out := log.StandardLogger().Out
|
||||
var buf bytes.Buffer
|
||||
log.SetOutput(&buf)
|
||||
log.SetLevel(log.DebugLevel)
|
||||
|
||||
plan, err := planner.PlanEvent("push")
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, plan)
|
||||
assert.Equal(t, 0, len(plan.Stages))
|
||||
|
||||
assert.Contains(t, buf.String(), "no events found for workflow: no-event.yml")
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
func TestGraphMissingFirst(t *testing.T) {
|
||||
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/no-first.yml", true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
plan, err := planner.PlanEvent("push")
|
||||
assert.EqualError(t, err, "unable to build dependency graph for no first (no-first.yml)")
|
||||
assert.NotNil(t, plan)
|
||||
assert.Equal(t, 0, len(plan.Stages))
|
||||
}
|
||||
|
||||
func TestGraphWithMissing(t *testing.T) {
|
||||
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/missing.yml", true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
out := log.StandardLogger().Out
|
||||
var buf bytes.Buffer
|
||||
log.SetOutput(&buf)
|
||||
log.SetLevel(log.DebugLevel)
|
||||
|
||||
plan, err := planner.PlanEvent("push")
|
||||
assert.NotNil(t, plan)
|
||||
assert.Equal(t, 0, len(plan.Stages))
|
||||
assert.EqualError(t, err, "unable to build dependency graph for missing (missing.yml)")
|
||||
assert.Contains(t, buf.String(), "unable to build dependency graph for missing (missing.yml)")
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
func TestGraphWithSomeMissing(t *testing.T) {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
|
||||
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/", true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
out := log.StandardLogger().Out
|
||||
var buf bytes.Buffer
|
||||
log.SetOutput(&buf)
|
||||
log.SetLevel(log.DebugLevel)
|
||||
|
||||
plan, err := planner.PlanAll()
|
||||
assert.Error(t, err, "unable to build dependency graph for no first (no-first.yml)")
|
||||
assert.NotNil(t, plan)
|
||||
assert.Equal(t, 1, len(plan.Stages))
|
||||
assert.Contains(t, buf.String(), "unable to build dependency graph for missing (missing.yml)")
|
||||
assert.Contains(t, buf.String(), "unable to build dependency graph for no first (no-first.yml)")
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
func TestGraphEvent(t *testing.T) {
|
||||
planner, err := model.NewWorkflowPlanner("testdata/basic", true)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
plan := planner.PlanEvent("push")
|
||||
assert.Nil(t, err)
|
||||
plan, err := planner.PlanEvent("push")
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, plan)
|
||||
assert.NotNil(t, plan.Stages)
|
||||
assert.Equal(t, len(plan.Stages), 3, "stages")
|
||||
assert.Equal(t, len(plan.Stages[0].Runs), 1, "stage0.runs")
|
||||
assert.Equal(t, len(plan.Stages[1].Runs), 1, "stage1.runs")
|
||||
@@ -58,8 +150,10 @@ func TestGraphEvent(t *testing.T) {
|
||||
assert.Equal(t, plan.Stages[1].Runs[0].JobID, "build", "jobid")
|
||||
assert.Equal(t, plan.Stages[2].Runs[0].JobID, "test", "jobid")
|
||||
|
||||
plan = planner.PlanEvent("release")
|
||||
assert.Equal(t, len(plan.Stages), 0, "stages")
|
||||
plan, err = planner.PlanEvent("release")
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, plan)
|
||||
assert.Equal(t, 0, len(plan.Stages))
|
||||
}
|
||||
|
||||
type TestJobFileInfo struct {
|
||||
@@ -68,6 +162,7 @@ type TestJobFileInfo struct {
|
||||
eventName string
|
||||
errorMessage string
|
||||
platforms map[string]string
|
||||
secrets map[string]string
|
||||
}
|
||||
|
||||
func (j *TestJobFileInfo) runTest(ctx context.Context, t *testing.T, cfg *Config) {
|
||||
@@ -88,6 +183,7 @@ func (j *TestJobFileInfo) runTest(ctx context.Context, t *testing.T, cfg *Config
|
||||
ReuseContainers: false,
|
||||
Env: cfg.Env,
|
||||
Secrets: cfg.Secrets,
|
||||
Inputs: cfg.Inputs,
|
||||
GitHubInstance: "github.com",
|
||||
ContainerArchitecture: cfg.ContainerArchitecture,
|
||||
}
|
||||
@@ -98,13 +194,15 @@ func (j *TestJobFileInfo) runTest(ctx context.Context, t *testing.T, cfg *Config
|
||||
planner, err := model.NewWorkflowPlanner(fullWorkflowPath, true)
|
||||
assert.Nil(t, err, fullWorkflowPath)
|
||||
|
||||
plan := planner.PlanEvent(j.eventName)
|
||||
|
||||
err = runner.NewPlanExecutor(plan)(ctx)
|
||||
if j.errorMessage == "" {
|
||||
assert.Nil(t, err, fullWorkflowPath)
|
||||
} else {
|
||||
assert.Error(t, err, j.errorMessage)
|
||||
plan, err := planner.PlanEvent(j.eventName)
|
||||
assert.True(t, (err == nil) != (plan == nil), "PlanEvent should return either a plan or an error")
|
||||
if err == nil && plan != nil {
|
||||
err = runner.NewPlanExecutor(plan)(ctx)
|
||||
if j.errorMessage == "" {
|
||||
assert.Nil(t, err, fullWorkflowPath)
|
||||
} else {
|
||||
assert.Error(t, err, j.errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("::endgroup::")
|
||||
@@ -119,81 +217,96 @@ func TestRunEvent(t *testing.T) {
|
||||
|
||||
tables := []TestJobFileInfo{
|
||||
// Shells
|
||||
{workdir, "shells/defaults", "push", "", platforms},
|
||||
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
||||
// TODO: figure out why it fails
|
||||
// {workdir, "shells/custom", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, }, // custom image with pwsh
|
||||
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}}, // custom image with pwsh
|
||||
{workdir, "shells/bash", "push", "", platforms},
|
||||
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}}, // slim doesn't have python
|
||||
{workdir, "shells/sh", "push", "", platforms},
|
||||
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
||||
{workdir, "shells/bash", "push", "", platforms, secrets},
|
||||
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}, secrets}, // slim doesn't have python
|
||||
{workdir, "shells/sh", "push", "", platforms, secrets},
|
||||
|
||||
// Local action
|
||||
{workdir, "local-action-docker-url", "push", "", platforms},
|
||||
{workdir, "local-action-dockerfile", "push", "", platforms},
|
||||
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms},
|
||||
{workdir, "local-action-js", "push", "", platforms},
|
||||
{workdir, "local-action-docker-url", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-dockerfile", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-js", "push", "", platforms, secrets},
|
||||
|
||||
// Uses
|
||||
{workdir, "uses-composite", "push", "", platforms},
|
||||
{workdir, "uses-composite-with-error", "push", "Job 'failing-composite-action' failed", platforms},
|
||||
{workdir, "uses-nested-composite", "push", "", platforms},
|
||||
{workdir, "remote-action-composite-js-pre-with-defaults", "push", "", platforms},
|
||||
{workdir, "uses-workflow", "push", "reusable workflows are currently not supported (see https://github.com/nektos/act/issues/826 for updates)", platforms},
|
||||
{workdir, "uses-docker-url", "push", "", platforms},
|
||||
{workdir, "act-composite-env-test", "push", "", platforms},
|
||||
{workdir, "uses-composite", "push", "", platforms, secrets},
|
||||
{workdir, "uses-composite-with-error", "push", "Job 'failing-composite-action' failed", platforms, secrets},
|
||||
{workdir, "uses-nested-composite", "push", "", platforms, secrets},
|
||||
{workdir, "remote-action-composite-js-pre-with-defaults", "push", "", platforms, secrets},
|
||||
{workdir, "uses-workflow", "push", "", platforms, map[string]string{"secret": "keep_it_private"}},
|
||||
{workdir, "uses-workflow", "pull_request", "", platforms, map[string]string{"secret": "keep_it_private"}},
|
||||
{workdir, "uses-docker-url", "push", "", platforms, secrets},
|
||||
{workdir, "act-composite-env-test", "push", "", platforms, secrets},
|
||||
|
||||
// Eval
|
||||
{workdir, "evalmatrix", "push", "", platforms},
|
||||
{workdir, "evalmatrixneeds", "push", "", platforms},
|
||||
{workdir, "evalmatrixneeds2", "push", "", platforms},
|
||||
{workdir, "evalmatrix-merge-map", "push", "", platforms},
|
||||
{workdir, "evalmatrix-merge-array", "push", "", platforms},
|
||||
{workdir, "issue-1195", "push", "", platforms},
|
||||
{workdir, "evalmatrix", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrixneeds", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrixneeds2", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrix-merge-map", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrix-merge-array", "push", "", platforms, secrets},
|
||||
{workdir, "issue-1195", "push", "", platforms, secrets},
|
||||
|
||||
{workdir, "basic", "push", "", platforms},
|
||||
{workdir, "fail", "push", "exit with `FAILURE`: 1", platforms},
|
||||
{workdir, "runs-on", "push", "", platforms},
|
||||
{workdir, "checkout", "push", "", platforms},
|
||||
{workdir, "job-container", "push", "", platforms},
|
||||
{workdir, "job-container-non-root", "push", "", platforms},
|
||||
{workdir, "job-container-invalid-credentials", "push", "failed to handle credentials: failed to interpolate container.credentials.password", platforms},
|
||||
{workdir, "container-hostname", "push", "", platforms},
|
||||
{workdir, "remote-action-docker", "push", "", platforms},
|
||||
{workdir, "remote-action-js", "push", "", platforms},
|
||||
{workdir, "remote-action-js", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:runner-latest"}}, // Test if this works with non root container
|
||||
{workdir, "matrix", "push", "", platforms},
|
||||
{workdir, "matrix-include-exclude", "push", "", platforms},
|
||||
{workdir, "commands", "push", "", platforms},
|
||||
{workdir, "workdir", "push", "", platforms},
|
||||
{workdir, "defaults-run", "push", "", platforms},
|
||||
{workdir, "composite-fail-with-output", "push", "", platforms},
|
||||
{workdir, "issue-597", "push", "", platforms},
|
||||
{workdir, "issue-598", "push", "", platforms},
|
||||
{workdir, "if-env-act", "push", "", platforms},
|
||||
{workdir, "env-and-path", "push", "", platforms},
|
||||
{workdir, "non-existent-action", "push", "Job 'nopanic' failed", platforms},
|
||||
{workdir, "outputs", "push", "", platforms},
|
||||
{workdir, "networking", "push", "", platforms},
|
||||
{workdir, "steps-context/conclusion", "push", "", platforms},
|
||||
{workdir, "steps-context/outcome", "push", "", platforms},
|
||||
{workdir, "job-status-check", "push", "job 'fail' failed", platforms},
|
||||
{workdir, "if-expressions", "push", "Job 'mytest' failed", platforms},
|
||||
{workdir, "actions-environment-and-context-tests", "push", "", platforms},
|
||||
{workdir, "uses-action-with-pre-and-post-step", "push", "", platforms},
|
||||
{workdir, "evalenv", "push", "", platforms},
|
||||
{workdir, "ensure-post-steps", "push", "Job 'second-post-step-should-fail' failed", platforms},
|
||||
{workdir, "workflow_dispatch", "workflow_dispatch", "", platforms},
|
||||
{workdir, "workflow_dispatch_no_inputs_mapping", "workflow_dispatch", "", platforms},
|
||||
{workdir, "workflow_dispatch-scalar", "workflow_dispatch", "", platforms},
|
||||
{workdir, "workflow_dispatch-scalar-composite-action", "workflow_dispatch", "", platforms},
|
||||
{"../model/testdata", "strategy", "push", "", platforms}, // TODO: move all testdata into pkg so we can validate it with planner and runner
|
||||
{workdir, "basic", "push", "", platforms, secrets},
|
||||
{workdir, "fail", "push", "exit with `FAILURE`: 1", platforms, secrets},
|
||||
{workdir, "runs-on", "push", "", platforms, secrets},
|
||||
{workdir, "checkout", "push", "", platforms, secrets},
|
||||
{workdir, "job-container", "push", "", platforms, secrets},
|
||||
{workdir, "job-container-non-root", "push", "", platforms, secrets},
|
||||
{workdir, "job-container-invalid-credentials", "push", "failed to handle credentials: failed to interpolate container.credentials.password", platforms, secrets},
|
||||
{workdir, "container-hostname", "push", "", platforms, secrets},
|
||||
{workdir, "remote-action-docker", "push", "", platforms, secrets},
|
||||
{workdir, "remote-action-js", "push", "", platforms, secrets},
|
||||
{workdir, "remote-action-js-node-user", "push", "", platforms, secrets}, // Test if this works with non root container
|
||||
{workdir, "matrix", "push", "", platforms, secrets},
|
||||
{workdir, "matrix-include-exclude", "push", "", platforms, secrets},
|
||||
{workdir, "matrix-exitcode", "push", "Job 'test' failed", platforms, secrets},
|
||||
{workdir, "commands", "push", "", platforms, secrets},
|
||||
{workdir, "workdir", "push", "", platforms, secrets},
|
||||
{workdir, "defaults-run", "push", "", platforms, secrets},
|
||||
{workdir, "composite-fail-with-output", "push", "", platforms, secrets},
|
||||
{workdir, "issue-597", "push", "", platforms, secrets},
|
||||
{workdir, "issue-598", "push", "", platforms, secrets},
|
||||
{workdir, "if-env-act", "push", "", platforms, secrets},
|
||||
{workdir, "env-and-path", "push", "", platforms, secrets},
|
||||
{workdir, "environment-files", "push", "", platforms, secrets},
|
||||
{workdir, "GITHUB_STATE", "push", "", platforms, secrets},
|
||||
{workdir, "environment-files-parser-bug", "push", "", platforms, secrets},
|
||||
{workdir, "non-existent-action", "push", "Job 'nopanic' failed", platforms, secrets},
|
||||
{workdir, "outputs", "push", "", platforms, secrets},
|
||||
{workdir, "networking", "push", "", platforms, secrets},
|
||||
{workdir, "steps-context/conclusion", "push", "", platforms, secrets},
|
||||
{workdir, "steps-context/outcome", "push", "", platforms, secrets},
|
||||
{workdir, "job-status-check", "push", "job 'fail' failed", platforms, secrets},
|
||||
{workdir, "if-expressions", "push", "Job 'mytest' failed", platforms, secrets},
|
||||
{workdir, "actions-environment-and-context-tests", "push", "", platforms, secrets},
|
||||
{workdir, "uses-action-with-pre-and-post-step", "push", "", platforms, secrets},
|
||||
{workdir, "evalenv", "push", "", platforms, secrets},
|
||||
{workdir, "docker-action-custom-path", "push", "", platforms, secrets},
|
||||
{workdir, "GITHUB_ENV-use-in-env-ctx", "push", "", platforms, secrets},
|
||||
{workdir, "ensure-post-steps", "push", "Job 'second-post-step-should-fail' failed", platforms, secrets},
|
||||
{workdir, "workflow_dispatch", "workflow_dispatch", "", platforms, secrets},
|
||||
{workdir, "workflow_dispatch_no_inputs_mapping", "workflow_dispatch", "", platforms, secrets},
|
||||
{workdir, "workflow_dispatch-scalar", "workflow_dispatch", "", platforms, secrets},
|
||||
{workdir, "workflow_dispatch-scalar-composite-action", "workflow_dispatch", "", platforms, secrets},
|
||||
{workdir, "job-needs-context-contains-result", "push", "", platforms, secrets},
|
||||
{"../model/testdata", "strategy", "push", "", platforms, secrets}, // TODO: move all testdata into pkg so we can validate it with planner and runner
|
||||
// {"testdata", "issue-228", "push", "", platforms, }, // TODO [igni]: Remove this once everything passes
|
||||
{"../model/testdata", "container-volumes", "push", "", platforms},
|
||||
{"../model/testdata", "container-volumes", "push", "", platforms, secrets},
|
||||
{workdir, "path-handling", "push", "", platforms, secrets},
|
||||
{workdir, "do-not-leak-step-env-in-composite", "push", "", platforms, secrets},
|
||||
{workdir, "set-env-step-env-override", "push", "", platforms, secrets},
|
||||
{workdir, "set-env-new-env-file-per-step", "push", "", platforms, secrets},
|
||||
{workdir, "no-panic-on-invalid-composite-action", "push", "jobs failed due to invalid action", platforms, secrets},
|
||||
}
|
||||
|
||||
for _, table := range tables {
|
||||
t.Run(table.workflowPath, func(t *testing.T) {
|
||||
config := &Config{}
|
||||
config := &Config{
|
||||
Secrets: table.secrets,
|
||||
}
|
||||
|
||||
eventFile := filepath.Join(workdir, table.workflowPath, "event.json")
|
||||
if _, err := os.Stat(eventFile); err == nil {
|
||||
@@ -221,51 +334,51 @@ func TestRunEventHostEnvironment(t *testing.T) {
|
||||
|
||||
tables = append(tables, []TestJobFileInfo{
|
||||
// Shells
|
||||
{workdir, "shells/defaults", "push", "", platforms},
|
||||
{workdir, "shells/pwsh", "push", "", platforms},
|
||||
{workdir, "shells/bash", "push", "", platforms},
|
||||
{workdir, "shells/python", "push", "", platforms},
|
||||
{workdir, "shells/sh", "push", "", platforms},
|
||||
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
||||
{workdir, "shells/pwsh", "push", "", platforms, secrets},
|
||||
{workdir, "shells/bash", "push", "", platforms, secrets},
|
||||
{workdir, "shells/python", "push", "", platforms, secrets},
|
||||
{workdir, "shells/sh", "push", "", platforms, secrets},
|
||||
|
||||
// Local action
|
||||
{workdir, "local-action-js", "push", "", platforms},
|
||||
{workdir, "local-action-js", "push", "", platforms, secrets},
|
||||
|
||||
// Uses
|
||||
{workdir, "uses-composite", "push", "", platforms},
|
||||
{workdir, "uses-composite-with-error", "push", "Job 'failing-composite-action' failed", platforms},
|
||||
{workdir, "uses-nested-composite", "push", "", platforms},
|
||||
{workdir, "act-composite-env-test", "push", "", platforms},
|
||||
{workdir, "uses-composite", "push", "", platforms, secrets},
|
||||
{workdir, "uses-composite-with-error", "push", "Job 'failing-composite-action' failed", platforms, secrets},
|
||||
{workdir, "uses-nested-composite", "push", "", platforms, secrets},
|
||||
{workdir, "act-composite-env-test", "push", "", platforms, secrets},
|
||||
|
||||
// Eval
|
||||
{workdir, "evalmatrix", "push", "", platforms},
|
||||
{workdir, "evalmatrixneeds", "push", "", platforms},
|
||||
{workdir, "evalmatrixneeds2", "push", "", platforms},
|
||||
{workdir, "evalmatrix-merge-map", "push", "", platforms},
|
||||
{workdir, "evalmatrix-merge-array", "push", "", platforms},
|
||||
{workdir, "issue-1195", "push", "", platforms},
|
||||
{workdir, "evalmatrix", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrixneeds", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrixneeds2", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrix-merge-map", "push", "", platforms, secrets},
|
||||
{workdir, "evalmatrix-merge-array", "push", "", platforms, secrets},
|
||||
{workdir, "issue-1195", "push", "", platforms, secrets},
|
||||
|
||||
{workdir, "fail", "push", "exit with `FAILURE`: 1", platforms},
|
||||
{workdir, "runs-on", "push", "", platforms},
|
||||
{workdir, "checkout", "push", "", platforms},
|
||||
{workdir, "remote-action-js", "push", "", platforms},
|
||||
{workdir, "matrix", "push", "", platforms},
|
||||
{workdir, "matrix-include-exclude", "push", "", platforms},
|
||||
{workdir, "commands", "push", "", platforms},
|
||||
{workdir, "defaults-run", "push", "", platforms},
|
||||
{workdir, "composite-fail-with-output", "push", "", platforms},
|
||||
{workdir, "issue-597", "push", "", platforms},
|
||||
{workdir, "issue-598", "push", "", platforms},
|
||||
{workdir, "if-env-act", "push", "", platforms},
|
||||
{workdir, "env-and-path", "push", "", platforms},
|
||||
{workdir, "non-existent-action", "push", "Job 'nopanic' failed", platforms},
|
||||
{workdir, "outputs", "push", "", platforms},
|
||||
{workdir, "steps-context/conclusion", "push", "", platforms},
|
||||
{workdir, "steps-context/outcome", "push", "", platforms},
|
||||
{workdir, "job-status-check", "push", "job 'fail' failed", platforms},
|
||||
{workdir, "if-expressions", "push", "Job 'mytest' failed", platforms},
|
||||
{workdir, "uses-action-with-pre-and-post-step", "push", "", platforms},
|
||||
{workdir, "evalenv", "push", "", platforms},
|
||||
{workdir, "ensure-post-steps", "push", "Job 'second-post-step-should-fail' failed", platforms},
|
||||
{workdir, "fail", "push", "exit with `FAILURE`: 1", platforms, secrets},
|
||||
{workdir, "runs-on", "push", "", platforms, secrets},
|
||||
{workdir, "checkout", "push", "", platforms, secrets},
|
||||
{workdir, "remote-action-js", "push", "", platforms, secrets},
|
||||
{workdir, "matrix", "push", "", platforms, secrets},
|
||||
{workdir, "matrix-include-exclude", "push", "", platforms, secrets},
|
||||
{workdir, "commands", "push", "", platforms, secrets},
|
||||
{workdir, "defaults-run", "push", "", platforms, secrets},
|
||||
{workdir, "composite-fail-with-output", "push", "", platforms, secrets},
|
||||
{workdir, "issue-597", "push", "", platforms, secrets},
|
||||
{workdir, "issue-598", "push", "", platforms, secrets},
|
||||
{workdir, "if-env-act", "push", "", platforms, secrets},
|
||||
{workdir, "env-and-path", "push", "", platforms, secrets},
|
||||
{workdir, "non-existent-action", "push", "Job 'nopanic' failed", platforms, secrets},
|
||||
{workdir, "outputs", "push", "", platforms, secrets},
|
||||
{workdir, "steps-context/conclusion", "push", "", platforms, secrets},
|
||||
{workdir, "steps-context/outcome", "push", "", platforms, secrets},
|
||||
{workdir, "job-status-check", "push", "job 'fail' failed", platforms, secrets},
|
||||
{workdir, "if-expressions", "push", "Job 'mytest' failed", platforms, secrets},
|
||||
{workdir, "uses-action-with-pre-and-post-step", "push", "", platforms, secrets},
|
||||
{workdir, "evalenv", "push", "", platforms, secrets},
|
||||
{workdir, "ensure-post-steps", "push", "Job 'second-post-step-should-fail' failed", platforms, secrets},
|
||||
}...)
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
@@ -274,16 +387,22 @@ func TestRunEventHostEnvironment(t *testing.T) {
|
||||
}
|
||||
|
||||
tables = append(tables, []TestJobFileInfo{
|
||||
{workdir, "windows-prepend-path", "push", "", platforms},
|
||||
{workdir, "windows-add-env", "push", "", platforms},
|
||||
{workdir, "windows-prepend-path", "push", "", platforms, secrets},
|
||||
{workdir, "windows-add-env", "push", "", platforms, secrets},
|
||||
}...)
|
||||
} else {
|
||||
platforms := map[string]string{
|
||||
"self-hosted": "-self-hosted",
|
||||
"self-hosted": "-self-hosted",
|
||||
"ubuntu-latest": "-self-hosted",
|
||||
}
|
||||
|
||||
tables = append(tables, []TestJobFileInfo{
|
||||
{workdir, "nix-prepend-path", "push", "", platforms},
|
||||
{workdir, "nix-prepend-path", "push", "", platforms, secrets},
|
||||
{workdir, "inputs-via-env-context", "push", "", platforms, secrets},
|
||||
{workdir, "do-not-leak-step-env-in-composite", "push", "", platforms, secrets},
|
||||
{workdir, "set-env-step-env-override", "push", "", platforms, secrets},
|
||||
{workdir, "set-env-new-env-file-per-step", "push", "", platforms, secrets},
|
||||
{workdir, "no-panic-on-invalid-composite-action", "push", "jobs failed due to invalid action", platforms, secrets},
|
||||
}...)
|
||||
}
|
||||
|
||||
@@ -303,17 +422,17 @@ func TestDryrunEvent(t *testing.T) {
|
||||
|
||||
tables := []TestJobFileInfo{
|
||||
// Shells
|
||||
{workdir, "shells/defaults", "push", "", platforms},
|
||||
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}}, // custom image with pwsh
|
||||
{workdir, "shells/bash", "push", "", platforms},
|
||||
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}}, // slim doesn't have python
|
||||
{workdir, "shells/sh", "push", "", platforms},
|
||||
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
||||
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
||||
{workdir, "shells/bash", "push", "", platforms, secrets},
|
||||
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}, secrets}, // slim doesn't have python
|
||||
{workdir, "shells/sh", "push", "", platforms, secrets},
|
||||
|
||||
// Local action
|
||||
{workdir, "local-action-docker-url", "push", "", platforms},
|
||||
{workdir, "local-action-dockerfile", "push", "", platforms},
|
||||
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms},
|
||||
{workdir, "local-action-js", "push", "", platforms},
|
||||
{workdir, "local-action-docker-url", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-dockerfile", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-js", "push", "", platforms, secrets},
|
||||
}
|
||||
|
||||
for _, table := range tables {
|
||||
@@ -323,6 +442,30 @@ func TestDryrunEvent(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestDockerActionForcePullForceRebuild(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
config := &Config{
|
||||
ForcePull: true,
|
||||
ForceRebuild: true,
|
||||
}
|
||||
|
||||
tables := []TestJobFileInfo{
|
||||
{workdir, "local-action-dockerfile", "push", "", platforms, secrets},
|
||||
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms, secrets},
|
||||
}
|
||||
|
||||
for _, table := range tables {
|
||||
t.Run(table.workflowPath, func(t *testing.T) {
|
||||
table.runTest(ctx, t, config)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRunDifferentArchitecture(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
@@ -339,6 +482,17 @@ func TestRunDifferentArchitecture(t *testing.T) {
|
||||
tjfi.runTest(context.Background(), t, &Config{ContainerArchitecture: "linux/arm64"})
|
||||
}
|
||||
|
||||
type maskJobLoggerFactory struct {
|
||||
Output bytes.Buffer
|
||||
}
|
||||
|
||||
func (f *maskJobLoggerFactory) WithJobLogger() *log.Logger {
|
||||
logger := log.New()
|
||||
logger.SetOutput(io.MultiWriter(&f.Output, os.Stdout))
|
||||
logger.SetLevel(log.DebugLevel)
|
||||
return logger
|
||||
}
|
||||
|
||||
func TestMaskValues(t *testing.T) {
|
||||
assertNoSecret := func(text string, secret string) {
|
||||
index := strings.Index(text, "composite secret")
|
||||
@@ -362,9 +516,9 @@ func TestMaskValues(t *testing.T) {
|
||||
platforms: platforms,
|
||||
}
|
||||
|
||||
output := captureOutput(t, func() {
|
||||
tjfi.runTest(context.Background(), t, &Config{})
|
||||
})
|
||||
logger := &maskJobLoggerFactory{}
|
||||
tjfi.runTest(WithJobLoggerFactory(common.WithLogger(context.Background(), logger.WithJobLogger()), logger), t, &Config{})
|
||||
output := logger.Output.String()
|
||||
|
||||
assertNoSecret(output, "secret value")
|
||||
assertNoSecret(output, "YWJjCg==")
|
||||
@@ -392,6 +546,27 @@ func TestRunEventSecrets(t *testing.T) {
|
||||
tjfi.runTest(context.Background(), t, &Config{Secrets: secrets, Env: env})
|
||||
}
|
||||
|
||||
func TestRunActionInputs(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
workflowPath := "input-from-cli"
|
||||
|
||||
tjfi := TestJobFileInfo{
|
||||
workdir: workdir,
|
||||
workflowPath: workflowPath,
|
||||
eventName: "workflow_dispatch",
|
||||
errorMessage: "",
|
||||
platforms: platforms,
|
||||
}
|
||||
|
||||
inputs := map[string]string{
|
||||
"SOME_INPUT": "input",
|
||||
}
|
||||
|
||||
tjfi.runTest(context.Background(), t, &Config{Inputs: inputs})
|
||||
}
|
||||
|
||||
func TestRunEventPullRequest(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
|
@@ -44,16 +44,16 @@ func (s stepStage) String() string {
|
||||
return "Unknown"
|
||||
}
|
||||
|
||||
func (s stepStage) getStepName(stepModel *model.Step) string {
|
||||
switch s {
|
||||
case stepStagePre:
|
||||
return fmt.Sprintf("pre-%s", stepModel.ID)
|
||||
case stepStageMain:
|
||||
return stepModel.ID
|
||||
case stepStagePost:
|
||||
return fmt.Sprintf("post-%s", stepModel.ID)
|
||||
func processRunnerEnvFileCommand(ctx context.Context, fileName string, rc *RunContext, setter func(context.Context, map[string]string, string)) error {
|
||||
env := map[string]string{}
|
||||
err := rc.JobContainer.UpdateFromEnv(path.Join(rc.JobContainer.GetActPath(), fileName), &env)(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return "unknown"
|
||||
for k, v := range env {
|
||||
setter(ctx, map[string]string{"name": k}, v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func runStepExecutor(step step, stage stepStage, executor common.Executor) common.Executor {
|
||||
@@ -63,13 +63,16 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
||||
stepModel := step.getStepModel()
|
||||
|
||||
ifExpression := step.getIfExpression(ctx, stage)
|
||||
rc.CurrentStep = stage.getStepName(stepModel)
|
||||
rc.CurrentStep = stepModel.ID
|
||||
|
||||
rc.StepResults[rc.CurrentStep] = &model.StepResult{
|
||||
stepResult := &model.StepResult{
|
||||
Outcome: model.StepStatusSuccess,
|
||||
Conclusion: model.StepStatusSuccess,
|
||||
Outputs: make(map[string]string),
|
||||
}
|
||||
if stage == stepStageMain {
|
||||
rc.StepResults[rc.CurrentStep] = stepResult
|
||||
}
|
||||
|
||||
err := setupEnv(ctx, step)
|
||||
if err != nil {
|
||||
@@ -78,15 +81,15 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
||||
|
||||
runStep, err := isStepEnabled(ctx, ifExpression, step, stage)
|
||||
if err != nil {
|
||||
rc.StepResults[rc.CurrentStep].Conclusion = model.StepStatusFailure
|
||||
rc.StepResults[rc.CurrentStep].Outcome = model.StepStatusFailure
|
||||
stepResult.Conclusion = model.StepStatusFailure
|
||||
stepResult.Outcome = model.StepStatusFailure
|
||||
return err
|
||||
}
|
||||
|
||||
if !runStep {
|
||||
rc.StepResults[rc.CurrentStep].Conclusion = model.StepStatusSkipped
|
||||
rc.StepResults[rc.CurrentStep].Outcome = model.StepStatusSkipped
|
||||
logger.WithField("stepResult", rc.StepResults[rc.CurrentStep].Outcome).Debugf("Skipping step '%s' due to '%s'", stepModel, ifExpression)
|
||||
stepResult.Conclusion = model.StepStatusSkipped
|
||||
stepResult.Outcome = model.StepStatusSkipped
|
||||
logger.WithField("stepResult", stepResult.Outcome).Debugf("Skipping step '%s' due to '%s'", stepModel, ifExpression)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -98,58 +101,79 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
||||
|
||||
// Prepare and clean Runner File Commands
|
||||
actPath := rc.JobContainer.GetActPath()
|
||||
|
||||
outputFileCommand := path.Join("workflow", "outputcmd.txt")
|
||||
stateFileCommand := path.Join("workflow", "statecmd.txt")
|
||||
(*step.getEnv())["GITHUB_OUTPUT"] = path.Join(actPath, outputFileCommand)
|
||||
|
||||
stateFileCommand := path.Join("workflow", "statecmd.txt")
|
||||
(*step.getEnv())["GITHUB_STATE"] = path.Join(actPath, stateFileCommand)
|
||||
|
||||
pathFileCommand := path.Join("workflow", "pathcmd.txt")
|
||||
(*step.getEnv())["GITHUB_PATH"] = path.Join(actPath, pathFileCommand)
|
||||
|
||||
envFileCommand := path.Join("workflow", "envs.txt")
|
||||
(*step.getEnv())["GITHUB_ENV"] = path.Join(actPath, envFileCommand)
|
||||
|
||||
summaryFileCommand := path.Join("workflow", "SUMMARY.md")
|
||||
(*step.getEnv())["GITHUB_STEP_SUMMARY"] = path.Join(actPath, summaryFileCommand)
|
||||
|
||||
_ = rc.JobContainer.Copy(actPath, &container.FileEntry{
|
||||
Name: outputFileCommand,
|
||||
Mode: 0666,
|
||||
Mode: 0o666,
|
||||
}, &container.FileEntry{
|
||||
Name: stateFileCommand,
|
||||
Mode: 0o666,
|
||||
}, &container.FileEntry{
|
||||
Name: pathFileCommand,
|
||||
Mode: 0o666,
|
||||
}, &container.FileEntry{
|
||||
Name: envFileCommand,
|
||||
Mode: 0666,
|
||||
}, &container.FileEntry{
|
||||
Name: summaryFileCommand,
|
||||
Mode: 0o666,
|
||||
})(ctx)
|
||||
|
||||
err = executor(ctx)
|
||||
|
||||
if err == nil {
|
||||
logger.WithField("stepResult", rc.StepResults[rc.CurrentStep].Outcome).Infof(" \u2705 Success - %s %s", stage, stepString)
|
||||
logger.WithField("stepResult", stepResult.Outcome).Infof(" \u2705 Success - %s %s", stage, stepString)
|
||||
} else {
|
||||
rc.StepResults[rc.CurrentStep].Outcome = model.StepStatusFailure
|
||||
stepResult.Outcome = model.StepStatusFailure
|
||||
|
||||
continueOnError, parseErr := isContinueOnError(ctx, stepModel.RawContinueOnError, step, stage)
|
||||
if parseErr != nil {
|
||||
rc.StepResults[rc.CurrentStep].Conclusion = model.StepStatusFailure
|
||||
stepResult.Conclusion = model.StepStatusFailure
|
||||
return parseErr
|
||||
}
|
||||
|
||||
if continueOnError {
|
||||
logger.Infof("Failed but continue next step")
|
||||
err = nil
|
||||
rc.StepResults[rc.CurrentStep].Conclusion = model.StepStatusSuccess
|
||||
stepResult.Conclusion = model.StepStatusSuccess
|
||||
} else {
|
||||
rc.StepResults[rc.CurrentStep].Conclusion = model.StepStatusFailure
|
||||
stepResult.Conclusion = model.StepStatusFailure
|
||||
}
|
||||
|
||||
logger.WithField("stepResult", rc.StepResults[rc.CurrentStep].Outcome).Errorf(" \u274C Failure - %s %s", stage, stepString)
|
||||
logger.WithField("stepResult", stepResult.Outcome).Errorf(" \u274C Failure - %s %s", stage, stepString)
|
||||
}
|
||||
// Process Runner File Commands
|
||||
orgerr := err
|
||||
state := map[string]string{}
|
||||
err = rc.JobContainer.UpdateFromEnv(path.Join(actPath, stateFileCommand), &state)(ctx)
|
||||
err = processRunnerEnvFileCommand(ctx, envFileCommand, rc, rc.setEnv)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for k, v := range state {
|
||||
rc.saveState(ctx, map[string]string{"name": k}, v)
|
||||
}
|
||||
output := map[string]string{}
|
||||
err = rc.JobContainer.UpdateFromEnv(path.Join(actPath, outputFileCommand), &output)(ctx)
|
||||
err = processRunnerEnvFileCommand(ctx, stateFileCommand, rc, rc.saveState)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for k, v := range output {
|
||||
rc.setOutput(ctx, map[string]string{"name": k}, v)
|
||||
err = processRunnerEnvFileCommand(ctx, outputFileCommand, rc, rc.setOutput)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = rc.UpdateExtraPath(ctx, path.Join(actPath, pathFileCommand))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if orgerr != nil {
|
||||
return orgerr
|
||||
@@ -162,24 +186,22 @@ func setupEnv(ctx context.Context, step step) error {
|
||||
rc := step.getRunContext()
|
||||
|
||||
mergeEnv(ctx, step)
|
||||
err := rc.JobContainer.UpdateFromImageEnv(step.getEnv())(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = rc.JobContainer.UpdateFromEnv((*step.getEnv())["GITHUB_ENV"], step.getEnv())(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = rc.JobContainer.UpdateFromPath(step.getEnv())(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// merge step env last, since it should not be overwritten
|
||||
mergeIntoMap(step.getEnv(), step.getStepModel().GetEnv())
|
||||
|
||||
exprEval := rc.NewExpressionEvaluator(ctx)
|
||||
for k, v := range *step.getEnv() {
|
||||
(*step.getEnv())[k] = exprEval.Interpolate(ctx, v)
|
||||
if !strings.HasPrefix(k, "INPUT_") {
|
||||
(*step.getEnv())[k] = exprEval.Interpolate(ctx, v)
|
||||
}
|
||||
}
|
||||
// after we have an evaluated step context, update the expressions evaluator with a new env context
|
||||
// you can use step level env in the with property of a uses construct
|
||||
exprEval = rc.NewExpressionEvaluatorWithEnv(ctx, *step.getEnv())
|
||||
for k, v := range *step.getEnv() {
|
||||
if strings.HasPrefix(k, "INPUT_") {
|
||||
(*step.getEnv())[k] = exprEval.Interpolate(ctx, v)
|
||||
}
|
||||
}
|
||||
|
||||
common.Logger(ctx).Debugf("setupEnv => %v", *step.getEnv())
|
||||
@@ -199,14 +221,6 @@ func mergeEnv(ctx context.Context, step step) {
|
||||
mergeIntoMap(env, rc.GetEnv())
|
||||
}
|
||||
|
||||
path := rc.JobContainer.GetPathVariableName()
|
||||
if (*env)[path] == "" {
|
||||
(*env)[path] = rc.JobContainer.DefaultPathVariable()
|
||||
}
|
||||
if rc.ExtraPath != nil && len(rc.ExtraPath) > 0 {
|
||||
(*env)[path] = rc.JobContainer.JoinPathVariable(append(rc.ExtraPath, (*env)[path])...)
|
||||
}
|
||||
|
||||
rc.withGithubEnv(ctx, step.getGithubContext(ctx), *env)
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,9 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -67,7 +69,7 @@ func TestStepActionLocalTest(t *testing.T) {
|
||||
salm.On("readAction", sal.Step, filepath.Clean("/tmp/path/to/action"), "", mock.Anything, mock.Anything).
|
||||
Return(&model.Action{}, nil)
|
||||
|
||||
cm.On("UpdateFromImageEnv", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
cm.On("Copy", "/var/run/act", mock.AnythingOfType("[]*container.FileEntry")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -75,14 +77,6 @@ func TestStepActionLocalTest(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromPath", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("Copy", "/var/run/act", mock.AnythingOfType("[]*container.FileEntry")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/statecmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -91,6 +85,8 @@ func TestStepActionLocalTest(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("GetContainerArchive", ctx, "/var/run/act/workflow/pathcmd.txt").Return(io.NopCloser(&bytes.Buffer{}), nil)
|
||||
|
||||
salm.On("runAction", sal, filepath.Clean("/tmp/path/to/action"), (*remoteAction)(nil)).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -107,13 +103,12 @@ func TestStepActionLocalTest(t *testing.T) {
|
||||
|
||||
func TestStepActionLocalPost(t *testing.T) {
|
||||
table := []struct {
|
||||
name string
|
||||
stepModel *model.Step
|
||||
actionModel *model.Action
|
||||
initialStepResults map[string]*model.StepResult
|
||||
expectedPostStepResult *model.StepResult
|
||||
err error
|
||||
mocks struct {
|
||||
name string
|
||||
stepModel *model.Step
|
||||
actionModel *model.Action
|
||||
initialStepResults map[string]*model.StepResult
|
||||
err error
|
||||
mocks struct {
|
||||
env bool
|
||||
exec bool
|
||||
}
|
||||
@@ -138,11 +133,6 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: &model.StepResult{
|
||||
Conclusion: model.StepStatusSuccess,
|
||||
Outcome: model.StepStatusSuccess,
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -171,11 +161,6 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: &model.StepResult{
|
||||
Conclusion: model.StepStatusSuccess,
|
||||
Outcome: model.StepStatusSuccess,
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -204,16 +189,11 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: &model.StepResult{
|
||||
Conclusion: model.StepStatusSkipped,
|
||||
Outcome: model.StepStatusSkipped,
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
}{
|
||||
env: true,
|
||||
env: false,
|
||||
exec: false,
|
||||
},
|
||||
},
|
||||
@@ -238,7 +218,6 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: nil,
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -277,11 +256,6 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
}
|
||||
sal.RunContext.ExprEval = sal.RunContext.NewExpressionEvaluator(ctx)
|
||||
|
||||
if tt.mocks.env {
|
||||
cm.On("UpdateFromImageEnv", &sal.env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", &sal.env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromPath", &sal.env).Return(func(ctx context.Context) error { return nil })
|
||||
}
|
||||
if tt.mocks.exec {
|
||||
suffixMatcher := func(suffix string) interface{} {
|
||||
return mock.MatchedBy(func(array []string) bool {
|
||||
@@ -294,6 +268,10 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/statecmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -301,12 +279,14 @@ func TestStepActionLocalPost(t *testing.T) {
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/outputcmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("GetContainerArchive", ctx, "/var/run/act/workflow/pathcmd.txt").Return(io.NopCloser(&bytes.Buffer{}), nil)
|
||||
}
|
||||
|
||||
err := sal.post()(ctx)
|
||||
|
||||
assert.Equal(t, tt.err, err)
|
||||
assert.Equal(t, tt.expectedPostStepResult, sal.RunContext.StepResults["post-step"])
|
||||
assert.Equal(t, sal.RunContext.StepResults["post-step"], (*model.StepResult)(nil))
|
||||
cm.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
|
@@ -11,11 +11,11 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
gogit "github.com/go-git/go-git/v5"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/common/git"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
|
||||
gogit "github.com/go-git/go-git/v5"
|
||||
)
|
||||
|
||||
type stepActionRemote struct {
|
||||
@@ -197,6 +197,7 @@ func (sar *stepActionRemote) getCompositeRunContext(ctx context.Context) *RunCon
|
||||
// was already created during the pre stage)
|
||||
env := evaluateCompositeInputAndEnv(ctx, sar.RunContext, sar)
|
||||
sar.compositeRunContext.Env = env
|
||||
sar.compositeRunContext.ExtraPath = sar.RunContext.ExtraPath
|
||||
}
|
||||
return sar.compositeRunContext
|
||||
}
|
||||
|
@@ -1,18 +1,20 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/common/git"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/common/git"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
)
|
||||
|
||||
type stepActionRemoteMocks struct {
|
||||
@@ -163,11 +165,6 @@ func TestStepActionRemote(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
if tt.mocks.env {
|
||||
cm.On("UpdateFromImageEnv", &sar.env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", &sar.env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromPath", &sar.env).Return(func(ctx context.Context) error { return nil })
|
||||
}
|
||||
if tt.mocks.read {
|
||||
sarm.On("readAction", sar.Step, suffixMatcher("act/remote-action@v1"), "", mock.Anything, mock.Anything).Return(&model.Action{}, nil)
|
||||
}
|
||||
@@ -178,6 +175,10 @@ func TestStepActionRemote(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/statecmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -185,6 +186,8 @@ func TestStepActionRemote(t *testing.T) {
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/outputcmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("GetContainerArchive", ctx, "/var/run/act/workflow/pathcmd.txt").Return(io.NopCloser(&bytes.Buffer{}), nil)
|
||||
}
|
||||
|
||||
err := sar.pre()(ctx)
|
||||
@@ -412,14 +415,14 @@ func TestStepActionRemotePreThroughActionToken(t *testing.T) {
|
||||
|
||||
func TestStepActionRemotePost(t *testing.T) {
|
||||
table := []struct {
|
||||
name string
|
||||
stepModel *model.Step
|
||||
actionModel *model.Action
|
||||
initialStepResults map[string]*model.StepResult
|
||||
expectedEnv map[string]string
|
||||
expectedPostStepResult *model.StepResult
|
||||
err error
|
||||
mocks struct {
|
||||
name string
|
||||
stepModel *model.Step
|
||||
actionModel *model.Action
|
||||
initialStepResults map[string]*model.StepResult
|
||||
IntraActionState map[string]map[string]string
|
||||
expectedEnv map[string]string
|
||||
err error
|
||||
mocks struct {
|
||||
env bool
|
||||
exec bool
|
||||
}
|
||||
@@ -442,19 +445,16 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
Conclusion: model.StepStatusSuccess,
|
||||
Outcome: model.StepStatusSuccess,
|
||||
Outputs: map[string]string{},
|
||||
State: map[string]string{
|
||||
"key": "value",
|
||||
},
|
||||
},
|
||||
},
|
||||
IntraActionState: map[string]map[string]string{
|
||||
"step": {
|
||||
"key": "value",
|
||||
},
|
||||
},
|
||||
expectedEnv: map[string]string{
|
||||
"STATE_key": "value",
|
||||
},
|
||||
expectedPostStepResult: &model.StepResult{
|
||||
Conclusion: model.StepStatusSuccess,
|
||||
Outcome: model.StepStatusSuccess,
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -483,11 +483,6 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: &model.StepResult{
|
||||
Conclusion: model.StepStatusSuccess,
|
||||
Outcome: model.StepStatusSuccess,
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -516,11 +511,6 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: &model.StepResult{
|
||||
Conclusion: model.StepStatusSkipped,
|
||||
Outcome: model.StepStatusSkipped,
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -550,7 +540,6 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
Outputs: map[string]string{},
|
||||
},
|
||||
},
|
||||
expectedPostStepResult: nil,
|
||||
mocks: struct {
|
||||
env bool
|
||||
exec bool
|
||||
@@ -582,18 +571,14 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
StepResults: tt.initialStepResults,
|
||||
StepResults: tt.initialStepResults,
|
||||
IntraActionState: tt.IntraActionState,
|
||||
},
|
||||
Step: tt.stepModel,
|
||||
action: tt.actionModel,
|
||||
}
|
||||
sar.RunContext.ExprEval = sar.RunContext.NewExpressionEvaluator(ctx)
|
||||
|
||||
if tt.mocks.env {
|
||||
cm.On("UpdateFromImageEnv", &sar.env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", &sar.env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromPath", &sar.env).Return(func(ctx context.Context) error { return nil })
|
||||
}
|
||||
if tt.mocks.exec {
|
||||
cm.On("Exec", []string{"node", "/var/run/act/actions/remote-action@v1/post.js"}, sar.env, "", "").Return(func(ctx context.Context) error { return tt.err })
|
||||
|
||||
@@ -601,6 +586,10 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/statecmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -608,6 +597,8 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/outputcmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("GetContainerArchive", ctx, "/var/run/act/workflow/pathcmd.txt").Return(io.NopCloser(&bytes.Buffer{}), nil)
|
||||
}
|
||||
|
||||
err := sar.post()(ctx)
|
||||
@@ -618,7 +609,8 @@ func TestStepActionRemotePost(t *testing.T) {
|
||||
assert.Equal(t, value, sar.env[key])
|
||||
}
|
||||
}
|
||||
assert.Equal(t, tt.expectedPostStepResult, sar.RunContext.StepResults["post-step"])
|
||||
// Enshure that StepResults is nil in this test
|
||||
assert.Equal(t, sar.RunContext.StepResults["post-step"], (*model.StepResult)(nil))
|
||||
cm.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
|
@@ -1,7 +1,9 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/nektos/act/pkg/container"
|
||||
@@ -55,18 +57,6 @@ func TestStepDockerMain(t *testing.T) {
|
||||
}
|
||||
sd.RunContext.ExprEval = sd.RunContext.NewExpressionEvaluator(ctx)
|
||||
|
||||
cm.On("UpdateFromImageEnv", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromPath", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("Pull", false).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -91,6 +81,10 @@ func TestStepDockerMain(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/statecmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -99,6 +93,8 @@ func TestStepDockerMain(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("GetContainerArchive", ctx, "/var/run/act/workflow/pathcmd.txt").Return(io.NopCloser(&bytes.Buffer{}), nil)
|
||||
|
||||
err := sd.main()(ctx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/kballard/go-shellquote"
|
||||
|
||||
"github.com/nektos/act/pkg/common"
|
||||
"github.com/nektos/act/pkg/container"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
@@ -30,6 +31,7 @@ func (sr *stepRun) main() common.Executor {
|
||||
return runStepExecutor(sr, stepStageMain, common.NewPipelineExecutor(
|
||||
sr.setupShellCommandExecutor(),
|
||||
func(ctx context.Context) error {
|
||||
sr.getRunContext().ApplyExtraPath(ctx, &sr.env)
|
||||
return sr.getRunContext().JobContainer.Exec(sr.cmd, sr.env, "", sr.Step.WorkingDirectory)(ctx)
|
||||
},
|
||||
))
|
||||
@@ -71,7 +73,7 @@ func (sr *stepRun) setupShellCommandExecutor() common.Executor {
|
||||
rc := sr.getRunContext()
|
||||
return rc.JobContainer.Copy(rc.JobContainer.GetActPath(), &container.FileEntry{
|
||||
Name: scriptName,
|
||||
Mode: 0755,
|
||||
Mode: 0o755,
|
||||
Body: script,
|
||||
})(ctx)
|
||||
}
|
||||
|
@@ -1,20 +1,23 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
|
||||
"github.com/nektos/act/pkg/container"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func TestStepRun(t *testing.T) {
|
||||
cm := &containerMock{}
|
||||
fileEntry := &container.FileEntry{
|
||||
Name: "workflow/1.sh",
|
||||
Mode: 0755,
|
||||
Mode: 0o755,
|
||||
Body: "\ncmd\n",
|
||||
}
|
||||
|
||||
@@ -53,7 +56,7 @@ func TestStepRun(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromImageEnv", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
cm.On("Copy", "/var/run/act", mock.AnythingOfType("[]*container.FileEntry")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -61,14 +64,6 @@ func TestStepRun(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromPath", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("Copy", "/var/run/act", mock.AnythingOfType("[]*container.FileEntry")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/statecmd.txt", mock.AnythingOfType("*map[string]string")).Return(func(ctx context.Context) error {
|
||||
return nil
|
||||
})
|
||||
@@ -79,6 +74,8 @@ func TestStepRun(t *testing.T) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
cm.On("GetContainerArchive", ctx, "/var/run/act/workflow/pathcmd.txt").Return(io.NopCloser(&bytes.Buffer{}), nil)
|
||||
|
||||
err := sr.main()(ctx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
|
@@ -134,7 +134,6 @@ func TestSetupEnv(t *testing.T) {
|
||||
Env: map[string]string{
|
||||
"RC_KEY": "rcvalue",
|
||||
},
|
||||
ExtraPath: []string{"/path/to/extra/file"},
|
||||
JobContainer: cm,
|
||||
}
|
||||
step := &model.Step{
|
||||
@@ -142,19 +141,13 @@ func TestSetupEnv(t *testing.T) {
|
||||
"STEP_WITH": "with-value",
|
||||
},
|
||||
}
|
||||
env := map[string]string{
|
||||
"PATH": "",
|
||||
}
|
||||
env := map[string]string{}
|
||||
|
||||
sm.On("getRunContext").Return(rc)
|
||||
sm.On("getGithubContext").Return(rc)
|
||||
sm.On("getStepModel").Return(step)
|
||||
sm.On("getEnv").Return(&env)
|
||||
|
||||
cm.On("UpdateFromImageEnv", &env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromEnv", "/var/run/act/workflow/envs.txt", &env).Return(func(ctx context.Context) error { return nil })
|
||||
cm.On("UpdateFromPath", &env).Return(func(ctx context.Context) error { return nil })
|
||||
|
||||
err := setupEnv(context.Background(), sm)
|
||||
assert.Nil(t, err)
|
||||
|
||||
@@ -178,13 +171,11 @@ func TestSetupEnv(t *testing.T) {
|
||||
"GITHUB_ACTION_REPOSITORY": "",
|
||||
"GITHUB_API_URL": "https:///api/v3",
|
||||
"GITHUB_BASE_REF": "",
|
||||
"GITHUB_ENV": "/var/run/act/workflow/envs.txt",
|
||||
"GITHUB_EVENT_NAME": "",
|
||||
"GITHUB_EVENT_PATH": "/var/run/act/workflow/event.json",
|
||||
"GITHUB_GRAPHQL_URL": "https:///api/graphql",
|
||||
"GITHUB_HEAD_REF": "",
|
||||
"GITHUB_JOB": "",
|
||||
"GITHUB_PATH": "/var/run/act/workflow/paths.txt",
|
||||
"GITHUB_JOB": "1",
|
||||
"GITHUB_RETENTION_DAYS": "0",
|
||||
"GITHUB_RUN_ID": "runId",
|
||||
"GITHUB_RUN_NUMBER": "1",
|
||||
@@ -192,7 +183,6 @@ func TestSetupEnv(t *testing.T) {
|
||||
"GITHUB_TOKEN": "",
|
||||
"GITHUB_WORKFLOW": "",
|
||||
"INPUT_STEP_WITH": "with-value",
|
||||
"PATH": "/path/to/extra/file:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"RC_KEY": "rcvalue",
|
||||
"RUNNER_PERFLOG": "/dev/null",
|
||||
"RUNNER_TRACKING_ID": "",
|
||||
|
82
pkg/runner/testdata/.github/workflows/local-reusable-workflow.yml
vendored
Normal file
82
pkg/runner/testdata/.github/workflows/local-reusable-workflow.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: reusable
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
string_required:
|
||||
required: true
|
||||
type: string
|
||||
string_optional:
|
||||
required: false
|
||||
type: string
|
||||
default: string
|
||||
bool_required:
|
||||
required: true
|
||||
type: boolean
|
||||
bool_optional:
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
number_required:
|
||||
required: true
|
||||
type: number
|
||||
number_optional:
|
||||
required: false
|
||||
type: number
|
||||
default: ${{ 1 }}
|
||||
outputs:
|
||||
output:
|
||||
description: "A workflow output"
|
||||
value: ${{ jobs.reusable_workflow_job.outputs.job-output }}
|
||||
|
||||
jobs:
|
||||
reusable_workflow_job:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: test required string
|
||||
run: |
|
||||
echo inputs.string_required=${{ inputs.string_required }}
|
||||
[[ "${{ inputs.string_required == 'string' }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test optional string
|
||||
run: |
|
||||
echo inputs.string_optional=${{ inputs.string_optional }}
|
||||
[[ "${{ inputs.string_optional == 'string' }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test required bool
|
||||
run: |
|
||||
echo inputs.bool_required=${{ inputs.bool_required }}
|
||||
[[ "${{ inputs.bool_required }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test optional bool
|
||||
run: |
|
||||
echo inputs.bool_optional=${{ inputs.bool_optional }}
|
||||
[[ "${{ inputs.bool_optional }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test required number
|
||||
run: |
|
||||
echo inputs.number_required=${{ inputs.number_required }}
|
||||
[[ "${{ inputs.number_required == 1 }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test optional number
|
||||
run: |
|
||||
echo inputs.number_optional=${{ inputs.number_optional }}
|
||||
[[ "${{ inputs.number_optional == 1 }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test secret
|
||||
run: |
|
||||
echo secrets.secret=${{ secrets.secret }}
|
||||
[[ "${{ secrets.secret == 'keep_it_private' }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test github.event_name is never workflow_call
|
||||
run: |
|
||||
echo github.event_name=${{ github.event_name }}
|
||||
[[ "${{ github.event_name != 'workflow_call' }}" = "true" ]] || exit 1
|
||||
|
||||
- name: test output
|
||||
id: output_test
|
||||
run: |
|
||||
echo "value=${{ inputs.string_required }}" >> $GITHUB_OUTPUT
|
||||
|
||||
outputs:
|
||||
job-output: ${{ steps.output_test.outputs.value }}
|
27
pkg/runner/testdata/GITHUB_ENV-use-in-env-ctx/push.yml
vendored
Normal file
27
pkg/runner/testdata/GITHUB_ENV-use-in-env-ctx/push.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MYGLOBALENV3: myglobalval3
|
||||
steps:
|
||||
- run: |
|
||||
echo MYGLOBALENV1=myglobalval1 > $GITHUB_ENV
|
||||
echo "::set-env name=MYGLOBALENV2::myglobalval2"
|
||||
- uses: nektos/act-test-actions/script@main
|
||||
with:
|
||||
main: |
|
||||
env
|
||||
[[ "$MYGLOBALENV1" = "${{ env.MYGLOBALENV1 }}" ]]
|
||||
[[ "$MYGLOBALENV1" = "${{ env.MYGLOBALENV1ALIAS }}" ]]
|
||||
[[ "$MYGLOBALENV1" = "$MYGLOBALENV1ALIAS" ]]
|
||||
[[ "$MYGLOBALENV2" = "${{ env.MYGLOBALENV2 }}" ]]
|
||||
[[ "$MYGLOBALENV2" = "${{ env.MYGLOBALENV2ALIAS }}" ]]
|
||||
[[ "$MYGLOBALENV2" = "$MYGLOBALENV2ALIAS" ]]
|
||||
[[ "$MYGLOBALENV3" = "${{ env.MYGLOBALENV3 }}" ]]
|
||||
[[ "$MYGLOBALENV3" = "${{ env.MYGLOBALENV3ALIAS }}" ]]
|
||||
[[ "$MYGLOBALENV3" = "$MYGLOBALENV3ALIAS" ]]
|
||||
env:
|
||||
MYGLOBALENV1ALIAS: ${{ env.MYGLOBALENV1 }}
|
||||
MYGLOBALENV2ALIAS: ${{ env.MYGLOBALENV2 }}
|
||||
MYGLOBALENV3ALIAS: ${{ env.MYGLOBALENV3 }}
|
48
pkg/runner/testdata/GITHUB_STATE/push.yml
vendored
Normal file
48
pkg/runner/testdata/GITHUB_STATE/push.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: nektos/act-test-actions/script@main
|
||||
with:
|
||||
pre: |
|
||||
env
|
||||
echo mystate0=mystateval > $GITHUB_STATE
|
||||
echo "::save-state name=mystate1::mystateval"
|
||||
main: |
|
||||
env
|
||||
echo mystate2=mystateval > $GITHUB_STATE
|
||||
echo "::save-state name=mystate3::mystateval"
|
||||
post: |
|
||||
env
|
||||
[ "$STATE_mystate0" = "mystateval" ]
|
||||
[ "$STATE_mystate1" = "mystateval" ]
|
||||
[ "$STATE_mystate2" = "mystateval" ]
|
||||
[ "$STATE_mystate3" = "mystateval" ]
|
||||
test-id-collision-bug:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: nektos/act-test-actions/script@main
|
||||
id: script
|
||||
with:
|
||||
pre: |
|
||||
env
|
||||
echo mystate0=mystateval > $GITHUB_STATE
|
||||
echo "::save-state name=mystate1::mystateval"
|
||||
main: |
|
||||
env
|
||||
echo mystate2=mystateval > $GITHUB_STATE
|
||||
echo "::save-state name=mystate3::mystateval"
|
||||
post: |
|
||||
env
|
||||
[ "$STATE_mystate0" = "mystateval" ]
|
||||
[ "$STATE_mystate1" = "mystateval" ]
|
||||
[ "$STATE_mystate2" = "mystateval" ]
|
||||
[ "$STATE_mystate3" = "mystateval" ]
|
||||
- uses: nektos/act-test-actions/script@main
|
||||
id: pre-script
|
||||
with:
|
||||
main: |
|
||||
env
|
||||
echo mystate0=mystateerror > $GITHUB_STATE
|
||||
echo "::save-state name=mystate1::mystateerror"
|
@@ -11,3 +11,5 @@ jobs:
|
||||
- uses: './actions-environment-and-context-tests/docker'
|
||||
- uses: 'nektos/act-test-actions/js@main'
|
||||
- uses: 'nektos/act-test-actions/docker@main'
|
||||
- uses: 'nektos/act-test-actions/docker-file@main'
|
||||
- uses: 'nektos/act-test-actions/docker-relative-context/action@main'
|
||||
|
18
pkg/runner/testdata/do-not-leak-step-env-in-composite/push.yml
vendored
Normal file
18
pkg/runner/testdata/do-not-leak-step-env-in-composite/push.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- run: exit 1
|
||||
shell: bash
|
||||
if: env.LEAK_ENV != 'val'
|
||||
shell: cp {0} action.yml
|
||||
- uses: ./
|
||||
env:
|
||||
LEAK_ENV: val
|
||||
- run: exit 1
|
||||
if: env.LEAK_ENV == 'val'
|
12
pkg/runner/testdata/docker-action-custom-path/push.yml
vendored
Normal file
12
pkg/runner/testdata/docker-action-custom-path/push.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
FROM ubuntu:latest
|
||||
ENV PATH="/opt/texlive/texdir/bin/x86_64-linuxmusl:${PATH}"
|
||||
ENV ORG_PATH="${PATH}"
|
||||
ENTRYPOINT [ "bash", "-c", "echo \"PATH=$PATH\" && echo \"ORG_PATH=$ORG_PATH\" && [[ \"$PATH\" = \"$ORG_PATH\" ]]" ]
|
||||
shell: mv {0} Dockerfile
|
||||
- uses: ./
|
13
pkg/runner/testdata/environment-files-parser-bug/push.yaml
vendored
Normal file
13
pkg/runner/testdata/environment-files-parser-bug/push.yaml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "test<<World" > $GITHUB_ENV
|
||||
echo "x=Thats really Weird" >> $GITHUB_ENV
|
||||
echo "World" >> $GITHUB_ENV
|
||||
- if: env.test != 'x=Thats really Weird'
|
||||
run: exit 1
|
||||
- if: env.x == 'Thats really Weird' # This assert is triggered by the broken impl of act
|
||||
run: exit 1
|
101
pkg/runner/testdata/environment-files/push.yaml
vendored
Normal file
101
pkg/runner/testdata/environment-files/push.yaml
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
name: environment-files
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Append to $GITHUB_PATH"
|
||||
run: |
|
||||
echo "$HOME/someFolder" >> $GITHUB_PATH
|
||||
- name: "Append some more to $GITHUB_PATH"
|
||||
run: |
|
||||
echo "$HOME/someOtherFolder" >> $GITHUB_PATH
|
||||
- name: "Check PATH"
|
||||
run: |
|
||||
echo "${PATH}"
|
||||
if [[ ! "${PATH}" =~ .*"$HOME/"someOtherFolder.*"$HOME/"someFolder.* ]]; then
|
||||
echo "${PATH} doesn't match .*someOtherFolder.*someFolder.*"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Prepend"
|
||||
run: |
|
||||
if ls | grep -q 'called ls' ; then
|
||||
echo 'ls was overridden already?'
|
||||
exit 2
|
||||
fi
|
||||
path_add=$(mktemp -d)
|
||||
cat > $path_add/ls <<LS
|
||||
#!/bin/sh
|
||||
echo 'called ls'
|
||||
LS
|
||||
chmod +x $path_add/ls
|
||||
echo $path_add >> $GITHUB_PATH
|
||||
- name: "Verify prepend"
|
||||
run: |
|
||||
if ! ls | grep -q 'called ls' ; then
|
||||
echo 'ls was not overridden'
|
||||
exit 2
|
||||
fi
|
||||
- name: "Write single line env to $GITHUB_ENV"
|
||||
run: |
|
||||
echo "KEY=value" >> $GITHUB_ENV
|
||||
- name: "Check single line env"
|
||||
run: |
|
||||
if [[ "${KEY}" != "value" ]]; then
|
||||
echo "${KEY} doesn't == 'value'"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Write single line env with more than one 'equals' signs to $GITHUB_ENV"
|
||||
run: |
|
||||
echo "KEY=value=anothervalue" >> $GITHUB_ENV
|
||||
- name: "Check single line env"
|
||||
run: |
|
||||
if [[ "${KEY}" != "value=anothervalue" ]]; then
|
||||
echo "${KEY} doesn't == 'value=anothervalue'"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Write multiline env to $GITHUB_ENV"
|
||||
run: |
|
||||
echo 'KEY2<<EOF' >> $GITHUB_ENV
|
||||
echo value2 >> $GITHUB_ENV
|
||||
echo 'EOF' >> $GITHUB_ENV
|
||||
- name: "Check multiline line env"
|
||||
run: |
|
||||
if [[ "${KEY2}" != "value2" ]]; then
|
||||
echo "${KEY2} doesn't == 'value'"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Write multiline env with UUID to $GITHUB_ENV"
|
||||
run: |
|
||||
echo 'KEY3<<ghadelimiter_b8273c6d-d535-419a-a010-b0aaac240e36' >> $GITHUB_ENV
|
||||
echo value3 >> $GITHUB_ENV
|
||||
echo 'ghadelimiter_b8273c6d-d535-419a-a010-b0aaac240e36' >> $GITHUB_ENV
|
||||
- name: "Check multiline env with UUID to $GITHUB_ENV"
|
||||
run: |
|
||||
if [[ "${KEY3}" != "value3" ]]; then
|
||||
echo "${KEY3} doesn't == 'value3'"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Write single line output to $GITHUB_OUTPUT"
|
||||
id: write-single-output
|
||||
run: |
|
||||
echo "KEY=value" >> $GITHUB_OUTPUT
|
||||
- name: "Check single line output"
|
||||
run: |
|
||||
if [[ "${{ steps.write-single-output.outputs.KEY }}" != "value" ]]; then
|
||||
echo "${{ steps.write-single-output.outputs.KEY }} doesn't == 'value'"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Write multiline output to $GITHUB_OUTPUT"
|
||||
id: write-multi-output
|
||||
run: |
|
||||
echo 'KEY2<<EOF' >> $GITHUB_OUTPUT
|
||||
echo value2 >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
- name: "Check multiline output"
|
||||
run: |
|
||||
if [[ "${{ steps.write-multi-output.outputs.KEY2 }}" != "value2" ]]; then
|
||||
echo "${{ steps.write-multi-output.outputs.KEY2 }} doesn't == 'value2'"
|
||||
exit 1
|
||||
fi
|
33
pkg/runner/testdata/environment-variables/push.yml
vendored
Normal file
33
pkg/runner/testdata/environment-variables/push.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: environment variables
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Test on job level
|
||||
run: |
|
||||
echo \$UPPER=$UPPER
|
||||
echo \$upper=$upper
|
||||
echo \$LOWER=$LOWER
|
||||
echo \$lower=$lower
|
||||
[[ "$UPPER" = "UPPER" ]] || exit 1
|
||||
[[ "$upper" = "" ]] || exit 1
|
||||
[[ "$LOWER" = "" ]] || exit 1
|
||||
[[ "$lower" = "lower" ]] || exit 1
|
||||
- name: Test on step level
|
||||
run: |
|
||||
echo \$UPPER=$UPPER
|
||||
echo \$upper=$upper
|
||||
echo \$LOWER=$LOWER
|
||||
echo \$lower=$lower
|
||||
[[ "$UPPER" = "upper" ]] || exit 1
|
||||
[[ "$upper" = "" ]] || exit 1
|
||||
[[ "$LOWER" = "" ]] || exit 1
|
||||
[[ "$lower" = "LOWER" ]] || exit 1
|
||||
env:
|
||||
UPPER: upper
|
||||
lower: LOWER
|
||||
env:
|
||||
UPPER: UPPER
|
||||
lower: lower
|
21
pkg/runner/testdata/input-from-cli/input.yml
vendored
Normal file
21
pkg/runner/testdata/input-from-cli/input.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
NAME:
|
||||
description: "A random input name for the workflow"
|
||||
type: string
|
||||
required: true
|
||||
SOME_VALUE:
|
||||
description: "Some other input to pass"
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Test with inputs
|
||||
run: |
|
||||
[ -z "${{ github.event.inputs.SOME_INPUT }}" ] && exit 1 || exit 0
|
8
pkg/runner/testdata/inputs-via-env-context/action.yml
vendored
Normal file
8
pkg/runner/testdata/inputs-via-env-context/action.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
inputs:
|
||||
test-env-input: {}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- run: |
|
||||
exit ${{ inputs.test-env-input == env.test-env-input && '0' || '1'}}
|
||||
shell: bash
|
15
pkg/runner/testdata/inputs-via-env-context/push.yml
vendored
Normal file
15
pkg/runner/testdata/inputs-via-env-context/push.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
on: push
|
||||
jobs:
|
||||
test-inputs-via-env-context:
|
||||
runs-on: self-hosted
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./inputs-via-env-context
|
||||
with:
|
||||
test-env-input: ${{ env.test-env-input }}
|
||||
env:
|
||||
test-env-input: ${{ github.event_name }}/${{ github.run_id }}
|
||||
- run: |
|
||||
exit ${{ env.test-env-input == format('{0}/{1}', github.event_name, github.run_id) && '0' || '1' }}
|
||||
env:
|
||||
test-env-input: ${{ github.event_name }}/${{ github.run_id }}
|
16
pkg/runner/testdata/issue-1595/missing.yml
vendored
Normal file
16
pkg/runner/testdata/issue-1595/missing.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: missing
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
second:
|
||||
runs-on: ubuntu-latest
|
||||
needs: first
|
||||
steps:
|
||||
- run: echo How did you get here?
|
||||
shell: bash
|
||||
|
||||
standalone:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo Hello world
|
||||
shell: bash
|
8
pkg/runner/testdata/issue-1595/no-event.yml
vendored
Normal file
8
pkg/runner/testdata/issue-1595/no-event.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
name: no event
|
||||
|
||||
jobs:
|
||||
stuck:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo How did you get here?
|
||||
shell: bash
|
10
pkg/runner/testdata/issue-1595/no-first.yml
vendored
Normal file
10
pkg/runner/testdata/issue-1595/no-first.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
name: no first
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
second:
|
||||
runs-on: ubuntu-latest
|
||||
needs: first
|
||||
steps:
|
||||
- run: echo How did you get here?
|
||||
shell: bash
|
15
pkg/runner/testdata/job-needs-context-contains-result/push.yml
vendored
Normal file
15
pkg/runner/testdata/job-needs-context-contains-result/push.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
on:
|
||||
push:
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: exit 0
|
||||
assert:
|
||||
needs: test
|
||||
if: |
|
||||
( always() && !cancelled() ) && (
|
||||
( needs.test.result != 'success' || !success() ) )
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: exit 1
|
16
pkg/runner/testdata/matrix-exitcode/push.yml
vendored
Normal file
16
pkg/runner/testdata/matrix-exitcode/push.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: test
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
val: ["success", "failure"]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: test
|
||||
run: |
|
||||
echo "Expected job result: ${{ matrix.val }}"
|
||||
[[ "${{ matrix.val }}" = "success" ]] || exit 1
|
29
pkg/runner/testdata/no-panic-on-invalid-composite-action/push.yml
vendored
Normal file
29
pkg/runner/testdata/no-panic-on-invalid-composite-action/push.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
on: push
|
||||
jobs:
|
||||
local-invalid-step:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Foo
|
||||
- uses: Foo/Bar
|
||||
shell: cp {0} action.yml
|
||||
- uses: ./
|
||||
local-missing-steps:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
runs:
|
||||
using: composite
|
||||
shell: cp {0} action.yml
|
||||
- uses: ./
|
||||
remote-invalid-step:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: nektos/act-test-actions/invalid-composite-action/invalid-step@main
|
||||
remote-missing-steps:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: nektos/act-test-actions/invalid-composite-action/missing-steps@main
|
21
pkg/runner/testdata/path-handling/action.yml
vendored
Normal file
21
pkg/runner/testdata/path-handling/action.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: output action
|
||||
description: output action
|
||||
|
||||
inputs:
|
||||
input:
|
||||
description: some input
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
job-output:
|
||||
description: some output
|
||||
value: ${{ steps.gen-out.outputs.step-output }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: run step
|
||||
id: gen-out
|
||||
run: |
|
||||
echo "::set-output name=step-output::"
|
||||
shell: bash
|
39
pkg/runner/testdata/path-handling/push.yml
vendored
Normal file
39
pkg/runner/testdata/path-handling/push.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: path tests
|
||||
on: push
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: "Append to $GITHUB_PATH"
|
||||
run: |
|
||||
echo "/opt/hostedtoolcache/node/18.99/x64/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: test path (after setup)
|
||||
run: |
|
||||
if ! echo "$PATH" |grep "/opt/hostedtoolcache/node/18.*/\(x64\|arm64\)/bin" ; then
|
||||
echo "Node binaries not in path: $PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- id: action-with-output
|
||||
uses: ./path-handling/
|
||||
|
||||
- name: test path (after local action)
|
||||
run: |
|
||||
if ! echo "$PATH" |grep "/opt/hostedtoolcache/node/18.*/\(x64\|arm64\)/bin" ; then
|
||||
echo "Node binaries not in path: $PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: nektos/act-test-actions/composite@main
|
||||
with:
|
||||
input: some input
|
||||
|
||||
- name: test path (after remote action)
|
||||
run: |
|
||||
if ! echo "$PATH" |grep "/opt/hostedtoolcache/node/18.*/\(x64\|arm64\)/bin" ; then
|
||||
echo "Node binaries not in path: $PATH"
|
||||
exit 1
|
||||
fi
|
30
pkg/runner/testdata/remote-action-js-node-user/push.yml
vendored
Normal file
30
pkg/runner/testdata/remote-action-js-node-user/push.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: remote-action-js
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: node:16-buster-slim
|
||||
options: --user node
|
||||
steps:
|
||||
- name: check permissions of env files
|
||||
id: test
|
||||
run: |
|
||||
echo "USER: $(id -un) expected: node"
|
||||
[[ "$(id -un)" = "node" ]]
|
||||
echo "TEST=Value" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- name: check if file command worked
|
||||
if: steps.test.outputs.test != 'Value'
|
||||
run: |
|
||||
echo "steps.test.outputs.test=${{ steps.test.outputs.test || 'missing value!' }}"
|
||||
exit 1
|
||||
shell: bash
|
||||
|
||||
- uses: actions/hello-world-javascript-action@v1
|
||||
with:
|
||||
who-to-greet: 'Mona the Octocat'
|
||||
|
||||
- uses: cloudposse/actions/github/slash-command-dispatch@0.14.0
|
15
pkg/runner/testdata/set-env-new-env-file-per-step/push.yml
vendored
Normal file
15
pkg/runner/testdata/set-env-new-env-file-per-step/push.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MY_ENV: test
|
||||
steps:
|
||||
- run: exit 1
|
||||
if: env.MY_ENV != 'test'
|
||||
- run: echo "MY_ENV=test2" > $GITHUB_ENV
|
||||
- run: exit 1
|
||||
if: env.MY_ENV != 'test2'
|
||||
- run: echo "MY_ENV=returnedenv" > $GITHUB_ENV
|
||||
- run: exit 1
|
||||
if: env.MY_ENV != 'returnedenv'
|
24
pkg/runner/testdata/set-env-step-env-override/push.yml
vendored
Normal file
24
pkg/runner/testdata/set-env-step-env-override/push.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
on: push
|
||||
jobs:
|
||||
_:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MY_ENV: test
|
||||
steps:
|
||||
- run: exit 1
|
||||
if: env.MY_ENV != 'test'
|
||||
- run: |
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- run: exit 1
|
||||
shell: bash
|
||||
if: env.MY_ENV != 'val'
|
||||
- run: echo "MY_ENV=returnedenv" > $GITHUB_ENV
|
||||
shell: bash
|
||||
shell: cp {0} action.yml
|
||||
- uses: ./
|
||||
env:
|
||||
MY_ENV: val
|
||||
- run: exit 1
|
||||
if: env.MY_ENV != 'returnedenv'
|
@@ -9,23 +9,22 @@ inputs:
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
# The output of actions/setup-node@v2 seems to fail the workflow
|
||||
# - uses: actions/setup-node@v2
|
||||
# with:
|
||||
# node-version: '16'
|
||||
# - run: |
|
||||
# console.log(process.version);
|
||||
# console.log("Hi from node");
|
||||
# console.log("${{ inputs.test_input_optional }}");
|
||||
# if("${{ inputs.test_input_optional }}" !== "Test") {
|
||||
# console.log("Invalid input test_input_optional expected \"Test\" as value");
|
||||
# process.exit(1);
|
||||
# }
|
||||
# if(!process.version.startsWith('v16')) {
|
||||
# console.log("Expected node v16, but got " + process.version);
|
||||
# process.exit(1);
|
||||
# }
|
||||
# shell: node {0}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
- run: |
|
||||
console.log(process.version);
|
||||
console.log("Hi from node");
|
||||
console.log("${{ inputs.test_input_optional }}");
|
||||
if("${{ inputs.test_input_optional }}" !== "Test") {
|
||||
console.log("Invalid input test_input_optional expected \"Test\" as value");
|
||||
process.exit(1);
|
||||
}
|
||||
if(!process.version.startsWith('v16')) {
|
||||
console.log("Expected node v16, but got " + process.version);
|
||||
process.exit(1);
|
||||
}
|
||||
shell: node {0}
|
||||
- uses: ./uses-composite/composite_action
|
||||
id: composite
|
||||
with:
|
||||
|
36
pkg/runner/testdata/uses-workflow/local-workflow.yml
vendored
Normal file
36
pkg/runner/testdata/uses-workflow/local-workflow.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: local-reusable-workflows
|
||||
on: pull_request
|
||||
|
||||
jobs:
|
||||
reusable-workflow:
|
||||
uses: ./.github/workflows/local-reusable-workflow.yml
|
||||
with:
|
||||
string_required: string
|
||||
bool_required: ${{ true }}
|
||||
number_required: 1
|
||||
secrets:
|
||||
secret: keep_it_private
|
||||
|
||||
reusable-workflow-with-inherited-secrets:
|
||||
uses: ./.github/workflows/local-reusable-workflow.yml
|
||||
with:
|
||||
string_required: string
|
||||
bool_required: ${{ true }}
|
||||
number_required: 1
|
||||
secrets: inherit
|
||||
|
||||
output-test:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- reusable-workflow
|
||||
- reusable-workflow-with-inherited-secrets
|
||||
steps:
|
||||
- name: output with secrets map
|
||||
run: |
|
||||
echo reusable-workflow.output=${{ needs.reusable-workflow.outputs.output }}
|
||||
[[ "${{ needs.reusable-workflow.outputs.output == 'string' }}" = "true" ]] || exit 1
|
||||
|
||||
- name: output with inherited secrets
|
||||
run: |
|
||||
echo reusable-workflow-with-inherited-secrets.output=${{ needs.reusable-workflow-with-inherited-secrets.outputs.output }}
|
||||
[[ "${{ needs.reusable-workflow-with-inherited-secrets.outputs.output == 'string' }}" = "true" ]] || exit 1
|
32
pkg/runner/testdata/uses-workflow/push.yml
vendored
32
pkg/runner/testdata/uses-workflow/push.yml
vendored
@@ -2,8 +2,34 @@ on: push
|
||||
|
||||
jobs:
|
||||
reusable-workflow:
|
||||
uses: nektos/act-tests/.github/workflows/reusable-workflow.yml@master
|
||||
uses: nektos/act-test-actions/.github/workflows/reusable-workflow.yml@main
|
||||
with:
|
||||
username: mona
|
||||
string_required: string
|
||||
bool_required: ${{ true }}
|
||||
number_required: 1
|
||||
secrets:
|
||||
envPATH: ${{ secrets.envPAT }}
|
||||
secret: keep_it_private
|
||||
|
||||
reusable-workflow-with-inherited-secrets:
|
||||
uses: nektos/act-test-actions/.github/workflows/reusable-workflow.yml@main
|
||||
with:
|
||||
string_required: string
|
||||
bool_required: ${{ true }}
|
||||
number_required: 1
|
||||
secrets: inherit
|
||||
|
||||
output-test:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- reusable-workflow
|
||||
- reusable-workflow-with-inherited-secrets
|
||||
steps:
|
||||
- name: output with secrets map
|
||||
run: |
|
||||
echo reusable-workflow.output=${{ needs.reusable-workflow.outputs.output }}
|
||||
[[ "${{ needs.reusable-workflow.outputs.output == 'string' }}" = "true" ]] || exit 1
|
||||
|
||||
- name: output with inherited secrets
|
||||
run: |
|
||||
echo reusable-workflow-with-inherited-secrets.output=${{ needs.reusable-workflow-with-inherited-secrets.outputs.output }}
|
||||
[[ "${{ needs.reusable-workflow-with-inherited-secrets.outputs.output == 'string' }}" = "true" ]] || exit 1
|
||||
|
Reference in New Issue
Block a user