From db684e73f8fea8f35176b8f030c3c0bfb5bfbf5b Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Sun, 21 Jul 2024 12:15:55 +0200 Subject: [PATCH 1/9] feat: add initial helper methods --- action.go | 261 ++++++++++++++++++++++++++++ action_test.go | 443 ++++++++++++++++++++++++++++++++++++++++++++++++ cmd/main.go | 9 + command.go | 83 +++++++++ command_test.go | 45 +++++ context.go | 189 +++++++++++++++++++++ context_test.go | 144 ++++++++++++++++ 7 files changed, 1174 insertions(+) create mode 100644 action.go create mode 100644 action_test.go create mode 100644 cmd/main.go create mode 100644 command.go create mode 100644 command_test.go create mode 100644 context.go create mode 100644 context_test.go diff --git a/action.go b/action.go new file mode 100644 index 0000000..1710a78 --- /dev/null +++ b/action.go @@ -0,0 +1,261 @@ +package sdk + +import ( + "fmt" + "io" + "os" + "strings" +) + +const ( + addMaskCmd = "add-mask" + + envCmd = "env" + outputCmd = "output" + pathCmd = "path" + stateCmd = "state" + + // https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + multiLineFileDelim = "234baa68-d26f-4bf9-996d-45ec3520cb95" + multilineFileCmd = "%s<<" + multiLineFileDelim + "\n%s\n" + multiLineFileDelim // ${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter} + + addMatcherCmd = "add-matcher" + removeMatcherCmd = "remove-matcher" + + groupCmd = "group" + endGroupCmd = "endgroup" + + stepSummaryCmd = "step-summary" + + debugCmd = "debug" + + noticeCmd = "notice" + warningCmd = "warning" + errorCmd = "error" + + errFileCmdFmt = "unable to write command to the environment file: %s" +) + +type Action struct { + w io.Writer + env func(string) string + fields CommandProperties +} + +func New() *Action { + return &Action{w: os.Stdout, env: os.Getenv} +} + +// WithFieldsSlice includes the provided fields in log output. "f" must be a +// slice of k=v pairs. The given slice will be sorted. It panics if any of the +// string in the given slice does not construct a valid 'key=value' pair. +func (c *Action) WithFieldsSlice(f ...string) *Action { + m := make(CommandProperties) + for _, s := range f { + pair := strings.SplitN(s, "=", 2) + if len(pair) < 2 { + panic(fmt.Sprintf("%q is not a proper k=v pair!", s)) + } + + m[pair[0]] = pair[1] + } + + return c.WithFieldsMap(m) +} + +// WithFieldsMap includes the provided fields in log output. The fields in "m" +// are automatically converted to k=v pairs and sorted. +func (c *Action) WithFieldsMap(m map[string]string) *Action { + return &Action{ + w: c.w, + fields: m, + } +} + +// GetInput gets the input by the given name. It returns the empty string if the +// input is not defined. +func (c *Action) GetInput(i string) string { + e := strings.ReplaceAll(i, " ", "_") + e = strings.ToUpper(e) + e = "INPUT_" + e + return strings.TrimSpace(c.env(e)) +} + +// IssueCommand issues a new GitHub actions Command. +// It panics if it cannot write to the output stream. +func (c *Action) IssueCommand(cmd *Command) { + if _, err := fmt.Fprintln(c.w, cmd.String()); err != nil { + panic(fmt.Errorf("failed to issue command: %w", err)) + } +} + +// IssueFileCommand issues a new GitHub actions Command using environment files. +// It panics if writing to the file fails. +func (c *Action) IssueFileCommand(cmd *Command) { + e := strings.ReplaceAll(cmd.Name, "-", "_") + e = strings.ToUpper(e) + e = "GITHUB_" + e + + filepath := c.env(e) + msg := []byte(cmd.Message) + f, err := os.OpenFile(filepath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + panic(fmt.Errorf(errFileCmdFmt, err)) + } + + defer func() { + if err := f.Close(); err != nil { + panic(err) + } + }() + + if _, err := f.Write(msg); err != nil { + panic(fmt.Errorf(errFileCmdFmt, err)) + } +} + +// AddMask adds a new field mask for the given string "p". After called, future +// attempts to log "p" will be replaced with "***" in log output. It panics if +// it cannot write to the output stream. +func (c *Action) AddMask(p string) { + // ::add-mask::

+ c.IssueCommand(&Command{ + Name: addMaskCmd, + Message: p, + }) +} + +// AddMatcher adds a new matcher with the given file path. It panics if it +// cannot write to the output stream. +func (c *Action) AddMatcher(p string) { + // ::add-matcher::

+ c.IssueCommand(&Command{ + Name: addMatcherCmd, + Message: p, + }) +} + +// RemoveMatcher removes a matcher with the given owner name. It panics if it +// cannot write to the output stream. +func (c *Action) RemoveMatcher(o string) { + // ::remove-matcher owner=:: + c.IssueCommand(&Command{ + Name: removeMatcherCmd, + Properties: CommandProperties{ + "owner": o, + }, + }) +} + +// Group starts a new collapsable region up to the next ungroup invocation. It +// panics if it cannot write to the output stream. +func (c *Action) Group(t string) { + // ::group:: + c.IssueCommand(&Command{ + Name: groupCmd, + Message: t, + }) +} + +// EndGroup ends the current group. It panics if it cannot write to the output +// stream. +func (c *Action) EndGroup() { + // ::endgroup:: + c.IssueCommand(&Command{ + Name: endGroupCmd, + }) +} + +// Debugf prints a debug-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Debugf(msg string, args ...any) { + // ::debug :: + c.IssueCommand(&Command{ + Name: debugCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// Noticef prints a notice-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Noticef(msg string, args ...any) { + // ::notice :: + c.IssueCommand(&Command{ + Name: noticeCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// Warningf prints a warning-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Warningf(msg string, args ...any) { + // ::warning :: + c.IssueCommand(&Command{ + Name: warningCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// Errorf prints a error-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Errorf(msg string, args ...any) { + // ::error :: + c.IssueCommand(&Command{ + Name: errorCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// AddPath adds the string "p" to the path for the invocation. +// It panics if it cannot write to the output file. +func (c *Action) AddPath(p string) { + c.IssueFileCommand(&Command{ + Name: pathCmd, + Message: p, + }) +} + +// SaveState saves state to be used in the "finally" post job entry point. +// It panics if it cannot write to the output stream. +func (c *Action) SaveState(k, v string) { + c.IssueFileCommand(&Command{ + Name: stateCmd, + Message: fmt.Sprintf(multilineFileCmd, k, v), + }) +} + +// AddStepSummary writes the given markdown to the job summary. If a job summary +// already exists, this value is appended. +// It panics if it cannot write to the output file. +func (c *Action) AddStepSummary(markdown string) { + c.IssueFileCommand(&Command{ + Name: stepSummaryCmd, + Message: markdown, + }) +} + +// SetEnv sets an environment variable. +// It panics if it cannot write to the output file. +func (c *Action) SetEnv(k, v string) { + c.IssueFileCommand(&Command{ + Name: envCmd, + Message: fmt.Sprintf(multilineFileCmd, k, v), + }) +} + +// SetOutput sets an output parameter. +// It panics if it cannot write to the output file. +func (c *Action) SetOutput(k, v string) { + c.IssueFileCommand(&Command{ + Name: outputCmd, + Message: fmt.Sprintf(multilineFileCmd, k, v), + }) +} diff --git a/action_test.go b/action_test.go new file mode 100644 index 0000000..df835ef --- /dev/null +++ b/action_test.go @@ -0,0 +1,443 @@ +package sdk + +import ( + "bytes" + "fmt" + "io" + "os" + "testing" +) + +func newFakeGetenvFunc(t *testing.T, wantKey, v string) func(string) string { + return func(gotKey string) string { + if gotKey != wantKey { + t.Errorf("expected call GetenvFunc(%q) to be GetenvFunc(%q)", gotKey, wantKey) + } + + return v + } +} + +func TestAction_WithFieldsSlice(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a = a.WithFieldsSlice("line=100", "file=app.js") + a.Debugf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::debug file=app.js,line=100::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_WithFieldsSlice_Panic(t *testing.T) { + t.Parallel() + + defer func() { + want := `"no-equals" is not a proper k=v pair!` + if got := recover(); got != want { + t.Errorf("expected %q to be %q", got, want) + } + }() + + var b bytes.Buffer + a := New() + a.w = &b + + a = a.WithFieldsSlice("no-equals") + a.Debugf("fail: %s", "thing") +} + +func TestAction_WithFieldsMap(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a = a.WithFieldsMap(map[string]string{"line": "100", "file": "app.js"}) + a.Debugf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::debug file=app.js,line=100::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_GetInput(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "INPUT_FOO", "bar") + + if got, want := a.GetInput("foo"), "bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_IssueCommand(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.IssueCommand(&Command{ + Name: "foo", + Message: "bar", + }) + + if got, want := b.String(), fmt.Sprintln("::foo::bar"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_IssueFileCommand(t *testing.T) { + t.Parallel() + + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + var b bytes.Buffer + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_FOO", file.Name()) + + a.IssueFileCommand(&Command{ + Name: "foo", + Message: "bar", + }) + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the message to be written to the env file + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + if got, want := string(data), "bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddMask(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.AddMask("foobar") + + if got, want := b.String(), fmt.Sprintln("::add-mask::foobar"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddMatcher(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.AddMatcher("foobar.json") + + if got, want := b.String(), fmt.Sprintln("::add-matcher::foobar.json"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_RemoveMatcher(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.RemoveMatcher("foobar") + + if got, want := b.String(), fmt.Sprintln("::remove-matcher owner=foobar::"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Group(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Group("mygroup") + + if got, want := b.String(), fmt.Sprintln("::group::mygroup"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_EndGroup(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.EndGroup() + + if got, want := b.String(), fmt.Sprintln("::endgroup::"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Debugf(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Debugf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::debug::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Noticef(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Noticef("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::notice::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Warningf(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Warningf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::warning::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Errorf(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Errorf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::error::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddPath(t *testing.T) { + t.Parallel() + + // expect a file command to be issued when env file is set. + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + var b bytes.Buffer + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_PATH", file.Name()) + + a.AddPath("/custom/bin") + + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the message to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + if got, want := string(data), "/custom/bin"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_SaveState(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_STATE", file.Name()) + + a.SaveState("key", "value") + a.SaveState("key2", "value2") + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + want := fmt.Sprintf("key<<%s\nvalue\n%s", multiLineFileDelim, multiLineFileDelim) + want += fmt.Sprintf("key2<<%s\nvalue2\n%s", multiLineFileDelim, multiLineFileDelim) + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddStepSummary(t *testing.T) { + t.Parallel() + + // expectations for env file env commands + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_STEP_SUMMARY", file.Name()) + + a.AddStepSummary(` +## This is + +some markdown +`) + a.AddStepSummary(` +- content +`) + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp summary file: %s", err) + } + + want := "\n## This is\n\nsome markdown\n\n- content\n" + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_SetEnv(t *testing.T) { + t.Parallel() + + // expectations for env file env commands + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_ENV", file.Name()) + + a.SetEnv("key", "value") + a.SetEnv("key2", "value2") + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + want := fmt.Sprintf("key<<%s\nvalue\n%s", multiLineFileDelim, multiLineFileDelim) + want += fmt.Sprintf("key2<<%s\nvalue2\n%s", multiLineFileDelim, multiLineFileDelim) + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_SetOutput(t *testing.T) { + t.Parallel() + + // expectations for env file env commands + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_OUTPUT", file.Name()) + + a.SetOutput("key", "value") + a.SetOutput("key2", "value2") + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + want := fmt.Sprintf("key<<%s\nvalue\n%s", multiLineFileDelim, multiLineFileDelim) + want += fmt.Sprintf("key2<<%s\nvalue2\n%s", multiLineFileDelim, multiLineFileDelim) + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} diff --git a/cmd/main.go b/cmd/main.go new file mode 100644 index 0000000..2a9c54b --- /dev/null +++ b/cmd/main.go @@ -0,0 +1,9 @@ +package main + +import "git.geekeey.de/actions/sdk" + +func main() { + a := sdk.New() + a.AddMask("hello") + a.WithFieldsSlice("foo=bar", "biz=baz").Debugf("hello world") +} diff --git a/command.go b/command.go new file mode 100644 index 0000000..2c0f38f --- /dev/null +++ b/command.go @@ -0,0 +1,83 @@ +package sdk + +import ( + "fmt" + "sort" + "strings" +) + +const ( + cmdSeparator = "::" + cmdPropertiesPrefix = " " +) + +// CommandProperties is a named "map[string]string" type to hold key-value pairs +// passed to an actions command. +type CommandProperties map[string]string + +// String encodes the CommandProperties to a string as comma separated +// 'key=value' pairs. The pairs are joined in a chronological order. +func (props *CommandProperties) String() string { + l := make([]string, 0, len(*props)) + for k, v := range *props { + l = append(l, fmt.Sprintf("%s=%s", k, escapeProperty(v))) + } + + sort.Strings(l) + return strings.Join(l, ",") +} + +// Command can be issued by a GitHub action by writing to `stdout` with +// following format. +// +// ::name key=value,key=value::message +// +// Examples: +// ::warning::This is the message +// ::set-env name=MY_VAR::some value +type Command struct { + Name string + Message string + Properties CommandProperties +} + +// String encodes the Command to a string in the following format: +// +// ::name key=value,key=value::message +func (cmd *Command) String() string { + if cmd.Name == "" { + cmd.Name = "missing.command" + } + + var builder strings.Builder + builder.WriteString(cmdSeparator) + builder.WriteString(cmd.Name) + if len(cmd.Properties) > 0 { + builder.WriteString(cmdPropertiesPrefix) + builder.WriteString(cmd.Properties.String()) + } + + builder.WriteString(cmdSeparator) + builder.WriteString(escapeData(cmd.Message)) + return builder.String() +} + +// escapeData escapes string values for presentation in the output of a command. +// This is a not-so-well-documented requirement of commands that define a message. +func escapeData(v string) string { + v = strings.ReplaceAll(v, "%", "%25") + v = strings.ReplaceAll(v, "\r", "%0D") + v = strings.ReplaceAll(v, "\n", "%0A") + return v +} + +// escapeData escapes command property values for presentation in the output of +// a command. +func escapeProperty(v string) string { + v = strings.ReplaceAll(v, "%", "%25") + v = strings.ReplaceAll(v, "\r", "%0D") + v = strings.ReplaceAll(v, "\n", "%0A") + v = strings.ReplaceAll(v, ":", "%3A") + v = strings.ReplaceAll(v, ",", "%2C") + return v +} diff --git a/command_test.go b/command_test.go new file mode 100644 index 0000000..4df7217 --- /dev/null +++ b/command_test.go @@ -0,0 +1,45 @@ +package sdk + +import "testing" + +func TestCommandProperties_String(t *testing.T) { + t.Parallel() + + props := CommandProperties{"hello": "world"} + if got, want := props.String(), "hello=world"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + props["foo"] = "bar" + if got, want := props.String(), "foo=bar,hello=world"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestCommand_String(t *testing.T) { + t.Parallel() + + cmd := Command{Name: "foo"} + if got, want := cmd.String(), "::foo::"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + cmd = Command{Name: "foo", Message: "bar"} + if got, want := cmd.String(), "::foo::bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + cmd = Command{ + Name: "foo", + Message: "bar", + Properties: CommandProperties{"bar": "foo"}, + } + if got, want := cmd.String(), "::foo bar=foo::bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + cmd = Command{Message: "quux"} + if got, want := cmd.String(), "::missing.command::quux"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} diff --git a/context.go b/context.go new file mode 100644 index 0000000..76477d2 --- /dev/null +++ b/context.go @@ -0,0 +1,189 @@ +package sdk + +import ( + "encoding/json" + "errors" + "fmt" + "os" + "strconv" +) + +// GitHubContext of current workflow. +// See: https://docs.github.com/en/actions/learn-github-actions/environment-variables +type GitHubContext struct { + Action string `env:"GITHUB_ACTION"` + ActionPath string `env:"GITHUB_ACTION_PATH"` + ActionRepository string `env:"GITHUB_ACTION_REPOSITORY"` + Actions bool `env:"GITHUB_ACTIONS"` + Actor string `env:"GITHUB_ACTOR"` + APIURL string `env:"GITHUB_API_URL,default=https://api.github.com"` + BaseRef string `env:"GITHUB_BASE_REF"` + Env string `env:"GITHUB_ENV"` + EventName string `env:"GITHUB_EVENT_NAME"` + EventPath string `env:"GITHUB_EVENT_PATH"` + GraphqlURL string `env:"GITHUB_GRAPHQL_URL,default=https://api.github.com/graphql"` + HeadRef string `env:"GITHUB_HEAD_REF"` + Job string `env:"GITHUB_JOB"` + Path string `env:"GITHUB_PATH"` + Ref string `env:"GITHUB_REF"` + RefName string `env:"GITHUB_REF_NAME"` + RefProtected bool `env:"GITHUB_REF_PROTECTED"` + RefType string `env:"GITHUB_REF_TYPE"` + + Repository string `env:"GITHUB_REPOSITORY"` + RepositoryOwner string `env:"GITHUB_REPOSITORY_OWNER"` + + RetentionDays int64 `env:"GITHUB_RETENTION_DAYS"` + RunAttempt int64 `env:"GITHUB_RUN_ATTEMPT"` + RunID int64 `env:"GITHUB_RUN_ID"` + RunNumber int64 `env:"GITHUB_RUN_NUMBER"` + ServerURL string `env:"GITHUB_SERVER_URL,default=https://github.com"` + SHA string `env:"GITHUB_SHA"` + StepSummary string `env:"GITHUB_STEP_SUMMARY"` + Workflow string `env:"GITHUB_WORKFLOW"` + Workspace string `env:"GITHUB_WORKSPACE"` + + // Event is populated by parsing the file at EventPath, if it exists. + Event map[string]any +} + +// Context returns the context of current action with the payload object +// that triggered the workflow +func (c *Action) Context() (*GitHubContext, error) { + var merr error + context := &GitHubContext{ + APIURL: "https://api.github.com", + GraphqlURL: "https://api.github.com/graphql", + ServerURL: "https://github.com", + } + + if v := c.env("GITHUB_ACTION"); v != "" { + context.Action = v + } + if v := c.env("GITHUB_ACTION_PATH"); v != "" { + context.ActionPath = v + } + if v := c.env("GITHUB_ACTION_REPOSITORY"); v != "" { + context.ActionRepository = v + } + if v, err := parseBool(c.env("GITHUB_ACTIONS")); err == nil { + context.Actions = v + } else { + merr = errors.Join(merr, err) + } + if v := c.env("GITHUB_ACTOR"); v != "" { + context.Actor = v + } + if v := c.env("GITHUB_API_URL"); v != "" { + context.APIURL = v + } + if v := c.env("GITHUB_BASE_REF"); v != "" { + context.BaseRef = v + } + if v := c.env("GITHUB_ENV"); v != "" { + context.Env = v + } + if v := c.env("GITHUB_EVENT_NAME"); v != "" { + context.EventName = v + } + if v := c.env("GITHUB_EVENT_PATH"); v != "" { + context.EventPath = v + } + if v := c.env("GITHUB_GRAPHQL_URL"); v != "" { + context.GraphqlURL = v + } + if v := c.env("GITHUB_HEAD_REF"); v != "" { + context.HeadRef = v + } + if v := c.env("GITHUB_JOB"); v != "" { + context.Job = v + } + if v := c.env("GITHUB_PATH"); v != "" { + context.Path = v + } + if v := c.env("GITHUB_REF"); v != "" { + context.Ref = v + } + if v := c.env("GITHUB_REF_NAME"); v != "" { + context.RefName = v + } + if v, err := parseBool(c.env("GITHUB_REF_PROTECTED")); err == nil { + context.RefProtected = v + } else { + merr = errors.Join(merr, err) + } + if v := c.env("GITHUB_REF_TYPE"); v != "" { + context.RefType = v + } + + if v := c.env("GITHUB_REPOSITORY"); v != "" { + context.Repository = v + } + if v := c.env("GITHUB_REPOSITORY_OWNER"); v != "" { + context.RepositoryOwner = v + } + + if v, err := parseInt(c.env("GITHUB_RETENTION_DAYS")); err == nil { + context.RetentionDays = v + } else { + merr = errors.Join(merr, err) + } + if v, err := parseInt(c.env("GITHUB_RUN_ATTEMPT")); err == nil { + context.RunAttempt = v + } else { + merr = errors.Join(merr, err) + } + if v, err := parseInt(c.env("GITHUB_RUN_ID")); err == nil { + context.RunID = v + } else { + merr = errors.Join(merr, err) + } + if v, err := parseInt(c.env("GITHUB_RUN_NUMBER")); err == nil { + context.RunNumber = v + } else { + merr = errors.Join(merr, err) + } + if v := c.env("GITHUB_SERVER_URL"); v != "" { + context.ServerURL = v + } + if v := c.env("GITHUB_SHA"); v != "" { + context.SHA = v + } + if v := c.env("GITHUB_STEP_SUMMARY"); v != "" { + context.StepSummary = v + } + if v := c.env("GITHUB_WORKFLOW"); v != "" { + context.Workflow = v + } + if v := c.env("GITHUB_WORKSPACE"); v != "" { + context.Workspace = v + } + + if context.EventPath != "" { + eventData, err := os.ReadFile(context.EventPath) + if err != nil && !os.IsNotExist(err) { + return nil, fmt.Errorf("could not read event file: %w", err) + } + if eventData != nil { + if err := json.Unmarshal(eventData, &context.Event); err != nil { + return nil, fmt.Errorf("failed to unmarshal event payload: %w", err) + } + } + } + + return context, merr +} + +func parseBool(v string) (bool, error) { + if v == "" { + return false, nil + } + return strconv.ParseBool(v) +} + +func parseInt(v string) (int64, error) { + if v == "" { + return 0, nil + } + return strconv.ParseInt(v, 10, 64) +} diff --git a/context_test.go b/context_test.go new file mode 100644 index 0000000..f261d42 --- /dev/null +++ b/context_test.go @@ -0,0 +1,144 @@ +package sdk + +import ( + "os" + "reflect" + "testing" +) + +func TestAction_Context(t *testing.T) { + t.Parallel() + + f, err := os.CreateTemp("", "") + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Remove(f.Name()) }) + + if _, err := f.Write([]byte(`{"foo": "bar"}`)); err != nil { + t.Fatal(err) + } + if err := f.Close(); err != nil { + t.Fatal(err) + } + + eventPayloadPath := f.Name() + + cases := []struct { + name string + env map[string]string + exp *GitHubContext + }{ + { + name: "empty", + env: nil, + exp: &GitHubContext{ + // Defaults + APIURL: "https://api.github.com", + ServerURL: "https://github.com", + GraphqlURL: "https://api.github.com/graphql", + }, + }, + { + name: "no_payload", + env: map[string]string{ + "GITHUB_ACTION": "__repo-owner_name-of-action-repo", + "GITHUB_ACTION_PATH": "/path/to/action", + "GITHUB_ACTION_REPOSITORY": "repo-owner/name-of-action-repo", + "GITHUB_ACTIONS": "true", + "GITHUB_ACTOR": "sethvargo", + "GITHUB_API_URL": "https://foo.com", + "GITHUB_BASE_REF": "main", + "GITHUB_ENV": "/path/to/env", + "GITHUB_EVENT_NAME": "event_name", + "GITHUB_HEAD_REF": "headbranch", + "GITHUB_GRAPHQL_URL": "https://baz.com", + "GITHUB_JOB": "12", + "GITHUB_PATH": "/path/to/path", + "GITHUB_REF": "refs/tags/v1.0", + "GITHUB_REF_NAME": "v1.0", + "GITHUB_REF_PROTECTED": "true", + "GITHUB_REF_TYPE": "tag", + "GITHUB_REPOSITORY": "sethvargo/baz", + "GITHUB_REPOSITORY_OWNER": "sethvargo", + "GITHUB_RETENTION_DAYS": "90", + "GITHUB_RUN_ATTEMPT": "6", + "GITHUB_RUN_ID": "56", + "GITHUB_RUN_NUMBER": "34", + "GITHUB_SERVER_URL": "https://bar.com", + "GITHUB_SHA": "abcd1234", + "GITHUB_STEP_SUMMARY": "/path/to/summary", + "GITHUB_WORKFLOW": "test", + "GITHUB_WORKSPACE": "/path/to/workspace", + }, + exp: &GitHubContext{ + Action: "__repo-owner_name-of-action-repo", + ActionPath: "/path/to/action", + ActionRepository: "repo-owner/name-of-action-repo", + Actions: true, + Actor: "sethvargo", + APIURL: "https://foo.com", + BaseRef: "main", + Env: "/path/to/env", + EventName: "event_name", + // NOTE: No EventPath + GraphqlURL: "https://baz.com", + Job: "12", + HeadRef: "headbranch", + Path: "/path/to/path", + Ref: "refs/tags/v1.0", + RefName: "v1.0", + RefProtected: true, + RefType: "tag", + Repository: "sethvargo/baz", + RepositoryOwner: "sethvargo", + RetentionDays: 90, + RunAttempt: 6, + RunID: 56, + RunNumber: 34, + ServerURL: "https://bar.com", + SHA: "abcd1234", + StepSummary: "/path/to/summary", + Workflow: "test", + Workspace: "/path/to/workspace", + }, + }, + { + name: "payload", + env: map[string]string{ + "GITHUB_EVENT_PATH": eventPayloadPath, + }, + exp: &GitHubContext{ + EventPath: eventPayloadPath, + + // Defaults + APIURL: "https://api.github.com", + ServerURL: "https://github.com", + GraphqlURL: "https://api.github.com/graphql", + + Event: map[string]any{ + "foo": "bar", + }, + }, + }, + } + + for _, tc := range cases { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + a := New() + a.env = func(s string) string { return tc.env[s] } + got, err := a.Context() + if err != nil { + t.Fatal(err) + } + + if !reflect.DeepEqual(got, tc.exp) { + t.Errorf("expected\n\n%#v\n\nto be\n\n%#v\n", got, tc.exp) + } + }) + } +} From 3d43ff47587dddbf74a1877b1e9cdbac88083427 Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Mon, 22 Jul 2024 21:13:54 +0200 Subject: [PATCH 2/9] feat: add cache helpers --- cache.go | 9 ++ cache/blob.go | 56 +++++++++ cache/cache.go | 329 ++++++++++++++++++++++++++++++++++++++++++++++++ cache/reader.go | 89 +++++++++++++ cache/retry.go | 42 +++++++ cmd/main.go | 4 +- go.mod | 2 + go.sum | 2 + 8 files changed, 532 insertions(+), 1 deletion(-) create mode 100644 cache.go create mode 100644 cache/blob.go create mode 100644 cache/cache.go create mode 100644 cache/reader.go create mode 100644 cache/retry.go create mode 100644 go.sum diff --git a/cache.go b/cache.go new file mode 100644 index 0000000..6fca1f5 --- /dev/null +++ b/cache.go @@ -0,0 +1,9 @@ +package sdk + +import "git.geekeey.de/actions/sdk/cache" + +func (c *Action) Cache() *cache.Client { + c.env("ACTIONS_CACHE_URL") + c.env("ACTIONS_RUNTIME_TOKEN") + return cache.New("", "") +} diff --git a/cache/blob.go b/cache/blob.go new file mode 100644 index 0000000..d2cbca3 --- /dev/null +++ b/cache/blob.go @@ -0,0 +1,56 @@ +package cache + +import ( + "bytes" + "io" + "os" +) + +type Blob interface { + io.ReaderAt + io.Closer + Size() int64 +} + +type byteBlob struct { + buf *bytes.Reader +} + +func NewByteBlob(b []byte) Blob { + return &byteBlob{buf: bytes.NewReader(b)} +} + +func (blob *byteBlob) ReadAt(p []byte, off int64) (n int, err error) { + return blob.buf.ReadAt(p, off) +} + +func (blob *byteBlob) Size() int64 { + return blob.buf.Size() +} + +func (blob *byteBlob) Close() error { + return nil +} + +type fileBlob struct { + buf *os.File +} + +func NewFileBlob(f *os.File) Blob { + return &fileBlob{buf: f} +} + +func (blob *fileBlob) ReadAt(p []byte, off int64) (n int, err error) { + return blob.buf.ReadAt(p, off) +} + +func (blob *fileBlob) Size() int64 { + if i, err := blob.buf.Stat(); err != nil { + return i.Size() + } + return 0 +} + +func (blob *fileBlob) Close() error { + return nil +} diff --git a/cache/cache.go b/cache/cache.go new file mode 100644 index 0000000..bc7ad6b --- /dev/null +++ b/cache/cache.go @@ -0,0 +1,329 @@ +package cache + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path" + "strings" + "sync" + + "golang.org/x/sync/errgroup" +) + +var UploadConcurrency = 4 +var UploadChunkSize = 32 * 1024 * 1024 + +type Client struct { + base string + http *http.Client +} + +type auth struct { + transport http.RoundTripper + token string +} + +func (t *auth) RoundTrip(req *http.Request) (*http.Response, error) { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", t.token)) + return t.transport.RoundTrip(req) +} + +func New(token, url string) *Client { + t := &auth{transport: &retry{transport: &http.Transport{}}, token: token} + return &Client{ + base: url, + http: &http.Client{Transport: t}, + } +} + +func (c *Client) url(p string) string { + return path.Join(c.base, "_apis/artifactcache", p) +} + +func (c *Client) version(k string) string { + h := sha256.New() + h.Write([]byte("|go-actionscache-1.0")) + return hex.EncodeToString(h.Sum(nil)) +} + +type ApiError struct { + Message string `json:"message"` + TypeName string `json:"typeName"` + TypeKey string `json:"typeKey"` + ErrorCode int `json:"errorCode"` +} + +func (e ApiError) Error() string { + return e.Message +} + +func (e ApiError) Is(err error) bool { + if err == os.ErrExist { + if strings.Contains(e.TypeKey, "AlreadyExists") { + return true + } + } + return false +} + +func checkApiError(res *http.Response) error { + if res.StatusCode >= 200 && res.StatusCode < 300 { + return nil + } + dec := json.NewDecoder(io.LimitReader(res.Body, 32*1024)) + + var details ApiError + if err := dec.Decode(&details); err != nil { + return err + } + + if details.Message != "" { + return details + } else { + return fmt.Errorf("unknown error %s", res.Status) + } +} + +func (c *Client) Load(ctx context.Context, keys ...string) (*Entry, error) { + u, err := url.Parse(c.url("cache")) + if err != nil { + return nil, err + } + q := u.Query() + q.Set("keys", strings.Join(keys, ",")) + q.Set("version", c.version(keys[0])) + u.RawQuery = q.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, err + } + req.Header.Add("Accept", "application/json;api-version=6.0-preview.1") + + res, err := c.http.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + err = checkApiError(res) + if err != nil { + return nil, err + } + + dec := json.NewDecoder(io.LimitReader(res.Body, 32*1024)) + + var ce Entry + if err = dec.Decode(&ce); err != nil { + return nil, err + } + + ce.http = c.http + return &ce, nil +} + +func (c *Client) Save(ctx context.Context, key string, b Blob) error { + id, err := c.reserve(ctx, key) + if err != nil { + return err + } + err = c.upload(ctx, id, b) + if err != nil { + return err + } + return c.commit(ctx, id, b.Size()) +} + +type ReserveCacheReq struct { + Key string `json:"key"` + Version string `json:"version"` +} + +type ReserveCacheRes struct { + CacheID int `json:"cacheID"` +} + +func (c *Client) reserve(ctx context.Context, key string) (int, error) { + payload := ReserveCacheReq{Key: key, Version: c.version(key)} + + buf := new(bytes.Buffer) + if err := json.NewEncoder(buf).Encode(payload); err != nil { + return 0, err + } + + url := c.url("caches") + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, buf) + if err != nil { + return 0, err + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.http.Do(req) + if err != nil { + return 0, err + } + defer res.Body.Close() + + err = checkApiError(res) + if err != nil { + return 0, err + } + + dec := json.NewDecoder(io.LimitReader(res.Body, 32*1024)) + + var cr ReserveCacheRes + if err = dec.Decode(&cr); err != nil { + return 0, err + } + + if cr.CacheID == 0 { + return 0, fmt.Errorf("invalid response (cache id is 0)") + } + return cr.CacheID, nil +} + +type CommitCacheReq struct { + Size int64 `json:"size"` +} + +func (c *Client) commit(ctx context.Context, id int, size int64) error { + payload := CommitCacheReq{Size: size} + + buf := new(bytes.Buffer) + if err := json.NewEncoder(buf).Encode(payload); err != nil { + return err + } + + url := c.url(fmt.Sprintf("caches/%d", id)) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, buf) + if err != nil { + return err + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.http.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + err = checkApiError(res) + if err != nil { + return err + } + + return nil +} + +func (c *Client) upload(ctx context.Context, id int, b Blob) error { + var mu sync.Mutex + grp, ctx := errgroup.WithContext(ctx) + offset := int64(0) + for i := 0; i < UploadConcurrency; i++ { + grp.Go(func() error { + for { + mu.Lock() + start := offset + if start >= b.Size() { + mu.Unlock() + return nil + } + end := start + int64(UploadChunkSize) + if end > b.Size() { + end = b.Size() + } + offset = end + mu.Unlock() + + if err := c.create(ctx, id, b, start, end-start); err != nil { + return err + } + } + }) + } + return grp.Wait() +} + +func (c *Client) create(ctx context.Context, id int, ra io.ReaderAt, off, n int64) error { + url := c.url(fmt.Sprintf("caches/%d", id)) + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, url, io.NewSectionReader(ra, off, n)) + if err != nil { + return err + } + req.Header.Add("Content-Type", "application/octet-stream") + req.Header.Add("Content-Range", fmt.Sprintf("bytes %d-%d/*", off, off+n-1)) + + res, err := c.http.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + err = checkApiError(res) + if err != nil { + return err + } + + return nil +} + +type Entry struct { + Key string `json:"cacheKey"` + Scope string `json:"scope"` + URL string `json:"archiveLocation"` + + http *http.Client +} + +// Download returns a ReaderAtCloser for pulling the data. Concurrent reads are not allowed +func (ce *Entry) Download(ctx context.Context) ReaderAtCloser { + return NewReaderAtCloser(func(offset int64) (io.ReadCloser, error) { + req, err := http.NewRequestWithContext(ctx, "GET", ce.URL, nil) + if err != nil { + return nil, err + } + if offset != 0 { + req.Header.Set("Range", fmt.Sprintf("bytes=%d-", offset)) + } + client := ce.http + if client == nil { + client = http.DefaultClient + } + + res, err := client.Do(req) + if err != nil { + return nil, err + } + + if res.StatusCode < 200 || res.StatusCode >= 300 { + if res.StatusCode == http.StatusRequestedRangeNotSatisfiable { + return nil, fmt.Errorf("invalid status response %v for %s, range: %v", res.Status, ce.URL, req.Header.Get("Range")) + } + return nil, fmt.Errorf("invalid status response %v for %s", res.Status, ce.URL) + } + if offset != 0 { + cr := res.Header.Get("content-range") + if !strings.HasPrefix(cr, fmt.Sprintf("bytes %d-", offset)) { + res.Body.Close() + return nil, fmt.Errorf("unhandled content range in response: %v", cr) + } + } + return res.Body, nil + }) +} + +func (ce *Entry) WriteTo(ctx context.Context, w io.Writer) error { + rac := ce.Download(ctx) + if _, err := io.Copy(w, &rc{ReaderAt: rac}); err != nil { + return err + } + return rac.Close() +} diff --git a/cache/reader.go b/cache/reader.go new file mode 100644 index 0000000..9225a76 --- /dev/null +++ b/cache/reader.go @@ -0,0 +1,89 @@ +package cache + +import ( + "io" +) + +type ReaderAtCloser interface { + io.ReaderAt + io.Closer +} + +type readerAtCloser struct { + offset int64 + rc io.ReadCloser + ra io.ReaderAt + open func(offset int64) (io.ReadCloser, error) + closed bool +} + +func NewReaderAtCloser(open func(offset int64) (io.ReadCloser, error)) ReaderAtCloser { + return &readerAtCloser{ + open: open, + } +} + +func (hrs *readerAtCloser) ReadAt(p []byte, off int64) (n int, err error) { + if hrs.closed { + return 0, io.EOF + } + + if hrs.ra != nil { + return hrs.ra.ReadAt(p, off) + } + + if hrs.rc == nil || off != hrs.offset { + if hrs.rc != nil { + hrs.rc.Close() + hrs.rc = nil + } + rc, err := hrs.open(off) + if err != nil { + return 0, err + } + hrs.rc = rc + } + if ra, ok := hrs.rc.(io.ReaderAt); ok { + hrs.ra = ra + n, err = ra.ReadAt(p, off) + } else { + for { + var nn int + nn, err = hrs.rc.Read(p) + n += nn + p = p[nn:] + if nn == len(p) || err != nil { + break + } + } + } + + hrs.offset += int64(n) + return +} + +func (hrs *readerAtCloser) Close() error { + if hrs.closed { + return nil + } + hrs.closed = true + if hrs.rc != nil { + return hrs.rc.Close() + } + + return nil +} + +type rc struct { + io.ReaderAt + offset int +} + +func (r *rc) Read(b []byte) (int, error) { + n, err := r.ReadAt(b, int64(r.offset)) + r.offset += n + if n > 0 && err == io.EOF { + err = nil + } + return n, err +} diff --git a/cache/retry.go b/cache/retry.go new file mode 100644 index 0000000..ce93305 --- /dev/null +++ b/cache/retry.go @@ -0,0 +1,42 @@ +package cache + +import ( + "bytes" + "fmt" + "io" + "net/http" +) + +type retry struct { + transport http.RoundTripper + retry int +} + +func (t *retry) RoundTrip(req *http.Request) (*http.Response, error) { + var body []byte + if req.Body != nil { + body, _ = io.ReadAll(req.Body) + } + + for count := 0; count < t.retry; count++ { + req.Body = io.NopCloser(bytes.NewBuffer(body)) + res, err := t.transport.RoundTrip(req) + if err != nil { + return nil, err + } + if t.check(res) { + if res.Body != nil { + io.Copy(io.Discard, res.Body) + res.Body.Close() + } + continue + } + return res, err + } + + return nil, fmt.Errorf("too many retries") +} + +func (t *retry) check(res *http.Response) bool { + return res.StatusCode > 399 +} diff --git a/cmd/main.go b/cmd/main.go index 2a9c54b..ebb54e0 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -1,6 +1,8 @@ package main -import "git.geekeey.de/actions/sdk" +import ( + "git.geekeey.de/actions/sdk" +) func main() { a := sdk.New() diff --git a/go.mod b/go.mod index 5f045b8..1348d41 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,5 @@ module git.geekeey.de/actions/sdk go 1.22.5 + +require golang.org/x/sync v0.7.0 // indirect diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..e8ef4a3 --- /dev/null +++ b/go.sum @@ -0,0 +1,2 @@ +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= From 09ef963ca3a2776b4de20ab90977b4cc29a98ee1 Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Sun, 20 Oct 2024 14:55:16 +0200 Subject: [PATCH 3/9] wip: needs tests --- cache.go | 6 +-- cache/tar.go | 115 +++++++++++++++++++++++++++++++++++++++++++++++++++ cmd/main.go | 18 ++++++++ 3 files changed, 136 insertions(+), 3 deletions(-) create mode 100644 cache/tar.go diff --git a/cache.go b/cache.go index 6fca1f5..f3a17da 100644 --- a/cache.go +++ b/cache.go @@ -3,7 +3,7 @@ package sdk import "git.geekeey.de/actions/sdk/cache" func (c *Action) Cache() *cache.Client { - c.env("ACTIONS_CACHE_URL") - c.env("ACTIONS_RUNTIME_TOKEN") - return cache.New("", "") + token := c.env("ACTIONS_RUNTIME_TOKEN") + url := c.env("ACTIONS_CACHE_URL") + return cache.New(token, url) } diff --git a/cache/tar.go b/cache/tar.go new file mode 100644 index 0000000..8899812 --- /dev/null +++ b/cache/tar.go @@ -0,0 +1,115 @@ +package cache + +import ( + "archive/tar" + "compress/gzip" + "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" +) + +// Tar takes a source and variable writers and walks 'source' writing each file +// found to the tar writer; the purpose for accepting multiple writers is to allow +// for multiple outputs (for example a file, or md5 hash) +func Tar(src string, writers ...io.Writer) error { + if _, err := os.Stat(src); err != nil { + return fmt.Errorf("unable to tar files - %v", err.Error()) + } + + mw := io.MultiWriter(writers...) + + gzw := gzip.NewWriter(mw) + defer gzw.Close() + + tw := tar.NewWriter(gzw) + defer tw.Close() + + // walk path + return filepath.Walk(src, func(file string, fi os.FileInfo, err error) error { + if err != nil { + return err + } + + if !fi.Mode().IsRegular() { + return nil + } + + header, err := tar.FileInfoHeader(fi, fi.Name()) + if err != nil { + return err + } + + // update the name to correctly reflect the desired destination when untaring + header.Name = strings.TrimPrefix(strings.Replace(file, src, "", -1), string(filepath.Separator)) + + if err := tw.WriteHeader(header); err != nil { + return err + } + + f, err := os.Open(file) + if err != nil { + return err + } + + if _, err := io.Copy(tw, f); err != nil { + f.Close() + return err + } + + f.Close() + + return nil + }) +} + +// Untar takes a destination path and a reader; a tar reader loops over the tarfile +// creating the file structure at 'dst' along the way, and writing any files +func Untar(dst string, r io.Reader) error { + + gzr, err := gzip.NewReader(r) + if err != nil { + return err + } + defer gzr.Close() + + tr := tar.NewReader(gzr) + + for { + header, err := tr.Next() + + if errors.Is(err, io.EOF) || header == nil { + break + } + if err != nil { + return err + } + + target := filepath.Join(dst, header.Name) + + switch header.Typeflag { + + // if its a dir and it doesn't exist create it + case tar.TypeDir: + if _, err := os.Stat(target); err != nil { + if err := os.MkdirAll(target, 0755); err != nil { + return err + } + } + + // if it's a file create it + case tar.TypeReg: + f, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) + if err != nil { + return err + } + if _, err := io.Copy(f, tr); err != nil { + return err + } + f.Close() + } + } + return nil +} diff --git a/cmd/main.go b/cmd/main.go index ebb54e0..69ceabc 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -1,11 +1,29 @@ package main import ( + "context" + "os" + "git.geekeey.de/actions/sdk" + "git.geekeey.de/actions/sdk/cache" ) func main() { a := sdk.New() a.AddMask("hello") a.WithFieldsSlice("foo=bar", "biz=baz").Debugf("hello world") + blob, err := a.Cache().Load(context.Background(), "example") + if err != nil { + panic(err) + } + cache.Tar("./foo") + f, err := os.Open("") + if err != nil { + panic(err) + } + a.Cache().Save(context.Background(), "", cache.NewFileBlob(f)) + entry := blob.Download(context.Background()) + if entry == nil { + return + } } From 76a93c0d8cec5bee7687fff7c968570c05b70e7b Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Sun, 21 Jul 2024 12:15:55 +0200 Subject: [PATCH 4/9] feat: add initial helper methods --- action.go | 261 ++++++++++++++++++++++++++++ action_test.go | 443 ++++++++++++++++++++++++++++++++++++++++++++++++ command.go | 83 +++++++++ command_test.go | 45 +++++ context.go | 189 +++++++++++++++++++++ context_test.go | 144 ++++++++++++++++ 6 files changed, 1165 insertions(+) create mode 100644 action.go create mode 100644 action_test.go create mode 100644 command.go create mode 100644 command_test.go create mode 100644 context.go create mode 100644 context_test.go diff --git a/action.go b/action.go new file mode 100644 index 0000000..1710a78 --- /dev/null +++ b/action.go @@ -0,0 +1,261 @@ +package sdk + +import ( + "fmt" + "io" + "os" + "strings" +) + +const ( + addMaskCmd = "add-mask" + + envCmd = "env" + outputCmd = "output" + pathCmd = "path" + stateCmd = "state" + + // https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + multiLineFileDelim = "234baa68-d26f-4bf9-996d-45ec3520cb95" + multilineFileCmd = "%s<<" + multiLineFileDelim + "\n%s\n" + multiLineFileDelim // ${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter} + + addMatcherCmd = "add-matcher" + removeMatcherCmd = "remove-matcher" + + groupCmd = "group" + endGroupCmd = "endgroup" + + stepSummaryCmd = "step-summary" + + debugCmd = "debug" + + noticeCmd = "notice" + warningCmd = "warning" + errorCmd = "error" + + errFileCmdFmt = "unable to write command to the environment file: %s" +) + +type Action struct { + w io.Writer + env func(string) string + fields CommandProperties +} + +func New() *Action { + return &Action{w: os.Stdout, env: os.Getenv} +} + +// WithFieldsSlice includes the provided fields in log output. "f" must be a +// slice of k=v pairs. The given slice will be sorted. It panics if any of the +// string in the given slice does not construct a valid 'key=value' pair. +func (c *Action) WithFieldsSlice(f ...string) *Action { + m := make(CommandProperties) + for _, s := range f { + pair := strings.SplitN(s, "=", 2) + if len(pair) < 2 { + panic(fmt.Sprintf("%q is not a proper k=v pair!", s)) + } + + m[pair[0]] = pair[1] + } + + return c.WithFieldsMap(m) +} + +// WithFieldsMap includes the provided fields in log output. The fields in "m" +// are automatically converted to k=v pairs and sorted. +func (c *Action) WithFieldsMap(m map[string]string) *Action { + return &Action{ + w: c.w, + fields: m, + } +} + +// GetInput gets the input by the given name. It returns the empty string if the +// input is not defined. +func (c *Action) GetInput(i string) string { + e := strings.ReplaceAll(i, " ", "_") + e = strings.ToUpper(e) + e = "INPUT_" + e + return strings.TrimSpace(c.env(e)) +} + +// IssueCommand issues a new GitHub actions Command. +// It panics if it cannot write to the output stream. +func (c *Action) IssueCommand(cmd *Command) { + if _, err := fmt.Fprintln(c.w, cmd.String()); err != nil { + panic(fmt.Errorf("failed to issue command: %w", err)) + } +} + +// IssueFileCommand issues a new GitHub actions Command using environment files. +// It panics if writing to the file fails. +func (c *Action) IssueFileCommand(cmd *Command) { + e := strings.ReplaceAll(cmd.Name, "-", "_") + e = strings.ToUpper(e) + e = "GITHUB_" + e + + filepath := c.env(e) + msg := []byte(cmd.Message) + f, err := os.OpenFile(filepath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + panic(fmt.Errorf(errFileCmdFmt, err)) + } + + defer func() { + if err := f.Close(); err != nil { + panic(err) + } + }() + + if _, err := f.Write(msg); err != nil { + panic(fmt.Errorf(errFileCmdFmt, err)) + } +} + +// AddMask adds a new field mask for the given string "p". After called, future +// attempts to log "p" will be replaced with "***" in log output. It panics if +// it cannot write to the output stream. +func (c *Action) AddMask(p string) { + // ::add-mask::

+ c.IssueCommand(&Command{ + Name: addMaskCmd, + Message: p, + }) +} + +// AddMatcher adds a new matcher with the given file path. It panics if it +// cannot write to the output stream. +func (c *Action) AddMatcher(p string) { + // ::add-matcher::

+ c.IssueCommand(&Command{ + Name: addMatcherCmd, + Message: p, + }) +} + +// RemoveMatcher removes a matcher with the given owner name. It panics if it +// cannot write to the output stream. +func (c *Action) RemoveMatcher(o string) { + // ::remove-matcher owner=:: + c.IssueCommand(&Command{ + Name: removeMatcherCmd, + Properties: CommandProperties{ + "owner": o, + }, + }) +} + +// Group starts a new collapsable region up to the next ungroup invocation. It +// panics if it cannot write to the output stream. +func (c *Action) Group(t string) { + // ::group:: + c.IssueCommand(&Command{ + Name: groupCmd, + Message: t, + }) +} + +// EndGroup ends the current group. It panics if it cannot write to the output +// stream. +func (c *Action) EndGroup() { + // ::endgroup:: + c.IssueCommand(&Command{ + Name: endGroupCmd, + }) +} + +// Debugf prints a debug-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Debugf(msg string, args ...any) { + // ::debug :: + c.IssueCommand(&Command{ + Name: debugCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// Noticef prints a notice-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Noticef(msg string, args ...any) { + // ::notice :: + c.IssueCommand(&Command{ + Name: noticeCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// Warningf prints a warning-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Warningf(msg string, args ...any) { + // ::warning :: + c.IssueCommand(&Command{ + Name: warningCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// Errorf prints a error-level message. It follows the standard fmt.Printf +// arguments, appending an OS-specific line break to the end of the message. +// It panics if it cannot write to the output stream. +func (c *Action) Errorf(msg string, args ...any) { + // ::error :: + c.IssueCommand(&Command{ + Name: errorCmd, + Message: fmt.Sprintf(msg, args...), + Properties: c.fields, + }) +} + +// AddPath adds the string "p" to the path for the invocation. +// It panics if it cannot write to the output file. +func (c *Action) AddPath(p string) { + c.IssueFileCommand(&Command{ + Name: pathCmd, + Message: p, + }) +} + +// SaveState saves state to be used in the "finally" post job entry point. +// It panics if it cannot write to the output stream. +func (c *Action) SaveState(k, v string) { + c.IssueFileCommand(&Command{ + Name: stateCmd, + Message: fmt.Sprintf(multilineFileCmd, k, v), + }) +} + +// AddStepSummary writes the given markdown to the job summary. If a job summary +// already exists, this value is appended. +// It panics if it cannot write to the output file. +func (c *Action) AddStepSummary(markdown string) { + c.IssueFileCommand(&Command{ + Name: stepSummaryCmd, + Message: markdown, + }) +} + +// SetEnv sets an environment variable. +// It panics if it cannot write to the output file. +func (c *Action) SetEnv(k, v string) { + c.IssueFileCommand(&Command{ + Name: envCmd, + Message: fmt.Sprintf(multilineFileCmd, k, v), + }) +} + +// SetOutput sets an output parameter. +// It panics if it cannot write to the output file. +func (c *Action) SetOutput(k, v string) { + c.IssueFileCommand(&Command{ + Name: outputCmd, + Message: fmt.Sprintf(multilineFileCmd, k, v), + }) +} diff --git a/action_test.go b/action_test.go new file mode 100644 index 0000000..df835ef --- /dev/null +++ b/action_test.go @@ -0,0 +1,443 @@ +package sdk + +import ( + "bytes" + "fmt" + "io" + "os" + "testing" +) + +func newFakeGetenvFunc(t *testing.T, wantKey, v string) func(string) string { + return func(gotKey string) string { + if gotKey != wantKey { + t.Errorf("expected call GetenvFunc(%q) to be GetenvFunc(%q)", gotKey, wantKey) + } + + return v + } +} + +func TestAction_WithFieldsSlice(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a = a.WithFieldsSlice("line=100", "file=app.js") + a.Debugf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::debug file=app.js,line=100::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_WithFieldsSlice_Panic(t *testing.T) { + t.Parallel() + + defer func() { + want := `"no-equals" is not a proper k=v pair!` + if got := recover(); got != want { + t.Errorf("expected %q to be %q", got, want) + } + }() + + var b bytes.Buffer + a := New() + a.w = &b + + a = a.WithFieldsSlice("no-equals") + a.Debugf("fail: %s", "thing") +} + +func TestAction_WithFieldsMap(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a = a.WithFieldsMap(map[string]string{"line": "100", "file": "app.js"}) + a.Debugf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::debug file=app.js,line=100::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_GetInput(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "INPUT_FOO", "bar") + + if got, want := a.GetInput("foo"), "bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_IssueCommand(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.IssueCommand(&Command{ + Name: "foo", + Message: "bar", + }) + + if got, want := b.String(), fmt.Sprintln("::foo::bar"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_IssueFileCommand(t *testing.T) { + t.Parallel() + + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + var b bytes.Buffer + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_FOO", file.Name()) + + a.IssueFileCommand(&Command{ + Name: "foo", + Message: "bar", + }) + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the message to be written to the env file + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + if got, want := string(data), "bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddMask(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.AddMask("foobar") + + if got, want := b.String(), fmt.Sprintln("::add-mask::foobar"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddMatcher(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.AddMatcher("foobar.json") + + if got, want := b.String(), fmt.Sprintln("::add-matcher::foobar.json"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_RemoveMatcher(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.RemoveMatcher("foobar") + + if got, want := b.String(), fmt.Sprintln("::remove-matcher owner=foobar::"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Group(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Group("mygroup") + + if got, want := b.String(), fmt.Sprintln("::group::mygroup"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_EndGroup(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.EndGroup() + + if got, want := b.String(), fmt.Sprintln("::endgroup::"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Debugf(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Debugf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::debug::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Noticef(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Noticef("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::notice::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Warningf(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Warningf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::warning::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_Errorf(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + a := New() + a.w = &b + + a.Errorf("fail: %s", "thing") + + if got, want := b.String(), fmt.Sprintln("::error::fail: thing"); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddPath(t *testing.T) { + t.Parallel() + + // expect a file command to be issued when env file is set. + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + var b bytes.Buffer + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_PATH", file.Name()) + + a.AddPath("/custom/bin") + + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the message to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + if got, want := string(data), "/custom/bin"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_SaveState(t *testing.T) { + t.Parallel() + + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_STATE", file.Name()) + + a.SaveState("key", "value") + a.SaveState("key2", "value2") + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + want := fmt.Sprintf("key<<%s\nvalue\n%s", multiLineFileDelim, multiLineFileDelim) + want += fmt.Sprintf("key2<<%s\nvalue2\n%s", multiLineFileDelim, multiLineFileDelim) + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_AddStepSummary(t *testing.T) { + t.Parallel() + + // expectations for env file env commands + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_STEP_SUMMARY", file.Name()) + + a.AddStepSummary(` +## This is + +some markdown +`) + a.AddStepSummary(` +- content +`) + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp summary file: %s", err) + } + + want := "\n## This is\n\nsome markdown\n\n- content\n" + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_SetEnv(t *testing.T) { + t.Parallel() + + // expectations for env file env commands + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_ENV", file.Name()) + + a.SetEnv("key", "value") + a.SetEnv("key2", "value2") + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + want := fmt.Sprintf("key<<%s\nvalue\n%s", multiLineFileDelim, multiLineFileDelim) + want += fmt.Sprintf("key2<<%s\nvalue2\n%s", multiLineFileDelim, multiLineFileDelim) + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestAction_SetOutput(t *testing.T) { + t.Parallel() + + // expectations for env file env commands + var b bytes.Buffer + file, err := os.CreateTemp("", "") + if err != nil { + t.Fatalf("unable to create a temp env file: %s", err) + } + defer os.Remove(file.Name()) + + a := New() + a.w = &b + a.env = newFakeGetenvFunc(t, "GITHUB_OUTPUT", file.Name()) + + a.SetOutput("key", "value") + a.SetOutput("key2", "value2") + + // expect an empty stdout buffer + if got, want := b.String(), ""; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + // expect the command to be written to the file. + data, err := io.ReadAll(file) + if err != nil { + t.Errorf("unable to read temp env file: %s", err) + } + + want := fmt.Sprintf("key<<%s\nvalue\n%s", multiLineFileDelim, multiLineFileDelim) + want += fmt.Sprintf("key2<<%s\nvalue2\n%s", multiLineFileDelim, multiLineFileDelim) + if got := string(data); got != want { + t.Errorf("expected %q to be %q", got, want) + } +} diff --git a/command.go b/command.go new file mode 100644 index 0000000..2c0f38f --- /dev/null +++ b/command.go @@ -0,0 +1,83 @@ +package sdk + +import ( + "fmt" + "sort" + "strings" +) + +const ( + cmdSeparator = "::" + cmdPropertiesPrefix = " " +) + +// CommandProperties is a named "map[string]string" type to hold key-value pairs +// passed to an actions command. +type CommandProperties map[string]string + +// String encodes the CommandProperties to a string as comma separated +// 'key=value' pairs. The pairs are joined in a chronological order. +func (props *CommandProperties) String() string { + l := make([]string, 0, len(*props)) + for k, v := range *props { + l = append(l, fmt.Sprintf("%s=%s", k, escapeProperty(v))) + } + + sort.Strings(l) + return strings.Join(l, ",") +} + +// Command can be issued by a GitHub action by writing to `stdout` with +// following format. +// +// ::name key=value,key=value::message +// +// Examples: +// ::warning::This is the message +// ::set-env name=MY_VAR::some value +type Command struct { + Name string + Message string + Properties CommandProperties +} + +// String encodes the Command to a string in the following format: +// +// ::name key=value,key=value::message +func (cmd *Command) String() string { + if cmd.Name == "" { + cmd.Name = "missing.command" + } + + var builder strings.Builder + builder.WriteString(cmdSeparator) + builder.WriteString(cmd.Name) + if len(cmd.Properties) > 0 { + builder.WriteString(cmdPropertiesPrefix) + builder.WriteString(cmd.Properties.String()) + } + + builder.WriteString(cmdSeparator) + builder.WriteString(escapeData(cmd.Message)) + return builder.String() +} + +// escapeData escapes string values for presentation in the output of a command. +// This is a not-so-well-documented requirement of commands that define a message. +func escapeData(v string) string { + v = strings.ReplaceAll(v, "%", "%25") + v = strings.ReplaceAll(v, "\r", "%0D") + v = strings.ReplaceAll(v, "\n", "%0A") + return v +} + +// escapeData escapes command property values for presentation in the output of +// a command. +func escapeProperty(v string) string { + v = strings.ReplaceAll(v, "%", "%25") + v = strings.ReplaceAll(v, "\r", "%0D") + v = strings.ReplaceAll(v, "\n", "%0A") + v = strings.ReplaceAll(v, ":", "%3A") + v = strings.ReplaceAll(v, ",", "%2C") + return v +} diff --git a/command_test.go b/command_test.go new file mode 100644 index 0000000..4df7217 --- /dev/null +++ b/command_test.go @@ -0,0 +1,45 @@ +package sdk + +import "testing" + +func TestCommandProperties_String(t *testing.T) { + t.Parallel() + + props := CommandProperties{"hello": "world"} + if got, want := props.String(), "hello=world"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + props["foo"] = "bar" + if got, want := props.String(), "foo=bar,hello=world"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} + +func TestCommand_String(t *testing.T) { + t.Parallel() + + cmd := Command{Name: "foo"} + if got, want := cmd.String(), "::foo::"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + cmd = Command{Name: "foo", Message: "bar"} + if got, want := cmd.String(), "::foo::bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + cmd = Command{ + Name: "foo", + Message: "bar", + Properties: CommandProperties{"bar": "foo"}, + } + if got, want := cmd.String(), "::foo bar=foo::bar"; got != want { + t.Errorf("expected %q to be %q", got, want) + } + + cmd = Command{Message: "quux"} + if got, want := cmd.String(), "::missing.command::quux"; got != want { + t.Errorf("expected %q to be %q", got, want) + } +} diff --git a/context.go b/context.go new file mode 100644 index 0000000..76477d2 --- /dev/null +++ b/context.go @@ -0,0 +1,189 @@ +package sdk + +import ( + "encoding/json" + "errors" + "fmt" + "os" + "strconv" +) + +// GitHubContext of current workflow. +// See: https://docs.github.com/en/actions/learn-github-actions/environment-variables +type GitHubContext struct { + Action string `env:"GITHUB_ACTION"` + ActionPath string `env:"GITHUB_ACTION_PATH"` + ActionRepository string `env:"GITHUB_ACTION_REPOSITORY"` + Actions bool `env:"GITHUB_ACTIONS"` + Actor string `env:"GITHUB_ACTOR"` + APIURL string `env:"GITHUB_API_URL,default=https://api.github.com"` + BaseRef string `env:"GITHUB_BASE_REF"` + Env string `env:"GITHUB_ENV"` + EventName string `env:"GITHUB_EVENT_NAME"` + EventPath string `env:"GITHUB_EVENT_PATH"` + GraphqlURL string `env:"GITHUB_GRAPHQL_URL,default=https://api.github.com/graphql"` + HeadRef string `env:"GITHUB_HEAD_REF"` + Job string `env:"GITHUB_JOB"` + Path string `env:"GITHUB_PATH"` + Ref string `env:"GITHUB_REF"` + RefName string `env:"GITHUB_REF_NAME"` + RefProtected bool `env:"GITHUB_REF_PROTECTED"` + RefType string `env:"GITHUB_REF_TYPE"` + + Repository string `env:"GITHUB_REPOSITORY"` + RepositoryOwner string `env:"GITHUB_REPOSITORY_OWNER"` + + RetentionDays int64 `env:"GITHUB_RETENTION_DAYS"` + RunAttempt int64 `env:"GITHUB_RUN_ATTEMPT"` + RunID int64 `env:"GITHUB_RUN_ID"` + RunNumber int64 `env:"GITHUB_RUN_NUMBER"` + ServerURL string `env:"GITHUB_SERVER_URL,default=https://github.com"` + SHA string `env:"GITHUB_SHA"` + StepSummary string `env:"GITHUB_STEP_SUMMARY"` + Workflow string `env:"GITHUB_WORKFLOW"` + Workspace string `env:"GITHUB_WORKSPACE"` + + // Event is populated by parsing the file at EventPath, if it exists. + Event map[string]any +} + +// Context returns the context of current action with the payload object +// that triggered the workflow +func (c *Action) Context() (*GitHubContext, error) { + var merr error + context := &GitHubContext{ + APIURL: "https://api.github.com", + GraphqlURL: "https://api.github.com/graphql", + ServerURL: "https://github.com", + } + + if v := c.env("GITHUB_ACTION"); v != "" { + context.Action = v + } + if v := c.env("GITHUB_ACTION_PATH"); v != "" { + context.ActionPath = v + } + if v := c.env("GITHUB_ACTION_REPOSITORY"); v != "" { + context.ActionRepository = v + } + if v, err := parseBool(c.env("GITHUB_ACTIONS")); err == nil { + context.Actions = v + } else { + merr = errors.Join(merr, err) + } + if v := c.env("GITHUB_ACTOR"); v != "" { + context.Actor = v + } + if v := c.env("GITHUB_API_URL"); v != "" { + context.APIURL = v + } + if v := c.env("GITHUB_BASE_REF"); v != "" { + context.BaseRef = v + } + if v := c.env("GITHUB_ENV"); v != "" { + context.Env = v + } + if v := c.env("GITHUB_EVENT_NAME"); v != "" { + context.EventName = v + } + if v := c.env("GITHUB_EVENT_PATH"); v != "" { + context.EventPath = v + } + if v := c.env("GITHUB_GRAPHQL_URL"); v != "" { + context.GraphqlURL = v + } + if v := c.env("GITHUB_HEAD_REF"); v != "" { + context.HeadRef = v + } + if v := c.env("GITHUB_JOB"); v != "" { + context.Job = v + } + if v := c.env("GITHUB_PATH"); v != "" { + context.Path = v + } + if v := c.env("GITHUB_REF"); v != "" { + context.Ref = v + } + if v := c.env("GITHUB_REF_NAME"); v != "" { + context.RefName = v + } + if v, err := parseBool(c.env("GITHUB_REF_PROTECTED")); err == nil { + context.RefProtected = v + } else { + merr = errors.Join(merr, err) + } + if v := c.env("GITHUB_REF_TYPE"); v != "" { + context.RefType = v + } + + if v := c.env("GITHUB_REPOSITORY"); v != "" { + context.Repository = v + } + if v := c.env("GITHUB_REPOSITORY_OWNER"); v != "" { + context.RepositoryOwner = v + } + + if v, err := parseInt(c.env("GITHUB_RETENTION_DAYS")); err == nil { + context.RetentionDays = v + } else { + merr = errors.Join(merr, err) + } + if v, err := parseInt(c.env("GITHUB_RUN_ATTEMPT")); err == nil { + context.RunAttempt = v + } else { + merr = errors.Join(merr, err) + } + if v, err := parseInt(c.env("GITHUB_RUN_ID")); err == nil { + context.RunID = v + } else { + merr = errors.Join(merr, err) + } + if v, err := parseInt(c.env("GITHUB_RUN_NUMBER")); err == nil { + context.RunNumber = v + } else { + merr = errors.Join(merr, err) + } + if v := c.env("GITHUB_SERVER_URL"); v != "" { + context.ServerURL = v + } + if v := c.env("GITHUB_SHA"); v != "" { + context.SHA = v + } + if v := c.env("GITHUB_STEP_SUMMARY"); v != "" { + context.StepSummary = v + } + if v := c.env("GITHUB_WORKFLOW"); v != "" { + context.Workflow = v + } + if v := c.env("GITHUB_WORKSPACE"); v != "" { + context.Workspace = v + } + + if context.EventPath != "" { + eventData, err := os.ReadFile(context.EventPath) + if err != nil && !os.IsNotExist(err) { + return nil, fmt.Errorf("could not read event file: %w", err) + } + if eventData != nil { + if err := json.Unmarshal(eventData, &context.Event); err != nil { + return nil, fmt.Errorf("failed to unmarshal event payload: %w", err) + } + } + } + + return context, merr +} + +func parseBool(v string) (bool, error) { + if v == "" { + return false, nil + } + return strconv.ParseBool(v) +} + +func parseInt(v string) (int64, error) { + if v == "" { + return 0, nil + } + return strconv.ParseInt(v, 10, 64) +} diff --git a/context_test.go b/context_test.go new file mode 100644 index 0000000..f261d42 --- /dev/null +++ b/context_test.go @@ -0,0 +1,144 @@ +package sdk + +import ( + "os" + "reflect" + "testing" +) + +func TestAction_Context(t *testing.T) { + t.Parallel() + + f, err := os.CreateTemp("", "") + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Remove(f.Name()) }) + + if _, err := f.Write([]byte(`{"foo": "bar"}`)); err != nil { + t.Fatal(err) + } + if err := f.Close(); err != nil { + t.Fatal(err) + } + + eventPayloadPath := f.Name() + + cases := []struct { + name string + env map[string]string + exp *GitHubContext + }{ + { + name: "empty", + env: nil, + exp: &GitHubContext{ + // Defaults + APIURL: "https://api.github.com", + ServerURL: "https://github.com", + GraphqlURL: "https://api.github.com/graphql", + }, + }, + { + name: "no_payload", + env: map[string]string{ + "GITHUB_ACTION": "__repo-owner_name-of-action-repo", + "GITHUB_ACTION_PATH": "/path/to/action", + "GITHUB_ACTION_REPOSITORY": "repo-owner/name-of-action-repo", + "GITHUB_ACTIONS": "true", + "GITHUB_ACTOR": "sethvargo", + "GITHUB_API_URL": "https://foo.com", + "GITHUB_BASE_REF": "main", + "GITHUB_ENV": "/path/to/env", + "GITHUB_EVENT_NAME": "event_name", + "GITHUB_HEAD_REF": "headbranch", + "GITHUB_GRAPHQL_URL": "https://baz.com", + "GITHUB_JOB": "12", + "GITHUB_PATH": "/path/to/path", + "GITHUB_REF": "refs/tags/v1.0", + "GITHUB_REF_NAME": "v1.0", + "GITHUB_REF_PROTECTED": "true", + "GITHUB_REF_TYPE": "tag", + "GITHUB_REPOSITORY": "sethvargo/baz", + "GITHUB_REPOSITORY_OWNER": "sethvargo", + "GITHUB_RETENTION_DAYS": "90", + "GITHUB_RUN_ATTEMPT": "6", + "GITHUB_RUN_ID": "56", + "GITHUB_RUN_NUMBER": "34", + "GITHUB_SERVER_URL": "https://bar.com", + "GITHUB_SHA": "abcd1234", + "GITHUB_STEP_SUMMARY": "/path/to/summary", + "GITHUB_WORKFLOW": "test", + "GITHUB_WORKSPACE": "/path/to/workspace", + }, + exp: &GitHubContext{ + Action: "__repo-owner_name-of-action-repo", + ActionPath: "/path/to/action", + ActionRepository: "repo-owner/name-of-action-repo", + Actions: true, + Actor: "sethvargo", + APIURL: "https://foo.com", + BaseRef: "main", + Env: "/path/to/env", + EventName: "event_name", + // NOTE: No EventPath + GraphqlURL: "https://baz.com", + Job: "12", + HeadRef: "headbranch", + Path: "/path/to/path", + Ref: "refs/tags/v1.0", + RefName: "v1.0", + RefProtected: true, + RefType: "tag", + Repository: "sethvargo/baz", + RepositoryOwner: "sethvargo", + RetentionDays: 90, + RunAttempt: 6, + RunID: 56, + RunNumber: 34, + ServerURL: "https://bar.com", + SHA: "abcd1234", + StepSummary: "/path/to/summary", + Workflow: "test", + Workspace: "/path/to/workspace", + }, + }, + { + name: "payload", + env: map[string]string{ + "GITHUB_EVENT_PATH": eventPayloadPath, + }, + exp: &GitHubContext{ + EventPath: eventPayloadPath, + + // Defaults + APIURL: "https://api.github.com", + ServerURL: "https://github.com", + GraphqlURL: "https://api.github.com/graphql", + + Event: map[string]any{ + "foo": "bar", + }, + }, + }, + } + + for _, tc := range cases { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + a := New() + a.env = func(s string) string { return tc.env[s] } + got, err := a.Context() + if err != nil { + t.Fatal(err) + } + + if !reflect.DeepEqual(got, tc.exp) { + t.Errorf("expected\n\n%#v\n\nto be\n\n%#v\n", got, tc.exp) + } + }) + } +} From 7d728dcbc2dc99c5a6f0cbdd21a161bc7cce824b Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Wed, 23 Oct 2024 22:18:58 +0200 Subject: [PATCH 5/9] feat: add `http.Client` provider with automatic token --- client.go | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 client.go diff --git a/client.go b/client.go new file mode 100644 index 0000000..2682a63 --- /dev/null +++ b/client.go @@ -0,0 +1,32 @@ +package sdk + +import ( + "fmt" + "net/http" + "net/url" +) + +func (a *Action) Client() *Client { + c := &Client{Client: &http.Client{}} + c.base = a.env("GITHUB_API_URL") + c.token = fmt.Sprintf("Bearer %s", a.env("GITHUB_TOKEN")) + return c +} + +type Client struct { + *http.Client + base string + token string +} + +func (c *Client) Do(req *http.Request) (*http.Response, error) { + req.Header.Set("Authorization", c.token) + if !req.URL.IsAbs() { + u, err := url.Parse(fmt.Sprintf("%s%s", c.base, req.URL)) + if err != nil { + return nil, err + } + req.URL = u + } + return c.Client.Do(req) +} From 6a3a41374e00b0c3bd132ef8e8323e51d753c0fc Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Fri, 1 Nov 2024 23:38:54 +0100 Subject: [PATCH 6/9] feat: change github context parsing --- client.go | 5 ++-- context.go | 78 ++++++++++++++++++------------------------------- context_test.go | 21 ++++++++----- 3 files changed, 44 insertions(+), 60 deletions(-) diff --git a/client.go b/client.go index 2682a63..9898adb 100644 --- a/client.go +++ b/client.go @@ -8,8 +8,9 @@ import ( func (a *Action) Client() *Client { c := &Client{Client: &http.Client{}} - c.base = a.env("GITHUB_API_URL") - c.token = fmt.Sprintf("Bearer %s", a.env("GITHUB_TOKEN")) + context := a.Context() + c.base = context.APIURL + c.token = fmt.Sprintf("Bearer %s", context.Token) return c } diff --git a/context.go b/context.go index 76477d2..5f676c9 100644 --- a/context.go +++ b/context.go @@ -2,10 +2,8 @@ package sdk import ( "encoding/json" - "errors" "fmt" "os" - "strconv" ) // GitHubContext of current workflow. @@ -14,7 +12,7 @@ type GitHubContext struct { Action string `env:"GITHUB_ACTION"` ActionPath string `env:"GITHUB_ACTION_PATH"` ActionRepository string `env:"GITHUB_ACTION_REPOSITORY"` - Actions bool `env:"GITHUB_ACTIONS"` + Actions string `env:"GITHUB_ACTIONS"` Actor string `env:"GITHUB_ACTOR"` APIURL string `env:"GITHUB_API_URL,default=https://api.github.com"` BaseRef string `env:"GITHUB_BASE_REF"` @@ -27,34 +25,36 @@ type GitHubContext struct { Path string `env:"GITHUB_PATH"` Ref string `env:"GITHUB_REF"` RefName string `env:"GITHUB_REF_NAME"` - RefProtected bool `env:"GITHUB_REF_PROTECTED"` + RefProtected string `env:"GITHUB_REF_PROTECTED"` RefType string `env:"GITHUB_REF_TYPE"` Repository string `env:"GITHUB_REPOSITORY"` RepositoryOwner string `env:"GITHUB_REPOSITORY_OWNER"` - RetentionDays int64 `env:"GITHUB_RETENTION_DAYS"` - RunAttempt int64 `env:"GITHUB_RUN_ATTEMPT"` - RunID int64 `env:"GITHUB_RUN_ID"` - RunNumber int64 `env:"GITHUB_RUN_NUMBER"` + RetentionDays string `env:"GITHUB_RETENTION_DAYS"` + RunAttempt string `env:"GITHUB_RUN_ATTEMPT"` + RunID string `env:"GITHUB_RUN_ID"` + RunNumber string `env:"GITHUB_RUN_NUMBER"` ServerURL string `env:"GITHUB_SERVER_URL,default=https://github.com"` SHA string `env:"GITHUB_SHA"` StepSummary string `env:"GITHUB_STEP_SUMMARY"` Workflow string `env:"GITHUB_WORKFLOW"` Workspace string `env:"GITHUB_WORKSPACE"` + Token string `env:"GITHUB_TOKEN"` + // Event is populated by parsing the file at EventPath, if it exists. - Event map[string]any + event map[string]any } // Context returns the context of current action with the payload object // that triggered the workflow -func (c *Action) Context() (*GitHubContext, error) { - var merr error +func (c *Action) Context() *GitHubContext { context := &GitHubContext{ APIURL: "https://api.github.com", GraphqlURL: "https://api.github.com/graphql", ServerURL: "https://github.com", + event: map[string]any{}, } if v := c.env("GITHUB_ACTION"); v != "" { @@ -66,10 +66,8 @@ func (c *Action) Context() (*GitHubContext, error) { if v := c.env("GITHUB_ACTION_REPOSITORY"); v != "" { context.ActionRepository = v } - if v, err := parseBool(c.env("GITHUB_ACTIONS")); err == nil { + if v := c.env("GITHUB_ACTIONS"); v != "" { context.Actions = v - } else { - merr = errors.Join(merr, err) } if v := c.env("GITHUB_ACTOR"); v != "" { context.Actor = v @@ -107,41 +105,29 @@ func (c *Action) Context() (*GitHubContext, error) { if v := c.env("GITHUB_REF_NAME"); v != "" { context.RefName = v } - if v, err := parseBool(c.env("GITHUB_REF_PROTECTED")); err == nil { + if v := c.env("GITHUB_REF_PROTECTED"); v != "" { context.RefProtected = v - } else { - merr = errors.Join(merr, err) } if v := c.env("GITHUB_REF_TYPE"); v != "" { context.RefType = v } - if v := c.env("GITHUB_REPOSITORY"); v != "" { context.Repository = v } if v := c.env("GITHUB_REPOSITORY_OWNER"); v != "" { context.RepositoryOwner = v } - - if v, err := parseInt(c.env("GITHUB_RETENTION_DAYS")); err == nil { + if v := c.env("GITHUB_RETENTION_DAYS"); v != "" { context.RetentionDays = v - } else { - merr = errors.Join(merr, err) } - if v, err := parseInt(c.env("GITHUB_RUN_ATTEMPT")); err == nil { + if v := c.env("GITHUB_RUN_ATTEMPT"); v != "" { context.RunAttempt = v - } else { - merr = errors.Join(merr, err) } - if v, err := parseInt(c.env("GITHUB_RUN_ID")); err == nil { + if v := c.env("GITHUB_RUN_ID"); v != "" { context.RunID = v - } else { - merr = errors.Join(merr, err) } - if v, err := parseInt(c.env("GITHUB_RUN_NUMBER")); err == nil { + if v := c.env("GITHUB_RUN_NUMBER"); v != "" { context.RunNumber = v - } else { - merr = errors.Join(merr, err) } if v := c.env("GITHUB_SERVER_URL"); v != "" { context.ServerURL = v @@ -158,32 +144,24 @@ func (c *Action) Context() (*GitHubContext, error) { if v := c.env("GITHUB_WORKSPACE"); v != "" { context.Workspace = v } + if v := c.env("GITHUB_TOKEN"); v != "" { + context.Token = v + } - if context.EventPath != "" { - eventData, err := os.ReadFile(context.EventPath) + return context +} + +func (c *GitHubContext) Event() (map[string]any, error) { + if c.EventPath != "" { + eventData, err := os.ReadFile(c.EventPath) if err != nil && !os.IsNotExist(err) { return nil, fmt.Errorf("could not read event file: %w", err) } if eventData != nil { - if err := json.Unmarshal(eventData, &context.Event); err != nil { + if err := json.Unmarshal(eventData, &c.event); err != nil { return nil, fmt.Errorf("failed to unmarshal event payload: %w", err) } } } - - return context, merr -} - -func parseBool(v string) (bool, error) { - if v == "" { - return false, nil - } - return strconv.ParseBool(v) -} - -func parseInt(v string) (int64, error) { - if v == "" { - return 0, nil - } - return strconv.ParseInt(v, 10, 64) + return c.event, nil } diff --git a/context_test.go b/context_test.go index f261d42..89f3ab2 100644 --- a/context_test.go +++ b/context_test.go @@ -37,6 +37,7 @@ func TestAction_Context(t *testing.T) { APIURL: "https://api.github.com", ServerURL: "https://github.com", GraphqlURL: "https://api.github.com/graphql", + event: map[string]any{}, }, }, { @@ -70,12 +71,13 @@ func TestAction_Context(t *testing.T) { "GITHUB_STEP_SUMMARY": "/path/to/summary", "GITHUB_WORKFLOW": "test", "GITHUB_WORKSPACE": "/path/to/workspace", + "GITHUB_TOKEN": "somerandomtoken", }, exp: &GitHubContext{ Action: "__repo-owner_name-of-action-repo", ActionPath: "/path/to/action", ActionRepository: "repo-owner/name-of-action-repo", - Actions: true, + Actions: "true", Actor: "sethvargo", APIURL: "https://foo.com", BaseRef: "main", @@ -88,19 +90,21 @@ func TestAction_Context(t *testing.T) { Path: "/path/to/path", Ref: "refs/tags/v1.0", RefName: "v1.0", - RefProtected: true, + RefProtected: "true", RefType: "tag", Repository: "sethvargo/baz", RepositoryOwner: "sethvargo", - RetentionDays: 90, - RunAttempt: 6, - RunID: 56, - RunNumber: 34, + RetentionDays: "90", + RunAttempt: "6", + RunID: "56", + RunNumber: "34", ServerURL: "https://bar.com", SHA: "abcd1234", StepSummary: "/path/to/summary", Workflow: "test", Workspace: "/path/to/workspace", + Token: "somerandomtoken", + event: map[string]any{}, }, }, { @@ -116,7 +120,7 @@ func TestAction_Context(t *testing.T) { ServerURL: "https://github.com", GraphqlURL: "https://api.github.com/graphql", - Event: map[string]any{ + event: map[string]any{ "foo": "bar", }, }, @@ -131,7 +135,8 @@ func TestAction_Context(t *testing.T) { a := New() a.env = func(s string) string { return tc.env[s] } - got, err := a.Context() + got := a.Context() + _, err := got.Event() if err != nil { t.Fatal(err) } From 285a7e8be509395ef42fe5553ac128700499224a Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Tue, 30 Sep 2025 21:23:51 +0200 Subject: [PATCH 7/9] chore: update domain to code.geekeey.de --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 5f045b8..f788a0e 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,3 @@ -module git.geekeey.de/actions/sdk +module code.geekeey.de/actions/sdk go 1.22.5 From 3daeb3fc050c00c90b55dc5e054091833c782723 Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Sun, 5 Oct 2025 20:37:14 +0200 Subject: [PATCH 8/9] feta: add globfs Add a fs.FS impl to allow to filter an existing fs.FS by path patterns. --- iox/globfs.go | 99 +++++++++++++++++++ iox/globfs_test.go | 234 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 333 insertions(+) create mode 100644 iox/globfs.go create mode 100644 iox/globfs_test.go diff --git a/iox/globfs.go b/iox/globfs.go new file mode 100644 index 0000000..91240cb --- /dev/null +++ b/iox/globfs.go @@ -0,0 +1,99 @@ +package iox + +import ( + "io/fs" + "os" + "path" +) + +type GlobFS struct { + base fs.FS + patterns []string +} + +// NewGlobFS creates a new GlobFS that exposes only files matching any of the given glob patterns. +func NewGlobFS(base fs.FS, patterns ...string) *GlobFS { + return &GlobFS{base: base, patterns: patterns} +} + +// match reports whether the given path matches any of the configured patterns. +func (g *GlobFS) match(name string) bool { + for _, pat := range g.patterns { + if matched, _ := path.Match(pat, name); matched { + return true + } + } + return false +} + +func (g *GlobFS) contains(dir string) bool { + queue := []string{dir} + visited := make(map[string]struct{}) + for len(queue) > 0 { + current := queue[0] + queue = queue[1:] // dequeue + + // Prevent visiting same dir multiple times + if _, seen := visited[current]; seen { + continue + } + visited[current] = struct{}{} + + entries, err := fs.ReadDir(g.base, current) + if err != nil { + continue + } + for _, entry := range entries { + rel := path.Join(current, entry.Name()) + if g.match(rel) { + return true + } + if entry.IsDir() { + queue = append(queue, rel) + } + } + } + return false +} + +func (g *GlobFS) Open(name string) (fs.File, error) { + if g.match(name) { + return g.base.Open(name) + } + + fi, err := fs.Stat(g.base, name) + if err != nil || !fi.IsDir() { + return nil, fs.ErrNotExist + } + if g.contains(name) { + return g.base.Open(name) + } + return nil, fs.ErrNotExist +} + +func (g *GlobFS) ReadDir(name string) ([]fs.DirEntry, error) { + if g.match(name) { + return fs.ReadDir(g.base, name) + } + + entries, err := fs.ReadDir(g.base, name) + if err != nil { + return nil, err + } + var children []fs.DirEntry + for _, entry := range entries { + rel := path.Join(name, entry.Name()) + if g.match(rel) { + children = append(children, entry) + } + if entry.IsDir() && g.contains(rel) { + children = append(children, entry) + } + } + + if len(children) == 0 { + return nil, os.ErrNotExist + } + + return children, nil +} diff --git a/iox/globfs_test.go b/iox/globfs_test.go new file mode 100644 index 0000000..e548a2a --- /dev/null +++ b/iox/globfs_test.go @@ -0,0 +1,234 @@ +package iox + +import ( + "io/fs" + "reflect" + "sort" + "testing" + "testing/fstest" +) + +func setupFS() fs.ReadDirFS { + // Create an in-memory FS with a mix of files and directories + return fstest.MapFS{ + "main.go": &fstest.MapFile{Data: []byte("package main")}, + "main_test.go": &fstest.MapFile{Data: []byte("package main_test")}, + "README.md": &fstest.MapFile{Data: []byte("# readme")}, + "LICENSE": &fstest.MapFile{Data: []byte("MIT")}, + "docs/guide.md": &fstest.MapFile{Data: []byte("Docs")}, + "docs/other.txt": &fstest.MapFile{Data: []byte("Other")}, + "docs/hidden/.keep": &fstest.MapFile{Data: []byte("")}, + "assets/img.png": &fstest.MapFile{Data: []byte("PNG")}, + "assets/style.css": &fstest.MapFile{Data: []byte("CSS")}, + ".gitignore": &fstest.MapFile{Data: []byte("*.log")}, + ".hiddenfile": &fstest.MapFile{Data: []byte("")}, + "emptydir/": &fstest.MapFile{Mode: fs.ModeDir}, + } +} + +// helper to get base names for easier comparison +func basenames(entries []fs.DirEntry) []string { + names := []string{} + for _, e := range entries { + names = append(names, e.Name()) + } + sort.Strings(names) + return names +} + +func TestGlobFSMultiplePatterns(t *testing.T) { + memfs := setupFS() + gfs := NewGlobFS(memfs, "*.go", "*.md", "assets/*", "docs/guide.md", ".gitignore") + + tests := []struct { + path string + want []string + wantErr bool + }{ + {path: ".", want: []string{"README.md", "assets", "docs", "main.go", "main_test.go", ".gitignore"}}, + {path: "assets", want: []string{"img.png", "style.css"}}, + {path: "docs", want: []string{"guide.md"}}, + {path: "docs/hidden", want: []string{}, wantErr: true}, + {path: "emptydir", want: []string{}, wantErr: true}, + } + + for _, tc := range tests { + tc := tc // capture range variable + t.Run(tc.path, func(t *testing.T) { + entries, err := fs.ReadDir(gfs, tc.path) + if tc.wantErr && err == nil { + t.Errorf("expected error, got nil") + return + } + if !tc.wantErr && err != nil { + t.Errorf("unexpected error: %v", err) + return + } + got := basenames(entries) + sort.Strings(tc.want) + if !reflect.DeepEqual(got, tc.want) { + t.Errorf("got %v; want %v", got, tc.want) + } + }) + } +} + +func TestGlobFSOpen(t *testing.T) { + memfs := setupFS() + gfs := NewGlobFS(memfs, "*.go", "*.md", "assets/*", "docs/guide.md", ".gitignore") + + type test struct { + path string + wantErr bool + } + tests := []test{ + {path: "main.go"}, + {path: "README.md"}, + {path: "LICENSE", wantErr: true}, + {path: "assets/img.png"}, + {path: "assets/style.css"}, + {path: "assets/nonexistent.png", wantErr: true}, + {path: "docs/guide.md"}, + {path: "docs/other.txt", wantErr: true}, + {path: ".gitignore"}, + {path: ".hiddenfile", wantErr: true}, + {path: "docs/hidden/.keep", wantErr: true}, + {path: "emptydir", wantErr: true}, + {path: "docs"}, // allowed because it contains matching file(s) + {path: "assets"}, // allowed because it contains matching file(s) + } + for _, tc := range tests { + tc := tc + t.Run(tc.path, func(t *testing.T) { + f, err := gfs.Open(tc.path) + if tc.wantErr && err == nil { + t.Errorf("expected error, got file") + if f != nil { + f.Close() + } + } else if !tc.wantErr && err != nil { + t.Errorf("unexpected error: %v", err) + } else if !tc.wantErr && err == nil { + info, _ := f.Stat() + if info.IsDir() { + _, derr := fs.ReadDir(gfs, tc.path) + if derr != nil && !tc.wantErr { + t.Errorf("unexpected error: %v", derr) + } + } + f.Close() + } + }) + } +} + +func TestGlobFSReadFile(t *testing.T) { + memfs := setupFS() + gfs := NewGlobFS(memfs, "*.go", "*.md", "assets/*", ".gitignore") + + tests := []struct { + name string + want []byte + wantErr bool + }{ + {name: "main.go", want: []byte("package main")}, + {name: "main_test.go", want: []byte("package main_test")}, + {name: "README.md", want: []byte("# readme")}, + {name: "assets/img.png", want: []byte("PNG")}, + {name: "assets/style.css", want: []byte("CSS")}, + {name: ".gitignore", want: []byte("*.log")}, + {name: "LICENSE", wantErr: true}, // not allowed by filter + {name: "docs/guide.md", wantErr: true}, // not allowed by filter + {name: "docs/hidden/.keep", wantErr: true}, // not allowed by filter + {name: "doesnotexist.txt", wantErr: true}, // does not exist + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + got, err := fs.ReadFile(gfs, tc.name) + if tc.wantErr { + if err == nil { + t.Errorf("expected error, got nil (got=%q)", got) + } + } else { + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if string(got) != string(tc.want) { + t.Errorf("got %q; want %q", got, tc.want) + } + } + }) + } +} + +func TestGlobFSRelativePaths(t *testing.T) { + memfs := setupFS() + gfs := NewGlobFS(memfs, "docs/*.md") + entries, err := fs.ReadDir(gfs, "docs") + if err != nil { + t.Fatal(err) + } + got := basenames(entries) + want := []string{"guide.md"} + if !reflect.DeepEqual(got, want) { + t.Errorf("docs/*.md: got %v, want %v", got, want) + } +} + +func TestGlobFSNoMatchesOpen(t *testing.T) { + gfs := NewGlobFS(setupFS(), "*.xyz") + _, err := gfs.Open("main.go") + if err == nil { + t.Fatal("expected error when opening file with no matches") + } +} + +func TestGlobFSNoMatchesStat(t *testing.T) { + gfs := NewGlobFS(setupFS(), "*.xyz") + _, err := fs.Stat(gfs, "main.go") + if err == nil { + t.Fatal("expected error with no matches: stat") + } +} + +func TestGlobFSNoMatchesReadDir(t *testing.T) { + gfs := NewGlobFS(setupFS(), "*.xyz") + _, err := fs.ReadDir(gfs, "main.go") + if err == nil { + t.Fatal("expected error with no matches: readdir") + } +} + +func TestGlobFSNoMatchesReadFile(t *testing.T) { + gfs := NewGlobFS(setupFS(), "*.xyz") + _, err := fs.ReadFile(gfs, "main.go") + if err == nil { + t.Fatal("expected error with no matches: readfile") + } +} + +func TestGlobFS_IntegrationWithStdlib(t *testing.T) { + memfs := setupFS() + gfs := NewGlobFS(memfs, "*.go", "docs/guide.md") + // Use fs.WalkDir with our filtered FS + var walked []string + err := fs.WalkDir(gfs, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + walked = append(walked, path) + return nil + }) + if err != nil { + t.Fatal(err) + } + // Only files and dirs matching or containing matches should appear + for _, p := range walked { + if p == "." || p == "main.go" || p == "main_test.go" || p == "docs" || p == "docs/guide.md" { + continue + } + t.Errorf("WalkDir: unexpected path %q", p) + } +} From 24ad00274d9646ee0da1a25f7df3cc624b7a8085 Mon Sep 17 00:00:00 2001 From: Louis Seubert Date: Sun, 25 Jan 2026 17:22:16 +0100 Subject: [PATCH 9/9] feat: rewrote glob to have a full doublestar impl This implements a full double star glob implementation with it's own filesystem implementation. --- .forgejo/workflows/default.yml | 17 ++ glob/fs.go | 127 +++++++++ glob/fs_test.go | 505 +++++++++++++++++++++++++++++++++ glob/glob.go | 485 +++++++++++++++++++++++++++++++ glob/glob_test.go | 220 ++++++++++++++ iox/globfs.go | 99 ------- iox/globfs_test.go | 234 --------------- 7 files changed, 1354 insertions(+), 333 deletions(-) create mode 100644 .forgejo/workflows/default.yml create mode 100644 glob/fs.go create mode 100644 glob/fs_test.go create mode 100644 glob/glob.go create mode 100644 glob/glob_test.go delete mode 100644 iox/globfs.go delete mode 100644 iox/globfs_test.go diff --git a/.forgejo/workflows/default.yml b/.forgejo/workflows/default.yml new file mode 100644 index 0000000..a6f7bb2 --- /dev/null +++ b/.forgejo/workflows/default.yml @@ -0,0 +1,17 @@ +name: default + +on: + push: + branches: [main] + +jobs: + default: + name: ensure tests work + runs-on: debian-latest + container: docker.io/golang:1.24-alpine + steps: + - name: checkout + uses: https://code.geekeey.de/actions/checkout@1 + - name: go run + run: | + go test diff --git a/glob/fs.go b/glob/fs.go new file mode 100644 index 0000000..e6ab35f --- /dev/null +++ b/glob/fs.go @@ -0,0 +1,127 @@ +package glob + +import ( + "io/fs" + "path/filepath" + "slices" +) + +type GlobFS struct { + base fs.FS + patterns []Pattern +} + +// NewGlobFS creates a new GlobFS that exposes only files matching any of the given glob patterns. +func NewGlobFS(base fs.FS, patterns ...string) (*GlobFS, error) { + fs := &GlobFS{base: base, patterns: []Pattern{}} + + for _, value := range patterns { + pattern, err := New(value) + if err != nil { + return nil, err + } + fs.patterns = append(fs.patterns, *pattern) + } + + return fs, nil +} + +func (g *GlobFS) match(name string, prefix bool) bool { + var f func(Pattern) bool + if prefix { + f = func(p Pattern) bool { return p.MatchPrefix(name) } + } else { + f = func(p Pattern) bool { return p.Match(name) } + } + return slices.ContainsFunc(g.patterns, f) +} + +func (g *GlobFS) contains(name string) (bool, error) { + stat, err := fs.Stat(g.base, name) + if err != nil { + return false, err + } + if stat.IsDir() { + contains := false + err := fs.WalkDir(g.base, name, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + if d.IsDir() && !g.match(name+string(filepath.Separator), true) { + return fs.SkipDir + } + + if g.match(path, false) { + contains = true + return fs.SkipAll + } + + return nil + }) + return contains, err + } else { + return g.match(name, false), nil + } +} + +func (g *GlobFS) Open(name string) (fs.File, error) { + root := name == "." + + // fast path some of the pattern matches + if root || g.match(name, false) { + return g.base.Open(name) + } + + ok, err := g.contains(name) + if err != nil { + return nil, err + } + + if ok { + return g.base.Open(name) + } else { + return nil, fs.ErrNotExist + } +} + +func (g *GlobFS) ReadDir(name string) ([]fs.DirEntry, error) { + root := name == "." + path := name + string(filepath.Separator) + + // fast path no pattern matches (prefix check) + // root dir ('.') must be handled to get initial entries + if !root && !g.match(path, true) { + return nil, fs.ErrNotExist + } + + entries, err := fs.ReadDir(g.base, name) + if err != nil { + return nil, err + } + + // if we do not have any child entries, we need to check if the directory + // itself matched some of the defined patterns, if so we should be able to + // read it, otherwise we can not read it. + if !root && len(entries) == 0 { + if !g.match(path, false) { + return nil, fs.ErrNotExist + } + } + + children := []fs.DirEntry{} + for _, entry := range entries { + ok, err := g.contains(filepath.Join(name, entry.Name())) + if err != nil { + return nil, err + } + + if ok { + children = append(children, entry) + } else { + continue + } + } + + return children, nil +} diff --git a/glob/fs_test.go b/glob/fs_test.go new file mode 100644 index 0000000..44b08c9 --- /dev/null +++ b/glob/fs_test.go @@ -0,0 +1,505 @@ +package glob + +import ( + "archive/zip" + "bytes" + "io" + "io/fs" + "reflect" + "sort" + "testing" + "testing/fstest" +) + +func setupFS() fs.ReadDirFS { + // Create an in-memory FS with a mix of files and directories + return fstest.MapFS{ + "main.go": &fstest.MapFile{Data: []byte("package main")}, + "main_test.go": &fstest.MapFile{Data: []byte("package main_test")}, + "README.md": &fstest.MapFile{Data: []byte("# readme")}, + "LICENSE": &fstest.MapFile{Data: []byte("MIT")}, + "docs/guide.md": &fstest.MapFile{Data: []byte("Docs")}, + "docs/other.txt": &fstest.MapFile{Data: []byte("Other")}, + "docs/hidden/.keep": &fstest.MapFile{Data: []byte("")}, + "assets/img.png": &fstest.MapFile{Data: []byte("PNG")}, + "assets/style.css": &fstest.MapFile{Data: []byte("CSS")}, + ".gitignore": &fstest.MapFile{Data: []byte("*.log")}, + ".hiddenfile": &fstest.MapFile{Data: []byte("")}, + "emptydir": &fstest.MapFile{Mode: fs.ModeDir | 0o755}, + } +} + +// helper to get base names for easier comparison +func basenames(entries []fs.DirEntry) []string { + names := []string{} + for _, e := range entries { + names = append(names, e.Name()) + } + sort.Strings(names) + return names +} + +func TestGlobFS_MultiplePatterns(t *testing.T) { + memfs := setupFS() + gfs, err := NewGlobFS(memfs, "*.go", "*.md", "assets/*", "docs/guide.md", ".gitignore") + + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + + tests := []struct { + path string + want []string + wantErr bool + }{ + {path: ".", want: []string{"README.md", "assets", "docs", "main.go", "main_test.go", ".gitignore"}}, + {path: "assets", want: []string{"img.png", "style.css"}}, + {path: "docs", want: []string{"guide.md"}}, + {path: "docs/hidden", want: []string{}, wantErr: true}, + {path: "emptydir", want: []string{}, wantErr: true}, + } + + for _, tc := range tests { + tc := tc // capture range variable + t.Run(escape(tc.path), func(t *testing.T) { + entries, err := fs.ReadDir(gfs, tc.path) + if tc.wantErr && err == nil { + t.Errorf("expected error, got nil") + return + } + if !tc.wantErr && err != nil { + t.Errorf("unexpected error: %v", err) + return + } + got := basenames(entries) + sort.Strings(tc.want) + if !reflect.DeepEqual(got, tc.want) { + t.Errorf("got %v; want %v", got, tc.want) + } + }) + } +} + +func TestGlobFS_Open(t *testing.T) { + memfs := setupFS() + gfs, err := NewGlobFS(memfs, "*.go", "*.md", "assets/*", "docs/guide.md", ".gitignore") + + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + + type test struct { + path string + wantErr bool + } + tests := []test{ + {path: "main.go"}, + {path: "README.md"}, + {path: "LICENSE", wantErr: true}, + {path: "assets/img.png"}, + {path: "assets/style.css"}, + {path: "assets/nonexistent.png", wantErr: true}, + {path: "docs/guide.md"}, + {path: "docs/other.txt", wantErr: true}, + {path: ".gitignore"}, + {path: ".hiddenfile", wantErr: true}, + {path: "docs/hidden/.keep", wantErr: true}, + {path: "emptydir", wantErr: true}, + {path: "docs"}, // allowed because it contains matching file(s) + {path: "assets"}, // allowed because it contains matching file(s) + } + for _, tc := range tests { + tc := tc + t.Run(escape(tc.path), func(t *testing.T) { + f, err := gfs.Open(tc.path) + if tc.wantErr && err == nil { + t.Errorf("expected error, got file") + if f != nil { + f.Close() + } + } else if !tc.wantErr && err != nil { + t.Errorf("unexpected error: %v", err) + } else if !tc.wantErr && err == nil { + info, _ := f.Stat() + if info.IsDir() { + _, derr := fs.ReadDir(gfs, tc.path) + if derr != nil && !tc.wantErr { + t.Errorf("unexpected error: %v", derr) + } + } + f.Close() + } + }) + } +} + +func TestGlobFS_ReadFile(t *testing.T) { + memfs := setupFS() + gfs, err := NewGlobFS(memfs, "*.go", "*.md", "assets/*", ".gitignore") + + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + + tests := []struct { + name string + want []byte + wantErr bool + }{ + {name: "main.go", want: []byte("package main")}, + {name: "main_test.go", want: []byte("package main_test")}, + {name: "README.md", want: []byte("# readme")}, + {name: "assets/img.png", want: []byte("PNG")}, + {name: "assets/style.css", want: []byte("CSS")}, + {name: ".gitignore", want: []byte("*.log")}, + {name: "LICENSE", wantErr: true}, // not allowed by filter + {name: "docs/guide.md", wantErr: true}, // not allowed by filter + {name: "docs/hidden/.keep", wantErr: true}, // not allowed by filter + {name: "doesnotexist.txt", wantErr: true}, // does not exist + } + + for _, tc := range tests { + tc := tc + t.Run(escape(tc.name), func(t *testing.T) { + got, err := fs.ReadFile(gfs, tc.name) + if tc.wantErr { + if err == nil { + t.Errorf("expected error, got nil (got=%q)", got) + } + } else { + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if string(got) != string(tc.want) { + t.Errorf("got %q; want %q", got, tc.want) + } + } + }) + } +} + +func TestGlobFS_RelativePaths(t *testing.T) { + memfs := setupFS() + gfs, err := NewGlobFS(memfs, "docs/*.md") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + entries, err := fs.ReadDir(gfs, "docs") + if err != nil { + t.Fatal(err) + } + got := basenames(entries) + want := []string{"guide.md"} + if !reflect.DeepEqual(got, want) { + t.Errorf("docs/*.md: got %v, want %v", got, want) + } +} + +func TestGlobFS_NoMatchesOpen(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), "*.xyz") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = gfs.Open("main.go") + if err == nil { + t.Fatal("expected error when opening file with no matches") + } +} + +func TestGlobFS_NoMatchesStat(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), "*.xyz") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = fs.Stat(gfs, "main.go") + if err == nil { + t.Fatal("expected error with no matches: stat") + } +} + +func TestGlobFS_NoMatchesReadDir(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), "*.xyz") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = fs.ReadDir(gfs, "main.go") + if err == nil { + t.Fatal("expected error with no matches: readdir") + } +} + +func TestGlobFS_NoMatchesReadFile(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), "*.xyz") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = fs.ReadFile(gfs, "main.go") + if err == nil { + t.Fatal("expected error with no matches: readfile") + } +} + +func TestGlobFS_MatchEmptyDirExact(t *testing.T) { + // the trailing slash indicates that the directory should be included + gfs, err := NewGlobFS(setupFS(), "emptydir/") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = fs.ReadDir(gfs, "emptydir") + if err != nil { + t.Errorf("unexpected error: %v", err) + } +} + +func TestGlobFS_MatchEmptyDirExact2(t *testing.T) { + // the trailing slash indicates that the directory should be included + gfs, err := NewGlobFS(setupFS(), "emptydir/*") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = fs.ReadDir(gfs, "emptydir") + if err != nil { + t.Errorf("unexpected error: %v", err) + } +} + +func TestGlobFS_NoMatchEmptyDirExact(t *testing.T) { + // no traling slash indicates that the directory must be a file to be included + gfs, err := NewGlobFS(setupFS(), "emptydir") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + _, err = fs.ReadDir(gfs, "emptydir") + if err == nil { + t.Fatal("expected error with no matches: readfile") + } +} + +func TestGlobFS_IntegrationWithStdlibWalkDir(t *testing.T) { + memfs := setupFS() + gfs, err := NewGlobFS(memfs, "*.go", "docs/guide.md") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + // Use fs.WalkDir with our filtered FS + var walked []string + err = fs.WalkDir(gfs, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + t.Fatalf("the %q caused: %v", path, err) + return err + } + walked = append(walked, path) + return nil + }) + if err != nil { + t.Fatal(err) + } + // Only files and dirs matching or containing matches should appear + for _, p := range walked { + if p == "." || p == "main.go" || p == "main_test.go" || p == "docs" || p == "docs/guide.md" { + continue + } + t.Errorf("WalkDir: unexpected path %q", p) + } +} + +func TestGlobFS_InvalidPattern(t *testing.T) { + _, err := NewGlobFS(setupFS(), "[invalid") + if err == nil { + t.Fatal("expected error for invalid pattern, got nil") + } +} + +func TestGlobFS_WildcardInDirSegment(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), "docs/*/*.md") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + entries, err := fs.ReadDir(gfs, "docs/hidden") + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if len(entries) != 0 { + t.Errorf("expected no entries, got %v", basenames(entries)) + } +} + +func TestGlobFS_DeeplyNestedMatch(t *testing.T) { + memfs := fstest.MapFS{ + "a/b/c/d.txt": &fstest.MapFile{Data: []byte("deep")}, + } + gfs, err := NewGlobFS(memfs, "a/b/c/*.txt") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + data, err := fs.ReadFile(gfs, "a/b/c/d.txt") + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if string(data) != "deep" { + t.Errorf("got %q, want %q", data, "deep") + } +} + +func TestGlobFS_HiddenFilesOnly(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), ".*") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + entries, err := fs.ReadDir(gfs, ".") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got := basenames(entries) + want := []string{".gitignore", ".hiddenfile"} + if !reflect.DeepEqual(got, want) { + t.Errorf("got %v, want %v", got, want) + } +} + +// Test directory pattern matching with various directory globs +func TestGlobFS_DirectoryPatterns(t *testing.T) { + memfs := fstest.MapFS{ + "foo/bar/baz.txt": &fstest.MapFile{Data: []byte("baz")}, + "foo/bar/qux.txt": &fstest.MapFile{Data: []byte("qux")}, + "foo/readme.md": &fstest.MapFile{Data: []byte("readme")}, + "foo/empty/.keep": &fstest.MapFile{Data: []byte("")}, // represent empty dir by a file inside + "top.txt": &fstest.MapFile{Data: []byte("top")}, + } + + t.Run("single dir segment wildcard", func(t *testing.T) { + gfs, err := NewGlobFS(memfs, "foo/bar/*") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + entries, err := fs.ReadDir(gfs, "foo/bar") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got := basenames(entries) + want := []string{"baz.txt", "qux.txt"} + if !reflect.DeepEqual(got, want) { + t.Errorf("got %v, want %v", got, want) + } + }) + + t.Run("recursive dir wildcard", func(t *testing.T) { + gfs, err := NewGlobFS(memfs, "foo/bar/*") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + entries, err := fs.ReadDir(gfs, "foo/bar") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got := basenames(entries) + want := []string{"baz.txt", "qux.txt"} + if !reflect.DeepEqual(got, want) { + t.Errorf("got %v, want %v", got, want) + } + entries, err = fs.ReadDir(gfs, "foo") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got = basenames(entries) + want = []string{"bar"} + if !reflect.DeepEqual(got, want) { + t.Errorf("got %v, want %v", got, want) + } + }) + + t.Run("match empty directory", func(t *testing.T) { + gfs, err := NewGlobFS(memfs, "foo/empty/") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + entries, err := fs.ReadDir(gfs, "foo/empty") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(entries) != 0 { + t.Errorf("expected empty, got %v", basenames(entries)) + } + }) + + t.Run("top-level dir wildcard", func(t *testing.T) { + gfs, err := NewGlobFS(memfs, "*/bar/*") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + entries, err := fs.ReadDir(gfs, "foo/bar") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got := basenames(entries) + want := []string{"baz.txt", "qux.txt"} + if !reflect.DeepEqual(got, want) { + t.Errorf("got %v, want %v", got, want) + } + }) +} + +func TestGlobFS_IntegrationWithStdlibZipWriter(t *testing.T) { + gfs, err := NewGlobFS(setupFS(), "*") + if err != nil { + t.Errorf("unexpected error while creating glob fs: %v", err) + } + + want := map[string]string{ + "main.go": "package main", + "main_test.go": "package main_test", + "README.md": "# readme", + "LICENSE": "MIT", + ".gitignore": "*.log", + ".hiddenfile": "", + } + + buf := new(bytes.Buffer) + wr := zip.NewWriter(buf) + err = wr.AddFS(gfs) + if err != nil { + t.Fatalf("adding fs to zip writer: %v", err) + } + err = wr.Close() + if err != nil { + t.Fatalf("close zip writer: %v", err) + } + + rd, err := zip.NewReader(bytes.NewReader(buf.Bytes()), int64(len(buf.Bytes()))) + if err != nil { + t.Fatalf("invalid zip archive: %v", err) + } + + got := make(map[string]string) + for _, f := range rd.File { + rc, err := f.Open() + if err != nil { + t.Fatalf("cannot open file %s: %v", f.Name, err) + } + content, err := io.ReadAll(rc) + defer rc.Close() + if err != nil { + t.Fatalf("cannot read file %s: %v", f.Name, err) + } + got[f.Name] = string(content) + } + + // Compare expected vs actual. + for name, exp := range want { + act, ok := got[name] + if !ok { + t.Errorf("expected file %q not found in zip", name) + continue + } + if act != exp { + t.Errorf("content mismatch for %q:\nexpected: %q\nactual: %q", name, exp, act) + } + } + + // Check for unexpected extra files. + for name := range got { + if _, ok := want[name]; !ok { + t.Errorf("unexpected file %q found in zip", name) + } + } +} diff --git a/glob/glob.go b/glob/glob.go new file mode 100644 index 0000000..b788de7 --- /dev/null +++ b/glob/glob.go @@ -0,0 +1,485 @@ +// Package glob implements a language for specifying glob patterns for path +// names starting at some root. The language does not follow the specs from +// filepath.Match but provides a superset which allows for directory +// wildcards. +// +// Patterns consist of normal characters, non-separator wildcards '*' and '?', +// separators '/' and directory wildcards '**'. +// +// A somewhat formal grammer can be given as: +// +// pattern = term, { '/', term }; +// term = '**' | name; +// name = { charSpecial | group | escapedChar | '*' | '?' }; +// charSpecial = (* any unicode rune except '/', '*', '?', '[' and '\' *); +// char = (* any unicode rune *); +// escapedChar = '\\', char; +// group = '[', [ '^' ] { escapedChar | groupChar | range } ']'; +// groupChar = (* any unicode rune except '-' and ']' *); +// range = ( groupChar | escapedChar ), '-', (groupChar | escapedChar); +// +// The format operators have the following meaning: +// +// - any character (rune) matches the exactly this rune - with the following +// exceptions +// - `/` works as a directory separator. It matches directory boundarys of the +// underlying system independently of the separator char used by the OS. +// - `?` matches exactly one non-separator char +// - `*` matches any number of non-separator chars - including zero +// - `\` escapes a character's special meaning allowing `*` and `?` to be used +// as regular characters. +// - `**` matches any number of nested directories. If anything is matched it +// always extends until a separator or the end of the name. +// - Groups can be defined using the `[` and `]` characters. Inside a group the +// special meaning of the characters mentioned before is disabled but the +// following rules apply +// - any character used as part of the group acts as a choice to pick from +// - if the group's first character is a `^` the whole group is negated +// - a range can be defined using `-` matching any rune between low and high +// inclusive +// - Multiple ranges can be given. Ranges can be combined with choices. +// - The meaning of `-` and `]` can be escacped using `\` +package glob + +import ( + "errors" + "fmt" + "io/fs" + "strings" + "unicode/utf8" +) + +const ( + // Separator defines the path separator to use in patterns. This is always + // a forward slash independently of the underlying's OS separator + Separator = '/' + // SingleWildcard defines the the single non-separator character wildcard + // operator. + SingleWildcard = '?' + // AnyWildcard defines the the any number of non-separator characters + // wildcard operator. + AnyWildcard = '*' + // Backslash escapes the next character's special meaning + Backslash = '\\' + // GroupStart starts a range + GroupStart = '[' + // GroupEnd starts a range + GroupEnd = ']' + // GroupNegate when used as the first character of a group negates the group. + GroupNegate = '^' + // Range defines the range operator + Range = '-' +) + +var ( + // ErrBadPattern is returned when an invalid pattern is found. Make + // sure you use errors.Is to compare errors to this sentinel value. + ErrBadPattern = errors.New("bad pattern") +) + +// Pattern defines a glob pattern prepared ahead of time which can be used to +// match filenames. Pattern is safe to use concurrently. +type Pattern struct { + tokens []token +} + +// New creates a new pattern from pat and returns it. It returns an error +// indicating any invalid pattern. +func New(pat string) (*Pattern, error) { + var tokens []token + + p := pat + for { + if len(p) == 0 { + return &Pattern{tokens: tokens}, nil + } + + r, l := utf8.DecodeRuneInString(p) + + var t token + switch r { + case Separator: + if len(tokens) > 0 && tokens[len(tokens)-1].r == Separator { + return nil, fmt.Errorf("%w: unexpected //", ErrBadPattern) + } + t = token{tokenTypeLiteral, Separator, runeGroup{}} + + case SingleWildcard: + if len(tokens) > 0 && (tokens[len(tokens)-1].t == tokenTypeAnyRunes || tokens[len(tokens)-1].t == tokenTypeAnyDirectories) { + return nil, fmt.Errorf("%w: unexpected ?", ErrBadPattern) + } + t = token{tokenTypeSingleRune, 0, runeGroup{}} + + case AnyWildcard: + if len(tokens) > 0 && (tokens[len(tokens)-1].t == tokenTypeSingleRune || tokens[len(tokens)-1].t == tokenTypeAnyDirectories) { + return nil, fmt.Errorf("%w: unexpected ?", ErrBadPattern) + } + + t = token{tokenTypeAnyRunes, 0, runeGroup{}} + + if len(p[l:]) > 0 { + n, nl := utf8.DecodeRuneInString(p[l:]) + if n == AnyWildcard { + d, _ := utf8.DecodeRuneInString(p[l+nl:]) + if d == utf8.RuneError { + return nil, fmt.Errorf("%w: unexpected end of patterm after **", ErrBadPattern) + } + if d != Separator { + return nil, fmt.Errorf("%w: unexpected %c after **", ErrBadPattern, d) + } + + t.t = tokenTypeAnyDirectories + l += nl + } + } + + case Backslash: + if len(p[l:]) == 0 { + return nil, fmt.Errorf("%w: no character given after \\", ErrBadPattern) + } + + p = p[l:] + r, l = utf8.DecodeRuneInString(p) + + t = token{tokenTypeLiteral, r, runeGroup{}} + + case GroupStart: + var err error + t, l, err = parseGroup(p) + if err != nil { + return nil, err + } + + case GroupEnd: + return nil, fmt.Errorf("%w: using ] w/o [", ErrBadPattern) + + default: + t = token{tokenTypeLiteral, r, runeGroup{}} + } + + tokens = append(tokens, t) + p = p[l:] + } +} + +// String reconstructs the glob pattern from the tokens. +func (pat *Pattern) String() string { + var b strings.Builder + for _, t := range pat.tokens { + switch t.t { + case tokenTypeLiteral: + switch t.r { + case GroupStart, GroupEnd, GroupNegate, AnyWildcard, SingleWildcard, Range: + b.WriteRune(Backslash) + fallthrough + default: + b.WriteRune(t.r) + } + case tokenTypeSingleRune: + b.WriteRune(SingleWildcard) + case tokenTypeAnyRunes: + b.WriteRune(AnyWildcard) + case tokenTypeAnyDirectories: + b.WriteString("**") + case tokenTypeGroup: + b.WriteRune(GroupStart) + if t.g.neg { + b.WriteRune(GroupNegate) + } + for _, r := range t.g.runes { + b.WriteRune(r) + } + for _, rg := range t.g.ranges { + b.WriteRune(rg.lo) + b.WriteRune(Range) + b.WriteRune(rg.hi) + } + b.WriteRune(GroupEnd) + } + } + return b.String() +} + +func (pat *Pattern) GoString() string { + return pat.String() +} + +// Match matches a file's path name f to the compiled pattern and returns +// whether the path matches the pattern or not. +func (pat *Pattern) Match(f string) bool { + return match(f, pat.tokens, false) +} + +func (pat *Pattern) MatchPrefix(f string) bool { + return match(f, pat.tokens, true) +} + +// GlobFS applies pat to all files found in fsys under root and returns the +// matching path names as a string slice. It uses fs.WalkDir internally and all +// constraints given for that function apply to GlobFS. +func (pat *Pattern) GlobFS(fsys fs.FS, root string) ([]string, error) { + results := make([]string, 0) + err := fs.WalkDir(fsys, root, func(p string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + if p == "." { + return nil + } + + if root != "." && root != "" { + p = strings.Replace(p, root, "", 1) + } + + if d.IsDir() { + if !pat.MatchPrefix(p) { + return fs.SkipDir + } + + return nil + } + + if pat.Match(p) { + results = append(results, p) + } + + return nil + }) + + return results, err +} + +func parseGroup(p string) (token, int, error) { + // re-read the [. No need to assert the rune here as it has been + // done in the main parsing loop. + _, le := utf8.DecodeRuneInString(p) + t := token{ + t: tokenTypeGroup, + g: runeGroup{}, + } + + initialLen := le + var start rune + + for { + if len(p[le:]) == 0 { + return t, le, fmt.Errorf("%w: missing %c", ErrBadPattern, GroupEnd) + } + + r, l := utf8.DecodeRuneInString(p[le:]) + le += l + + if initialLen == le-l && r == GroupNegate { + t.g.neg = true + continue + } + + switch r { + case GroupEnd: + if start != 0 { + t.g.runes = append(t.g.runes, start) + } + + return t, le, nil + + case Range: + if start == 0 { + return t, le, fmt.Errorf("%w: missing start for character range", ErrBadPattern) + } + + if len(p[le:]) == 0 { + return t, le, fmt.Errorf("%w: missing range end", ErrBadPattern) + } + + r, l = utf8.DecodeRuneInString(p[le:]) + le += l + + switch r { + case GroupEnd: + return t, le, fmt.Errorf("%w: unterminated range", ErrBadPattern) + + case Backslash: + if len(p[le:]) == 0 { + return t, le, fmt.Errorf("%w: missing character after \\", ErrBadPattern) + } + r, l = utf8.DecodeRuneInString(p[le:]) + le += l + fallthrough + + default: + t.g.ranges = append(t.g.ranges, runeRange{start, r}) + start = 0 + } + + case Backslash: + if len(p[le:]) == 0 { + return t, le, fmt.Errorf("%w: missing character after \\", ErrBadPattern) + } + + r, l = utf8.DecodeRuneInString(p[le:]) + le += l + fallthrough + + default: + if start != 0 { + t.g.runes = append(t.g.runes, start) + } + start = r + } + } +} + +// match is used internally to implement a simple recursive backtracking +// algorithmn using the token list t to match against file path f. If matchPrefix +// is set to true, match returns true if f is completely matched by any prefix +// of t. Otherwise, match returns true if f is matched by _all_ tokens in t. +func match(f string, t []token, matchPrefix bool) bool { + for { + if len(f) == 0 { + if matchPrefix { + return true + } + + if len(t) == 0 { + return true + } + + if len(t) == 1 && t[0].t == tokenTypeAnyRunes { + return true + } + + return false + } + + if len(t) == 0 { + return false + } + + r, le := utf8.DecodeRuneInString(f) + + switch t[0].t { + case tokenTypeLiteral: + if t[0].r != r { + return false + } + + case tokenTypeGroup: + if !t[0].g.match(r) { + return false + } + + case tokenTypeSingleRune: + if r == Separator { + return false + } + + case tokenTypeAnyRunes: + if r == Separator { + return match(f, t[1:], matchPrefix) + } + + if match(f[le:], t, matchPrefix) { + return true + } + + if match(f, t[1:], matchPrefix) { + return true + } + + case tokenTypeAnyDirectories: + if match(f, t[2:], matchPrefix) { + return true + } + + var l2 int + for { + if len(f[le+l2:]) == 0 { + return false + } + + n, nl := utf8.DecodeRuneInString(f[le+l2:]) + l2 += nl + + if n == Separator { + break + } + } + + if match(f[le+l2:], t[2:], matchPrefix) { + return true + } + + return match(f[le+l2:], t, matchPrefix) + } + + t = t[1:] + f = f[le:] + } +} + +// tokenType enumerates the different types of tokens. +type tokenType int + +const ( + // a rune literal + tokenTypeLiteral tokenType = iota + 1 + // any single non-separator rune + tokenTypeSingleRune + // any number of non-separator runes (incl. zero) + tokenTypeAnyRunes + // any number runes including separators. Matches whole directories. + tokenTypeAnyDirectories + // a group of rune consisting of named runes and/or ranges. Might be negated. + tokenTypeGroup +) + +// token implements a single token in the pattern. +type token struct { + // the token's type + t tokenType + // a literal rune to matche. Literal runes are stored separate from groups + // to improve matching performance. + r rune + // A rune group to match. + g runeGroup +} + +// A group of runes. Groups can contain any number of enumerated runes and rune +// ranges. In addition a whole group can be negated. +type runeGroup struct { + // Whether the group is negated + neg bool + // Enumerated runes contained in this group + runes []rune + // All ranges contained in this group + ranges []runeRange +} + +// match matches r with g. It returns true if r is matched. +func (g runeGroup) match(r rune) bool { + for _, ru := range g.runes { + if ru == r { + return !g.neg + } + } + + for _, rang := range g.ranges { + if rang.match(r) { + return !g.neg + } + } + + return g.neg +} + +// A closed range of runes consisting of all runes between lo and hi both +// inclusive. +type runeRange struct { + lo, hi rune +} + +// match returns whether r is in rg. +func (rg runeRange) match(r rune) bool { + return rg.lo <= r && r <= rg.hi +} diff --git a/glob/glob_test.go b/glob/glob_test.go new file mode 100644 index 0000000..eb3786d --- /dev/null +++ b/glob/glob_test.go @@ -0,0 +1,220 @@ +package glob + +import ( + "errors" + "fmt" + "reflect" + "strings" + "testing" + "testing/fstest" +) + +type test struct { + pattern, f string + match bool + err error +} + +func escape(name string) string { + // use a math division slash for correct visual + return strings.ReplaceAll(name, "/", "∕") +} + +func TestPattern_Match(t *testing.T) { + tests := []test{ + // Test cases not covered by path.Match + {"main.go", "main.go", true, nil}, + {"main_test.go", "main_test.go", true, nil}, + {"foo/foo_test.go", "foo/foo_test.go", true, nil}, + {"?.go", "m.go", true, nil}, + {"*.go", "main.go", true, nil}, + {"**/*.go", "main.go", true, nil}, + {"*.go", "*.go", true, nil}, + + {"//", "", false, ErrBadPattern}, + {"foo//", "", false, ErrBadPattern}, + {"*?.go", "", false, ErrBadPattern}, + {"?*.go", "", false, ErrBadPattern}, + {"**?.go", "", false, ErrBadPattern}, + {"**f", "", false, ErrBadPattern}, + {"[a-", "", false, ErrBadPattern}, + {"[a-\\", "", false, ErrBadPattern}, + {"[\\", "", false, ErrBadPattern}, + + {"**/m.go", "foo.go", false, nil}, + {"**/m.go", "foo/a.go", false, nil}, + {"**/m.go", "m.go", true, nil}, + {"**/m.go", "foo/m.go", true, nil}, + {"**/m.go", "bar/m.go", true, nil}, + {"**/m.go", "foo/bar/m.go", true, nil}, + + {"ab[cde]", "abc", true, nil}, + {"ab[cde]", "abd", true, nil}, + {"ab[cde]", "abe", true, nil}, + {"ab[+-\\-]", "ab-", true, nil}, + {"ab[\\--a]", "ab-", true, nil}, + + {"[a-fA-F]", "a", true, nil}, + {"[a-fA-F]", "f", true, nil}, + {"[a-fA-F]", "A", true, nil}, + {"[a-fA-F]", "F", true, nil}, + + // The following test cases are taken from + // https://github.com/golang/go/blob/master/src/path/match_test.go and are + // provided here to test compatebility of the match implementation with the + // test cases from the golang standard lib. + {"abc", "abc", true, nil}, + {"*", "abc", true, nil}, + {"*c", "abc", true, nil}, + {"a*", "a", true, nil}, + {"a*", "abc", true, nil}, + {"a*", "ab/c", false, nil}, + {"a*/b", "abc/b", true, nil}, + {"a*/b", "a/c/b", false, nil}, + {"a*b*c*d*e*/f", "axbxcxdxe/f", true, nil}, + {"a*b*c*d*e*/f", "axbxcxdxexxx/f", true, nil}, + {"a*b*c*d*e*/f", "axbxcxdxe/xxx/f", false, nil}, + {"a*b*c*d*e*/f", "axbxcxdxexxx/fff", false, nil}, + {"a*b?c*x", "abxbbxdbxebxczzx", true, nil}, + {"a*b?c*x", "abxbbxdbxebxczzy", false, nil}, + {"ab[c]", "abc", true, nil}, + {"ab[b-d]", "abc", true, nil}, + {"ab[e-g]", "abc", false, nil}, + {"ab[^c]", "abc", false, nil}, + {"ab[^b-d]", "abc", false, nil}, + {"ab[^e-g]", "abc", true, nil}, + {"a\\*b", "a*b", true, nil}, + {"a\\*b", "ab", false, nil}, + {"a?b", "a☺b", true, nil}, + {"a[^a]b", "a☺b", true, nil}, + {"a???b", "a☺b", false, nil}, + {"a[^a][^a][^a]b", "a☺b", false, nil}, + {"[a-ζ]*", "α", true, nil}, + {"*[a-ζ]", "A", false, nil}, + {"a?b", "a/b", false, nil}, + {"a*b", "a/b", false, nil}, + {"[\\]a]", "]", true, nil}, + {"[\\-]", "-", true, nil}, + {"[x\\-]", "x", true, nil}, + {"[x\\-]", "-", true, nil}, + {"[x\\-]", "z", false, nil}, + {"[\\-x]", "x", true, nil}, + {"[\\-x]", "-", true, nil}, + {"[\\-x]", "a", false, nil}, + {"[]a]", "]", false, ErrBadPattern}, + {"[-]", "-", false, ErrBadPattern}, + {"[x-]", "x", false, ErrBadPattern}, + {"[x-]", "-", false, ErrBadPattern}, + {"[x-]", "z", false, ErrBadPattern}, + {"[-x]", "x", false, ErrBadPattern}, + {"[-x]", "-", false, ErrBadPattern}, + {"[-x]", "a", false, ErrBadPattern}, + {"\\", "a", false, ErrBadPattern}, + {"[a-b-c]", "a", false, ErrBadPattern}, + {"[", "a", false, ErrBadPattern}, + {"[^", "a", false, ErrBadPattern}, + {"[^bc", "a", false, ErrBadPattern}, + {"a[", "a", false, ErrBadPattern}, + {"a[", "ab", false, ErrBadPattern}, + {"a[", "x", false, ErrBadPattern}, + {"a/b[", "x", false, ErrBadPattern}, + {"*x", "xxx", true, nil}, + } + + for _, tt := range tests { + pat, err := New(tt.pattern) + if err != tt.err && !errors.Is(err, tt.err) { + t.Errorf("New(%#q): wanted error %v but got %v", tt.pattern, tt.err, err) + } + + if pat != nil { + match := pat.Match(tt.f) + if match != tt.match { + t.Errorf("New(%#q).Match(%#q): wanted match %v but got %v", tt.pattern, tt.f, tt.match, match) + } + } + } +} + +func TestPattern_MatchPrefix(t *testing.T) { + tests := []test{ + {"**/*.go", "foo/", true, nil}, + {"**/*.go", "foo", true, nil}, + {"**/*.go", "foo/bar/", true, nil}, + {"**/*.go", "foo/bar", true, nil}, + {"*/*.go", "foo", true, nil}, + } + + for _, tc := range tests { + tc := tc // capture range variable + t.Run(fmt.Sprintf("%s (%s)", escape(tc.pattern), escape(tc.f)), func(t *testing.T) { + pat, err := New(tc.pattern) + if err != nil { + t.Errorf("unexpected error: %v", err) + return + } + got := pat.MatchPrefix(tc.f) + + if got != tc.match { + t.Errorf("got %v; want %v", got, tc.match) + } + }) + } +} + +func TestPattern_GlobFS(t *testing.T) { + fsys := fstest.MapFS{ + "go.mod": &fstest.MapFile{Mode: 0644}, + "go.sum": &fstest.MapFile{Mode: 0644}, + "cmd/main.go": &fstest.MapFile{Mode: 0644}, + "cmd/main_test.go": &fstest.MapFile{Mode: 0644}, + "internal/tool/tool.go": &fstest.MapFile{Mode: 0644}, + "internal/tool/tool_test.go": &fstest.MapFile{Mode: 0644}, + "internal/cli/cli.go": &fstest.MapFile{Mode: 0644}, + "internal/cli/cli_test.go": &fstest.MapFile{Mode: 0644}, + } + + pat, err := New("**/*_test.go") + if err != nil { + t.Fatal(err) + } + + files, err := pat.GlobFS(fsys, ".") + if err != nil { + t.Fatal(err) + } + expect := []string{ + "cmd/main_test.go", + "internal/cli/cli_test.go", + "internal/tool/tool_test.go", + } + if !reflect.DeepEqual(expect, files) { + t.Errorf("got %v; want %v", files, expect) + } +} + +func TestPattern_String(t *testing.T) { + tests := []string{ + "main.go", + "*.go", + "**/*.go", + "foo/bar/*", + "foo/?ar.go", + "foo/[abc].go", + "foo/[a-c].go", + "foo/**/", + "foo/*/bar.go", + "foo/\\*bar.go", + } + for _, patstr := range tests { + t.Run(escape(patstr), func(t *testing.T) { + pat, err := New(patstr) + if err != nil { + t.Fatalf("New(%q) failed: %v", patstr, err) + } + if pat.String() != patstr { + t.Fatalf("Pattern.String() = %q, want %q", pat.String(), patstr) + } + }) + } +} diff --git a/iox/globfs.go b/iox/globfs.go deleted file mode 100644 index 91240cb..0000000 --- a/iox/globfs.go +++ /dev/null @@ -1,99 +0,0 @@ -package iox - -import ( - "io/fs" - "os" - "path" -) - -type GlobFS struct { - base fs.FS - patterns []string -} - -// NewGlobFS creates a new GlobFS that exposes only files matching any of the given glob patterns. -func NewGlobFS(base fs.FS, patterns ...string) *GlobFS { - return &GlobFS{base: base, patterns: patterns} -} - -// match reports whether the given path matches any of the configured patterns. -func (g *GlobFS) match(name string) bool { - for _, pat := range g.patterns { - if matched, _ := path.Match(pat, name); matched { - return true - } - } - return false -} - -func (g *GlobFS) contains(dir string) bool { - queue := []string{dir} - visited := make(map[string]struct{}) - for len(queue) > 0 { - current := queue[0] - queue = queue[1:] // dequeue - - // Prevent visiting same dir multiple times - if _, seen := visited[current]; seen { - continue - } - visited[current] = struct{}{} - - entries, err := fs.ReadDir(g.base, current) - if err != nil { - continue - } - for _, entry := range entries { - rel := path.Join(current, entry.Name()) - if g.match(rel) { - return true - } - if entry.IsDir() { - queue = append(queue, rel) - } - } - } - return false -} - -func (g *GlobFS) Open(name string) (fs.File, error) { - if g.match(name) { - return g.base.Open(name) - } - - fi, err := fs.Stat(g.base, name) - if err != nil || !fi.IsDir() { - return nil, fs.ErrNotExist - } - if g.contains(name) { - return g.base.Open(name) - } - return nil, fs.ErrNotExist -} - -func (g *GlobFS) ReadDir(name string) ([]fs.DirEntry, error) { - if g.match(name) { - return fs.ReadDir(g.base, name) - } - - entries, err := fs.ReadDir(g.base, name) - if err != nil { - return nil, err - } - var children []fs.DirEntry - for _, entry := range entries { - rel := path.Join(name, entry.Name()) - if g.match(rel) { - children = append(children, entry) - } - if entry.IsDir() && g.contains(rel) { - children = append(children, entry) - } - } - - if len(children) == 0 { - return nil, os.ErrNotExist - } - - return children, nil -} diff --git a/iox/globfs_test.go b/iox/globfs_test.go deleted file mode 100644 index e548a2a..0000000 --- a/iox/globfs_test.go +++ /dev/null @@ -1,234 +0,0 @@ -package iox - -import ( - "io/fs" - "reflect" - "sort" - "testing" - "testing/fstest" -) - -func setupFS() fs.ReadDirFS { - // Create an in-memory FS with a mix of files and directories - return fstest.MapFS{ - "main.go": &fstest.MapFile{Data: []byte("package main")}, - "main_test.go": &fstest.MapFile{Data: []byte("package main_test")}, - "README.md": &fstest.MapFile{Data: []byte("# readme")}, - "LICENSE": &fstest.MapFile{Data: []byte("MIT")}, - "docs/guide.md": &fstest.MapFile{Data: []byte("Docs")}, - "docs/other.txt": &fstest.MapFile{Data: []byte("Other")}, - "docs/hidden/.keep": &fstest.MapFile{Data: []byte("")}, - "assets/img.png": &fstest.MapFile{Data: []byte("PNG")}, - "assets/style.css": &fstest.MapFile{Data: []byte("CSS")}, - ".gitignore": &fstest.MapFile{Data: []byte("*.log")}, - ".hiddenfile": &fstest.MapFile{Data: []byte("")}, - "emptydir/": &fstest.MapFile{Mode: fs.ModeDir}, - } -} - -// helper to get base names for easier comparison -func basenames(entries []fs.DirEntry) []string { - names := []string{} - for _, e := range entries { - names = append(names, e.Name()) - } - sort.Strings(names) - return names -} - -func TestGlobFSMultiplePatterns(t *testing.T) { - memfs := setupFS() - gfs := NewGlobFS(memfs, "*.go", "*.md", "assets/*", "docs/guide.md", ".gitignore") - - tests := []struct { - path string - want []string - wantErr bool - }{ - {path: ".", want: []string{"README.md", "assets", "docs", "main.go", "main_test.go", ".gitignore"}}, - {path: "assets", want: []string{"img.png", "style.css"}}, - {path: "docs", want: []string{"guide.md"}}, - {path: "docs/hidden", want: []string{}, wantErr: true}, - {path: "emptydir", want: []string{}, wantErr: true}, - } - - for _, tc := range tests { - tc := tc // capture range variable - t.Run(tc.path, func(t *testing.T) { - entries, err := fs.ReadDir(gfs, tc.path) - if tc.wantErr && err == nil { - t.Errorf("expected error, got nil") - return - } - if !tc.wantErr && err != nil { - t.Errorf("unexpected error: %v", err) - return - } - got := basenames(entries) - sort.Strings(tc.want) - if !reflect.DeepEqual(got, tc.want) { - t.Errorf("got %v; want %v", got, tc.want) - } - }) - } -} - -func TestGlobFSOpen(t *testing.T) { - memfs := setupFS() - gfs := NewGlobFS(memfs, "*.go", "*.md", "assets/*", "docs/guide.md", ".gitignore") - - type test struct { - path string - wantErr bool - } - tests := []test{ - {path: "main.go"}, - {path: "README.md"}, - {path: "LICENSE", wantErr: true}, - {path: "assets/img.png"}, - {path: "assets/style.css"}, - {path: "assets/nonexistent.png", wantErr: true}, - {path: "docs/guide.md"}, - {path: "docs/other.txt", wantErr: true}, - {path: ".gitignore"}, - {path: ".hiddenfile", wantErr: true}, - {path: "docs/hidden/.keep", wantErr: true}, - {path: "emptydir", wantErr: true}, - {path: "docs"}, // allowed because it contains matching file(s) - {path: "assets"}, // allowed because it contains matching file(s) - } - for _, tc := range tests { - tc := tc - t.Run(tc.path, func(t *testing.T) { - f, err := gfs.Open(tc.path) - if tc.wantErr && err == nil { - t.Errorf("expected error, got file") - if f != nil { - f.Close() - } - } else if !tc.wantErr && err != nil { - t.Errorf("unexpected error: %v", err) - } else if !tc.wantErr && err == nil { - info, _ := f.Stat() - if info.IsDir() { - _, derr := fs.ReadDir(gfs, tc.path) - if derr != nil && !tc.wantErr { - t.Errorf("unexpected error: %v", derr) - } - } - f.Close() - } - }) - } -} - -func TestGlobFSReadFile(t *testing.T) { - memfs := setupFS() - gfs := NewGlobFS(memfs, "*.go", "*.md", "assets/*", ".gitignore") - - tests := []struct { - name string - want []byte - wantErr bool - }{ - {name: "main.go", want: []byte("package main")}, - {name: "main_test.go", want: []byte("package main_test")}, - {name: "README.md", want: []byte("# readme")}, - {name: "assets/img.png", want: []byte("PNG")}, - {name: "assets/style.css", want: []byte("CSS")}, - {name: ".gitignore", want: []byte("*.log")}, - {name: "LICENSE", wantErr: true}, // not allowed by filter - {name: "docs/guide.md", wantErr: true}, // not allowed by filter - {name: "docs/hidden/.keep", wantErr: true}, // not allowed by filter - {name: "doesnotexist.txt", wantErr: true}, // does not exist - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - got, err := fs.ReadFile(gfs, tc.name) - if tc.wantErr { - if err == nil { - t.Errorf("expected error, got nil (got=%q)", got) - } - } else { - if err != nil { - t.Errorf("unexpected error: %v", err) - } - if string(got) != string(tc.want) { - t.Errorf("got %q; want %q", got, tc.want) - } - } - }) - } -} - -func TestGlobFSRelativePaths(t *testing.T) { - memfs := setupFS() - gfs := NewGlobFS(memfs, "docs/*.md") - entries, err := fs.ReadDir(gfs, "docs") - if err != nil { - t.Fatal(err) - } - got := basenames(entries) - want := []string{"guide.md"} - if !reflect.DeepEqual(got, want) { - t.Errorf("docs/*.md: got %v, want %v", got, want) - } -} - -func TestGlobFSNoMatchesOpen(t *testing.T) { - gfs := NewGlobFS(setupFS(), "*.xyz") - _, err := gfs.Open("main.go") - if err == nil { - t.Fatal("expected error when opening file with no matches") - } -} - -func TestGlobFSNoMatchesStat(t *testing.T) { - gfs := NewGlobFS(setupFS(), "*.xyz") - _, err := fs.Stat(gfs, "main.go") - if err == nil { - t.Fatal("expected error with no matches: stat") - } -} - -func TestGlobFSNoMatchesReadDir(t *testing.T) { - gfs := NewGlobFS(setupFS(), "*.xyz") - _, err := fs.ReadDir(gfs, "main.go") - if err == nil { - t.Fatal("expected error with no matches: readdir") - } -} - -func TestGlobFSNoMatchesReadFile(t *testing.T) { - gfs := NewGlobFS(setupFS(), "*.xyz") - _, err := fs.ReadFile(gfs, "main.go") - if err == nil { - t.Fatal("expected error with no matches: readfile") - } -} - -func TestGlobFS_IntegrationWithStdlib(t *testing.T) { - memfs := setupFS() - gfs := NewGlobFS(memfs, "*.go", "docs/guide.md") - // Use fs.WalkDir with our filtered FS - var walked []string - err := fs.WalkDir(gfs, ".", func(path string, d fs.DirEntry, err error) error { - if err != nil { - return err - } - walked = append(walked, path) - return nil - }) - if err != nil { - t.Fatal(err) - } - // Only files and dirs matching or containing matches should appear - for _, p := range walked { - if p == "." || p == "main.go" || p == "main_test.go" || p == "docs" || p == "docs/guide.md" { - continue - } - t.Errorf("WalkDir: unexpected path %q", p) - } -}