mirror of
https://github.com/aykhans/sarin.git
synced 2026-01-14 04:21:21 +00:00
v1.0.0: here we go again
This commit is contained in:
285
internal/config/cli.go
Normal file
285
internal/config/cli.go
Normal file
@@ -0,0 +1,285 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
versionpkg "go.aykhans.me/sarin/internal/version"
|
||||
"go.aykhans.me/utils/common"
|
||||
)
|
||||
|
||||
const cliUsageText = `Usage:
|
||||
sarin [flags]
|
||||
|
||||
Simple usage:
|
||||
sarin -U https://example.com -d 1m
|
||||
|
||||
Usage with all flags:
|
||||
sarin -s -q -z -o json -f ./config.yaml -c 50 -r 100_000 -d 2m30s \
|
||||
-U https://example.com \
|
||||
-M POST \
|
||||
-V "sharedUUID={{ fakeit_UUID }}" \
|
||||
-B '{"product": "car"}' \
|
||||
-P "id={{ .Values.sharedUUID }}" \
|
||||
-H "User-Agent: {{ fakeit_UserAgent }}" -H "Accept: */*" \
|
||||
-C "token={{ .Values.sharedUUID }}" \
|
||||
-X "http://proxy.example.com" \
|
||||
-T 3s \
|
||||
-I
|
||||
|
||||
Flags:
|
||||
General Config:
|
||||
-h, -help Help for sarin
|
||||
-v, -version Version for sarin
|
||||
-s, -show-config bool Show the final config after parsing all sources (default %v)
|
||||
-f, -config-file string Path to the config file (local file / http URL)
|
||||
-c, -concurrency uint Number of concurrent requests (default %d)
|
||||
-r, -requests uint Number of total requests
|
||||
-d, -duration time Maximum duration for the test (e.g. 30s, 1m, 5h)
|
||||
-q, -quiet bool Hide the progress bar and runtime logs (default %v)
|
||||
-o, -output string Output format (possible values: table, json, yaml, none) (default '%v')
|
||||
-z, -dry-run bool Run without sending requests (default %v)
|
||||
|
||||
Request Config:
|
||||
-U, -url string Target URL for the request
|
||||
-M, -method []string HTTP method for the request (default %s)
|
||||
-B, -body []string Body for the request (e.g. "body text")
|
||||
-P, -param []string URL parameter for the request (e.g. "key1=value1")
|
||||
-H, -header []string Header for the request (e.g. "key1: value1")
|
||||
-C, -cookie []string Cookie for the request (e.g. "key1=value1")
|
||||
-X, -proxy []string Proxy for the request (e.g. "http://proxy.example.com:8080")
|
||||
-V, -values []string List of values for templating (e.g. "key1=value1")
|
||||
-T, -timeout time Timeout for the request (e.g. 400ms, 3s, 1m10s) (default %v)
|
||||
-I, -insecure bool Skip SSL/TLS certificate verification (default %v)`
|
||||
|
||||
var _ IParser = ConfigCLIParser{}
|
||||
|
||||
type ConfigCLIParser struct {
|
||||
args []string
|
||||
}
|
||||
|
||||
func NewConfigCLIParser(args []string) *ConfigCLIParser {
|
||||
if args == nil {
|
||||
args = []string{}
|
||||
}
|
||||
return &ConfigCLIParser{args: args}
|
||||
}
|
||||
|
||||
type stringSliceArg []string
|
||||
|
||||
func (arg *stringSliceArg) String() string {
|
||||
return strings.Join(*arg, ",")
|
||||
}
|
||||
|
||||
func (arg *stringSliceArg) Set(value string) error {
|
||||
*arg = append(*arg, value)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parse parses command-line arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.ErrCLINoArgs
|
||||
// - types.CLIUnexpectedArgsError
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigCLIParser) Parse() (*Config, error) {
|
||||
flagSet := flag.NewFlagSet("sarin", flag.ExitOnError)
|
||||
|
||||
flagSet.Usage = func() { parser.PrintHelp() }
|
||||
|
||||
var (
|
||||
config = &Config{}
|
||||
|
||||
// General config
|
||||
version bool
|
||||
showConfig bool
|
||||
configFiles = stringSliceArg{}
|
||||
concurrency uint
|
||||
requestCount uint64
|
||||
duration time.Duration
|
||||
quiet bool
|
||||
output string
|
||||
dryRun bool
|
||||
|
||||
// Request config
|
||||
urlInput string
|
||||
methods = stringSliceArg{}
|
||||
bodies = stringSliceArg{}
|
||||
params = stringSliceArg{}
|
||||
headers = stringSliceArg{}
|
||||
cookies = stringSliceArg{}
|
||||
proxies = stringSliceArg{}
|
||||
values = stringSliceArg{}
|
||||
timeout time.Duration
|
||||
insecure bool
|
||||
)
|
||||
|
||||
{
|
||||
// General config
|
||||
flagSet.BoolVar(&version, "version", false, "Version for sarin")
|
||||
flagSet.BoolVar(&version, "v", false, "Version for sarin")
|
||||
|
||||
flagSet.BoolVar(&showConfig, "show-config", false, "Show the final config after parsing all sources")
|
||||
flagSet.BoolVar(&showConfig, "s", false, "Show the final config after parsing all sources")
|
||||
|
||||
flagSet.Var(&configFiles, "config-file", "Path to the config file")
|
||||
flagSet.Var(&configFiles, "f", "Path to the config file")
|
||||
|
||||
flagSet.UintVar(&concurrency, "concurrency", 0, "Number of concurrent requests")
|
||||
flagSet.UintVar(&concurrency, "c", 0, "Number of concurrent requests")
|
||||
|
||||
flagSet.Uint64Var(&requestCount, "requests", 0, "Number of total requests")
|
||||
flagSet.Uint64Var(&requestCount, "r", 0, "Number of total requests")
|
||||
|
||||
flagSet.DurationVar(&duration, "duration", 0, "Maximum duration for the test")
|
||||
flagSet.DurationVar(&duration, "d", 0, "Maximum duration for the test")
|
||||
|
||||
flagSet.BoolVar(&quiet, "quiet", false, "Hide the progress bar and runtime logs")
|
||||
flagSet.BoolVar(&quiet, "q", false, "Hide the progress bar and runtime logs")
|
||||
|
||||
flagSet.StringVar(&output, "output", "", "Output format (possible values: table, json, yaml, none)")
|
||||
flagSet.StringVar(&output, "o", "", "Output format (possible values: table, json, yaml, none)")
|
||||
|
||||
flagSet.BoolVar(&dryRun, "dry-run", false, "Run without sending requests")
|
||||
flagSet.BoolVar(&dryRun, "z", false, "Run without sending requests")
|
||||
|
||||
// Request config
|
||||
flagSet.StringVar(&urlInput, "url", "", "Target URL for the request")
|
||||
flagSet.StringVar(&urlInput, "U", "", "Target URL for the request")
|
||||
|
||||
flagSet.Var(&methods, "method", "HTTP method for the request")
|
||||
flagSet.Var(&methods, "M", "HTTP method for the request")
|
||||
|
||||
flagSet.Var(&bodies, "body", "Body for the request")
|
||||
flagSet.Var(&bodies, "B", "Body for the request")
|
||||
|
||||
flagSet.Var(¶ms, "param", "URL parameter for the request")
|
||||
flagSet.Var(¶ms, "P", "URL parameter for the request")
|
||||
|
||||
flagSet.Var(&headers, "header", "Header for the request")
|
||||
flagSet.Var(&headers, "H", "Header for the request")
|
||||
|
||||
flagSet.Var(&cookies, "cookie", "Cookie for the request")
|
||||
flagSet.Var(&cookies, "C", "Cookie for the request")
|
||||
|
||||
flagSet.Var(&proxies, "proxy", "Proxy for the request")
|
||||
flagSet.Var(&proxies, "X", "Proxy for the request")
|
||||
|
||||
flagSet.Var(&values, "values", "List of values for templating")
|
||||
flagSet.Var(&values, "V", "List of values for templating")
|
||||
|
||||
flagSet.DurationVar(&timeout, "timeout", 0, "Timeout for the request (e.g. 400ms, 15s, 1m10s)")
|
||||
flagSet.DurationVar(&timeout, "T", 0, "Timeout for the request (e.g. 400ms, 15s, 1m10s)")
|
||||
|
||||
flagSet.BoolVar(&insecure, "insecure", false, "Skip SSL/TLS certificate verification")
|
||||
flagSet.BoolVar(&insecure, "I", false, "Skip SSL/TLS certificate verification")
|
||||
}
|
||||
|
||||
// Parse the specific arguments provided to the parser, skipping the program name.
|
||||
if err := flagSet.Parse(parser.args[1:]); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Check if no flags were set and no non-flag arguments were provided.
|
||||
// This covers cases where `sarin` is run without any meaningful arguments.
|
||||
if flagSet.NFlag() == 0 && len(flagSet.Args()) == 0 {
|
||||
return nil, types.ErrCLINoArgs
|
||||
}
|
||||
|
||||
// Check for any unexpected non-flag arguments remaining after parsing.
|
||||
if args := flagSet.Args(); len(args) > 0 {
|
||||
return nil, types.NewCLIUnexpectedArgsError(args)
|
||||
}
|
||||
|
||||
if version {
|
||||
fmt.Printf("Version: %s\nGit Commit: %s\nBuild Date: %s\nGo Version: %s\n",
|
||||
versionpkg.Version, versionpkg.GitCommit, versionpkg.BuildDate, versionpkg.GoVersion)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
var fieldParseErrors []types.FieldParseError
|
||||
// Iterate over flags that were explicitly set on the command line.
|
||||
flagSet.Visit(func(flagVar *flag.Flag) {
|
||||
switch flagVar.Name {
|
||||
// General config
|
||||
case "show-config", "s":
|
||||
config.ShowConfig = common.ToPtr(showConfig)
|
||||
case "config-file", "f":
|
||||
for _, configFile := range configFiles {
|
||||
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||
}
|
||||
case "concurrency", "c":
|
||||
config.Concurrency = common.ToPtr(concurrency)
|
||||
case "requests", "r":
|
||||
config.Requests = common.ToPtr(requestCount)
|
||||
case "duration", "d":
|
||||
config.Duration = common.ToPtr(duration)
|
||||
case "quiet", "q":
|
||||
config.Quiet = common.ToPtr(quiet)
|
||||
case "output", "o":
|
||||
config.Output = common.ToPtr(ConfigOutputType(output))
|
||||
case "dry-run", "z":
|
||||
config.DryRun = common.ToPtr(dryRun)
|
||||
|
||||
// Request config
|
||||
case "url", "U":
|
||||
urlParsed, err := url.Parse(urlInput)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(fieldParseErrors, types.NewFieldParseError("url", urlInput, err))
|
||||
} else {
|
||||
config.URL = urlParsed
|
||||
}
|
||||
case "method", "M":
|
||||
config.Methods = append(config.Methods, methods...)
|
||||
case "body", "B":
|
||||
config.Bodies = append(config.Bodies, bodies...)
|
||||
case "param", "P":
|
||||
config.Params.Parse(params...)
|
||||
case "header", "H":
|
||||
config.Headers.Parse(headers...)
|
||||
case "cookie", "C":
|
||||
config.Cookies.Parse(cookies...)
|
||||
case "proxy", "X":
|
||||
for i, proxy := range proxies {
|
||||
err := config.Proxies.Parse(proxy)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(fmt.Sprintf("proxy[%d]", i), proxy, err),
|
||||
)
|
||||
}
|
||||
}
|
||||
case "values", "V":
|
||||
config.Values = append(config.Values, values...)
|
||||
case "timeout", "T":
|
||||
config.Timeout = common.ToPtr(timeout)
|
||||
case "insecure", "I":
|
||||
config.Insecure = common.ToPtr(insecure)
|
||||
}
|
||||
})
|
||||
|
||||
if len(fieldParseErrors) > 0 {
|
||||
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (parser ConfigCLIParser) PrintHelp() {
|
||||
fmt.Printf(
|
||||
cliUsageText+"\n",
|
||||
Defaults.ShowConfig,
|
||||
Defaults.Concurrency,
|
||||
Defaults.Quiet,
|
||||
Defaults.Output,
|
||||
Defaults.DryRun,
|
||||
|
||||
Defaults.Method,
|
||||
Defaults.RequestTimeout,
|
||||
Defaults.Insecure,
|
||||
)
|
||||
}
|
||||
757
internal/config/config.go
Normal file
757
internal/config/config.go
Normal file
@@ -0,0 +1,757 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/bubbles/viewport"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/glamour"
|
||||
"github.com/charmbracelet/glamour/styles"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/x/term"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
"go.aykhans.me/sarin/internal/version"
|
||||
"go.aykhans.me/utils/common"
|
||||
utilsErr "go.aykhans.me/utils/errors"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
var Defaults = struct {
|
||||
UserAgent string
|
||||
Method string
|
||||
RequestTimeout time.Duration
|
||||
Concurrency uint
|
||||
ShowConfig bool
|
||||
Quiet bool
|
||||
Insecure bool
|
||||
Output ConfigOutputType
|
||||
DryRun bool
|
||||
}{
|
||||
UserAgent: "Sarin/" + version.Version,
|
||||
Method: "GET",
|
||||
RequestTimeout: time.Second * 10,
|
||||
Concurrency: 1,
|
||||
ShowConfig: false,
|
||||
Quiet: false,
|
||||
Insecure: false,
|
||||
Output: ConfigOutputTypeTable,
|
||||
DryRun: false,
|
||||
}
|
||||
|
||||
var (
|
||||
ValidProxySchemes = []string{"http", "https", "socks5", "socks5h"}
|
||||
ValidRequestURLSchemes = []string{"http", "https"}
|
||||
)
|
||||
|
||||
var (
|
||||
StyleYellow = lipgloss.NewStyle().Foreground(lipgloss.Color("220"))
|
||||
StyleRed = lipgloss.NewStyle().Foreground(lipgloss.Color("196"))
|
||||
)
|
||||
|
||||
type IParser interface {
|
||||
Parse() (*Config, error)
|
||||
}
|
||||
|
||||
type ConfigOutputType string
|
||||
|
||||
var (
|
||||
ConfigOutputTypeTable ConfigOutputType = "table"
|
||||
ConfigOutputTypeJSON ConfigOutputType = "json"
|
||||
ConfigOutputTypeYAML ConfigOutputType = "yaml"
|
||||
ConfigOutputTypeNone ConfigOutputType = "none"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
ShowConfig *bool `yaml:"showConfig,omitempty"`
|
||||
Files []types.ConfigFile `yaml:"files,omitempty"`
|
||||
Methods []string `yaml:"methods,omitempty"`
|
||||
URL *url.URL `yaml:"url,omitempty"`
|
||||
Timeout *time.Duration `yaml:"timeout,omitempty"`
|
||||
Concurrency *uint `yaml:"concurrency,omitempty"`
|
||||
Requests *uint64 `yaml:"requests,omitempty"`
|
||||
Duration *time.Duration `yaml:"duration,omitempty"`
|
||||
Quiet *bool `yaml:"quiet,omitempty"`
|
||||
Output *ConfigOutputType `yaml:"output,omitempty"`
|
||||
Insecure *bool `yaml:"insecure,omitempty"`
|
||||
DryRun *bool `yaml:"dryRun,omitempty"`
|
||||
Params types.Params `yaml:"params,omitempty"`
|
||||
Headers types.Headers `yaml:"headers,omitempty"`
|
||||
Cookies types.Cookies `yaml:"cookies,omitempty"`
|
||||
Bodies []string `yaml:"bodies,omitempty"`
|
||||
Proxies types.Proxies `yaml:"proxies,omitempty"`
|
||||
Values []string `yaml:"values,omitempty"`
|
||||
}
|
||||
|
||||
func NewConfig() *Config {
|
||||
return &Config{}
|
||||
}
|
||||
|
||||
func (config Config) MarshalYAML() (any, error) {
|
||||
const randomValueComment = "Cycles through all values, with a new random start each round"
|
||||
|
||||
toNode := func(v any) *yaml.Node {
|
||||
node := &yaml.Node{}
|
||||
_ = node.Encode(v)
|
||||
return node
|
||||
}
|
||||
|
||||
addField := func(content *[]*yaml.Node, key string, value *yaml.Node, comment string) {
|
||||
if value.Kind == 0 || (value.Kind == yaml.ScalarNode && value.Value == "") ||
|
||||
(value.Kind == yaml.SequenceNode && len(value.Content) == 0) {
|
||||
return
|
||||
}
|
||||
keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: key, LineComment: comment}
|
||||
*content = append(*content, keyNode, value)
|
||||
}
|
||||
|
||||
addStringSlice := func(content *[]*yaml.Node, key string, items []string, withComment bool) {
|
||||
comment := ""
|
||||
if withComment && len(items) > 1 {
|
||||
comment = randomValueComment
|
||||
}
|
||||
switch len(items) {
|
||||
case 1:
|
||||
addField(content, key, toNode(items[0]), "")
|
||||
default:
|
||||
addField(content, key, toNode(items), comment)
|
||||
}
|
||||
}
|
||||
|
||||
marshalKeyValues := func(items []types.KeyValue[string, []string]) *yaml.Node {
|
||||
seqNode := &yaml.Node{Kind: yaml.SequenceNode}
|
||||
for _, item := range items {
|
||||
keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: item.Key}
|
||||
var valueNode *yaml.Node
|
||||
|
||||
switch len(item.Value) {
|
||||
case 1:
|
||||
valueNode = &yaml.Node{Kind: yaml.ScalarNode, Value: item.Value[0]}
|
||||
default:
|
||||
valueNode = &yaml.Node{Kind: yaml.SequenceNode}
|
||||
for _, v := range item.Value {
|
||||
valueNode.Content = append(valueNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: v})
|
||||
}
|
||||
if len(item.Value) > 1 {
|
||||
keyNode.LineComment = randomValueComment
|
||||
}
|
||||
}
|
||||
|
||||
mapNode := &yaml.Node{Kind: yaml.MappingNode, Content: []*yaml.Node{keyNode, valueNode}}
|
||||
seqNode.Content = append(seqNode.Content, mapNode)
|
||||
}
|
||||
return seqNode
|
||||
}
|
||||
|
||||
root := &yaml.Node{Kind: yaml.MappingNode}
|
||||
content := &root.Content
|
||||
|
||||
if config.ShowConfig != nil {
|
||||
addField(content, "showConfig", toNode(*config.ShowConfig), "")
|
||||
}
|
||||
|
||||
addStringSlice(content, "method", config.Methods, true)
|
||||
|
||||
if config.URL != nil {
|
||||
addField(content, "url", toNode(config.URL.String()), "")
|
||||
}
|
||||
if config.Timeout != nil {
|
||||
addField(content, "timeout", toNode(*config.Timeout), "")
|
||||
}
|
||||
if config.Concurrency != nil {
|
||||
addField(content, "concurrency", toNode(*config.Concurrency), "")
|
||||
}
|
||||
if config.Requests != nil {
|
||||
addField(content, "requests", toNode(*config.Requests), "")
|
||||
}
|
||||
if config.Duration != nil {
|
||||
addField(content, "duration", toNode(*config.Duration), "")
|
||||
}
|
||||
if config.Quiet != nil {
|
||||
addField(content, "quiet", toNode(*config.Quiet), "")
|
||||
}
|
||||
if config.Output != nil {
|
||||
addField(content, "output", toNode(string(*config.Output)), "")
|
||||
}
|
||||
if config.Insecure != nil {
|
||||
addField(content, "insecure", toNode(*config.Insecure), "")
|
||||
}
|
||||
if config.DryRun != nil {
|
||||
addField(content, "dryRun", toNode(*config.DryRun), "")
|
||||
}
|
||||
|
||||
if len(config.Params) > 0 {
|
||||
items := make([]types.KeyValue[string, []string], len(config.Params))
|
||||
for i, p := range config.Params {
|
||||
items[i] = types.KeyValue[string, []string](p)
|
||||
}
|
||||
addField(content, "params", marshalKeyValues(items), "")
|
||||
}
|
||||
if len(config.Headers) > 0 {
|
||||
items := make([]types.KeyValue[string, []string], len(config.Headers))
|
||||
for i, h := range config.Headers {
|
||||
items[i] = types.KeyValue[string, []string](h)
|
||||
}
|
||||
addField(content, "headers", marshalKeyValues(items), "")
|
||||
}
|
||||
if len(config.Cookies) > 0 {
|
||||
items := make([]types.KeyValue[string, []string], len(config.Cookies))
|
||||
for i, c := range config.Cookies {
|
||||
items[i] = types.KeyValue[string, []string](c)
|
||||
}
|
||||
addField(content, "cookies", marshalKeyValues(items), "")
|
||||
}
|
||||
|
||||
addStringSlice(content, "body", config.Bodies, true)
|
||||
|
||||
if len(config.Proxies) > 0 {
|
||||
proxyStrings := make([]string, len(config.Proxies))
|
||||
for i, p := range config.Proxies {
|
||||
proxyStrings[i] = p.String()
|
||||
}
|
||||
addStringSlice(content, "proxy", proxyStrings, true)
|
||||
}
|
||||
|
||||
addStringSlice(content, "values", config.Values, false)
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func (config Config) Print() bool {
|
||||
configYAML, err := yaml.Marshal(config)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render("Error marshaling config to yaml: "+err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Pipe mode: output raw content directly
|
||||
if !term.IsTerminal(os.Stdout.Fd()) {
|
||||
fmt.Println(string(configYAML))
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
style := styles.TokyoNightStyleConfig
|
||||
style.Document.Margin = common.ToPtr[uint](0)
|
||||
style.CodeBlock.Margin = common.ToPtr[uint](0)
|
||||
|
||||
renderer, err := glamour.NewTermRenderer(
|
||||
glamour.WithStyles(style),
|
||||
glamour.WithWordWrap(0),
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
content, err := renderer.Render("```yaml\n" + string(configYAML) + "```")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
p := tea.NewProgram(
|
||||
printConfigModel{content: strings.Trim(content, "\n"), rawContent: configYAML},
|
||||
tea.WithAltScreen(),
|
||||
tea.WithMouseCellMotion(),
|
||||
)
|
||||
|
||||
m, err := p.Run()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
return m.(printConfigModel).start //nolint:forcetypeassert // m is guaranteed to be of type printConfigModel as it was the only model passed to tea.NewProgram
|
||||
}
|
||||
|
||||
func (config *Config) Merge(newConfig *Config) {
|
||||
config.Files = append(config.Files, newConfig.Files...)
|
||||
if len(newConfig.Methods) > 0 {
|
||||
config.Methods = append(config.Methods, newConfig.Methods...)
|
||||
}
|
||||
if newConfig.URL != nil {
|
||||
config.URL = newConfig.URL
|
||||
}
|
||||
if newConfig.Timeout != nil {
|
||||
config.Timeout = newConfig.Timeout
|
||||
}
|
||||
if newConfig.Concurrency != nil {
|
||||
config.Concurrency = newConfig.Concurrency
|
||||
}
|
||||
if newConfig.Requests != nil {
|
||||
config.Requests = newConfig.Requests
|
||||
}
|
||||
if newConfig.Duration != nil {
|
||||
config.Duration = newConfig.Duration
|
||||
}
|
||||
if newConfig.ShowConfig != nil {
|
||||
config.ShowConfig = newConfig.ShowConfig
|
||||
}
|
||||
if newConfig.Quiet != nil {
|
||||
config.Quiet = newConfig.Quiet
|
||||
}
|
||||
if newConfig.Output != nil {
|
||||
config.Output = newConfig.Output
|
||||
}
|
||||
if newConfig.Insecure != nil {
|
||||
config.Insecure = newConfig.Insecure
|
||||
}
|
||||
if newConfig.DryRun != nil {
|
||||
config.DryRun = newConfig.DryRun
|
||||
}
|
||||
if len(newConfig.Params) != 0 {
|
||||
config.Params = append(config.Params, newConfig.Params...)
|
||||
}
|
||||
if len(newConfig.Headers) != 0 {
|
||||
config.Headers = append(config.Headers, newConfig.Headers...)
|
||||
}
|
||||
if len(newConfig.Cookies) != 0 {
|
||||
config.Cookies = append(config.Cookies, newConfig.Cookies...)
|
||||
}
|
||||
if len(newConfig.Bodies) != 0 {
|
||||
config.Bodies = append(config.Bodies, newConfig.Bodies...)
|
||||
}
|
||||
if len(newConfig.Proxies) != 0 {
|
||||
config.Proxies.Append(newConfig.Proxies...)
|
||||
}
|
||||
if len(newConfig.Values) != 0 {
|
||||
config.Values = append(config.Values, newConfig.Values...)
|
||||
}
|
||||
}
|
||||
|
||||
func (config *Config) SetDefaults() {
|
||||
if config.URL != nil && len(config.URL.Query()) > 0 {
|
||||
urlParams := types.Params{}
|
||||
for key, values := range config.URL.Query() {
|
||||
for _, value := range values {
|
||||
urlParams = append(urlParams, types.Param{
|
||||
Key: key,
|
||||
Value: []string{value},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
config.Params = append(urlParams, config.Params...)
|
||||
config.URL.RawQuery = ""
|
||||
}
|
||||
|
||||
if len(config.Methods) == 0 {
|
||||
config.Methods = []string{Defaults.Method}
|
||||
}
|
||||
if config.Timeout == nil {
|
||||
config.Timeout = &Defaults.RequestTimeout
|
||||
}
|
||||
if config.Concurrency == nil {
|
||||
config.Concurrency = common.ToPtr(Defaults.Concurrency)
|
||||
}
|
||||
if config.ShowConfig == nil {
|
||||
config.ShowConfig = common.ToPtr(Defaults.ShowConfig)
|
||||
}
|
||||
if config.Quiet == nil {
|
||||
config.Quiet = common.ToPtr(Defaults.Quiet)
|
||||
}
|
||||
if config.Insecure == nil {
|
||||
config.Insecure = common.ToPtr(Defaults.Insecure)
|
||||
}
|
||||
if config.DryRun == nil {
|
||||
config.DryRun = common.ToPtr(Defaults.DryRun)
|
||||
}
|
||||
if !config.Headers.Has("User-Agent") {
|
||||
config.Headers = append(config.Headers, types.Header{Key: "User-Agent", Value: []string{Defaults.UserAgent}})
|
||||
}
|
||||
|
||||
if config.Output == nil {
|
||||
config.Output = common.ToPtr(Defaults.Output)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate validates the config fields.
|
||||
// It can return the following errors:
|
||||
// - types.FieldValidationErrors
|
||||
func (config Config) Validate() error {
|
||||
validationErrors := make([]types.FieldValidationError, 0)
|
||||
|
||||
if len(config.Methods) == 0 {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Method", "", errors.New("method is required")))
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.URL == nil:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", "", errors.New("URL is required")))
|
||||
case !slices.Contains(ValidRequestURLSchemes, config.URL.Scheme):
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", config.URL.String(), fmt.Errorf("URL scheme must be one of: %s", strings.Join(ValidRequestURLSchemes, ", "))))
|
||||
case config.URL.Host == "":
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", config.URL.String(), errors.New("URL must have a host")))
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.Concurrency == nil:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", "", errors.New("concurrency count is required")))
|
||||
case *config.Concurrency == 0:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", "0", errors.New("concurrency must be greater than 0")))
|
||||
case *config.Concurrency > 100_000_000:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", strconv.FormatUint(uint64(*config.Concurrency), 10), errors.New("concurrency must not exceed 100,000,000")))
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.Requests == nil && config.Duration == nil:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests / Duration", "", errors.New("either request count or duration must be specified")))
|
||||
case (config.Requests != nil && config.Duration != nil) && (*config.Requests == 0 && *config.Duration == 0):
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests / Duration", "0", errors.New("both request count and duration cannot be zero")))
|
||||
case config.Requests != nil && config.Duration == nil && *config.Requests == 0:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests", "0", errors.New("request count must be greater than 0")))
|
||||
case config.Requests == nil && config.Duration != nil && *config.Duration == 0:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Duration", "0", errors.New("duration must be greater than 0")))
|
||||
}
|
||||
|
||||
if *config.Timeout < 1 {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Timeout", "0", errors.New("timeout must be greater than 0")))
|
||||
}
|
||||
|
||||
if config.ShowConfig == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("ShowConfig", "", errors.New("showConfig field is required")))
|
||||
}
|
||||
|
||||
if config.Quiet == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Quiet", "", errors.New("quiet field is required")))
|
||||
}
|
||||
|
||||
if config.Output == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Output", "", errors.New("output field is required")))
|
||||
} else {
|
||||
switch *config.Output {
|
||||
case "":
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Output", "", errors.New("output field is required")))
|
||||
case ConfigOutputTypeTable, ConfigOutputTypeJSON, ConfigOutputTypeYAML, ConfigOutputTypeNone:
|
||||
default:
|
||||
validOutputs := []string{string(ConfigOutputTypeTable), string(ConfigOutputTypeJSON), string(ConfigOutputTypeYAML), string(ConfigOutputTypeNone)}
|
||||
validationErrors = append(validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
"Output",
|
||||
string(*config.Output),
|
||||
fmt.Errorf(
|
||||
"output type must be one of: %s",
|
||||
strings.Join(validOutputs, ", "),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if config.Insecure == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Insecure", "", errors.New("insecure field is required")))
|
||||
}
|
||||
|
||||
if config.DryRun == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("DryRun", "", errors.New("dryRun field is required")))
|
||||
}
|
||||
|
||||
for i, proxy := range config.Proxies {
|
||||
if !slices.Contains(ValidProxySchemes, proxy.Scheme) {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Proxy[%d]", i),
|
||||
proxy.String(),
|
||||
fmt.Errorf("proxy scheme must be one of: %v", ValidProxySchemes),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
templateErrors := ValidateTemplates(&config)
|
||||
validationErrors = append(validationErrors, templateErrors...)
|
||||
|
||||
if len(validationErrors) > 0 {
|
||||
return types.NewFieldValidationErrors(validationErrors)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ReadAllConfigs() *Config {
|
||||
envParser := NewConfigENVParser("SARIN")
|
||||
envConfig, err := envParser.Parse()
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||
printParseErrors("ENV", err.Errors...)
|
||||
fmt.Println()
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
cliParser := NewConfigCLIParser(os.Args)
|
||||
cliConf, err := cliParser.Parse()
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnSentinel(types.ErrCLINoArgs, func(err error) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Fprintln(os.Stderr, StyleYellow.Render("\nNo arguments provided."))
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.CLIUnexpectedArgsError) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(
|
||||
"\nUnexpected CLI arguments provided: ",
|
||||
)+strings.Join(err.Args, ", "),
|
||||
)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Println()
|
||||
printParseErrors("CLI", err.Errors...)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
for _, configFile := range append(envConfig.Files, cliConf.Files...) {
|
||||
fileConfig, err := parseConfigFile(configFile, 10)
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnType(func(err types.ConfigFileReadError) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(
|
||||
fmt.Sprintf("\nFailed to read config file (%s): ", configFile.Path())+err.Error(),
|
||||
),
|
||||
)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.UnmarshalError) error {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(
|
||||
fmt.Sprintf("\nFailed to parse config file (%s): ", configFile.Path())+err.Error(),
|
||||
),
|
||||
)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||
printParseErrors(fmt.Sprintf("CONFIG FILE '%s'", configFile.Path()), err.Errors...)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
envConfig.Merge(fileConfig)
|
||||
}
|
||||
|
||||
envConfig.Merge(cliConf)
|
||||
|
||||
return envConfig
|
||||
}
|
||||
|
||||
// parseConfigFile recursively parses a config file and its nested files up to maxDepth levels.
|
||||
// Returns the merged configuration or an error if parsing fails.
|
||||
// It can return the following errors:
|
||||
// - types.ConfigFileReadError
|
||||
// - types.UnmarshalError
|
||||
// - types.FieldParseErrors
|
||||
func parseConfigFile(configFile types.ConfigFile, maxDepth int) (*Config, error) {
|
||||
configFileParser := NewConfigFileParser(configFile)
|
||||
fileConfig, err := configFileParser.Parse()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if maxDepth <= 0 {
|
||||
return fileConfig, nil
|
||||
}
|
||||
|
||||
for _, c := range fileConfig.Files {
|
||||
innerFileConfig, err := parseConfigFile(c, maxDepth-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
innerFileConfig.Merge(fileConfig)
|
||||
fileConfig = innerFileConfig
|
||||
}
|
||||
|
||||
return fileConfig, nil
|
||||
}
|
||||
|
||||
func printParseErrors(parserName string, errors ...types.FieldParseError) {
|
||||
for _, fieldErr := range errors {
|
||||
if fieldErr.Value == "" {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(fmt.Sprintf("[%s] Field '%s': ", parserName, fieldErr.Field))+fieldErr.Err.Error(),
|
||||
)
|
||||
} else {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(fmt.Sprintf("[%s] Field '%s' (%s): ", parserName, fieldErr.Field, fieldErr.Value))+fieldErr.Err.Error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
scrollbarWidth = 1
|
||||
scrollbarBottomSpace = 1
|
||||
statusDisplayTime = 3 * time.Second
|
||||
)
|
||||
|
||||
var (
|
||||
printConfigBorderStyle = func() lipgloss.Border {
|
||||
b := lipgloss.RoundedBorder()
|
||||
return b
|
||||
}()
|
||||
|
||||
printConfigHelpStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1)
|
||||
printConfigSuccessStatusStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1).Foreground(lipgloss.Color("10"))
|
||||
printConfigErrorStatusStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1).Foreground(lipgloss.Color("9"))
|
||||
printConfigKeyStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("12")).Bold(true)
|
||||
printConfigDescStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("252"))
|
||||
)
|
||||
|
||||
type printConfigClearStatusMsg struct{}
|
||||
|
||||
type printConfigModel struct {
|
||||
viewport viewport.Model
|
||||
content string
|
||||
rawContent []byte
|
||||
statusMsg string
|
||||
ready bool
|
||||
start bool
|
||||
}
|
||||
|
||||
func (m printConfigModel) Init() tea.Cmd { return nil }
|
||||
|
||||
func (m printConfigModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var cmd tea.Cmd
|
||||
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
switch msg.String() {
|
||||
case "ctrl+c", "esc":
|
||||
return m, tea.Quit
|
||||
case "ctrl+s":
|
||||
return m.saveContent()
|
||||
case "enter":
|
||||
m.start = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
|
||||
case printConfigClearStatusMsg:
|
||||
m.statusMsg = ""
|
||||
return m, nil
|
||||
|
||||
case tea.WindowSizeMsg:
|
||||
m.handleResize(msg)
|
||||
}
|
||||
|
||||
m.viewport, cmd = m.viewport.Update(msg)
|
||||
return m, cmd
|
||||
}
|
||||
|
||||
func (m printConfigModel) View() string {
|
||||
if !m.ready {
|
||||
return "\n Initializing..."
|
||||
}
|
||||
|
||||
content := lipgloss.JoinHorizontal(lipgloss.Top, m.viewport.View(), m.scrollbar())
|
||||
return fmt.Sprintf("%s\n%s\n%s", m.headerView(), content, m.footerView())
|
||||
}
|
||||
|
||||
func (m *printConfigModel) saveContent() (printConfigModel, tea.Cmd) {
|
||||
filename := fmt.Sprintf("sarin_config_%s.yaml", time.Now().Format("2006-01-02_15-04-05"))
|
||||
if err := os.WriteFile(filename, m.rawContent, 0600); err != nil {
|
||||
m.statusMsg = printConfigErrorStatusStyle.Render("✗ Error saving file: " + err.Error())
|
||||
} else {
|
||||
m.statusMsg = printConfigSuccessStatusStyle.Render("✓ Saved to " + filename)
|
||||
}
|
||||
return *m, tea.Tick(statusDisplayTime, func(time.Time) tea.Msg { return printConfigClearStatusMsg{} })
|
||||
}
|
||||
|
||||
func (m *printConfigModel) handleResize(msg tea.WindowSizeMsg) {
|
||||
headerHeight := lipgloss.Height(m.headerView())
|
||||
footerHeight := lipgloss.Height(m.footerView())
|
||||
height := msg.Height - headerHeight - footerHeight
|
||||
width := msg.Width - scrollbarWidth
|
||||
|
||||
if !m.ready {
|
||||
m.viewport = viewport.New(width, height)
|
||||
m.viewport.SetContent(m.contentWithLineNumbers())
|
||||
m.ready = true
|
||||
} else {
|
||||
m.viewport.Width = width
|
||||
m.viewport.Height = height
|
||||
}
|
||||
}
|
||||
|
||||
func (m printConfigModel) headerView() string {
|
||||
var title string
|
||||
if m.statusMsg != "" {
|
||||
title = ("" + m.statusMsg)
|
||||
} else {
|
||||
sep := printConfigDescStyle.Render(" / ")
|
||||
help := printConfigKeyStyle.Render("ENTER") + printConfigDescStyle.Render(" start") + sep +
|
||||
printConfigKeyStyle.Render("CTRL+S") + printConfigDescStyle.Render(" save") + sep +
|
||||
printConfigKeyStyle.Render("ESC") + printConfigDescStyle.Render(" exit")
|
||||
title = printConfigHelpStyle.Render(help)
|
||||
}
|
||||
line := strings.Repeat("─", max(0, m.viewport.Width+scrollbarWidth-lipgloss.Width(title)))
|
||||
return lipgloss.JoinHorizontal(lipgloss.Center, title, line)
|
||||
}
|
||||
|
||||
func (m printConfigModel) footerView() string {
|
||||
return strings.Repeat("─", m.viewport.Width+scrollbarWidth)
|
||||
}
|
||||
|
||||
func (m printConfigModel) contentWithLineNumbers() string {
|
||||
lines := strings.Split(m.content, "\n")
|
||||
width := len(strconv.Itoa(len(lines)))
|
||||
lineNumStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("246"))
|
||||
|
||||
var sb strings.Builder
|
||||
for i, line := range lines {
|
||||
lineNum := lineNumStyle.Render(fmt.Sprintf("%*d", width, i+1))
|
||||
sb.WriteString(lineNum)
|
||||
sb.WriteString(" ")
|
||||
sb.WriteString(line)
|
||||
if i < len(lines)-1 {
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (m printConfigModel) scrollbar() string {
|
||||
height := m.viewport.Height
|
||||
trackHeight := height - scrollbarBottomSpace
|
||||
totalLines := m.viewport.TotalLineCount()
|
||||
|
||||
if totalLines <= height {
|
||||
return strings.Repeat(" \n", trackHeight) + " "
|
||||
}
|
||||
|
||||
thumbSize := max(1, (height*trackHeight)/totalLines)
|
||||
thumbPos := int(m.viewport.ScrollPercent() * float64(trackHeight-thumbSize))
|
||||
|
||||
var sb strings.Builder
|
||||
for i := range trackHeight {
|
||||
if i >= thumbPos && i < thumbPos+thumbSize {
|
||||
sb.WriteByte('\xe2') // █ (U+2588)
|
||||
sb.WriteByte('\x96')
|
||||
sb.WriteByte('\x88')
|
||||
} else {
|
||||
sb.WriteByte('\xe2') // ░ (U+2591)
|
||||
sb.WriteByte('\x96')
|
||||
sb.WriteByte('\x91')
|
||||
}
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
sb.WriteByte(' ')
|
||||
return sb.String()
|
||||
}
|
||||
235
internal/config/env.go
Normal file
235
internal/config/env.go
Normal file
@@ -0,0 +1,235 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/url"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
"go.aykhans.me/utils/common"
|
||||
utilsParse "go.aykhans.me/utils/parser"
|
||||
)
|
||||
|
||||
var _ IParser = ConfigENVParser{}
|
||||
|
||||
type ConfigENVParser struct {
|
||||
envPrefix string
|
||||
}
|
||||
|
||||
func NewConfigENVParser(envPrefix string) *ConfigENVParser {
|
||||
return &ConfigENVParser{envPrefix}
|
||||
}
|
||||
|
||||
// Parse parses env arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigENVParser) Parse() (*Config, error) {
|
||||
var (
|
||||
config = &Config{}
|
||||
fieldParseErrors []types.FieldParseError
|
||||
)
|
||||
|
||||
if showConfig := parser.getEnv("SHOW_CONFIG"); showConfig != "" {
|
||||
showConfigParsed, err := utilsParse.ParseString[bool](showConfig)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("SHOW_CONFIG"),
|
||||
showConfig,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.ShowConfig = &showConfigParsed
|
||||
}
|
||||
}
|
||||
|
||||
if configFile := parser.getEnv("CONFIG_FILE"); configFile != "" {
|
||||
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||
}
|
||||
|
||||
if quiet := parser.getEnv("QUIET"); quiet != "" {
|
||||
quietParsed, err := utilsParse.ParseString[bool](quiet)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("QUIET"),
|
||||
quiet,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Quiet = &quietParsed
|
||||
}
|
||||
}
|
||||
|
||||
if output := parser.getEnv("OUTPUT"); output != "" {
|
||||
config.Output = common.ToPtr(ConfigOutputType(output))
|
||||
}
|
||||
|
||||
if insecure := parser.getEnv("INSECURE"); insecure != "" {
|
||||
insecureParsed, err := utilsParse.ParseString[bool](insecure)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("INSECURE"),
|
||||
insecure,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Insecure = &insecureParsed
|
||||
}
|
||||
}
|
||||
|
||||
if dryRun := parser.getEnv("DRY_RUN"); dryRun != "" {
|
||||
dryRunParsed, err := utilsParse.ParseString[bool](dryRun)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("DRY_RUN"),
|
||||
dryRun,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.DryRun = &dryRunParsed
|
||||
}
|
||||
}
|
||||
|
||||
if method := parser.getEnv("METHOD"); method != "" {
|
||||
config.Methods = []string{method}
|
||||
}
|
||||
|
||||
if urlEnv := parser.getEnv("URL"); urlEnv != "" {
|
||||
urlEnvParsed, err := url.Parse(urlEnv)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(parser.getFullEnvName("URL"), urlEnv, err),
|
||||
)
|
||||
} else {
|
||||
config.URL = urlEnvParsed
|
||||
}
|
||||
}
|
||||
|
||||
if concurrency := parser.getEnv("CONCURRENCY"); concurrency != "" {
|
||||
concurrencyParsed, err := utilsParse.ParseString[uint](concurrency)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("CONCURRENCY"),
|
||||
concurrency,
|
||||
errors.New("invalid value for unsigned integer"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Concurrency = &concurrencyParsed
|
||||
}
|
||||
}
|
||||
|
||||
if requests := parser.getEnv("REQUESTS"); requests != "" {
|
||||
requestsParsed, err := utilsParse.ParseString[uint64](requests)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("REQUESTS"),
|
||||
requests,
|
||||
errors.New("invalid value for unsigned integer"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Requests = &requestsParsed
|
||||
}
|
||||
}
|
||||
|
||||
if duration := parser.getEnv("DURATION"); duration != "" {
|
||||
durationParsed, err := utilsParse.ParseString[time.Duration](duration)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("DURATION"),
|
||||
duration,
|
||||
errors.New("invalid value duration, expected a duration string (e.g., '10s', '1h30m')"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Duration = &durationParsed
|
||||
}
|
||||
}
|
||||
|
||||
if timeout := parser.getEnv("TIMEOUT"); timeout != "" {
|
||||
timeoutParsed, err := utilsParse.ParseString[time.Duration](timeout)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("TIMEOUT"),
|
||||
timeout,
|
||||
errors.New("invalid value duration, expected a duration string (e.g., '10s', '1h30m')"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Timeout = &timeoutParsed
|
||||
}
|
||||
}
|
||||
|
||||
if param := parser.getEnv("PARAM"); param != "" {
|
||||
config.Params.Parse(param)
|
||||
}
|
||||
|
||||
if header := parser.getEnv("HEADER"); header != "" {
|
||||
config.Headers.Parse(header)
|
||||
}
|
||||
|
||||
if cookie := parser.getEnv("COOKIE"); cookie != "" {
|
||||
config.Cookies.Parse(cookie)
|
||||
}
|
||||
|
||||
if body := parser.getEnv("BODY"); body != "" {
|
||||
config.Bodies = []string{body}
|
||||
}
|
||||
|
||||
if proxy := parser.getEnv("PROXY"); proxy != "" {
|
||||
err := config.Proxies.Parse(proxy)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("PROXY"),
|
||||
proxy,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if values := parser.getEnv("VALUES"); values != "" {
|
||||
config.Values = []string{values}
|
||||
}
|
||||
|
||||
if len(fieldParseErrors) > 0 {
|
||||
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (parser ConfigENVParser) getFullEnvName(envName string) string {
|
||||
if parser.envPrefix == "" {
|
||||
return envName
|
||||
}
|
||||
return parser.envPrefix + "_" + envName
|
||||
}
|
||||
|
||||
func (parser ConfigENVParser) getEnv(envName string) string {
|
||||
return os.Getenv(parser.getFullEnvName(envName))
|
||||
}
|
||||
280
internal/config/file.go
Normal file
280
internal/config/file.go
Normal file
@@ -0,0 +1,280 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
"go.aykhans.me/utils/common"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
var _ IParser = ConfigFileParser{}
|
||||
|
||||
type ConfigFileParser struct {
|
||||
configFile types.ConfigFile
|
||||
}
|
||||
|
||||
func NewConfigFileParser(configFile types.ConfigFile) *ConfigFileParser {
|
||||
return &ConfigFileParser{configFile}
|
||||
}
|
||||
|
||||
// Parse parses config file arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.ConfigFileReadError
|
||||
// - types.UnmarshalError
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigFileParser) Parse() (*Config, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second*30)
|
||||
defer cancel()
|
||||
|
||||
configFileData, err := fetchFile(ctx, parser.configFile.Path())
|
||||
if err != nil {
|
||||
return nil, types.NewConfigFileReadError(err)
|
||||
}
|
||||
|
||||
switch parser.configFile.Type() {
|
||||
case types.ConfigFileTypeYAML, types.ConfigFileTypeUnknown:
|
||||
return parser.ParseYAML(configFileData)
|
||||
default:
|
||||
panic("unhandled config file type")
|
||||
}
|
||||
}
|
||||
|
||||
// fetchFile retrieves file contents from a local path or HTTP/HTTPS URL.
|
||||
func fetchFile(ctx context.Context, src string) ([]byte, error) {
|
||||
if strings.HasPrefix(src, "http://") || strings.HasPrefix(src, "https://") {
|
||||
return fetchHTTP(ctx, src)
|
||||
}
|
||||
return fetchLocal(src)
|
||||
}
|
||||
|
||||
// fetchHTTP downloads file contents from an HTTP/HTTPS URL.
|
||||
func fetchHTTP(ctx context.Context, url string) ([]byte, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch file: %w", err)
|
||||
}
|
||||
defer resp.Body.Close() //nolint:errcheck
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("failed to fetch file: HTTP %d %s", resp.StatusCode, resp.Status)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read response body: %w", err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// fetchLocal reads file contents from the local filesystem.
|
||||
// It resolves relative paths from the current working directory.
|
||||
func fetchLocal(src string) ([]byte, error) {
|
||||
path := src
|
||||
if !filepath.IsAbs(src) {
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get working directory: %w", err)
|
||||
}
|
||||
path = filepath.Join(pwd, src)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(path) //nolint:gosec
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
type stringOrSliceField []string
|
||||
|
||||
func (ss *stringOrSliceField) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
case yaml.ScalarNode:
|
||||
// Handle single string value
|
||||
*ss = []string{node.Value}
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
// Handle array of strings
|
||||
var slice []string
|
||||
if err := node.Decode(&slice); err != nil {
|
||||
return err //nolint:wrapcheck
|
||||
}
|
||||
*ss = slice
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("expected a string or a sequence of strings, but got %v", node.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
// keyValuesField handles flexible YAML formats for key-value pairs.
|
||||
// Supported formats:
|
||||
// - Sequence of maps: [{key1: value1}, {key2: [value2, value3]}]
|
||||
// - Single map: {key1: value1, key2: [value2, value3]}
|
||||
//
|
||||
// Values can be either a single string or an array of strings.
|
||||
type keyValuesField []types.KeyValue[string, []string]
|
||||
|
||||
func (kv *keyValuesField) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
case yaml.MappingNode:
|
||||
// Handle single map: {key1: value1, key2: [value2]}
|
||||
return kv.unmarshalMapping(node)
|
||||
case yaml.SequenceNode:
|
||||
// Handle sequence of maps: [{key1: value1}, {key2: value2}]
|
||||
for _, item := range node.Content {
|
||||
if item.Kind != yaml.MappingNode {
|
||||
return fmt.Errorf("expected a mapping in sequence, but got %v", item.Kind)
|
||||
}
|
||||
if err := kv.unmarshalMapping(item); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("expected a mapping or sequence of mappings, but got %v", node.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
func (kv *keyValuesField) unmarshalMapping(node *yaml.Node) error {
|
||||
// MappingNode content is [key1, value1, key2, value2, ...]
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
|
||||
if keyNode.Kind != yaml.ScalarNode {
|
||||
return fmt.Errorf("expected a string key, but got %v", keyNode.Kind)
|
||||
}
|
||||
|
||||
key := keyNode.Value
|
||||
var values []string
|
||||
|
||||
switch valueNode.Kind {
|
||||
case yaml.ScalarNode:
|
||||
values = []string{valueNode.Value}
|
||||
case yaml.SequenceNode:
|
||||
for _, v := range valueNode.Content {
|
||||
if v.Kind != yaml.ScalarNode {
|
||||
return fmt.Errorf("expected string values in array for key %q, but got %v", key, v.Kind)
|
||||
}
|
||||
values = append(values, v.Value)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("expected a string or array of strings for key %q, but got %v", key, valueNode.Kind)
|
||||
}
|
||||
|
||||
*kv = append(*kv, types.KeyValue[string, []string]{Key: key, Value: values})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type configYAML struct {
|
||||
ConfigFiles stringOrSliceField `yaml:"configFile"`
|
||||
Method stringOrSliceField `yaml:"method"`
|
||||
URL *string `yaml:"url"`
|
||||
Timeout *time.Duration `yaml:"timeout"`
|
||||
Concurrency *uint `yaml:"concurrency"`
|
||||
RequestCount *uint64 `yaml:"requests"`
|
||||
Duration *time.Duration `yaml:"duration"`
|
||||
Quiet *bool `yaml:"quiet"`
|
||||
Output *string `yaml:"output"`
|
||||
Insecure *bool `yaml:"insecure"`
|
||||
ShowConfig *bool `yaml:"showConfig"`
|
||||
DryRun *bool `yaml:"dryRun"`
|
||||
Params keyValuesField `yaml:"params"`
|
||||
Headers keyValuesField `yaml:"headers"`
|
||||
Cookies keyValuesField `yaml:"cookies"`
|
||||
Bodies stringOrSliceField `yaml:"body"`
|
||||
Proxies stringOrSliceField `yaml:"proxy"`
|
||||
Values stringOrSliceField `yaml:"values"`
|
||||
}
|
||||
|
||||
// ParseYAML parses YAML config file arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.UnmarshalError
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigFileParser) ParseYAML(data []byte) (*Config, error) {
|
||||
var (
|
||||
config = &Config{}
|
||||
parsedData = &configYAML{}
|
||||
)
|
||||
|
||||
err := yaml.Unmarshal(data, &parsedData)
|
||||
if err != nil {
|
||||
return nil, types.NewUnmarshalError(err)
|
||||
}
|
||||
|
||||
var fieldParseErrors []types.FieldParseError
|
||||
|
||||
config.Methods = append(config.Methods, parsedData.Method...)
|
||||
config.Timeout = parsedData.Timeout
|
||||
config.Concurrency = parsedData.Concurrency
|
||||
config.Requests = parsedData.RequestCount
|
||||
config.Duration = parsedData.Duration
|
||||
config.ShowConfig = parsedData.ShowConfig
|
||||
config.Quiet = parsedData.Quiet
|
||||
|
||||
if parsedData.Output != nil {
|
||||
config.Output = common.ToPtr(ConfigOutputType(*parsedData.Output))
|
||||
}
|
||||
|
||||
config.Insecure = parsedData.Insecure
|
||||
config.DryRun = parsedData.DryRun
|
||||
for _, kv := range parsedData.Params {
|
||||
config.Params = append(config.Params, types.Param(kv))
|
||||
}
|
||||
for _, kv := range parsedData.Headers {
|
||||
config.Headers = append(config.Headers, types.Header(kv))
|
||||
}
|
||||
for _, kv := range parsedData.Cookies {
|
||||
config.Cookies = append(config.Cookies, types.Cookie(kv))
|
||||
}
|
||||
config.Bodies = append(config.Bodies, parsedData.Bodies...)
|
||||
config.Values = append(config.Values, parsedData.Values...)
|
||||
|
||||
if len(parsedData.ConfigFiles) > 0 {
|
||||
for _, configFile := range parsedData.ConfigFiles {
|
||||
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||
}
|
||||
}
|
||||
|
||||
if parsedData.URL != nil {
|
||||
urlParsed, err := url.Parse(*parsedData.URL)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(fieldParseErrors, types.NewFieldParseError("url", *parsedData.URL, err))
|
||||
} else {
|
||||
config.URL = urlParsed
|
||||
}
|
||||
}
|
||||
|
||||
for i, proxy := range parsedData.Proxies {
|
||||
err := config.Proxies.Parse(proxy)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(fmt.Sprintf("proxy[%d]", i), proxy, err),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if len(fieldParseErrors) > 0 {
|
||||
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
212
internal/config/template_validator.go
Normal file
212
internal/config/template_validator.go
Normal file
@@ -0,0 +1,212 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"text/template"
|
||||
|
||||
"go.aykhans.me/sarin/internal/sarin"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
func validateTemplateString(value string, funcMap template.FuncMap) error {
|
||||
if value == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := template.New("").Funcs(funcMap).Parse(value)
|
||||
if err != nil {
|
||||
return fmt.Errorf("template parse error: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateTemplateMethods(methods []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for i, method := range methods {
|
||||
if err := validateTemplateString(method, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Method[%d]", i),
|
||||
method,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateParams(params types.Params, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for paramIndex, param := range params {
|
||||
// Validate param key
|
||||
if err := validateTemplateString(param.Key, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Param[%d].Key", paramIndex),
|
||||
param.Key,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate param values
|
||||
for valueIndex, value := range param.Value {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Param[%d].Value[%d]", paramIndex, valueIndex),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateHeaders(headers types.Headers, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for headerIndex, header := range headers {
|
||||
// Validate header key
|
||||
if err := validateTemplateString(header.Key, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Header[%d].Key", headerIndex),
|
||||
header.Key,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate header values
|
||||
for valueIndex, value := range header.Value {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Header[%d].Value[%d]", headerIndex, valueIndex),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateCookies(cookies types.Cookies, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for cookieIndex, cookie := range cookies {
|
||||
// Validate cookie key
|
||||
if err := validateTemplateString(cookie.Key, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Cookie[%d].Key", cookieIndex),
|
||||
cookie.Key,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate cookie values
|
||||
for valueIndex, value := range cookie.Value {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Cookie[%d].Value[%d]", cookieIndex, valueIndex),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateBodies(bodies []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for i, body := range bodies {
|
||||
if err := validateTemplateString(body, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Body[%d]", i),
|
||||
body,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateValues(values []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for i, value := range values {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Values[%d]", i),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func ValidateTemplates(config *Config) []types.FieldValidationError {
|
||||
// Create template function map using the same functions as sarin package
|
||||
randSource := sarin.NewDefaultRandSource()
|
||||
funcMap := sarin.NewDefaultTemplateFuncMap(randSource)
|
||||
|
||||
bodyFuncMapData := &sarin.BodyTemplateFuncMapData{}
|
||||
bodyFuncMap := sarin.NewDefaultBodyTemplateFuncMap(randSource, bodyFuncMapData)
|
||||
|
||||
var allErrors []types.FieldValidationError
|
||||
|
||||
// Validate methods
|
||||
allErrors = append(allErrors, validateTemplateMethods(config.Methods, funcMap)...)
|
||||
|
||||
// Validate params
|
||||
allErrors = append(allErrors, validateTemplateParams(config.Params, funcMap)...)
|
||||
|
||||
// Validate headers
|
||||
allErrors = append(allErrors, validateTemplateHeaders(config.Headers, funcMap)...)
|
||||
|
||||
// Validate cookies
|
||||
allErrors = append(allErrors, validateTemplateCookies(config.Cookies, funcMap)...)
|
||||
|
||||
// Validate bodies
|
||||
allErrors = append(allErrors, validateTemplateBodies(config.Bodies, bodyFuncMap)...)
|
||||
|
||||
// Validate values
|
||||
allErrors = append(allErrors, validateTemplateValues(config.Values, funcMap)...)
|
||||
|
||||
return allErrors
|
||||
}
|
||||
310
internal/sarin/client.go
Normal file
310
internal/sarin/client.go
Normal file
@@ -0,0 +1,310 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"math"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/valyala/fasthttp"
|
||||
"github.com/valyala/fasthttp/fasthttpproxy"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsSlice "go.aykhans.me/utils/slice"
|
||||
"golang.org/x/net/proxy"
|
||||
)
|
||||
|
||||
type HostClientGenerator func() *fasthttp.HostClient
|
||||
|
||||
func safeUintToInt(u uint) int {
|
||||
if u > math.MaxInt {
|
||||
return math.MaxInt
|
||||
}
|
||||
return int(u)
|
||||
}
|
||||
|
||||
// NewHostClients creates a list of fasthttp.HostClient instances for the given proxies.
|
||||
// If no proxies are provided, a single client without a proxy is returned.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func NewHostClients(
|
||||
ctx context.Context,
|
||||
timeout time.Duration,
|
||||
proxies []url.URL,
|
||||
maxConns uint,
|
||||
requestURL *url.URL,
|
||||
skipVerify bool,
|
||||
) ([]*fasthttp.HostClient, error) {
|
||||
isTLS := requestURL.Scheme == "https"
|
||||
|
||||
if proxiesLen := len(proxies); proxiesLen > 0 {
|
||||
clients := make([]*fasthttp.HostClient, 0, proxiesLen)
|
||||
addr := requestURL.Host
|
||||
if isTLS && requestURL.Port() == "" {
|
||||
addr += ":443"
|
||||
}
|
||||
|
||||
for _, proxy := range proxies {
|
||||
dialFunc, err := NewProxyDialFunc(ctx, &proxy, timeout)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxy.String(), err)
|
||||
}
|
||||
|
||||
clients = append(clients, &fasthttp.HostClient{
|
||||
MaxConns: safeUintToInt(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||
},
|
||||
Addr: addr,
|
||||
Dial: dialFunc,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
DisableHeaderNamesNormalizing: true,
|
||||
DisablePathNormalizing: true,
|
||||
NoDefaultUserAgentHeader: true,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return clients, nil
|
||||
}
|
||||
|
||||
client := &fasthttp.HostClient{
|
||||
MaxConns: safeUintToInt(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||
},
|
||||
Addr: requestURL.Host,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
DisableHeaderNamesNormalizing: true,
|
||||
DisablePathNormalizing: true,
|
||||
NoDefaultUserAgentHeader: true,
|
||||
}
|
||||
return []*fasthttp.HostClient{client}, nil
|
||||
}
|
||||
|
||||
func NewProxyDialFunc(ctx context.Context, proxyURL *url.URL, timeout time.Duration) (fasthttp.DialFunc, error) {
|
||||
var (
|
||||
dialer fasthttp.DialFunc
|
||||
err error
|
||||
)
|
||||
|
||||
switch proxyURL.Scheme {
|
||||
case "socks5":
|
||||
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "socks5h":
|
||||
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "http":
|
||||
dialer = fasthttpproxy.FasthttpHTTPDialerDualStackTimeout(proxyURL.String(), timeout)
|
||||
case "https":
|
||||
dialer = fasthttpHTTPSDialerDualStackTimeout(proxyURL, timeout)
|
||||
default:
|
||||
return nil, errors.New("unsupported proxy scheme")
|
||||
}
|
||||
|
||||
if dialer == nil {
|
||||
return nil, errors.New("internal error: proxy dialer is nil")
|
||||
}
|
||||
|
||||
return dialer, nil
|
||||
}
|
||||
|
||||
func fasthttpSocksDialerDualStackTimeout(ctx context.Context, proxyURL *url.URL, timeout time.Duration, resolveLocally bool) (fasthttp.DialFunc, error) {
|
||||
netDialer := &net.Dialer{}
|
||||
|
||||
// Parse auth from proxy URL if present
|
||||
var auth *proxy.Auth
|
||||
if proxyURL.User != nil {
|
||||
auth = &proxy.Auth{
|
||||
User: proxyURL.User.Username(),
|
||||
}
|
||||
if password, ok := proxyURL.User.Password(); ok {
|
||||
auth.Password = password
|
||||
}
|
||||
}
|
||||
|
||||
// Create SOCKS5 dialer with net.Dialer as forward dialer
|
||||
socksDialer, err := proxy.SOCKS5("tcp", proxyURL.Host, auth, netDialer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Assert to ContextDialer for timeout support
|
||||
contextDialer, ok := socksDialer.(proxy.ContextDialer)
|
||||
if !ok {
|
||||
// Fallback without timeout (should not happen with net.Dialer)
|
||||
return func(addr string) (net.Conn, error) {
|
||||
return socksDialer.Dial("tcp", addr)
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Return dial function that uses context with timeout
|
||||
return func(addr string) (net.Conn, error) {
|
||||
deadline := time.Now().Add(timeout)
|
||||
|
||||
if resolveLocally {
|
||||
host, port, err := net.SplitHostPort(addr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Cap DNS resolution to half the timeout to reserve time for dial
|
||||
dnsCtx, dnsCancel := context.WithTimeout(ctx, timeout)
|
||||
ips, err := net.DefaultResolver.LookupIP(dnsCtx, "ip", host)
|
||||
dnsCancel()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(ips) == 0 {
|
||||
return nil, errors.New("no IP addresses found for host: " + host)
|
||||
}
|
||||
|
||||
// Use the first resolved IP
|
||||
addr = net.JoinHostPort(ips[0].String(), port)
|
||||
}
|
||||
|
||||
// Use remaining time for dial
|
||||
remaining := time.Until(deadline)
|
||||
if remaining <= 0 {
|
||||
return nil, context.DeadlineExceeded
|
||||
}
|
||||
|
||||
dialCtx, dialCancel := context.WithTimeout(ctx, remaining)
|
||||
defer dialCancel()
|
||||
|
||||
return contextDialer.DialContext(dialCtx, "tcp", addr)
|
||||
}, nil
|
||||
}
|
||||
|
||||
func fasthttpHTTPSDialerDualStackTimeout(proxyURL *url.URL, timeout time.Duration) fasthttp.DialFunc {
|
||||
proxyAddr := proxyURL.Host
|
||||
if proxyURL.Port() == "" {
|
||||
proxyAddr = net.JoinHostPort(proxyURL.Hostname(), "443")
|
||||
}
|
||||
|
||||
// Build Proxy-Authorization header if auth is present
|
||||
var proxyAuth string
|
||||
if proxyURL.User != nil {
|
||||
username := proxyURL.User.Username()
|
||||
password, _ := proxyURL.User.Password()
|
||||
credentials := username + ":" + password
|
||||
proxyAuth = "Basic " + base64.StdEncoding.EncodeToString([]byte(credentials))
|
||||
}
|
||||
|
||||
return func(addr string) (net.Conn, error) {
|
||||
// Establish TCP connection to proxy with timeout
|
||||
start := time.Now()
|
||||
conn, err := fasthttp.DialDualStackTimeout(proxyAddr, timeout)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
remaining := timeout - time.Since(start)
|
||||
if remaining <= 0 {
|
||||
conn.Close() //nolint:errcheck,gosec
|
||||
return nil, context.DeadlineExceeded
|
||||
}
|
||||
|
||||
// Set deadline for the TLS handshake and CONNECT request
|
||||
if err := conn.SetDeadline(time.Now().Add(remaining)); err != nil {
|
||||
conn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Upgrade to TLS
|
||||
tlsConn := tls.Client(conn, &tls.Config{ //nolint:gosec
|
||||
ServerName: proxyURL.Hostname(),
|
||||
})
|
||||
if err := tlsConn.Handshake(); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build and send CONNECT request
|
||||
connectReq := &http.Request{
|
||||
Method: http.MethodConnect,
|
||||
URL: &url.URL{Opaque: addr},
|
||||
Host: addr,
|
||||
Header: make(http.Header),
|
||||
}
|
||||
if proxyAuth != "" {
|
||||
connectReq.Header.Set("Proxy-Authorization", proxyAuth)
|
||||
}
|
||||
|
||||
if err := connectReq.Write(tlsConn); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Read response using buffered reader, but return wrapped connection
|
||||
// to preserve any buffered data
|
||||
bufReader := bufio.NewReader(tlsConn)
|
||||
resp, err := http.ReadResponse(bufReader, connectReq)
|
||||
if err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
resp.Body.Close() //nolint:errcheck,gosec
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, errors.New("proxy CONNECT failed: " + resp.Status)
|
||||
}
|
||||
|
||||
// Clear deadline for the tunneled connection
|
||||
if err := tlsConn.SetDeadline(time.Time{}); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Return wrapped connection that uses the buffered reader
|
||||
// to avoid losing any data that was read ahead
|
||||
return &bufferedConn{Conn: tlsConn, reader: bufReader}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// bufferedConn wraps a net.Conn with a buffered reader to preserve
|
||||
// any data that was read during HTTP response parsing.
|
||||
type bufferedConn struct {
|
||||
net.Conn
|
||||
|
||||
reader *bufio.Reader
|
||||
}
|
||||
|
||||
func (c *bufferedConn) Read(b []byte) (int, error) {
|
||||
return c.reader.Read(b)
|
||||
}
|
||||
|
||||
func NewHostClientGenerator(clients ...*fasthttp.HostClient) HostClientGenerator {
|
||||
switch len(clients) {
|
||||
case 0:
|
||||
hostClient := &fasthttp.HostClient{}
|
||||
return func() *fasthttp.HostClient {
|
||||
return hostClient
|
||||
}
|
||||
case 1:
|
||||
return func() *fasthttp.HostClient {
|
||||
return clients[0]
|
||||
}
|
||||
default:
|
||||
return utilsSlice.RandomCycle(nil, clients...)
|
||||
}
|
||||
}
|
||||
14
internal/sarin/helpers.go
Normal file
14
internal/sarin/helpers.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"math/rand/v2"
|
||||
"time"
|
||||
)
|
||||
|
||||
func NewDefaultRandSource() rand.Source {
|
||||
now := time.Now().UnixNano()
|
||||
return rand.NewPCG(
|
||||
uint64(now), //nolint:gosec // G115: Safe conversion; UnixNano timestamp used as random seed, bit pattern is intentional
|
||||
uint64(now>>32), //nolint:gosec // G115: Safe conversion; right-shifted timestamp for seed entropy, overflow is acceptable
|
||||
)
|
||||
}
|
||||
336
internal/sarin/request.go
Normal file
336
internal/sarin/request.go
Normal file
@@ -0,0 +1,336 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"maps"
|
||||
"math/rand/v2"
|
||||
"net/url"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/valyala/fasthttp"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsSlice "go.aykhans.me/utils/slice"
|
||||
)
|
||||
|
||||
type RequestGenerator func(*fasthttp.Request) error
|
||||
|
||||
type RequestGeneratorWithData func(*fasthttp.Request, any) error
|
||||
|
||||
type valuesData struct {
|
||||
Values map[string]string
|
||||
}
|
||||
|
||||
// NewRequestGenerator creates a new RequestGenerator function that generates HTTP requests
|
||||
// with the specified configuration. The returned RequestGenerator is NOT safe for concurrent
|
||||
// use by multiple goroutines.
|
||||
func NewRequestGenerator(
|
||||
methods []string,
|
||||
requestURL *url.URL,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
bodies []string,
|
||||
values []string,
|
||||
) (RequestGenerator, bool) {
|
||||
randSource := NewDefaultRandSource()
|
||||
//nolint:gosec // G404: Using non-cryptographic rand for load testing, not security
|
||||
localRand := rand.New(randSource)
|
||||
templateFuncMap := NewDefaultTemplateFuncMap(randSource)
|
||||
|
||||
methodGenerator, isMethodGeneratorDynamic := NewMethodGeneratorFunc(localRand, methods, templateFuncMap)
|
||||
paramsGenerator, isParamsGeneratorDynamic := NewParamsGeneratorFunc(localRand, params, templateFuncMap)
|
||||
headersGenerator, isHeadersGeneratorDynamic := NewHeadersGeneratorFunc(localRand, headers, templateFuncMap)
|
||||
cookiesGenerator, isCookiesGeneratorDynamic := NewCookiesGeneratorFunc(localRand, cookies, templateFuncMap)
|
||||
|
||||
bodyTemplateFuncMapData := &BodyTemplateFuncMapData{}
|
||||
bodyTemplateFuncMap := NewDefaultBodyTemplateFuncMap(randSource, bodyTemplateFuncMapData)
|
||||
bodyGenerator, isBodyGeneratorDynamic := NewBodyGeneratorFunc(localRand, bodies, bodyTemplateFuncMap)
|
||||
|
||||
valuesGenerator := NewValuesGeneratorFunc(values, templateFuncMap)
|
||||
|
||||
return func(req *fasthttp.Request) error {
|
||||
req.SetRequestURI(requestURL.Path)
|
||||
req.Header.SetHost(requestURL.Host)
|
||||
|
||||
data, err := valuesGenerator()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := methodGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bodyTemplateFuncMapData.ClearFormDataContenType()
|
||||
if err := bodyGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := headersGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
if bodyTemplateFuncMapData.GetFormDataContenType() != "" {
|
||||
req.Header.Add("Content-Type", bodyTemplateFuncMapData.GetFormDataContenType())
|
||||
}
|
||||
|
||||
if err := paramsGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := cookiesGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if requestURL.Scheme == "https" {
|
||||
req.URI().SetScheme("https")
|
||||
}
|
||||
return nil
|
||||
}, isMethodGeneratorDynamic ||
|
||||
isParamsGeneratorDynamic ||
|
||||
isHeadersGeneratorDynamic ||
|
||||
isCookiesGeneratorDynamic ||
|
||||
isBodyGeneratorDynamic
|
||||
}
|
||||
|
||||
func NewMethodGeneratorFunc(localRand *rand.Rand, methods []string, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
methodGenerator, isDynamic := buildStringSliceGenerator(localRand, methods, templateFunctions)
|
||||
|
||||
var (
|
||||
method string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
method, err = methodGenerator()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.Header.SetMethod(method)
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewBodyGeneratorFunc(localRand *rand.Rand, bodies []string, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
bodyGenerator, isDynamic := buildStringSliceGenerator(localRand, bodies, templateFunctions)
|
||||
|
||||
var (
|
||||
body string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
body, err = bodyGenerator()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.SetBody([]byte(body))
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewParamsGeneratorFunc(localRand *rand.Rand, params types.Params, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, params, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.URI().QueryArgs().Add(key, value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewHeadersGeneratorFunc(localRand *rand.Rand, headers types.Headers, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, headers, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.Header.Add(key, value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewCookiesGeneratorFunc(localRand *rand.Rand, cookies types.Cookies, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, cookies, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
if len(generators) > 0 {
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
cookieStrings := make([]string, 0, len(generators))
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cookieStrings = append(cookieStrings, key+"="+value)
|
||||
}
|
||||
req.Header.Add("Cookie", strings.Join(cookieStrings, "; "))
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewValuesGeneratorFunc(values []string, templateFunctions template.FuncMap) func() (valuesData, error) {
|
||||
generators := make([]func(_ any) (string, error), len(values))
|
||||
|
||||
for i, v := range values {
|
||||
generators[i], _ = createTemplateFunc(v, templateFunctions)
|
||||
}
|
||||
|
||||
var (
|
||||
rendered string
|
||||
data map[string]string
|
||||
err error
|
||||
)
|
||||
return func() (valuesData, error) {
|
||||
result := make(map[string]string)
|
||||
for _, generator := range generators {
|
||||
rendered, err = generator(nil)
|
||||
if err != nil {
|
||||
return valuesData{}, fmt.Errorf("values rendering: %w", err)
|
||||
}
|
||||
|
||||
data, err = godotenv.Unmarshal(rendered)
|
||||
if err != nil {
|
||||
return valuesData{}, fmt.Errorf("values rendering: %w", err)
|
||||
}
|
||||
|
||||
maps.Copy(result, data)
|
||||
}
|
||||
|
||||
return valuesData{Values: result}, nil
|
||||
}
|
||||
}
|
||||
|
||||
func createTemplateFunc(value string, templateFunctions template.FuncMap) (func(data any) (string, error), bool) {
|
||||
tmpl, err := template.New("").Funcs(templateFunctions).Parse(value)
|
||||
if err == nil && hasTemplateActions(tmpl) {
|
||||
var err error
|
||||
return func(data any) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
if err = tmpl.Execute(&buf, data); err != nil {
|
||||
return "", fmt.Errorf("template rendering: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}, true
|
||||
}
|
||||
return func(_ any) (string, error) { return value, nil }, false
|
||||
}
|
||||
|
||||
type keyValueGenerator struct {
|
||||
Key func(data any) (string, error)
|
||||
Value func() func(data any) (string, error)
|
||||
}
|
||||
|
||||
type keyValueItem interface {
|
||||
types.Param | types.Header | types.Cookie
|
||||
}
|
||||
|
||||
func buildKeyValueGenerators[T keyValueItem](
|
||||
localRand *rand.Rand,
|
||||
items []T,
|
||||
templateFunctions template.FuncMap,
|
||||
) ([]keyValueGenerator, bool) {
|
||||
isDynamic := false
|
||||
generators := make([]keyValueGenerator, len(items))
|
||||
|
||||
for generatorIndex, item := range items {
|
||||
// Convert to KeyValue to access fields
|
||||
keyValue := types.KeyValue[string, []string](item)
|
||||
|
||||
// Generate key function
|
||||
keyFunc, keyIsDynamic := createTemplateFunc(keyValue.Key, templateFunctions)
|
||||
if keyIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
|
||||
// Generate value functions
|
||||
valueFuncs := make([]func(data any) (string, error), len(keyValue.Value))
|
||||
for j, v := range keyValue.Value {
|
||||
valueFunc, valueIsDynamic := createTemplateFunc(v, templateFunctions)
|
||||
if valueIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
valueFuncs[j] = valueFunc
|
||||
}
|
||||
|
||||
generators[generatorIndex] = keyValueGenerator{
|
||||
Key: keyFunc,
|
||||
Value: utilsSlice.RandomCycle(localRand, valueFuncs...),
|
||||
}
|
||||
|
||||
if len(keyValue.Value) > 1 {
|
||||
isDynamic = true
|
||||
}
|
||||
}
|
||||
|
||||
return generators, isDynamic
|
||||
}
|
||||
|
||||
func buildStringSliceGenerator(
|
||||
localRand *rand.Rand,
|
||||
values []string,
|
||||
templateFunctions template.FuncMap,
|
||||
) (func() func(data any) (string, error), bool) {
|
||||
// Return a function that returns an empty string generator if values is empty
|
||||
if len(values) == 0 {
|
||||
emptyFunc := func(_ any) (string, error) { return "", nil }
|
||||
return func() func(_ any) (string, error) { return emptyFunc }, false
|
||||
}
|
||||
|
||||
isDynamic := len(values) > 1
|
||||
valueFuncs := make([]func(data any) (string, error), len(values))
|
||||
|
||||
for i, value := range values {
|
||||
valueFunc, valueIsDynamic := createTemplateFunc(value, templateFunctions)
|
||||
if valueIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
valueFuncs[i] = valueFunc
|
||||
}
|
||||
|
||||
return utilsSlice.RandomCycle(localRand, valueFuncs...), isDynamic
|
||||
}
|
||||
348
internal/sarin/response.go
Normal file
348
internal/sarin/response.go
Normal file
@@ -0,0 +1,348 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/lipgloss/table"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
const DefaultResponseDurationAccuracy uint32 = 1
|
||||
const DefaultResponseColumnMaxWidth = 50
|
||||
|
||||
// Duration wraps time.Duration to provide consistent JSON/YAML marshaling as human-readable strings.
|
||||
type Duration time.Duration
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
//nolint:wrapcheck
|
||||
return json.Marshal(time.Duration(d).String())
|
||||
}
|
||||
|
||||
func (d Duration) MarshalYAML() (any, error) {
|
||||
return time.Duration(d).String(), nil
|
||||
}
|
||||
|
||||
func (d Duration) String() string {
|
||||
dur := time.Duration(d)
|
||||
switch {
|
||||
case dur >= time.Second:
|
||||
return dur.Round(time.Millisecond).String()
|
||||
case dur >= time.Millisecond:
|
||||
return dur.Round(time.Microsecond).String()
|
||||
default:
|
||||
return dur.String()
|
||||
}
|
||||
}
|
||||
|
||||
// BigInt wraps big.Int to provide consistent JSON/YAML marshaling as numbers.
|
||||
type BigInt struct {
|
||||
*big.Int
|
||||
}
|
||||
|
||||
func (b BigInt) MarshalJSON() ([]byte, error) {
|
||||
return []byte(b.Int.String()), nil
|
||||
}
|
||||
|
||||
func (b BigInt) MarshalYAML() (any, error) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: b.Int.String(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b BigInt) String() string {
|
||||
return b.Int.String()
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
durations map[time.Duration]uint64
|
||||
}
|
||||
|
||||
type SarinResponseData struct {
|
||||
sync.Mutex
|
||||
|
||||
Responses map[string]*Response
|
||||
|
||||
// accuracy is the time bucket size in nanoseconds for storing response durations.
|
||||
// Larger values (e.g., 1000) save memory but reduce accuracy by grouping more durations together.
|
||||
// Smaller values (e.g., 10) improve accuracy but increase memory usage.
|
||||
// Minimum value is 1 (most accurate, highest memory usage).
|
||||
// Default value is 1.
|
||||
accuracy time.Duration
|
||||
}
|
||||
|
||||
func NewSarinResponseData(accuracy uint32) *SarinResponseData {
|
||||
if accuracy == 0 {
|
||||
accuracy = DefaultResponseDurationAccuracy
|
||||
}
|
||||
|
||||
return &SarinResponseData{
|
||||
Responses: make(map[string]*Response),
|
||||
accuracy: time.Duration(accuracy),
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) Add(responseKey string, responseTime time.Duration) {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
response, ok := data.Responses[responseKey]
|
||||
if !ok {
|
||||
data.Responses[responseKey] = &Response{
|
||||
durations: map[time.Duration]uint64{
|
||||
responseTime / data.accuracy: 1,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
response.durations[responseTime/data.accuracy]++
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintTable() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
|
||||
headerStyle := lipgloss.NewStyle().
|
||||
Bold(true).
|
||||
Foreground(lipgloss.Color("246")).
|
||||
Padding(0, 1)
|
||||
|
||||
cellStyle := lipgloss.NewStyle().
|
||||
Padding(0, 1)
|
||||
|
||||
rows := make([][]string, 0, len(output.Responses)+1)
|
||||
for key, stats := range output.Responses {
|
||||
rows = append(rows, []string{
|
||||
wrapText(key, DefaultResponseColumnMaxWidth),
|
||||
stats.Count.String(),
|
||||
stats.Min.String(),
|
||||
stats.Max.String(),
|
||||
stats.Average.String(),
|
||||
stats.P90.String(),
|
||||
stats.P95.String(),
|
||||
stats.P99.String(),
|
||||
})
|
||||
}
|
||||
|
||||
rows = append(rows, []string{
|
||||
"Total",
|
||||
output.Total.Count.String(),
|
||||
output.Total.Min.String(),
|
||||
output.Total.Max.String(),
|
||||
output.Total.Average.String(),
|
||||
output.Total.P90.String(),
|
||||
output.Total.P95.String(),
|
||||
output.Total.P99.String(),
|
||||
})
|
||||
|
||||
tbl := table.New().
|
||||
Border(lipgloss.NormalBorder()).
|
||||
BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("240"))).
|
||||
BorderRow(true).
|
||||
Headers("Response", "Count", "Min", "Max", "Average", "P90", "P95", "P99").
|
||||
Rows(rows...).
|
||||
StyleFunc(func(row, col int) lipgloss.Style {
|
||||
if row == table.HeaderRow {
|
||||
return headerStyle
|
||||
}
|
||||
return cellStyle
|
||||
})
|
||||
|
||||
fmt.Println(tbl)
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintJSON() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
encoder := json.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent("", " ")
|
||||
if err := encoder.Encode(output); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintYAML() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
encoder := yaml.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent(2)
|
||||
if err := encoder.Encode(output); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
type responseStat struct {
|
||||
Count BigInt `json:"count" yaml:"count"`
|
||||
Min Duration `json:"min" yaml:"min"`
|
||||
Max Duration `json:"max" yaml:"max"`
|
||||
Average Duration `json:"average" yaml:"average"`
|
||||
P90 Duration `json:"p90" yaml:"p90"`
|
||||
P95 Duration `json:"p95" yaml:"p95"`
|
||||
P99 Duration `json:"p99" yaml:"p99"`
|
||||
}
|
||||
|
||||
type responseStats map[string]responseStat
|
||||
|
||||
type outputData struct {
|
||||
Responses map[string]responseStat `json:"responses" yaml:"responses"`
|
||||
Total responseStat `json:"total" yaml:"total"`
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) prepareOutputData() outputData {
|
||||
switch len(data.Responses) {
|
||||
case 0:
|
||||
return outputData{
|
||||
Responses: make(map[string]responseStat),
|
||||
Total: responseStat{},
|
||||
}
|
||||
case 1:
|
||||
var (
|
||||
responseKey string
|
||||
stats responseStat
|
||||
)
|
||||
for key, response := range data.Responses {
|
||||
stats = calculateStats(response.durations, data.accuracy)
|
||||
responseKey = key
|
||||
}
|
||||
return outputData{
|
||||
Responses: responseStats{
|
||||
responseKey: stats,
|
||||
},
|
||||
Total: stats,
|
||||
}
|
||||
default:
|
||||
// Calculate stats for each response
|
||||
allStats := make(responseStats)
|
||||
var totalDurations = make(map[time.Duration]uint64)
|
||||
|
||||
for key, response := range data.Responses {
|
||||
stats := calculateStats(response.durations, data.accuracy)
|
||||
allStats[key] = stats
|
||||
|
||||
// Aggregate for total row
|
||||
for duration, count := range response.durations {
|
||||
totalDurations[duration] += count
|
||||
}
|
||||
}
|
||||
|
||||
return outputData{
|
||||
Responses: allStats,
|
||||
Total: calculateStats(totalDurations, data.accuracy),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calculateStats(durations map[time.Duration]uint64, accuracy time.Duration) responseStat {
|
||||
if len(durations) == 0 {
|
||||
return responseStat{}
|
||||
}
|
||||
|
||||
// Extract and sort unique durations
|
||||
sortedDurations := make([]time.Duration, 0, len(durations))
|
||||
for duration := range durations {
|
||||
sortedDurations = append(sortedDurations, duration)
|
||||
}
|
||||
slices.Sort(sortedDurations)
|
||||
|
||||
sum := new(big.Int)
|
||||
totalCount := new(big.Int)
|
||||
minDuration := sortedDurations[0] * accuracy
|
||||
maxDuration := sortedDurations[len(sortedDurations)-1] * accuracy
|
||||
|
||||
for _, duration := range sortedDurations {
|
||||
actualDuration := duration * accuracy
|
||||
count := durations[duration]
|
||||
|
||||
totalCount.Add(
|
||||
totalCount,
|
||||
new(big.Int).SetUint64(count),
|
||||
)
|
||||
|
||||
sum.Add(
|
||||
sum,
|
||||
new(big.Int).Mul(
|
||||
new(big.Int).SetInt64(int64(actualDuration)),
|
||||
new(big.Int).SetUint64(count),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Calculate percentiles
|
||||
p90 := calculatePercentile(sortedDurations, durations, totalCount, 90, accuracy)
|
||||
p95 := calculatePercentile(sortedDurations, durations, totalCount, 95, accuracy)
|
||||
p99 := calculatePercentile(sortedDurations, durations, totalCount, 99, accuracy)
|
||||
|
||||
return responseStat{
|
||||
Count: BigInt{totalCount},
|
||||
Min: Duration(minDuration),
|
||||
Max: Duration(maxDuration),
|
||||
Average: Duration(div(sum, totalCount).Int64()),
|
||||
P90: p90,
|
||||
P95: p95,
|
||||
P99: p99,
|
||||
}
|
||||
}
|
||||
|
||||
func calculatePercentile(sortedDurations []time.Duration, durations map[time.Duration]uint64, totalCount *big.Int, percentile int, accuracy time.Duration) Duration {
|
||||
// Calculate the target position for the percentile
|
||||
// Using ceiling method: position = ceil(totalCount * percentile / 100)
|
||||
target := new(big.Int).Mul(totalCount, big.NewInt(int64(percentile)))
|
||||
target.Add(target, big.NewInt(99)) // Add 99 to achieve ceiling division by 100
|
||||
target.Div(target, big.NewInt(100))
|
||||
|
||||
// Accumulate counts until we reach the target position
|
||||
cumulative := new(big.Int)
|
||||
for _, duration := range sortedDurations {
|
||||
count := durations[duration]
|
||||
cumulative.Add(cumulative, new(big.Int).SetUint64(count))
|
||||
|
||||
if cumulative.Cmp(target) >= 0 {
|
||||
return Duration(duration * accuracy)
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to the last duration (shouldn't happen with valid data)
|
||||
return Duration(sortedDurations[len(sortedDurations)-1] * accuracy)
|
||||
}
|
||||
|
||||
// div performs division with rounding to the nearest integer.
|
||||
func div(x, y *big.Int) *big.Int {
|
||||
quotient, remainder := new(big.Int).DivMod(x, y, new(big.Int))
|
||||
if remainder.Mul(remainder, big.NewInt(2)).Cmp(y) >= 0 {
|
||||
quotient.Add(quotient, big.NewInt(1))
|
||||
}
|
||||
return quotient
|
||||
}
|
||||
|
||||
// wrapText wraps a string to multiple lines if it exceeds maxWidth.
|
||||
func wrapText(s string, maxWidth int) string {
|
||||
if len(s) <= maxWidth {
|
||||
return s
|
||||
}
|
||||
|
||||
var lines []string
|
||||
for len(s) > maxWidth {
|
||||
lines = append(lines, s[:maxWidth])
|
||||
s = s[maxWidth:]
|
||||
}
|
||||
if len(s) > 0 {
|
||||
lines = append(lines, s)
|
||||
}
|
||||
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
776
internal/sarin/sarin.go
Normal file
776
internal/sarin/sarin.go
Normal file
@@ -0,0 +1,776 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/bubbles/progress"
|
||||
"github.com/charmbracelet/bubbles/spinner"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/valyala/fasthttp"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
type runtimeMessageLevel uint8
|
||||
|
||||
const (
|
||||
runtimeMessageLevelWarning runtimeMessageLevel = iota
|
||||
runtimeMessageLevelError
|
||||
)
|
||||
|
||||
type runtimeMessage struct {
|
||||
timestamp time.Time
|
||||
level runtimeMessageLevel
|
||||
text string
|
||||
}
|
||||
|
||||
type messageSender func(level runtimeMessageLevel, text string)
|
||||
|
||||
type sarin struct {
|
||||
workers uint
|
||||
requestURL *url.URL
|
||||
methods []string
|
||||
params types.Params
|
||||
headers types.Headers
|
||||
cookies types.Cookies
|
||||
bodies []string
|
||||
totalRequests *uint64
|
||||
totalDuration *time.Duration
|
||||
timeout time.Duration
|
||||
quiet bool
|
||||
skipCertVerify bool
|
||||
values []string
|
||||
collectStats bool
|
||||
dryRun bool
|
||||
|
||||
hostClients []*fasthttp.HostClient
|
||||
responses *SarinResponseData
|
||||
}
|
||||
|
||||
// NewSarin creates a new sarin instance for load testing.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func NewSarin(
|
||||
ctx context.Context,
|
||||
methods []string,
|
||||
requestURL *url.URL,
|
||||
timeout time.Duration,
|
||||
workers uint,
|
||||
totalRequests *uint64,
|
||||
totalDuration *time.Duration,
|
||||
quiet bool,
|
||||
skipCertVerify bool,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
bodies []string,
|
||||
proxies types.Proxies,
|
||||
values []string,
|
||||
collectStats bool,
|
||||
dryRun bool,
|
||||
) (*sarin, error) {
|
||||
if workers == 0 {
|
||||
workers = 1
|
||||
}
|
||||
|
||||
hostClients, err := newHostClients(ctx, timeout, proxies, workers, requestURL, skipCertVerify)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
srn := &sarin{
|
||||
workers: workers,
|
||||
requestURL: requestURL,
|
||||
methods: methods,
|
||||
params: params,
|
||||
headers: headers,
|
||||
cookies: cookies,
|
||||
bodies: bodies,
|
||||
totalRequests: totalRequests,
|
||||
totalDuration: totalDuration,
|
||||
timeout: timeout,
|
||||
quiet: quiet,
|
||||
skipCertVerify: skipCertVerify,
|
||||
values: values,
|
||||
collectStats: collectStats,
|
||||
dryRun: dryRun,
|
||||
hostClients: hostClients,
|
||||
}
|
||||
|
||||
if collectStats {
|
||||
srn.responses = NewSarinResponseData(uint32(100))
|
||||
}
|
||||
|
||||
return srn, nil
|
||||
}
|
||||
|
||||
func (q sarin) GetResponses() *SarinResponseData {
|
||||
return q.responses
|
||||
}
|
||||
|
||||
func (q sarin) Start(ctx context.Context) {
|
||||
jobsCtx, jobsCancel := context.WithCancel(ctx)
|
||||
|
||||
var workersWG sync.WaitGroup
|
||||
jobsCh := make(chan struct{}, max(q.workers, 1))
|
||||
|
||||
var counter atomic.Uint64
|
||||
|
||||
totalRequests := uint64(0)
|
||||
if q.totalRequests != nil {
|
||||
totalRequests = *q.totalRequests
|
||||
}
|
||||
|
||||
var streamCtx context.Context
|
||||
var streamCancel context.CancelFunc
|
||||
var streamCh chan struct{}
|
||||
var messageChannel chan runtimeMessage
|
||||
var sendMessage messageSender
|
||||
|
||||
if q.quiet {
|
||||
sendMessage = func(level runtimeMessageLevel, text string) {}
|
||||
} else {
|
||||
streamCtx, streamCancel = context.WithCancel(context.Background())
|
||||
defer streamCancel()
|
||||
streamCh = make(chan struct{})
|
||||
messageChannel = make(chan runtimeMessage, max(q.workers, 1))
|
||||
sendMessage = func(level runtimeMessageLevel, text string) {
|
||||
messageChannel <- runtimeMessage{
|
||||
timestamp: time.Now(),
|
||||
level: level,
|
||||
text: text,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start workers
|
||||
q.startWorkers(&workersWG, jobsCh, q.hostClients, &counter, sendMessage)
|
||||
|
||||
if !q.quiet {
|
||||
// Start streaming to terminal
|
||||
//nolint:contextcheck // streamCtx must remain active until all workers complete to ensure all collected data is streamed
|
||||
go q.streamProgress(streamCtx, jobsCancel, streamCh, totalRequests, &counter, messageChannel)
|
||||
}
|
||||
|
||||
// Setup duration-based cancellation
|
||||
q.setupDurationTimeout(ctx, jobsCancel)
|
||||
// Distribute jobs to workers.
|
||||
// This blocks until all jobs are sent or the context is canceled.
|
||||
q.sendJobs(jobsCtx, jobsCh)
|
||||
|
||||
// Close the jobs channel so workers stop after completing their current job
|
||||
close(jobsCh)
|
||||
// Wait until all workers stopped
|
||||
workersWG.Wait()
|
||||
if messageChannel != nil {
|
||||
close(messageChannel)
|
||||
}
|
||||
|
||||
if !q.quiet {
|
||||
// Stop the progress streaming
|
||||
streamCancel()
|
||||
// Wait until progress streaming has completely stopped
|
||||
<-streamCh
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) Worker(
|
||||
jobs <-chan struct{},
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
req := fasthttp.AcquireRequest()
|
||||
resp := fasthttp.AcquireResponse()
|
||||
defer fasthttp.ReleaseRequest(req)
|
||||
defer fasthttp.ReleaseResponse(resp)
|
||||
|
||||
requestGenerator, isDynamic := NewRequestGenerator(q.methods, q.requestURL, q.params, q.headers, q.cookies, q.bodies, q.values)
|
||||
|
||||
if q.dryRun {
|
||||
switch {
|
||||
case q.collectStats && isDynamic:
|
||||
q.workerDryRunStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
case q.collectStats && !isDynamic:
|
||||
q.workerDryRunStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
case !q.collectStats && isDynamic:
|
||||
q.workerDryRunNoStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
default:
|
||||
q.workerDryRunNoStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
}
|
||||
} else {
|
||||
switch {
|
||||
case q.collectStats && isDynamic:
|
||||
q.workerStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
case q.collectStats && !isDynamic:
|
||||
q.workerStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
case !q.collectStats && isDynamic:
|
||||
q.workerNoStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
default:
|
||||
q.workerNoStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
resp.Reset()
|
||||
|
||||
if err := requestGenerator(req); err != nil {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
|
||||
startTime := time.Now()
|
||||
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
if err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
} else {
|
||||
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
// Static request generation failed - record all jobs as errors
|
||||
for range jobs {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
resp.Reset()
|
||||
|
||||
startTime := time.Now()
|
||||
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
if err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
} else {
|
||||
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerNoStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
resp.Reset()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerNoStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
|
||||
// Static request generation failed - just count the jobs without sending
|
||||
for range jobs {
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
resp.Reset()
|
||||
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
const dryRunResponseKey = "dry-run"
|
||||
|
||||
// statusCodeStrings contains pre-computed string representations for HTTP status codes 100-599.
|
||||
var statusCodeStrings = func() map[int]string {
|
||||
m := make(map[int]string, 500)
|
||||
for i := 100; i < 600; i++ {
|
||||
m[i] = strconv.Itoa(i)
|
||||
}
|
||||
return m
|
||||
}()
|
||||
|
||||
// statusCodeToString returns a string representation of the HTTP status code.
|
||||
// Uses a pre-computed map for codes 100-599, falls back to strconv.Itoa for others.
|
||||
func statusCodeToString(code int) string {
|
||||
if s, ok := statusCodeStrings[code]; ok {
|
||||
return s
|
||||
}
|
||||
return strconv.Itoa(code)
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
startTime := time.Now()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
q.responses.Add(dryRunResponseKey, time.Since(startTime))
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
// Static request generation failed - record all jobs as errors
|
||||
for range jobs {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
q.responses.Add(dryRunResponseKey, 0)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunNoStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunNoStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
// newHostClients initializes HTTP clients for the given configuration.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func newHostClients(
|
||||
ctx context.Context,
|
||||
timeout time.Duration,
|
||||
proxies types.Proxies,
|
||||
workers uint,
|
||||
requestURL *url.URL,
|
||||
skipCertVerify bool,
|
||||
) ([]*fasthttp.HostClient, error) {
|
||||
proxiesRaw := make([]url.URL, len(proxies))
|
||||
for i, proxy := range proxies {
|
||||
proxiesRaw[i] = url.URL(proxy)
|
||||
}
|
||||
|
||||
maxConns := max(fasthttp.DefaultMaxConnsPerHost, workers)
|
||||
maxConns = ((maxConns * 50 / 100) + maxConns)
|
||||
return NewHostClients(
|
||||
ctx,
|
||||
timeout,
|
||||
proxiesRaw,
|
||||
maxConns,
|
||||
requestURL,
|
||||
skipCertVerify,
|
||||
)
|
||||
}
|
||||
|
||||
func (q sarin) startWorkers(wg *sync.WaitGroup, jobs <-chan struct{}, hostClients []*fasthttp.HostClient, counter *atomic.Uint64, sendMessage messageSender) {
|
||||
for range max(q.workers, 1) {
|
||||
wg.Go(func() {
|
||||
q.Worker(jobs, NewHostClientGenerator(hostClients...), counter, sendMessage)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) setupDurationTimeout(ctx context.Context, cancel context.CancelFunc) {
|
||||
if q.totalDuration != nil {
|
||||
go func() {
|
||||
timer := time.NewTimer(*q.totalDuration)
|
||||
defer timer.Stop()
|
||||
select {
|
||||
case <-timer.C:
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
// Context cancelled, cleanup
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) sendJobs(ctx context.Context, jobs chan<- struct{}) {
|
||||
if q.totalRequests != nil && *q.totalRequests > 0 {
|
||||
for range *q.totalRequests {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
jobs <- struct{}{}
|
||||
}
|
||||
} else {
|
||||
for ctx.Err() == nil {
|
||||
jobs <- struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type tickMsg time.Time
|
||||
|
||||
var (
|
||||
helpStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#d1d1d1"))
|
||||
errorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FC5B5B")).Bold(true)
|
||||
warningStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FFD93D")).Bold(true)
|
||||
messageChannelStyle = lipgloss.NewStyle().
|
||||
Border(lipgloss.ThickBorder(), false, false, false, true).
|
||||
BorderForeground(lipgloss.Color("#757575")).
|
||||
PaddingLeft(1).
|
||||
Margin(1, 0, 0, 0).
|
||||
Foreground(lipgloss.Color("#888888"))
|
||||
)
|
||||
|
||||
type progressModel struct {
|
||||
progress progress.Model
|
||||
startTime time.Time
|
||||
messages []string
|
||||
counter *atomic.Uint64
|
||||
current uint64
|
||||
maxValue uint64
|
||||
ctx context.Context //nolint:containedctx
|
||||
cancel context.CancelFunc
|
||||
cancelling bool
|
||||
}
|
||||
|
||||
func (m progressModel) Init() tea.Cmd {
|
||||
return tea.Batch(progressTickCmd())
|
||||
}
|
||||
|
||||
func (m progressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if msg.Type == tea.KeyCtrlC {
|
||||
m.cancelling = true
|
||||
m.cancel()
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case tea.WindowSizeMsg:
|
||||
m.progress.Width = max(10, msg.Width-1)
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case runtimeMessage:
|
||||
var msgBuilder strings.Builder
|
||||
msgBuilder.WriteString("[")
|
||||
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||
msgBuilder.WriteString("] ")
|
||||
switch msg.level {
|
||||
case runtimeMessageLevelError:
|
||||
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||
case runtimeMessageLevelWarning:
|
||||
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||
}
|
||||
msgBuilder.WriteString(msg.text)
|
||||
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case tickMsg:
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, progressTickCmd()
|
||||
|
||||
default:
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m progressModel) View() string {
|
||||
var messagesBuilder strings.Builder
|
||||
for i, msg := range m.messages {
|
||||
if len(msg) > 0 {
|
||||
messagesBuilder.WriteString(msg)
|
||||
if i < len(m.messages)-1 {
|
||||
messagesBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var finalBuilder strings.Builder
|
||||
if messagesBuilder.Len() > 0 {
|
||||
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||
finalBuilder.WriteString("\n")
|
||||
}
|
||||
|
||||
m.current = m.counter.Load()
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.current, 10))
|
||||
finalBuilder.WriteString("/")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.maxValue, 10))
|
||||
finalBuilder.WriteString(" - ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(m.progress.ViewAs(float64(m.current) / float64(m.maxValue)))
|
||||
finalBuilder.WriteString("\n\n ")
|
||||
if m.cancelling {
|
||||
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||
} else {
|
||||
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||
}
|
||||
return finalBuilder.String()
|
||||
}
|
||||
|
||||
func progressTickCmd() tea.Cmd {
|
||||
return tea.Tick(time.Millisecond*250, func(t time.Time) tea.Msg {
|
||||
return tickMsg(t)
|
||||
})
|
||||
}
|
||||
|
||||
var infiniteProgressStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#00D4FF"))
|
||||
|
||||
type infiniteProgressModel struct {
|
||||
spinner spinner.Model
|
||||
startTime time.Time
|
||||
counter *atomic.Uint64
|
||||
messages []string
|
||||
ctx context.Context //nolint:containedctx
|
||||
quit bool
|
||||
cancel context.CancelFunc
|
||||
cancelling bool
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) Init() tea.Cmd {
|
||||
return m.spinner.Tick
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if msg.Type == tea.KeyCtrlC {
|
||||
m.cancelling = true
|
||||
m.cancel()
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case runtimeMessage:
|
||||
var msgBuilder strings.Builder
|
||||
msgBuilder.WriteString("[")
|
||||
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||
msgBuilder.WriteString("] ")
|
||||
switch msg.level {
|
||||
case runtimeMessageLevelError:
|
||||
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||
case runtimeMessageLevelWarning:
|
||||
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||
}
|
||||
msgBuilder.WriteString(msg.text)
|
||||
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||
if m.ctx.Err() != nil {
|
||||
m.quit = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
default:
|
||||
if m.ctx.Err() != nil {
|
||||
m.quit = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
var cmd tea.Cmd
|
||||
m.spinner, cmd = m.spinner.Update(msg)
|
||||
return m, cmd
|
||||
}
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) View() string {
|
||||
var messagesBuilder strings.Builder
|
||||
for i, msg := range m.messages {
|
||||
if len(msg) > 0 {
|
||||
messagesBuilder.WriteString(msg)
|
||||
if i < len(m.messages)-1 {
|
||||
messagesBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var finalBuilder strings.Builder
|
||||
if messagesBuilder.Len() > 0 {
|
||||
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||
finalBuilder.WriteString("\n")
|
||||
}
|
||||
|
||||
if m.quit {
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(infiniteProgressStyle.Render("∙∙∙∙∙"))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n\n")
|
||||
} else {
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(m.spinner.View())
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n\n ")
|
||||
if m.cancelling {
|
||||
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||
} else {
|
||||
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||
}
|
||||
}
|
||||
return finalBuilder.String()
|
||||
}
|
||||
|
||||
func (q sarin) streamProgress(
|
||||
ctx context.Context,
|
||||
cancel context.CancelFunc,
|
||||
done chan<- struct{},
|
||||
total uint64,
|
||||
counter *atomic.Uint64,
|
||||
messageChannel <-chan runtimeMessage,
|
||||
) {
|
||||
var program *tea.Program
|
||||
if total > 0 {
|
||||
model := progressModel{
|
||||
progress: progress.New(progress.WithGradient("#151594", "#00D4FF")),
|
||||
startTime: time.Now(),
|
||||
messages: make([]string, 8),
|
||||
counter: counter,
|
||||
current: 0,
|
||||
maxValue: total,
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
}
|
||||
|
||||
program = tea.NewProgram(model)
|
||||
} else {
|
||||
model := infiniteProgressModel{
|
||||
spinner: spinner.New(
|
||||
spinner.WithSpinner(
|
||||
spinner.Spinner{
|
||||
Frames: []string{
|
||||
"●∙∙∙∙",
|
||||
"∙●∙∙∙",
|
||||
"∙∙●∙∙",
|
||||
"∙∙∙●∙",
|
||||
"∙∙∙∙●",
|
||||
"∙∙∙●∙",
|
||||
"∙∙●∙∙",
|
||||
"∙●∙∙∙",
|
||||
},
|
||||
FPS: time.Second / 8, //nolint:mnd
|
||||
},
|
||||
),
|
||||
spinner.WithStyle(infiniteProgressStyle),
|
||||
),
|
||||
startTime: time.Now(),
|
||||
counter: counter,
|
||||
messages: make([]string, 8),
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
quit: false,
|
||||
}
|
||||
|
||||
program = tea.NewProgram(model)
|
||||
}
|
||||
|
||||
go func() {
|
||||
for msg := range messageChannel {
|
||||
program.Send(msg)
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err := program.Run(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
done <- struct{}{}
|
||||
}
|
||||
579
internal/sarin/template.go
Normal file
579
internal/sarin/template.go
Normal file
@@ -0,0 +1,579 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"math/rand/v2"
|
||||
"mime/multipart"
|
||||
"strings"
|
||||
"text/template"
|
||||
"text/template/parse"
|
||||
"time"
|
||||
|
||||
"github.com/brianvoe/gofakeit/v7"
|
||||
)
|
||||
|
||||
func NewDefaultTemplateFuncMap(randSource rand.Source) template.FuncMap {
|
||||
fakeit := gofakeit.NewFaker(randSource, false)
|
||||
|
||||
return template.FuncMap{
|
||||
// Strings
|
||||
"strings_ToUpper": strings.ToUpper,
|
||||
"strings_ToLower": strings.ToLower,
|
||||
"strings_RemoveSpaces": func(s string) string { return strings.ReplaceAll(s, " ", "") },
|
||||
"strings_Replace": strings.Replace,
|
||||
"strings_ToDate": func(dateString string) time.Time {
|
||||
date, err := time.Parse("2006-01-02", dateString)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return date
|
||||
},
|
||||
"strings_First": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[:n])
|
||||
},
|
||||
"strings_Last": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[len(runes)-n:])
|
||||
},
|
||||
"strings_Truncate": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return "..."
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[:n]) + "..."
|
||||
},
|
||||
"strings_TrimPrefix": strings.TrimPrefix,
|
||||
"strings_TrimSuffix": strings.TrimSuffix,
|
||||
"strings_Join": func(sep string, values ...string) string {
|
||||
return strings.Join(values, sep)
|
||||
},
|
||||
|
||||
// Dict
|
||||
"dict_Str": func(values ...string) map[string]string {
|
||||
dict := make(map[string]string)
|
||||
for i := 0; i < len(values); i += 2 {
|
||||
if i+1 < len(values) {
|
||||
key := values[i]
|
||||
value := values[i+1]
|
||||
dict[key] = value
|
||||
}
|
||||
}
|
||||
return dict
|
||||
},
|
||||
|
||||
// Slice
|
||||
"slice_Str": func(values ...string) []string { return values },
|
||||
"slice_Int": func(values ...int) []int { return values },
|
||||
"slice_Uint": func(values ...uint) []uint { return values },
|
||||
|
||||
// Fakeit / File
|
||||
// "fakeit_CSV": fakeit.CSV(nil),
|
||||
// "fakeit_JSON": fakeit.JSON(nil),
|
||||
// "fakeit_XML": fakeit.XML(nil),
|
||||
"fakeit_FileExtension": fakeit.FileExtension,
|
||||
"fakeit_FileMimeType": fakeit.FileMimeType,
|
||||
|
||||
// Fakeit / ID
|
||||
"fakeit_ID": fakeit.ID,
|
||||
"fakeit_UUID": fakeit.UUID,
|
||||
|
||||
// Fakeit / Template
|
||||
// "fakeit_Template": fakeit.Template(nil) (string, error),
|
||||
// "fakeit_Markdown": fakeit.Markdown(nil) (string, error),
|
||||
// "fakeit_EmailText": fakeit.EmailText(nil) (string, error),
|
||||
// "fakeit_FixedWidth": fakeit.FixedWidth(nil) (string, error),
|
||||
|
||||
// Fakeit / Product
|
||||
// "fakeit_Product": fakeit.Product() *ProductInfo,
|
||||
"fakeit_ProductName": fakeit.ProductName,
|
||||
"fakeit_ProductDescription": fakeit.ProductDescription,
|
||||
"fakeit_ProductCategory": fakeit.ProductCategory,
|
||||
"fakeit_ProductFeature": fakeit.ProductFeature,
|
||||
"fakeit_ProductMaterial": fakeit.ProductMaterial,
|
||||
"fakeit_ProductUPC": fakeit.ProductUPC,
|
||||
"fakeit_ProductAudience": fakeit.ProductAudience,
|
||||
"fakeit_ProductDimension": fakeit.ProductDimension,
|
||||
"fakeit_ProductUseCase": fakeit.ProductUseCase,
|
||||
"fakeit_ProductBenefit": fakeit.ProductBenefit,
|
||||
"fakeit_ProductSuffix": fakeit.ProductSuffix,
|
||||
"fakeit_ProductISBN": func() string { return fakeit.ProductISBN(nil) },
|
||||
|
||||
// Fakeit / Person
|
||||
// "fakeit_Person": fakeit.Person() *PersonInfo,
|
||||
"fakeit_Name": fakeit.Name,
|
||||
"fakeit_NamePrefix": fakeit.NamePrefix,
|
||||
"fakeit_NameSuffix": fakeit.NameSuffix,
|
||||
"fakeit_FirstName": fakeit.FirstName,
|
||||
"fakeit_MiddleName": fakeit.MiddleName,
|
||||
"fakeit_LastName": fakeit.LastName,
|
||||
"fakeit_Gender": fakeit.Gender,
|
||||
"fakeit_Age": fakeit.Age,
|
||||
"fakeit_Ethnicity": fakeit.Ethnicity,
|
||||
"fakeit_SSN": fakeit.SSN,
|
||||
"fakeit_EIN": fakeit.EIN,
|
||||
"fakeit_Hobby": fakeit.Hobby,
|
||||
// "fakeit_Contact": fakeit.Contact() *ContactInfo,
|
||||
"fakeit_Email": fakeit.Email,
|
||||
"fakeit_Phone": fakeit.Phone,
|
||||
"fakeit_PhoneFormatted": fakeit.PhoneFormatted,
|
||||
// "fakeit_Teams": fakeit.Teams(peopleArray []string, teamsArray []string) map[string][]string,
|
||||
|
||||
// Fakeit / Generate
|
||||
// "fakeit_Struct": fakeit.Struct(v any),
|
||||
// "fakeit_Slice": fakeit.Slice(v any),
|
||||
// "fakeit_Map": fakeit.Map() map[string]any,
|
||||
// "fakeit_Generate": fakeit.Generate(value string) string,
|
||||
"fakeit_Regex": fakeit.Regex,
|
||||
|
||||
// Fakeit / Auth
|
||||
"fakeit_Username": fakeit.Username,
|
||||
"fakeit_Password": fakeit.Password,
|
||||
|
||||
// Fakeit / Address
|
||||
// "fakeit_Address": fakeit.Address() *AddressInfo,
|
||||
"fakeit_City": fakeit.City,
|
||||
"fakeit_Country": fakeit.Country,
|
||||
"fakeit_CountryAbr": fakeit.CountryAbr,
|
||||
"fakeit_State": fakeit.State,
|
||||
"fakeit_StateAbr": fakeit.StateAbr,
|
||||
"fakeit_Street": fakeit.Street,
|
||||
"fakeit_StreetName": fakeit.StreetName,
|
||||
"fakeit_StreetNumber": fakeit.StreetNumber,
|
||||
"fakeit_StreetPrefix": fakeit.StreetPrefix,
|
||||
"fakeit_StreetSuffix": fakeit.StreetSuffix,
|
||||
"fakeit_Unit": fakeit.Unit,
|
||||
"fakeit_Zip": fakeit.Zip,
|
||||
"fakeit_Latitude": fakeit.Latitude,
|
||||
"fakeit_LatitudeInRange": func(minLatitude, maxLatitude float64) float64 {
|
||||
value, err := fakeit.LatitudeInRange(minLatitude, maxLatitude)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
"fakeit_Longitude": fakeit.Longitude,
|
||||
"fakeit_LongitudeInRange": func(minLongitude, maxLongitude float64) float64 {
|
||||
value, err := fakeit.LongitudeInRange(minLongitude, maxLongitude)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
|
||||
// Fakeit / Game
|
||||
"fakeit_Gamertag": fakeit.Gamertag,
|
||||
// "fakeit_Dice": fakeit.Dice(numDice uint, sides []uint) []uint,
|
||||
|
||||
// Fakeit / Beer
|
||||
"fakeit_BeerAlcohol": fakeit.BeerAlcohol,
|
||||
"fakeit_BeerBlg": fakeit.BeerBlg,
|
||||
"fakeit_BeerHop": fakeit.BeerHop,
|
||||
"fakeit_BeerIbu": fakeit.BeerIbu,
|
||||
"fakeit_BeerMalt": fakeit.BeerMalt,
|
||||
"fakeit_BeerName": fakeit.BeerName,
|
||||
"fakeit_BeerStyle": fakeit.BeerStyle,
|
||||
"fakeit_BeerYeast": fakeit.BeerYeast,
|
||||
|
||||
// Fakeit / Car
|
||||
// "fakeit_Car": fakeit.Car() *CarInfo,
|
||||
"fakeit_CarMaker": fakeit.CarMaker,
|
||||
"fakeit_CarModel": fakeit.CarModel,
|
||||
"fakeit_CarType": fakeit.CarType,
|
||||
"fakeit_CarFuelType": fakeit.CarFuelType,
|
||||
"fakeit_CarTransmissionType": fakeit.CarTransmissionType,
|
||||
|
||||
// Fakeit / Words
|
||||
// Nouns
|
||||
"fakeit_Noun": fakeit.Noun,
|
||||
"fakeit_NounCommon": fakeit.NounCommon,
|
||||
"fakeit_NounConcrete": fakeit.NounConcrete,
|
||||
"fakeit_NounAbstract": fakeit.NounAbstract,
|
||||
"fakeit_NounCollectivePeople": fakeit.NounCollectivePeople,
|
||||
"fakeit_NounCollectiveAnimal": fakeit.NounCollectiveAnimal,
|
||||
"fakeit_NounCollectiveThing": fakeit.NounCollectiveThing,
|
||||
"fakeit_NounCountable": fakeit.NounCountable,
|
||||
"fakeit_NounUncountable": fakeit.NounUncountable,
|
||||
|
||||
// Verbs
|
||||
"fakeit_Verb": fakeit.Verb,
|
||||
"fakeit_VerbAction": fakeit.VerbAction,
|
||||
"fakeit_VerbLinking": fakeit.VerbLinking,
|
||||
"fakeit_VerbHelping": fakeit.VerbHelping,
|
||||
|
||||
// Adverbs
|
||||
"fakeit_Adverb": fakeit.Adverb,
|
||||
"fakeit_AdverbManner": fakeit.AdverbManner,
|
||||
"fakeit_AdverbDegree": fakeit.AdverbDegree,
|
||||
"fakeit_AdverbPlace": fakeit.AdverbPlace,
|
||||
"fakeit_AdverbTimeDefinite": fakeit.AdverbTimeDefinite,
|
||||
"fakeit_AdverbTimeIndefinite": fakeit.AdverbTimeIndefinite,
|
||||
"fakeit_AdverbFrequencyDefinite": fakeit.AdverbFrequencyDefinite,
|
||||
"fakeit_AdverbFrequencyIndefinite": fakeit.AdverbFrequencyIndefinite,
|
||||
|
||||
// Propositions
|
||||
"fakeit_Preposition": fakeit.Preposition,
|
||||
"fakeit_PrepositionSimple": fakeit.PrepositionSimple,
|
||||
"fakeit_PrepositionDouble": fakeit.PrepositionDouble,
|
||||
"fakeit_PrepositionCompound": fakeit.PrepositionCompound,
|
||||
|
||||
// Adjectives
|
||||
"fakeit_Adjective": fakeit.Adjective,
|
||||
"fakeit_AdjectiveDescriptive": fakeit.AdjectiveDescriptive,
|
||||
"fakeit_AdjectiveQuantitative": fakeit.AdjectiveQuantitative,
|
||||
"fakeit_AdjectiveProper": fakeit.AdjectiveProper,
|
||||
"fakeit_AdjectiveDemonstrative": fakeit.AdjectiveDemonstrative,
|
||||
"fakeit_AdjectivePossessive": fakeit.AdjectivePossessive,
|
||||
"fakeit_AdjectiveInterrogative": fakeit.AdjectiveInterrogative,
|
||||
"fakeit_AdjectiveIndefinite": fakeit.AdjectiveIndefinite,
|
||||
|
||||
// Pronouns
|
||||
"fakeit_Pronoun": fakeit.Pronoun,
|
||||
"fakeit_PronounPersonal": fakeit.PronounPersonal,
|
||||
"fakeit_PronounObject": fakeit.PronounObject,
|
||||
"fakeit_PronounPossessive": fakeit.PronounPossessive,
|
||||
"fakeit_PronounReflective": fakeit.PronounReflective,
|
||||
"fakeit_PronounDemonstrative": fakeit.PronounDemonstrative,
|
||||
"fakeit_PronounInterrogative": fakeit.PronounInterrogative,
|
||||
"fakeit_PronounRelative": fakeit.PronounRelative,
|
||||
|
||||
// Connectives
|
||||
"fakeit_Connective": fakeit.Connective,
|
||||
"fakeit_ConnectiveTime": fakeit.ConnectiveTime,
|
||||
"fakeit_ConnectiveComparative": fakeit.ConnectiveComparative,
|
||||
"fakeit_ConnectiveComplaint": fakeit.ConnectiveComplaint,
|
||||
"fakeit_ConnectiveListing": fakeit.ConnectiveListing,
|
||||
"fakeit_ConnectiveCasual": fakeit.ConnectiveCasual,
|
||||
"fakeit_ConnectiveExamplify": fakeit.ConnectiveExamplify,
|
||||
|
||||
// Words
|
||||
"fakeit_Word": fakeit.Word,
|
||||
|
||||
// Text
|
||||
"fakeit_Sentence": fakeit.Sentence,
|
||||
"fakeit_Paragraph": fakeit.Paragraph,
|
||||
"fakeit_LoremIpsumWord": fakeit.LoremIpsumWord,
|
||||
"fakeit_LoremIpsumSentence": fakeit.LoremIpsumSentence,
|
||||
"fakeit_LoremIpsumParagraph": fakeit.LoremIpsumParagraph,
|
||||
"fakeit_Question": fakeit.Question,
|
||||
"fakeit_Quote": fakeit.Quote,
|
||||
"fakeit_Phrase": fakeit.Phrase,
|
||||
|
||||
// Fakeit / Foods
|
||||
"fakeit_Fruit": fakeit.Fruit,
|
||||
"fakeit_Vegetable": fakeit.Vegetable,
|
||||
"fakeit_Breakfast": fakeit.Breakfast,
|
||||
"fakeit_Lunch": fakeit.Lunch,
|
||||
"fakeit_Dinner": fakeit.Dinner,
|
||||
"fakeit_Snack": fakeit.Snack,
|
||||
"fakeit_Dessert": fakeit.Dessert,
|
||||
|
||||
// Fakeit / Misc
|
||||
"fakeit_Bool": fakeit.Bool,
|
||||
// "fakeit_Weighted": fakeit.Weighted(options []any, weights []float32) (any, error),
|
||||
"fakeit_FlipACoin": fakeit.FlipACoin,
|
||||
// "fakeit_RandomMapKey": fakeit.RandomMapKey(mapI any) any,
|
||||
// "fakeit_ShuffleAnySlice": fakeit.ShuffleAnySlice(v any),
|
||||
|
||||
// Fakeit / Colors
|
||||
"fakeit_Color": fakeit.Color,
|
||||
"fakeit_HexColor": fakeit.HexColor,
|
||||
"fakeit_RGBColor": fakeit.RGBColor,
|
||||
"fakeit_SafeColor": fakeit.SafeColor,
|
||||
"fakeit_NiceColors": fakeit.NiceColors,
|
||||
|
||||
// Fakeit / Images
|
||||
// "fakeit_Image": fakeit.Image(width int, height int) *img.RGBA,
|
||||
"fakeit_ImageJpeg": fakeit.ImageJpeg,
|
||||
"fakeit_ImagePng": fakeit.ImagePng,
|
||||
|
||||
// Fakeit / Internet
|
||||
"fakeit_URL": fakeit.URL,
|
||||
"fakeit_UrlSlug": fakeit.UrlSlug,
|
||||
"fakeit_DomainName": fakeit.DomainName,
|
||||
"fakeit_DomainSuffix": fakeit.DomainSuffix,
|
||||
"fakeit_IPv4Address": fakeit.IPv4Address,
|
||||
"fakeit_IPv6Address": fakeit.IPv6Address,
|
||||
"fakeit_MacAddress": fakeit.MacAddress,
|
||||
"fakeit_HTTPStatusCode": fakeit.HTTPStatusCode,
|
||||
"fakeit_HTTPStatusCodeSimple": fakeit.HTTPStatusCodeSimple,
|
||||
"fakeit_LogLevel": fakeit.LogLevel,
|
||||
"fakeit_HTTPMethod": fakeit.HTTPMethod,
|
||||
"fakeit_HTTPVersion": fakeit.HTTPVersion,
|
||||
"fakeit_UserAgent": fakeit.UserAgent,
|
||||
"fakeit_ChromeUserAgent": fakeit.ChromeUserAgent,
|
||||
"fakeit_FirefoxUserAgent": fakeit.FirefoxUserAgent,
|
||||
"fakeit_OperaUserAgent": fakeit.OperaUserAgent,
|
||||
"fakeit_SafariUserAgent": fakeit.SafariUserAgent,
|
||||
"fakeit_APIUserAgent": fakeit.APIUserAgent,
|
||||
|
||||
// Fakeit / HTML
|
||||
"fakeit_InputName": fakeit.InputName,
|
||||
"fakeit_Svg": func() string { return fakeit.Svg(nil) },
|
||||
|
||||
// Fakeit / Date/Time
|
||||
"fakeit_Date": fakeit.Date,
|
||||
"fakeit_PastDate": fakeit.PastDate,
|
||||
"fakeit_FutureDate": fakeit.FutureDate,
|
||||
"fakeit_DateRange": fakeit.DateRange,
|
||||
"fakeit_NanoSecond": fakeit.NanoSecond,
|
||||
"fakeit_Second": fakeit.Second,
|
||||
"fakeit_Minute": fakeit.Minute,
|
||||
"fakeit_Hour": fakeit.Hour,
|
||||
"fakeit_Month": fakeit.Month,
|
||||
"fakeit_MonthString": fakeit.MonthString,
|
||||
"fakeit_Day": fakeit.Day,
|
||||
"fakeit_WeekDay": fakeit.WeekDay,
|
||||
"fakeit_Year": fakeit.Year,
|
||||
"fakeit_TimeZone": fakeit.TimeZone,
|
||||
"fakeit_TimeZoneAbv": fakeit.TimeZoneAbv,
|
||||
"fakeit_TimeZoneFull": fakeit.TimeZoneFull,
|
||||
"fakeit_TimeZoneOffset": fakeit.TimeZoneOffset,
|
||||
"fakeit_TimeZoneRegion": fakeit.TimeZoneRegion,
|
||||
|
||||
// Fakeit / Payment
|
||||
"fakeit_Price": fakeit.Price,
|
||||
// "fakeit_CreditCard": fakeit.CreditCard() *CreditCardInfo,
|
||||
"fakeit_CreditCardCvv": fakeit.CreditCardCvv,
|
||||
"fakeit_CreditCardExp": fakeit.CreditCardExp,
|
||||
"fakeit_CreditCardNumber": func(gaps bool) string {
|
||||
return fakeit.CreditCardNumber(&gofakeit.CreditCardOptions{Gaps: gaps})
|
||||
},
|
||||
"fakeit_CreditCardType": fakeit.CreditCardType,
|
||||
// "fakeit_Currency": fakeit.Currency() *CurrencyInfo,
|
||||
"fakeit_CurrencyLong": fakeit.CurrencyLong,
|
||||
"fakeit_CurrencyShort": fakeit.CurrencyShort,
|
||||
"fakeit_AchRouting": fakeit.AchRouting,
|
||||
"fakeit_AchAccount": fakeit.AchAccount,
|
||||
"fakeit_BitcoinAddress": fakeit.BitcoinAddress,
|
||||
"fakeit_BitcoinPrivateKey": fakeit.BitcoinPrivateKey,
|
||||
"fakeit_BankName": fakeit.BankName,
|
||||
"fakeit_BankType": fakeit.BankType,
|
||||
|
||||
// Fakeit / Finance
|
||||
"fakeit_Cusip": fakeit.Cusip,
|
||||
"fakeit_Isin": fakeit.Isin,
|
||||
|
||||
// Fakeit / Company
|
||||
"fakeit_BS": fakeit.BS,
|
||||
"fakeit_Blurb": fakeit.Blurb,
|
||||
"fakeit_BuzzWord": fakeit.BuzzWord,
|
||||
"fakeit_Company": fakeit.Company,
|
||||
"fakeit_CompanySuffix": fakeit.CompanySuffix,
|
||||
// "fakeit_Job": fakeit.Job() *JobInfo,
|
||||
"fakeit_JobDescriptor": fakeit.JobDescriptor,
|
||||
"fakeit_JobLevel": fakeit.JobLevel,
|
||||
"fakeit_JobTitle": fakeit.JobTitle,
|
||||
"fakeit_Slogan": fakeit.Slogan,
|
||||
|
||||
// Fakeit / Hacker
|
||||
"fakeit_HackerAbbreviation": fakeit.HackerAbbreviation,
|
||||
"fakeit_HackerAdjective": fakeit.HackerAdjective,
|
||||
"fakeit_HackeringVerb": fakeit.HackeringVerb,
|
||||
"fakeit_HackerNoun": fakeit.HackerNoun,
|
||||
"fakeit_HackerPhrase": fakeit.HackerPhrase,
|
||||
"fakeit_HackerVerb": fakeit.HackerVerb,
|
||||
|
||||
// Fakeit / Hipster
|
||||
"fakeit_HipsterWord": fakeit.HipsterWord,
|
||||
"fakeit_HipsterSentence": fakeit.HipsterSentence,
|
||||
"fakeit_HipsterParagraph": fakeit.HipsterParagraph,
|
||||
|
||||
// Fakeit / App
|
||||
"fakeit_AppName": fakeit.AppName,
|
||||
"fakeit_AppVersion": fakeit.AppVersion,
|
||||
"fakeit_AppAuthor": fakeit.AppAuthor,
|
||||
|
||||
// Fakeit / Animal
|
||||
"fakeit_PetName": fakeit.PetName,
|
||||
"fakeit_Animal": fakeit.Animal,
|
||||
"fakeit_AnimalType": fakeit.AnimalType,
|
||||
"fakeit_FarmAnimal": fakeit.FarmAnimal,
|
||||
"fakeit_Cat": fakeit.Cat,
|
||||
"fakeit_Dog": fakeit.Dog,
|
||||
"fakeit_Bird": fakeit.Bird,
|
||||
|
||||
// Fakeit / Emoji
|
||||
"fakeit_Emoji": fakeit.Emoji,
|
||||
"fakeit_EmojiCategory": fakeit.EmojiCategory,
|
||||
"fakeit_EmojiAlias": fakeit.EmojiAlias,
|
||||
"fakeit_EmojiTag": fakeit.EmojiTag,
|
||||
"fakeit_EmojiFlag": fakeit.EmojiFlag,
|
||||
"fakeit_EmojiAnimal": fakeit.EmojiAnimal,
|
||||
"fakeit_EmojiFood": fakeit.EmojiFood,
|
||||
"fakeit_EmojiPlant": fakeit.EmojiPlant,
|
||||
"fakeit_EmojiMusic": fakeit.EmojiMusic,
|
||||
"fakeit_EmojiVehicle": fakeit.EmojiVehicle,
|
||||
"fakeit_EmojiSport": fakeit.EmojiSport,
|
||||
"fakeit_EmojiFace": fakeit.EmojiFace,
|
||||
"fakeit_EmojiHand": fakeit.EmojiHand,
|
||||
"fakeit_EmojiClothing": fakeit.EmojiClothing,
|
||||
"fakeit_EmojiLandmark": fakeit.EmojiLandmark,
|
||||
"fakeit_EmojiElectronics": fakeit.EmojiElectronics,
|
||||
"fakeit_EmojiGame": fakeit.EmojiGame,
|
||||
"fakeit_EmojiTools": fakeit.EmojiTools,
|
||||
"fakeit_EmojiWeather": fakeit.EmojiWeather,
|
||||
"fakeit_EmojiJob": fakeit.EmojiJob,
|
||||
"fakeit_EmojiPerson": fakeit.EmojiPerson,
|
||||
"fakeit_EmojiGesture": fakeit.EmojiGesture,
|
||||
"fakeit_EmojiCostume": fakeit.EmojiCostume,
|
||||
"fakeit_EmojiSentence": fakeit.EmojiSentence,
|
||||
|
||||
// Fakeit / Language
|
||||
"fakeit_Language": fakeit.Language,
|
||||
"fakeit_LanguageAbbreviation": fakeit.LanguageAbbreviation,
|
||||
"fakeit_ProgrammingLanguage": fakeit.ProgrammingLanguage,
|
||||
|
||||
// Fakeit / Number
|
||||
"fakeit_Number": fakeit.Number,
|
||||
"fakeit_Int": fakeit.Int,
|
||||
"fakeit_IntN": fakeit.IntN,
|
||||
"fakeit_Int8": fakeit.Int8,
|
||||
"fakeit_Int16": fakeit.Int16,
|
||||
"fakeit_Int32": fakeit.Int32,
|
||||
"fakeit_Int64": fakeit.Int64,
|
||||
"fakeit_Uint": fakeit.Uint,
|
||||
"fakeit_UintN": fakeit.UintN,
|
||||
"fakeit_Uint8": fakeit.Uint8,
|
||||
"fakeit_Uint16": fakeit.Uint16,
|
||||
"fakeit_Uint32": fakeit.Uint32,
|
||||
"fakeit_Uint64": fakeit.Uint64,
|
||||
"fakeit_Float32": fakeit.Float32,
|
||||
"fakeit_Float32Range": fakeit.Float32Range,
|
||||
"fakeit_Float64": fakeit.Float64,
|
||||
"fakeit_Float64Range": fakeit.Float64Range,
|
||||
// "fakeit_ShuffleInts": fakeit.ShuffleInts,
|
||||
"fakeit_RandomInt": fakeit.RandomInt,
|
||||
"fakeit_HexUint": fakeit.HexUint,
|
||||
|
||||
// Fakeit / String
|
||||
"fakeit_Digit": fakeit.Digit,
|
||||
"fakeit_DigitN": fakeit.DigitN,
|
||||
"fakeit_Letter": fakeit.Letter,
|
||||
"fakeit_LetterN": fakeit.LetterN,
|
||||
"fakeit_Lexify": fakeit.Lexify,
|
||||
"fakeit_Numerify": fakeit.Numerify,
|
||||
// "fakeit_ShuffleStrings": fakeit.ShuffleStrings,
|
||||
"fakeit_RandomString": fakeit.RandomString,
|
||||
|
||||
// Fakeit / Celebrity
|
||||
"fakeit_CelebrityActor": fakeit.CelebrityActor,
|
||||
"fakeit_CelebrityBusiness": fakeit.CelebrityBusiness,
|
||||
"fakeit_CelebritySport": fakeit.CelebritySport,
|
||||
|
||||
// Fakeit / Minecraft
|
||||
"fakeit_MinecraftOre": fakeit.MinecraftOre,
|
||||
"fakeit_MinecraftWood": fakeit.MinecraftWood,
|
||||
"fakeit_MinecraftArmorTier": fakeit.MinecraftArmorTier,
|
||||
"fakeit_MinecraftArmorPart": fakeit.MinecraftArmorPart,
|
||||
"fakeit_MinecraftWeapon": fakeit.MinecraftWeapon,
|
||||
"fakeit_MinecraftTool": fakeit.MinecraftTool,
|
||||
"fakeit_MinecraftDye": fakeit.MinecraftDye,
|
||||
"fakeit_MinecraftFood": fakeit.MinecraftFood,
|
||||
"fakeit_MinecraftAnimal": fakeit.MinecraftAnimal,
|
||||
"fakeit_MinecraftVillagerJob": fakeit.MinecraftVillagerJob,
|
||||
"fakeit_MinecraftVillagerStation": fakeit.MinecraftVillagerStation,
|
||||
"fakeit_MinecraftVillagerLevel": fakeit.MinecraftVillagerLevel,
|
||||
"fakeit_MinecraftMobPassive": fakeit.MinecraftMobPassive,
|
||||
"fakeit_MinecraftMobNeutral": fakeit.MinecraftMobNeutral,
|
||||
"fakeit_MinecraftMobHostile": fakeit.MinecraftMobHostile,
|
||||
"fakeit_MinecraftMobBoss": fakeit.MinecraftMobBoss,
|
||||
"fakeit_MinecraftBiome": fakeit.MinecraftBiome,
|
||||
"fakeit_MinecraftWeather": fakeit.MinecraftWeather,
|
||||
|
||||
// Fakeit / Book
|
||||
// "fakeit_Book": fakeit.Book() *BookInfo,
|
||||
"fakeit_BookTitle": fakeit.BookTitle,
|
||||
"fakeit_BookAuthor": fakeit.BookAuthor,
|
||||
"fakeit_BookGenre": fakeit.BookGenre,
|
||||
|
||||
// Fakeit / Movie
|
||||
// "fakeit_Movie": fakeit.Movie() *MovieInfo,
|
||||
"fakeit_MovieName": fakeit.MovieName,
|
||||
"fakeit_MovieGenre": fakeit.MovieGenre,
|
||||
|
||||
// Fakeit / Error
|
||||
"fakeit_Error": func() string { return fakeit.Error().Error() },
|
||||
"fakeit_ErrorDatabase": func() string { return fakeit.ErrorDatabase().Error() },
|
||||
"fakeit_ErrorGRPC": func() string { return fakeit.ErrorGRPC().Error() },
|
||||
"fakeit_ErrorHTTP": func() string { return fakeit.ErrorHTTP().Error() },
|
||||
"fakeit_ErrorHTTPClient": func() string { return fakeit.ErrorHTTPClient().Error() },
|
||||
"fakeit_ErrorHTTPServer": func() string { return fakeit.ErrorHTTPServer().Error() },
|
||||
// "fakeit_ErrorInput": func() string { return fakeit.ErrorInput().Error() },
|
||||
"fakeit_ErrorRuntime": func() string { return fakeit.ErrorRuntime().Error() },
|
||||
|
||||
// Fakeit / School
|
||||
"fakeit_School": fakeit.School,
|
||||
|
||||
// Fakeit / Song
|
||||
// "fakeit_Song": fakeit.Song() *SongInfo,
|
||||
"fakeit_SongName": fakeit.SongName,
|
||||
"fakeit_SongArtist": fakeit.SongArtist,
|
||||
"fakeit_SongGenre": fakeit.SongGenre,
|
||||
}
|
||||
}
|
||||
|
||||
type BodyTemplateFuncMapData struct {
|
||||
formDataContenType string
|
||||
}
|
||||
|
||||
func (data BodyTemplateFuncMapData) GetFormDataContenType() string {
|
||||
return data.formDataContenType
|
||||
}
|
||||
|
||||
func (data *BodyTemplateFuncMapData) ClearFormDataContenType() {
|
||||
data.formDataContenType = ""
|
||||
}
|
||||
|
||||
func NewDefaultBodyTemplateFuncMap(randSource rand.Source, data *BodyTemplateFuncMapData) template.FuncMap {
|
||||
funcMap := NewDefaultTemplateFuncMap(randSource)
|
||||
|
||||
if data != nil {
|
||||
funcMap["body_FormData"] = func(kv map[string]string) string {
|
||||
var multipartData bytes.Buffer
|
||||
writer := multipart.NewWriter(&multipartData)
|
||||
data.formDataContenType = writer.FormDataContentType()
|
||||
|
||||
for k, v := range kv {
|
||||
_ = writer.WriteField(k, v)
|
||||
}
|
||||
|
||||
_ = writer.Close()
|
||||
return multipartData.String()
|
||||
}
|
||||
}
|
||||
|
||||
return funcMap
|
||||
}
|
||||
|
||||
func hasTemplateActions(tmpl *template.Template) bool {
|
||||
if tmpl.Tree == nil || tmpl.Root == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, node := range tmpl.Root.Nodes {
|
||||
switch node.Type() {
|
||||
case parse.NodeAction, parse.NodeIf, parse.NodeRange,
|
||||
parse.NodeWith, parse.NodeTemplate:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
46
internal/types/config_file.go
Normal file
46
internal/types/config_file.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ConfigFileType string
|
||||
|
||||
const (
|
||||
ConfigFileTypeUnknown ConfigFileType = "unknown"
|
||||
ConfigFileTypeYAML ConfigFileType = "yaml/yml"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
path string
|
||||
_type ConfigFileType
|
||||
}
|
||||
|
||||
func (configFile ConfigFile) Path() string {
|
||||
return configFile.path
|
||||
}
|
||||
|
||||
func (configFile ConfigFile) Type() ConfigFileType {
|
||||
return configFile._type
|
||||
}
|
||||
|
||||
func ParseConfigFile(configFileRaw string) *ConfigFile {
|
||||
// TODO: Improve file type detection
|
||||
// (e.g., use magic bytes or content inspection instead of relying solely on file extension)
|
||||
|
||||
configFileParsed := &ConfigFile{
|
||||
path: configFileRaw,
|
||||
}
|
||||
|
||||
configFileExtension, _ := strings.CutPrefix(filepath.Ext(configFileRaw), ".")
|
||||
|
||||
switch strings.ToLower(configFileExtension) {
|
||||
case "yml", "yaml":
|
||||
configFileParsed._type = ConfigFileTypeYAML
|
||||
default:
|
||||
configFileParsed._type = ConfigFileTypeUnknown
|
||||
}
|
||||
|
||||
return configFileParsed
|
||||
}
|
||||
40
internal/types/cookie.go
Normal file
40
internal/types/cookie.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type Cookie KeyValue[string, []string]
|
||||
|
||||
type Cookies []Cookie
|
||||
|
||||
func (cookies Cookies) GetValue(key string) *[]string {
|
||||
for i := range cookies {
|
||||
if cookies[i].Key == key {
|
||||
return &cookies[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cookies *Cookies) Append(cookie ...Cookie) {
|
||||
for _, c := range cookie {
|
||||
if item := cookies.GetValue(c.Key); item != nil {
|
||||
*item = append(*item, c.Value...)
|
||||
} else {
|
||||
*cookies = append(*cookies, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (cookies *Cookies) Parse(rawValues ...string) {
|
||||
for _, rawValue := range rawValues {
|
||||
cookies.Append(*ParseCookie(rawValue))
|
||||
}
|
||||
}
|
||||
|
||||
func ParseCookie(rawValue string) *Cookie {
|
||||
parts := strings.SplitN(rawValue, "=", 2)
|
||||
if len(parts) == 1 {
|
||||
return &Cookie{Key: parts[0], Value: []string{""}}
|
||||
}
|
||||
return &Cookie{Key: parts[0], Value: []string{parts[1]}}
|
||||
}
|
||||
189
internal/types/errors.go
Normal file
189
internal/types/errors.go
Normal file
@@ -0,0 +1,189 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
// General
|
||||
ErrNoError = errors.New("no error (internal)")
|
||||
|
||||
// CLI
|
||||
ErrCLINoArgs = errors.New("CLI expects arguments but received none")
|
||||
)
|
||||
|
||||
// ======================================== General ========================================
|
||||
|
||||
type FieldParseError struct {
|
||||
Field string
|
||||
Value string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewFieldParseError(field string, value string, err error) FieldParseError {
|
||||
if err == nil {
|
||||
err = ErrNoError
|
||||
}
|
||||
return FieldParseError{field, value, err}
|
||||
}
|
||||
|
||||
func (e FieldParseError) Error() string {
|
||||
return fmt.Sprintf("Field '%s' parse failed: %v", e.Field, e.Err)
|
||||
}
|
||||
|
||||
func (e FieldParseError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type FieldParseErrors struct {
|
||||
Errors []FieldParseError
|
||||
}
|
||||
|
||||
func NewFieldParseErrors(fieldParseErrors []FieldParseError) FieldParseErrors {
|
||||
return FieldParseErrors{fieldParseErrors}
|
||||
}
|
||||
|
||||
func (e FieldParseErrors) Error() string {
|
||||
if len(e.Errors) == 0 {
|
||||
return "No field parse errors"
|
||||
}
|
||||
if len(e.Errors) == 1 {
|
||||
return e.Errors[0].Error()
|
||||
}
|
||||
|
||||
var builder strings.Builder
|
||||
for i, err := range e.Errors {
|
||||
if i > 0 {
|
||||
builder.WriteString("\n")
|
||||
}
|
||||
builder.WriteString(err.Error())
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
type FieldValidationError struct {
|
||||
Field string
|
||||
Value string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewFieldValidationError(field string, value string, err error) FieldValidationError {
|
||||
if err == nil {
|
||||
err = ErrNoError
|
||||
}
|
||||
return FieldValidationError{field, value, err}
|
||||
}
|
||||
|
||||
func (e FieldValidationError) Error() string {
|
||||
return fmt.Sprintf("Field '%s' validation failed: %v", e.Field, e.Err)
|
||||
}
|
||||
|
||||
func (e FieldValidationError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type FieldValidationErrors struct {
|
||||
Errors []FieldValidationError
|
||||
}
|
||||
|
||||
func NewFieldValidationErrors(fieldValidationErrors []FieldValidationError) FieldValidationErrors {
|
||||
return FieldValidationErrors{fieldValidationErrors}
|
||||
}
|
||||
|
||||
func (e FieldValidationErrors) Error() string {
|
||||
if len(e.Errors) == 0 {
|
||||
return "No field validation errors"
|
||||
}
|
||||
if len(e.Errors) == 1 {
|
||||
return e.Errors[0].Error()
|
||||
}
|
||||
|
||||
var builder strings.Builder
|
||||
for i, err := range e.Errors {
|
||||
if i > 0 {
|
||||
builder.WriteString("\n")
|
||||
}
|
||||
builder.WriteString(err.Error())
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
type UnmarshalError struct {
|
||||
error error
|
||||
}
|
||||
|
||||
func NewUnmarshalError(err error) UnmarshalError {
|
||||
if err == nil {
|
||||
err = ErrNoError
|
||||
}
|
||||
return UnmarshalError{err}
|
||||
}
|
||||
|
||||
func (e UnmarshalError) Error() string {
|
||||
return "Unmarshal error: " + e.error.Error()
|
||||
}
|
||||
|
||||
func (e UnmarshalError) Unwrap() error {
|
||||
return e.error
|
||||
}
|
||||
|
||||
// ======================================== CLI ========================================
|
||||
|
||||
type CLIUnexpectedArgsError struct {
|
||||
Args []string
|
||||
}
|
||||
|
||||
func NewCLIUnexpectedArgsError(args []string) CLIUnexpectedArgsError {
|
||||
return CLIUnexpectedArgsError{args}
|
||||
}
|
||||
|
||||
func (e CLIUnexpectedArgsError) Error() string {
|
||||
return fmt.Sprintf("CLI received unexpected arguments: %v", strings.Join(e.Args, ","))
|
||||
}
|
||||
|
||||
// ======================================== Config File ========================================
|
||||
|
||||
type ConfigFileReadError struct {
|
||||
error error
|
||||
}
|
||||
|
||||
func NewConfigFileReadError(err error) ConfigFileReadError {
|
||||
if err == nil {
|
||||
err = ErrNoError
|
||||
}
|
||||
return ConfigFileReadError{err}
|
||||
}
|
||||
|
||||
func (e ConfigFileReadError) Error() string {
|
||||
return "Config file read error: " + e.error.Error()
|
||||
}
|
||||
|
||||
func (e ConfigFileReadError) Unwrap() error {
|
||||
return e.error
|
||||
}
|
||||
|
||||
// ======================================== Proxy ========================================
|
||||
|
||||
type ProxyDialError struct {
|
||||
Proxy string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewProxyDialError(proxy string, err error) ProxyDialError {
|
||||
if err == nil {
|
||||
err = ErrNoError
|
||||
}
|
||||
return ProxyDialError{proxy, err}
|
||||
}
|
||||
|
||||
func (e ProxyDialError) Error() string {
|
||||
return "proxy \"" + e.Proxy + "\": " + e.Err.Error()
|
||||
}
|
||||
|
||||
func (e ProxyDialError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
49
internal/types/header.go
Normal file
49
internal/types/header.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type Header KeyValue[string, []string]
|
||||
|
||||
type Headers []Header
|
||||
|
||||
func (headers Headers) Has(key string) bool {
|
||||
for i := range headers {
|
||||
if headers[i].Key == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (headers Headers) GetValue(key string) *[]string {
|
||||
for i := range headers {
|
||||
if headers[i].Key == key {
|
||||
return &headers[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (headers *Headers) Append(header ...Header) {
|
||||
for _, h := range header {
|
||||
if item := headers.GetValue(h.Key); item != nil {
|
||||
*item = append(*item, h.Value...)
|
||||
} else {
|
||||
*headers = append(*headers, h)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (headers *Headers) Parse(rawValues ...string) {
|
||||
for _, rawValue := range rawValues {
|
||||
headers.Append(*ParseHeader(rawValue))
|
||||
}
|
||||
}
|
||||
|
||||
func ParseHeader(rawValue string) *Header {
|
||||
parts := strings.SplitN(rawValue, ": ", 2)
|
||||
if len(parts) == 1 {
|
||||
return &Header{Key: parts[0], Value: []string{""}}
|
||||
}
|
||||
return &Header{Key: parts[0], Value: []string{parts[1]}}
|
||||
}
|
||||
6
internal/types/key_value.go
Normal file
6
internal/types/key_value.go
Normal file
@@ -0,0 +1,6 @@
|
||||
package types
|
||||
|
||||
type KeyValue[K, V any] struct {
|
||||
Key K
|
||||
Value V
|
||||
}
|
||||
40
internal/types/param.go
Normal file
40
internal/types/param.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type Param KeyValue[string, []string]
|
||||
|
||||
type Params []Param
|
||||
|
||||
func (params Params) GetValue(key string) *[]string {
|
||||
for i := range params {
|
||||
if params[i].Key == key {
|
||||
return ¶ms[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (params *Params) Append(param ...Param) {
|
||||
for _, p := range param {
|
||||
if item := params.GetValue(p.Key); item != nil {
|
||||
*item = append(*item, p.Value...)
|
||||
} else {
|
||||
*params = append(*params, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (params *Params) Parse(rawValues ...string) {
|
||||
for _, rawValue := range rawValues {
|
||||
params.Append(*ParseParam(rawValue))
|
||||
}
|
||||
}
|
||||
|
||||
func ParseParam(rawValue string) *Param {
|
||||
parts := strings.SplitN(rawValue, "=", 2)
|
||||
if len(parts) == 1 {
|
||||
return &Param{Key: parts[0], Value: []string{""}}
|
||||
}
|
||||
return &Param{Key: parts[0], Value: []string{parts[1]}}
|
||||
}
|
||||
38
internal/types/proxy.go
Normal file
38
internal/types/proxy.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type Proxy url.URL
|
||||
|
||||
func (proxy Proxy) String() string {
|
||||
return (*url.URL)(&proxy).String()
|
||||
}
|
||||
|
||||
type Proxies []Proxy
|
||||
|
||||
func (proxies *Proxies) Append(proxy ...Proxy) {
|
||||
*proxies = append(*proxies, proxy...)
|
||||
}
|
||||
|
||||
func (proxies *Proxies) Parse(rawValue string) error {
|
||||
parsedProxy, err := ParseProxy(rawValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
proxies.Append(*parsedProxy)
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseProxy(rawValue string) (*Proxy, error) {
|
||||
urlParsed, err := url.Parse(rawValue)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse proxy URL: %w", err)
|
||||
}
|
||||
|
||||
proxyParsed := Proxy(*urlParsed)
|
||||
return &proxyParsed, nil
|
||||
}
|
||||
8
internal/version/version.go
Normal file
8
internal/version/version.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package version
|
||||
|
||||
var (
|
||||
Version = "unknown" // Set via ldflags
|
||||
GitCommit = "unknown" // Set via ldflags
|
||||
BuildDate = "unknown" // Set via ldflags
|
||||
GoVersion = "unknown" // Set via ldflags
|
||||
)
|
||||
Reference in New Issue
Block a user