mirror of
https://github.com/aykhans/sarin.git
synced 2026-02-28 06:49:13 +00:00
Compare commits
63 Commits
v0.7.1
...
feat/e2e-t
| Author | SHA1 | Date | |
|---|---|---|---|
| 7c246102ff | |||
| 4b3230bb27 | |||
| d197e90103 | |||
| ae054bb3d6 | |||
| 61af28a3d3 | |||
| 665be5d98a | |||
| d346067e8a | |||
| a3e20cd3d3 | |||
| 6d921cf8e3 | |||
|
|
d8b0a1e6a3 | ||
| b21d97192c | |||
| f0606a0f82 | |||
| 3be8ff218c | |||
| 7cb49195f8 | |||
|
|
a154215495 | ||
| c1584eb47b | |||
| 6a713ef241 | |||
| 6dafc082ed | |||
| e83eacf380 | |||
| c2ba1844ab | |||
|
|
054e5fd253 | ||
| 533ced4b54 | |||
| c3ea3a34ad | |||
|
|
c02a079d2a | ||
|
|
f78942bfb6 | ||
| 1369cb9f09 | |||
| 18662e6a64 | |||
| 81f08edc8d | |||
| a9738c0a11 | |||
| 76225884e6 | |||
| a512f3605d | |||
|
|
635c33008b | ||
| 3f2147ec6c | |||
| 92d0c5e003 | |||
| 27bc8f2e96 | |||
| 46c6fa9912 | |||
| a3d311009f | |||
| 710f4c6cb5 | |||
| 2d7ba34cb8 | |||
| 25d4762a3c | |||
| 361d423651 | |||
| ffa724fae7 | |||
| 7930be490d | |||
| e6c54e9cb2 | |||
| b32f567de7 | |||
| b6e85d9443 | |||
| 827e3535cd | |||
| 7ecf534d87 | |||
|
|
17ad5fadb9 | ||
| 7fb59a7989 | |||
| 527909c882 | |||
| 4459675efa | |||
|
|
604af355e6 | ||
| 7d4267c4c2 | |||
|
|
845ab7296c | ||
| 49d004ff06 | |||
| 045deb6120 | |||
| 075ef26203 | |||
|
|
946afbb2c3 | ||
| aacb33cfa5 | |||
| 4a7db48351 | |||
| b73087dce5 | |||
|
|
20a46feab8 |
@@ -1,11 +0,0 @@
|
||||
.github
|
||||
assets
|
||||
binaries
|
||||
dodo
|
||||
.git
|
||||
.gitignore
|
||||
.golangci.yml
|
||||
README.md
|
||||
LICENSE
|
||||
config.json
|
||||
build.sh
|
||||
2
.github/FUNDING.yml
vendored
Normal file
2
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
buy_me_a_coffee: aykhan
|
||||
custom: https://commerce.coinbase.com/checkout/0f33d2fb-54a6-44f5-8783-006ebf70d1a0
|
||||
24
.github/workflows/e2e.yaml
vendored
Normal file
24
.github/workflows/e2e.yaml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: e2e-tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
name: e2e
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 1.26.0
|
||||
cache: true
|
||||
- name: run e2e tests
|
||||
run: go test ./e2e/... -v -count=1
|
||||
25
.github/workflows/golangci-lint.yml
vendored
25
.github/workflows/golangci-lint.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: golangci-lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
golangci:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: stable
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v7
|
||||
with:
|
||||
version: v2.0.2
|
||||
args: --timeout=10m --config=.golangci.yml
|
||||
27
.github/workflows/lint.yaml
vendored
Normal file
27
.github/workflows/lint.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: golangci-lint
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
golangci:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 1.26.0
|
||||
- name: go fix
|
||||
run: |
|
||||
go fix ./...
|
||||
git diff --exit-code
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v9
|
||||
with:
|
||||
version: v2.9.0
|
||||
86
.github/workflows/publish-docker-image.yml
vendored
86
.github/workflows/publish-docker-image.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: publish-docker-image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
# Match stable and pre versions, such as 'v1.0.0', 'v0.23.0-a', 'v0.23.0-a.2', 'v0.23.0-b', 'v0.23.0-b.3'
|
||||
- "v*.*.*"
|
||||
- "v*.*.*-a"
|
||||
- "v*.*.*-a.*"
|
||||
- "v*.*.*-b"
|
||||
- "v*.*.*-b.*"
|
||||
|
||||
jobs:
|
||||
build-and-push-stable-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Extract build args
|
||||
# Extract version number and check if it's an pre version
|
||||
run: |
|
||||
if [[ "${GITHUB_REF_NAME}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "PRE_RELEASE=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PRE_RELEASE=true" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: aykhans
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
version: v0.9.1
|
||||
|
||||
# Metadata for stable versions
|
||||
- name: Docker meta for stable
|
||||
id: meta-stable
|
||||
if: env.PRE_RELEASE == 'false'
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
aykhans/dodo
|
||||
tags: |
|
||||
type=semver,pattern={{version}},value=${{ env.VERSION }}
|
||||
type=raw,value=stable
|
||||
flavor: |
|
||||
latest=true
|
||||
labels: |
|
||||
org.opencontainers.image.version=${{ env.VERSION }}
|
||||
|
||||
# Metadata for pre versions
|
||||
- name: Docker meta for pre
|
||||
id: meta-pre
|
||||
if: env.PRE_RELEASE == 'true'
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
aykhans/dodo
|
||||
tags: |
|
||||
type=raw,value=${{ env.VERSION }}
|
||||
labels: |
|
||||
org.opencontainers.image.version=${{ env.VERSION }}
|
||||
|
||||
- name: Build and Push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta-stable.outputs.tags || steps.meta-pre.outputs.tags }}
|
||||
labels: ${{ steps.meta-stable.outputs.labels || steps.meta-pre.outputs.labels }}
|
||||
98
.github/workflows/release.yaml
vendored
Normal file
98
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
name: Build and Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Release tag (e.g., v1.0.0)"
|
||||
required: true
|
||||
build_binaries:
|
||||
description: "Build and upload binaries"
|
||||
type: boolean
|
||||
default: true
|
||||
build_docker:
|
||||
description: "Build and push Docker image"
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build binaries
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.tag || github.ref }}
|
||||
|
||||
- name: Set build metadata
|
||||
run: |
|
||||
echo "VERSION=$(git describe --tags --always)" >> $GITHUB_ENV
|
||||
echo "GIT_COMMIT=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
echo "GO_VERSION=1.26.0" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Go
|
||||
if: github.event_name == 'release' || inputs.build_binaries
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache: true
|
||||
|
||||
- name: Build binaries
|
||||
if: github.event_name == 'release' || inputs.build_binaries
|
||||
run: |
|
||||
LDFLAGS="-X 'go.aykhans.me/sarin/internal/version.Version=${{ env.VERSION }}' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.GitCommit=${{ env.GIT_COMMIT }}' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)' \
|
||||
-s -w"
|
||||
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "$LDFLAGS" -o ./sarin-linux-amd64 ./cmd/cli/main.go
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags "$LDFLAGS" -o ./sarin-linux-arm64 ./cmd/cli/main.go
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 go build -ldflags "$LDFLAGS" -o ./sarin-darwin-amd64 ./cmd/cli/main.go
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 go build -ldflags "$LDFLAGS" -o ./sarin-darwin-arm64 ./cmd/cli/main.go
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 go build -ldflags "$LDFLAGS" -o ./sarin-windows-amd64.exe ./cmd/cli/main.go
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=arm64 go build -ldflags "$LDFLAGS" -o ./sarin-windows-arm64.exe ./cmd/cli/main.go
|
||||
|
||||
- name: Upload Release Assets
|
||||
if: github.event_name == 'release' || inputs.build_binaries
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: ${{ inputs.tag || github.ref_name }}
|
||||
files: ./sarin-*
|
||||
|
||||
- name: Set up QEMU
|
||||
if: github.event_name == 'release' || inputs.build_docker
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: github.event_name == 'release' || inputs.build_docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: github.event_name == 'release' || inputs.build_docker
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
if: github.event_name == 'release' || inputs.build_docker
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
build-args: |
|
||||
VERSION=${{ env.VERSION }}
|
||||
GIT_COMMIT=${{ env.GIT_COMMIT }}
|
||||
GO_VERSION=${{ env.GO_VERSION }}
|
||||
tags: |
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/sarin:${{ env.VERSION }}
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/sarin:latest
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1 @@
|
||||
dodo
|
||||
binaries/
|
||||
bin/*
|
||||
|
||||
100
.golangci.yaml
Normal file
100
.golangci.yaml
Normal file
@@ -0,0 +1,100 @@
|
||||
version: "2"
|
||||
|
||||
run:
|
||||
go: "1.26"
|
||||
concurrency: 12
|
||||
|
||||
linters:
|
||||
default: none
|
||||
enable:
|
||||
- asciicheck
|
||||
- errcheck
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nolintlint
|
||||
- prealloc
|
||||
- reassign
|
||||
- staticcheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
- bidichk
|
||||
- bodyclose
|
||||
- containedctx
|
||||
- contextcheck
|
||||
- copyloopvar
|
||||
- embeddedstructfieldcheck
|
||||
- errorlint
|
||||
- exptostd
|
||||
- fatcontext
|
||||
- funcorder
|
||||
- gocheckcompilerdirectives
|
||||
- gocritic
|
||||
- gomoddirectives
|
||||
- gosec
|
||||
- gosmopolitan
|
||||
- grouper
|
||||
- importas
|
||||
- inamedparam
|
||||
- intrange
|
||||
- loggercheck
|
||||
- mirror
|
||||
- musttag
|
||||
- perfsprint
|
||||
- predeclared
|
||||
- tagalign
|
||||
- tagliatelle
|
||||
- testifylint
|
||||
- thelper
|
||||
- tparallel
|
||||
- unparam
|
||||
- usestdlibvars
|
||||
- usetesting
|
||||
- wastedassign
|
||||
|
||||
settings:
|
||||
staticcheck:
|
||||
checks:
|
||||
- "all"
|
||||
- "-S1002"
|
||||
- "-ST1000"
|
||||
varnamelen:
|
||||
ignore-decls:
|
||||
- w http.ResponseWriter
|
||||
- wg sync.WaitGroup
|
||||
- wg *sync.WaitGroup
|
||||
|
||||
exclusions:
|
||||
rules:
|
||||
- path: _test\.go$
|
||||
linters:
|
||||
- errorlint
|
||||
- forcetypeassert
|
||||
- perfsprint
|
||||
- errcheck
|
||||
- gosec
|
||||
|
||||
- path: _test\.go$
|
||||
linters:
|
||||
- staticcheck
|
||||
text: "SA5011"
|
||||
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
|
||||
settings:
|
||||
gofmt:
|
||||
# Simplify code: gofmt with `-s` option.
|
||||
# Default: true
|
||||
simplify: false
|
||||
# Apply the rewrite rules to the source before reformatting.
|
||||
# https://pkg.go.dev/cmd/gofmt
|
||||
# Default: []
|
||||
rewrite-rules:
|
||||
- pattern: "interface{}"
|
||||
replacement: "any"
|
||||
- pattern: "a[b:len(a)]"
|
||||
replacement: "a[b:]"
|
||||
@@ -1,33 +0,0 @@
|
||||
version: "2"
|
||||
|
||||
run:
|
||||
go: "1.24"
|
||||
concurrency: 8
|
||||
timeout: 10m
|
||||
|
||||
linters:
|
||||
default: none
|
||||
enable:
|
||||
- asasalint
|
||||
- asciicheck
|
||||
- errcheck
|
||||
- gomodguard
|
||||
- goprintffuncname
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nolintlint
|
||||
- prealloc
|
||||
- reassign
|
||||
- staticcheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
settings:
|
||||
staticcheck:
|
||||
checks:
|
||||
- "all"
|
||||
- "-S1002"
|
||||
- "-ST1000"
|
||||
29
Dockerfile
29
Dockerfile
@@ -1,17 +1,32 @@
|
||||
FROM golang:1.24-alpine AS builder
|
||||
ARG GO_VERSION=1.26.0
|
||||
|
||||
FROM docker.io/library/golang:${GO_VERSION}-alpine AS builder
|
||||
|
||||
ARG VERSION=unknown
|
||||
ARG GIT_COMMIT=unknown
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY . .
|
||||
RUN --mount=type=bind,source=./go.mod,target=./go.mod \
|
||||
--mount=type=bind,source=./go.sum,target=./go.sum \
|
||||
go mod download
|
||||
|
||||
RUN CGO_ENABLED=0 go build -ldflags "-s -w" -o dodo
|
||||
RUN --mount=type=bind,source=./,target=./ \
|
||||
CGO_ENABLED=0 go build \
|
||||
-ldflags "-X 'go.aykhans.me/sarin/internal/version.Version=${VERSION}' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.GitCommit=${GIT_COMMIT}' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)' \
|
||||
-s -w" \
|
||||
-o /sarin ./cmd/cli/main.go
|
||||
|
||||
FROM gcr.io/distroless/static-debian12:latest
|
||||
|
||||
ENV TERM=xterm-256color
|
||||
ENV COLORTERM=truecolor
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=builder /src/dodo /dodo
|
||||
COPY --from=builder /sarin /sarin
|
||||
|
||||
ENTRYPOINT ["./dodo"]
|
||||
ENTRYPOINT ["./sarin"]
|
||||
|
||||
934
EXAMPLES.md
934
EXAMPLES.md
@@ -1,934 +0,0 @@
|
||||
# Dodo Usage Examples
|
||||
|
||||
This document provides comprehensive examples of using Dodo with various configuration combinations. Each example includes three methods: CLI usage, YAML configuration, and JSON configuration.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Basic HTTP Stress Testing](#1-basic-http-stress-testing)
|
||||
2. [POST Request with Form Data](#2-post-request-with-form-data)
|
||||
3. [API Testing with Authentication](#3-api-testing-with-authentication)
|
||||
4. [Testing with Custom Headers and Cookies](#4-testing-with-custom-headers-and-cookies)
|
||||
5. [Load Testing with Proxy Rotation](#5-load-testing-with-proxy-rotation)
|
||||
6. [JSON API Testing with Dynamic Data](#6-json-api-testing-with-dynamic-data)
|
||||
7. [File Upload Testing](#7-file-upload-testing)
|
||||
8. [E-commerce Cart Testing](#8-e-commerce-cart-testing)
|
||||
9. [GraphQL API Testing](#9-graphql-api-testing)
|
||||
10. [WebSocket-style HTTP Testing](#10-websocket-style-http-testing)
|
||||
11. [Multi-tenant Application Testing](#11-multi-tenant-application-testing)
|
||||
12. [Rate Limiting Testing](#12-rate-limiting-testing)
|
||||
|
||||
---
|
||||
|
||||
## 1. Basic HTTP Stress Testing
|
||||
|
||||
Test a simple website with basic GET requests to measure performance under load.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/get \
|
||||
-m GET \
|
||||
-d 5 \
|
||||
-r 100 \
|
||||
-t 5s \
|
||||
-o 30s \
|
||||
--skip-verify=false \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://httpbin.org/get"
|
||||
yes: true
|
||||
timeout: "5s"
|
||||
dodos: 5
|
||||
requests: 100
|
||||
duration: "30s"
|
||||
skip_verify: false
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/get",
|
||||
"yes": true,
|
||||
"timeout": "5s",
|
||||
"dodos": 5,
|
||||
"requests": 100,
|
||||
"duration": "30s",
|
||||
"skip_verify": false
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. POST Request with Form Data
|
||||
|
||||
Test form submission endpoints with randomized form data.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/post \
|
||||
-m POST \
|
||||
-d 3 \
|
||||
-r 50 \
|
||||
-t 10s \
|
||||
--skip-verify=false \
|
||||
-H "Content-Type:application/x-www-form-urlencoded" \
|
||||
-b "username={{ fakeit_Username }}&password={{ fakeit_Password true true true true true 12 }}&email={{ fakeit_Email }}" \
|
||||
-b "username={{ fakeit_Username }}&password={{ fakeit_Password true true true true true 8 }}&email={{ fakeit_Email }}" \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "POST"
|
||||
url: "https://httpbin.org/post"
|
||||
yes: true
|
||||
timeout: "10s"
|
||||
dodos: 3
|
||||
requests: 50
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- Content-Type: "application/x-www-form-urlencoded"
|
||||
|
||||
body:
|
||||
- "username={{ fakeit_Username }}&password={{ fakeit_Password true true true true true 12 }}&email={{ fakeit_Email }}"
|
||||
- "username={{ fakeit_Username }}&password={{ fakeit_Password true true true true true 8 }}&email={{ fakeit_Email }}"
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/post",
|
||||
"yes": true,
|
||||
"timeout": "10s",
|
||||
"dodos": 3,
|
||||
"requests": 50,
|
||||
"skip_verify": false,
|
||||
"headers": [{ "Content-Type": "application/x-www-form-urlencoded" }],
|
||||
"body": [
|
||||
"username={{ fakeit_Username }}&password={{ fakeit_Password true true true true true 12 }}&email={{ fakeit_Email }}",
|
||||
"username={{ fakeit_Username }}&password={{ fakeit_Password true true true true true 8 }}&email={{ fakeit_Email }}"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. API Testing with Authentication
|
||||
|
||||
Test protected API endpoints with various authentication methods.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/bearer \
|
||||
-m GET \
|
||||
-d 4 \
|
||||
-r 200 \
|
||||
-t 8s \
|
||||
--skip-verify=false \
|
||||
-H "Authorization:Bearer {{ fakeit_LetterN 32 }}" \
|
||||
-H "User-Agent:{{ fakeit_UserAgent }}" \
|
||||
-H "X-Request-ID:{{ fakeit_Int }}" \
|
||||
-H "Accept:application/json" \
|
||||
-p "api_version=v1" \
|
||||
-p "format=json" \
|
||||
-p "client_id=mobile" -p "client_id=web" -p "client_id=desktop" \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://httpbin.org/bearer"
|
||||
yes: true
|
||||
timeout: "8s"
|
||||
dodos: 4
|
||||
requests: 200
|
||||
skip_verify: false
|
||||
|
||||
params:
|
||||
- api_version: "v1"
|
||||
- format: "json"
|
||||
- client_id: ["mobile", "web", "desktop"]
|
||||
|
||||
headers:
|
||||
- Authorization: "Bearer {{ fakeit_LetterN 32 }}"
|
||||
- User-Agent: "{{ fakeit_UserAgent }}"
|
||||
- X-Request-ID: "{{ fakeit_Int }}"
|
||||
- Accept: "application/json"
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/bearer",
|
||||
"yes": true,
|
||||
"timeout": "8s",
|
||||
"dodos": 4,
|
||||
"requests": 200,
|
||||
"skip_verify": false,
|
||||
"params": [
|
||||
{ "api_version": "v1" },
|
||||
{ "format": "json" },
|
||||
{ "client_id": ["mobile", "web", "desktop"] }
|
||||
],
|
||||
"headers": [
|
||||
{ "Authorization": "Bearer {{ fakeit_LetterN 32 }}" },
|
||||
{ "User-Agent": "{{ fakeit_UserAgent }}" },
|
||||
{ "X-Request-ID": "{{ fakeit_Int }}" },
|
||||
{ "Accept": "application/json" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. Testing with Custom Headers and Cookies
|
||||
|
||||
Test applications that require specific headers and session cookies.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/cookies \
|
||||
-m GET \
|
||||
-d 6 \
|
||||
-r 75 \
|
||||
-t 5s \
|
||||
--skip-verify=false \
|
||||
-H 'Accept-Language:{{ strings_Join "," (fakeit_LanguageAbbreviation) (fakeit_LanguageAbbreviation) (fakeit_LanguageAbbreviation) }}' \
|
||||
-H "X-Forwarded-For:{{ fakeit_IPv4Address }}" \
|
||||
-H "X-Real-IP:{{ fakeit_IPv4Address }}" \
|
||||
-H "Accept-Encoding:gzip" -H "Accept-Encoding:deflate" -H "Accept-Encoding:br" \
|
||||
-c "session_id={{ fakeit_UUID }}" \
|
||||
-c 'user_pref={{ fakeit_RandomString "a1" "b2" "c3" }}' \
|
||||
-c "theme=dark" -c "theme=light" -c "theme=auto" \
|
||||
-c "lang=en" -c "lang=es" -c "lang=fr" -c "lang=de" \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://httpbin.org/cookies"
|
||||
yes: true
|
||||
timeout: "5s"
|
||||
dodos: 6
|
||||
requests: 75
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- Accept-Language: '{{ strings_Join "," (fakeit_LanguageAbbreviation) (fakeit_LanguageAbbreviation) (fakeit_LanguageAbbreviation) }}'
|
||||
- X-Forwarded-For: "{{ fakeit_IPv4Address }}"
|
||||
- X-Real-IP: "{{ fakeit_IPv4Address }}"
|
||||
- Accept-Encoding: ["gzip", "deflate", "br"]
|
||||
|
||||
cookies:
|
||||
- session_id: "{{ fakeit_UUID }}"
|
||||
- user_pref: '{{ fakeit_RandomString "a1" "b2" "c3" }}'
|
||||
- theme: ["dark", "light", "auto"]
|
||||
- lang: ["en", "es", "fr", "de"]
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/cookies",
|
||||
"yes": true,
|
||||
"timeout": "5s",
|
||||
"dodos": 6,
|
||||
"requests": 75,
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{
|
||||
"Accept-Language": "{{ strings_Join \",\" (fakeit_LanguageAbbreviation) (fakeit_LanguageAbbreviation) (fakeit_LanguageAbbreviation) }}"
|
||||
},
|
||||
{ "X-Forwarded-For": "{{ fakeit_IPv4Address }}" },
|
||||
{ "X-Real-IP": "{{ fakeit_IPv4Address }}" },
|
||||
{ "Accept-Encoding": ["gzip", "deflate", "br"] }
|
||||
],
|
||||
"cookies": [
|
||||
{ "session_id": "{{ fakeit_UUID }}" },
|
||||
{ "user_pref": "{{ fakeit_RandomString \"a1\" \"b2\" \"c3\" }}" },
|
||||
{ "theme": ["dark", "light", "auto"] },
|
||||
{ "lang": ["en", "es", "fr", "de"] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Load Testing with Proxy Rotation
|
||||
|
||||
Test through multiple proxies for distributed load testing.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/ip \
|
||||
-m GET \
|
||||
-d 8 \
|
||||
-r 300 \
|
||||
-t 15s \
|
||||
--skip-verify=false \
|
||||
-x "http://proxy1.example.com:8080" \
|
||||
-x "http://proxy2.example.com:8080" \
|
||||
-x "socks5://proxy3.example.com:1080" \
|
||||
-x "http://username:password@proxy4.example.com:8080" \
|
||||
-H "User-Agent:{{ fakeit_UserAgent }}" \
|
||||
-H "Accept:application/json" \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://httpbin.org/ip"
|
||||
yes: true
|
||||
timeout: "15s"
|
||||
dodos: 8
|
||||
requests: 300
|
||||
skip_verify: false
|
||||
|
||||
proxy:
|
||||
- "http://proxy1.example.com:8080"
|
||||
- "http://proxy2.example.com:8080"
|
||||
- "socks5://proxy3.example.com:1080"
|
||||
- "http://username:password@proxy4.example.com:8080"
|
||||
|
||||
headers:
|
||||
- User-Agent: "{{ fakeit_UserAgent }}"
|
||||
- Accept: "application/json"
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/ip",
|
||||
"yes": true,
|
||||
"timeout": "15s",
|
||||
"dodos": 8,
|
||||
"requests": 300,
|
||||
"skip_verify": false,
|
||||
"proxy": [
|
||||
"http://proxy1.example.com:8080",
|
||||
"http://proxy2.example.com:8080",
|
||||
"socks5://proxy3.example.com:1080",
|
||||
"http://username:password@proxy4.example.com:8080"
|
||||
],
|
||||
"headers": [
|
||||
{ "User-Agent": "{{ fakeit_UserAgent }}" },
|
||||
{ "Accept": "application/json" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. JSON API Testing with Dynamic Data
|
||||
|
||||
Test REST APIs with realistic JSON payloads.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/post \
|
||||
-m POST \
|
||||
-d 5 \
|
||||
-r 150 \
|
||||
-t 12s \
|
||||
--skip-verify=false \
|
||||
-H "Content-Type:application/json" \
|
||||
-H "Accept:application/json" \
|
||||
-H "X-API-Version:2023-10-01" \
|
||||
-b '{"user_id":{{ fakeit_Uint }},"name":"{{ fakeit_Name }}","email":"{{ fakeit_Email }}","created_at":"{{ fakeit_Date }}"}' \
|
||||
-b '{"product_id":{{ fakeit_Uint }},"name":"{{ fakeit_ProductName }}","price":{{ fakeit_Price 10 1000 }},"category":"{{ fakeit_ProductCategory }}"}' \
|
||||
-b '{"order_id":"{{ fakeit_UUID }}","items":[{"id":{{ fakeit_Uint }},"quantity":{{ fakeit_IntRange 1 10 }}}],"total":{{ fakeit_Price 50 500 }}}' \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "POST"
|
||||
url: "https://httpbin.org/post"
|
||||
yes: true
|
||||
timeout: "12s"
|
||||
dodos: 5
|
||||
requests: 150
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- Content-Type: "application/json"
|
||||
- Accept: "application/json"
|
||||
- X-API-Version: "2023-10-01"
|
||||
|
||||
body:
|
||||
- '{"user_id":{{ fakeit_Uint }},"name":"{{ fakeit_Name }}","email":"{{ fakeit_Email }}","created_at":"{{ fakeit_Date }}"}'
|
||||
- '{"product_id":{{ fakeit_Uint }},"name":"{{ fakeit_ProductName }}","price":{{ fakeit_Price 10 1000 }},"category":"{{ fakeit_ProductCategory }}"}'
|
||||
- '{"order_id":"{{ fakeit_UUID }}","items":[{"id":{{ fakeit_Uint }},"quantity":{{ fakeit_IntRange 1 10 }}}],"total":{{ fakeit_Price 50 500 }}}'
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/post",
|
||||
"yes": true,
|
||||
"timeout": "12s",
|
||||
"dodos": 5,
|
||||
"requests": 150,
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "Content-Type": "application/json" },
|
||||
{ "Accept": "application/json" },
|
||||
{ "X-API-Version": "2023-10-01" }
|
||||
],
|
||||
"body": [
|
||||
"{\"user_id\":{{ fakeit_Uint }},\"name\":\"{{ fakeit_Name }}\",\"email\":\"{{ fakeit_Email }}\",\"created_at\":\"{{ fakeit_Date }}\"}",
|
||||
"{\"product_id\":{{ fakeit_Uint }},\"name\":\"{{ fakeit_ProductName }}\",\"price\":{{ fakeit_Price 10 1000 }},\"category\":\"{{ fakeit_ProductCategory }}\"}",
|
||||
"{\"order_id\":\"{{ fakeit_UUID }}\",\"items\":[{\"id\":{{ fakeit_Uint }},\"quantity\":{{ fakeit_IntRange 1 10 }}}],\"total\":{{ fakeit_Price 50 500 }}}"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. File Upload Testing
|
||||
|
||||
Test file upload endpoints with multipart form data.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://httpbin.org/post \
|
||||
-m POST \
|
||||
-d 3 \
|
||||
-r 25 \
|
||||
-t 30s \
|
||||
--skip-verify=false \
|
||||
-H "X-Upload-Source:dodo-test" \
|
||||
-H "User-Agent:{{ fakeit_UserAgent }}" \
|
||||
-b '{{ body_FormData (dict_Str "filename" (fakeit_UUID) "content" (fakeit_Paragraph 3 5 10 " ")) }}' \
|
||||
-b '{{ body_FormData (dict_Str "file" (fakeit_UUID) "description" (fakeit_Sentence 10) "category" "image") }}' \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "POST"
|
||||
url: "https://httpbin.org/post"
|
||||
yes: true
|
||||
timeout: "30s"
|
||||
dodos: 3
|
||||
requests: 25
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- X-Upload-Source: "dodo-test"
|
||||
- User-Agent: "{{ fakeit_UserAgent }}"
|
||||
|
||||
body:
|
||||
- '{{ body_FormData (dict_Str "filename" (fakeit_UUID) "content" (fakeit_Paragraph 3 5 10 " ")) }}'
|
||||
- '{{ body_FormData (dict_Str "file" (fakeit_UUID) "description" (fakeit_Sentence 10) "category" "image") }}'
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/post",
|
||||
"yes": true,
|
||||
"timeout": "30s",
|
||||
"dodos": 3,
|
||||
"requests": 25,
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "X-Upload-Source": "dodo-test" },
|
||||
{ "User-Agent": "{{ fakeit_UserAgent }}" }
|
||||
],
|
||||
"body": [
|
||||
"{{ body_FormData (dict_Str \"filename\" (fakeit_UUID) \"content\" (fakeit_Paragraph 3 5 10 \" \")) }}",
|
||||
"{{ body_FormData (dict_Str \"file\" (fakeit_UUID) \"description\" (fakeit_Sentence 10) \"category\" \"image\") }}"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8. E-commerce Cart Testing
|
||||
|
||||
Test shopping cart operations with realistic product data.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://api.example-shop.com/cart \
|
||||
-m POST \
|
||||
-d 10 \
|
||||
-r 500 \
|
||||
-t 8s \
|
||||
--skip-verify=false \
|
||||
-H "Content-Type:application/json" \
|
||||
-H "Authorization:Bearer {{ fakeit_LetterN 32 }}" \
|
||||
-H "X-Client-Version:1.2.3" \
|
||||
-H "User-Agent:{{ fakeit_UserAgent }}" \
|
||||
-c "cart_session={{ fakeit_UUID }}" \
|
||||
-c "user_pref=guest" -c "user_pref=member" -c "user_pref=premium" \
|
||||
-c "region=US" -c "region=EU" -c "region=ASIA" \
|
||||
-p "currency=USD" -p "currency=EUR" -p "currency=GBP" \
|
||||
-p "locale=en-US" -p "locale=en-GB" -p "locale=de-DE" -p "locale=fr-FR" \
|
||||
-b '{"action":"add","product_id":"{{ fakeit_UUID }}","quantity":{{ fakeit_IntRange 1 5 }},"user_id":"{{ fakeit_UUID }}"}' \
|
||||
-b '{"action":"remove","product_id":"{{ fakeit_UUID }}","user_id":"{{ fakeit_UUID }}"}' \
|
||||
-b '{"action":"update","product_id":"{{ fakeit_UUID }}","quantity":{{ fakeit_IntRange 1 10 }},"user_id":"{{ fakeit_UUID }}"}' \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "POST"
|
||||
url: "https://api.example-shop.com/cart"
|
||||
yes: true
|
||||
timeout: "8s"
|
||||
dodos: 10
|
||||
requests: 500
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- Content-Type: "application/json"
|
||||
- Authorization: "Bearer {{ fakeit_LetterN 32 }}"
|
||||
- X-Client-Version: "1.2.3"
|
||||
- User-Agent: "{{ fakeit_UserAgent }}"
|
||||
|
||||
cookies:
|
||||
- cart_session: "{{ fakeit_UUID }}"
|
||||
- user_pref: ["guest", "member", "premium"]
|
||||
- region: ["US", "EU", "ASIA"]
|
||||
|
||||
params:
|
||||
- currency: ["USD", "EUR", "GBP"]
|
||||
- locale: ["en-US", "en-GB", "de-DE", "fr-FR"]
|
||||
|
||||
body:
|
||||
- '{"action":"add","product_id":"{{ fakeit_UUID }}","quantity":{{ fakeit_IntRange 1 5 }},"user_id":"{{ fakeit_UUID }}"}'
|
||||
- '{"action":"remove","product_id":"{{ fakeit_UUID }}","user_id":"{{ fakeit_UUID }}"}'
|
||||
- '{"action":"update","product_id":"{{ fakeit_UUID }}","quantity":{{ fakeit_IntRange 1 10 }},"user_id":"{{ fakeit_UUID }}"}'
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "POST",
|
||||
"url": "https://api.example-shop.com/cart",
|
||||
"yes": true,
|
||||
"timeout": "8s",
|
||||
"dodos": 10,
|
||||
"requests": 500,
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "Content-Type": "application/json" },
|
||||
{ "Authorization": "Bearer {{ fakeit_LetterN 32 }}" },
|
||||
{ "X-Client-Version": "1.2.3" },
|
||||
{ "User-Agent": "{{ fakeit_UserAgent }}" }
|
||||
],
|
||||
"cookies": [
|
||||
{ "cart_session": "{{ fakeit_UUID }}" },
|
||||
{ "user_pref": ["guest", "member", "premium"] },
|
||||
{ "region": ["US", "EU", "ASIA"] }
|
||||
],
|
||||
"params": [
|
||||
{ "currency": ["USD", "EUR", "GBP"] },
|
||||
{ "locale": ["en-US", "en-GB", "de-DE", "fr-FR"] }
|
||||
],
|
||||
"body": [
|
||||
"{\"action\":\"add\",\"product_id\":\"{{ fakeit_UUID }}\",\"quantity\":{{ fakeit_IntRange 1 5 }},\"user_id\":\"{{ fakeit_UUID }}\"}",
|
||||
"{\"action\":\"remove\",\"product_id\":\"{{ fakeit_UUID }}\",\"user_id\":\"{{ fakeit_UUID }}\"}",
|
||||
"{\"action\":\"update\",\"product_id\":\"{{ fakeit_UUID }}\",\"quantity\":{{ fakeit_IntRange 1 10 }},\"user_id\":\"{{ fakeit_UUID }}\"}"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. GraphQL API Testing
|
||||
|
||||
Test GraphQL endpoints with various queries and mutations.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://api.example.com/graphql \
|
||||
-m POST \
|
||||
-d 4 \
|
||||
-r 100 \
|
||||
-t 10s \
|
||||
--skip-verify=false \
|
||||
-H "Content-Type:application/json" \
|
||||
-H "Authorization:Bearer {{ fakeit_UUID }}" \
|
||||
-H "X-GraphQL-Client:dodo-test" \
|
||||
-b '{"query":"query GetUser($id: ID!) { user(id: $id) { id name email } }","variables":{"id":"{{ fakeit_UUID }}"}}' \
|
||||
-b '{"query":"query GetPosts($limit: Int) { posts(limit: $limit) { id title content } }","variables":{"limit":{{ fakeit_IntRange 5 20 }}}}' \
|
||||
-b '{"query":"mutation CreatePost($input: PostInput!) { createPost(input: $input) { id title } }","variables":{"input":{"title":"{{ fakeit_Sentence 5 }}","content":"{{ fakeit_Paragraph 2 3 5 " "}}","authorId":"{{ fakeit_UUID }}"}}}' \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "POST"
|
||||
url: "https://api.example.com/graphql"
|
||||
yes: true
|
||||
timeout: "10s"
|
||||
dodos: 4
|
||||
requests: 100
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- Content-Type: "application/json"
|
||||
- Authorization: "Bearer {{ fakeit_UUID }}"
|
||||
- X-GraphQL-Client: "dodo-test"
|
||||
|
||||
body:
|
||||
- '{"query":"query GetUser($id: ID!) { user(id: $id) { id name email } }","variables":{"id":"{{ fakeit_UUID }}"}}'
|
||||
- '{"query":"query GetPosts($limit: Int) { posts(limit: $limit) { id title content } }","variables":{"limit":{{ fakeit_IntRange 5 20 }}}}'
|
||||
- '{"query":"mutation CreatePost($input: PostInput!) { createPost(input: $input) { id title } }","variables":{"input":{"title":"{{ fakeit_Sentence 5 }}","content":"{{ fakeit_Paragraph 2 3 5 " "}}","authorId":"{{ fakeit_UUID }}"}}}'
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "POST",
|
||||
"url": "https://api.example.com/graphql",
|
||||
"yes": true,
|
||||
"timeout": "10s",
|
||||
"dodos": 4,
|
||||
"requests": 100,
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "Content-Type": "application/json" },
|
||||
{ "Authorization": "Bearer {{ fakeit_UUID }}" },
|
||||
{ "X-GraphQL-Client": "dodo-test" }
|
||||
],
|
||||
"body": [
|
||||
"{\"query\":\"query GetUser($id: ID!) { user(id: $id) { id name email } }\",\"variables\":{\"id\":\"{{ fakeit_UUID }}\"}}",
|
||||
"{\"query\":\"query GetPosts($limit: Int) { posts(limit: $limit) { id title content } }\",\"variables\":{\"limit\":{{ fakeit_IntRange 5 20 }}}}",
|
||||
"{\"query\":\"mutation CreatePost($input: PostInput!) { createPost(input: $input) { id title } }\",\"variables\":{\"input\":{\"title\":\"{{ fakeit_Sentence 5 }}\",\"content\":\"{{ fakeit_Paragraph 2 3 5 \\\" \\\"}}\",\"authorId\":\"{{ fakeit_UUID }}\"}}}"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. WebSocket-style HTTP Testing
|
||||
|
||||
Test real-time applications with WebSocket-like HTTP endpoints.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://api.realtime-app.com/events \
|
||||
-m POST \
|
||||
-d 15 \
|
||||
-r 1000 \
|
||||
-t 5s \
|
||||
-o 60s \
|
||||
--skip-verify=false \
|
||||
-H "Content-Type:application/json" \
|
||||
-H "X-Event-Type:{{ fakeit_LetterNN 4 12 }}" \
|
||||
-H "Connection:keep-alive" \
|
||||
-H "Cache-Control:no-cache" \
|
||||
-c "connection_id={{ fakeit_UUID }}" \
|
||||
-c "session_token={{ fakeit_UUID }}" \
|
||||
-p "channel=general" -p "channel=notifications" -p "channel=alerts" -p "channel=updates" \
|
||||
-p "version=v1" -p "version=v2" \
|
||||
-b '{"event":"{{ fakeit_Word }}","data":{"timestamp":"{{ fakeit_Date }}","user_id":"{{ fakeit_UUID }}","message":"{{ fakeit_Sentence 8 }}"}}' \
|
||||
-b '{"event":"ping","data":{"timestamp":"{{ fakeit_Date }}","client_id":"{{ fakeit_UUID }}"}}' \
|
||||
-b '{"event":"status_update","data":{"status":"{{ fakeit_Word }}","user_id":"{{ fakeit_UUID }}","timestamp":"{{ fakeit_Date }}"}}' \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "POST"
|
||||
url: "https://api.realtime-app.com/events"
|
||||
yes: true
|
||||
timeout: "5s"
|
||||
dodos: 15
|
||||
requests: 1000
|
||||
duration: "60s"
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- Content-Type: "application/json"
|
||||
- X-Event-Type: "{{ fakeit_LetterNN 4 12 }}"
|
||||
- Connection: "keep-alive"
|
||||
- Cache-Control: "no-cache"
|
||||
|
||||
cookies:
|
||||
- connection_id: "{{ fakeit_UUID }}"
|
||||
- session_token: "{{ fakeit_UUID }}"
|
||||
|
||||
params:
|
||||
- channel: ["general", "notifications", "alerts", "updates"]
|
||||
- version: ["v1", "v2"]
|
||||
|
||||
body:
|
||||
- '{"event":"{{ fakeit_Word }}","data":{"timestamp":"{{ fakeit_Date }}","user_id":"{{ fakeit_UUID }}","message":"{{ fakeit_Sentence 8 }}"}}'
|
||||
- '{"event":"ping","data":{"timestamp":"{{ fakeit_Date }}","client_id":"{{ fakeit_UUID }}"}}'
|
||||
- '{"event":"status_update","data":{"status":"{{ fakeit_Word }}","user_id":"{{ fakeit_UUID }}","timestamp":"{{ fakeit_Date }}"}}'
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "POST",
|
||||
"url": "https://api.realtime-app.com/events",
|
||||
"yes": true,
|
||||
"timeout": "5s",
|
||||
"dodos": 15,
|
||||
"requests": 1000,
|
||||
"duration": "60s",
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "Content-Type": "application/json" },
|
||||
{ "X-Event-Type": "{{ fakeit_LetterNN 4 12 }}" },
|
||||
{ "Connection": "keep-alive" },
|
||||
{ "Cache-Control": "no-cache" }
|
||||
],
|
||||
"cookies": [
|
||||
{ "connection_id": "{{ fakeit_UUID }}" },
|
||||
{ "session_token": "{{ fakeit_UUID }}" }
|
||||
],
|
||||
"params": [
|
||||
{ "channel": ["general", "notifications", "alerts", "updates"] },
|
||||
{ "version": ["v1", "v2"] }
|
||||
],
|
||||
"body": [
|
||||
"{\"event\":\"{{ fakeit_Word }}\",\"data\":{\"timestamp\":\"{{ fakeit_Date }}\",\"user_id\":\"{{ fakeit_UUID }}\",\"message\":\"{{ fakeit_Sentence 8 }}\"}}",
|
||||
"{\"event\":\"ping\",\"data\":{\"timestamp\":\"{{ fakeit_Date }}\",\"client_id\":\"{{ fakeit_UUID }}\"}}",
|
||||
"{\"event\":\"status_update\",\"data\":{\"status\":\"{{ fakeit_Word }}\",\"user_id\":\"{{ fakeit_UUID }}\",\"timestamp\":\"{{ fakeit_Date }}\"}}"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 11. Multi-tenant Application Testing
|
||||
|
||||
Test SaaS applications with tenant-specific configurations.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://app.saas-platform.com/api/data \
|
||||
-m GET \
|
||||
-d 12 \
|
||||
-r 600 \
|
||||
-t 15s \
|
||||
--skip-verify=false \
|
||||
-H "X-Tenant-ID:{{ fakeit_UUID }}" \
|
||||
-H "Authorization:Bearer {{ fakeit_LetterN 64 }}" \
|
||||
-H "X-Client-Type:web" -H "X-Client-Type:mobile" -H "X-Client-Type:api" \
|
||||
-H "Accept:application/json" \
|
||||
-c "tenant_session={{ fakeit_UUID }}" \
|
||||
-c "user_role=admin" -c "user_role=user" -c "user_role=viewer" \
|
||||
-c "subscription_tier=free" -c "subscription_tier=pro" -c "subscription_tier=enterprise" \
|
||||
-p "page={{ fakeit_IntRange 1 10 }}" \
|
||||
-p "limit={{ fakeit_IntRange 10 100 }}" \
|
||||
-p "sort=created_at" -p "sort=updated_at" -p "sort=name" \
|
||||
-p "order=asc" -p "order=desc" \
|
||||
-p "filter_by=active" -p "filter_by=inactive" -p "filter_by=pending" \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://app.saas-platform.com/api/data"
|
||||
yes: true
|
||||
timeout: "15s"
|
||||
dodos: 12
|
||||
requests: 600
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- X-Tenant-ID: "{{ fakeit_UUID }}"
|
||||
- Authorization: "Bearer {{ fakeit_LetterN 64 }}"
|
||||
- X-Client-Type: ["web", "mobile", "api"]
|
||||
- Accept: "application/json"
|
||||
|
||||
cookies:
|
||||
- tenant_session: "{{ fakeit_UUID }}"
|
||||
- user_role: ["admin", "user", "viewer"]
|
||||
- subscription_tier: ["free", "pro", "enterprise"]
|
||||
|
||||
params:
|
||||
- page: "{{ fakeit_IntRange 1 10 }}"
|
||||
- limit: "{{ fakeit_IntRange 10 100 }}"
|
||||
- sort: ["created_at", "updated_at", "name"]
|
||||
- order: ["asc", "desc"]
|
||||
- filter_by: ["active", "inactive", "pending"]
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://app.saas-platform.com/api/data",
|
||||
"yes": true,
|
||||
"timeout": "15s",
|
||||
"dodos": 12,
|
||||
"requests": 600,
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "X-Tenant-ID": "{{ fakeit_UUID }}" },
|
||||
{ "Authorization": "Bearer {{ fakeit_LetterN 64 }}" },
|
||||
{ "X-Client-Type": ["web", "mobile", "api"] },
|
||||
{ "Accept": "application/json" }
|
||||
],
|
||||
"cookies": [
|
||||
{ "tenant_session": "{{ fakeit_UUID }}" },
|
||||
{ "user_role": ["admin", "user", "viewer"] },
|
||||
{ "subscription_tier": ["free", "pro", "enterprise"] }
|
||||
],
|
||||
"params": [
|
||||
{ "page": "{{ fakeit_IntRange 1 10 }}" },
|
||||
{ "limit": "{{ fakeit_IntRange 10 100 }}" },
|
||||
{ "sort": ["created_at", "updated_at", "name"] },
|
||||
{ "order": ["asc", "desc"] },
|
||||
{ "filter_by": ["active", "inactive", "pending"] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 12. Rate Limiting Testing
|
||||
|
||||
Test API rate limits and throttling mechanisms.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
dodo -u https://api.rate-limited.com/endpoint \
|
||||
-m GET \
|
||||
-d 20 \
|
||||
-r 2000 \
|
||||
-t 3s \
|
||||
-o 120s \
|
||||
--skip-verify=false \
|
||||
-H "X-API-Key:{{ fakeit_UUID }}" \
|
||||
-H "X-Client-ID:{{ fakeit_UUID }}" \
|
||||
-H "X-Rate-Limit-Test:true" \
|
||||
-H "User-Agent:{{ fakeit_UserAgent }}" \
|
||||
-c "rate_limit_bucket={{ fakeit_UUID }}" \
|
||||
-c "client_tier=tier1" -c "client_tier=tier2" -c "client_tier=tier3" \
|
||||
-p "burst_test=true" \
|
||||
-p "client_type=premium" -p "client_type=standard" -p "client_type=free" \
|
||||
-p "request_id={{ fakeit_UUID }}" \
|
||||
-y
|
||||
```
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://api.rate-limited.com/endpoint"
|
||||
yes: true
|
||||
timeout: "3s"
|
||||
dodos: 20
|
||||
requests: 2000
|
||||
duration: "120s"
|
||||
skip_verify: false
|
||||
|
||||
headers:
|
||||
- X-API-Key: "{{ fakeit_UUID }}"
|
||||
- X-Client-ID: "{{ fakeit_UUID }}"
|
||||
- X-Rate-Limit-Test: "true"
|
||||
- User-Agent: "{{ fakeit_UserAgent }}"
|
||||
|
||||
params:
|
||||
- burst_test: "true"
|
||||
- client_type: ["premium", "standard", "free"]
|
||||
- request_id: "{{ fakeit_UUID }}"
|
||||
|
||||
cookies:
|
||||
- rate_limit_bucket: "{{ fakeit_UUID }}"
|
||||
- client_tier: ["tier1", "tier2", "tier3"]
|
||||
```
|
||||
|
||||
### JSON Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://api.rate-limited.com/endpoint",
|
||||
"yes": true,
|
||||
"timeout": "3s",
|
||||
"dodos": 20,
|
||||
"requests": 2000,
|
||||
"duration": "120s",
|
||||
"skip_verify": false,
|
||||
"headers": [
|
||||
{ "X-API-Key": "{{ fakeit_UUID }}" },
|
||||
{ "X-Client-ID": "{{ fakeit_UUID }}" },
|
||||
{ "X-Rate-Limit-Test": "true" },
|
||||
{ "User-Agent": "{{ fakeit_UserAgent }}" }
|
||||
],
|
||||
"params": [
|
||||
{ "burst_test": "true" },
|
||||
{ "client_type": ["premium", "standard", "free"] },
|
||||
{ "request_id": "{{ fakeit_UUID }}" }
|
||||
],
|
||||
"cookies": [
|
||||
{ "rate_limit_bucket": "{{ fakeit_UUID }}" },
|
||||
{ "client_tier": ["tier1", "tier2", "tier3"] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- All examples use template functions for dynamic data generation
|
||||
- Adjust `dodos`, `requests`, `duration`, and `timeout` values based on your testing requirements
|
||||
- Use `skip_verify: true` for testing with self-signed certificates
|
||||
- Set `yes: true` to skip confirmation prompts in automated testing
|
||||
- Template functions like `{{ fakeit_* }}` generate random realistic data for each request
|
||||
- Multiple values in arrays (e.g., `["value1", "value2"]`) will be randomly selected per request
|
||||
- Use the `body_FormData` function for multipart form uploads
|
||||
- Proxy configurations support HTTP, SOCKS5, and SOCKS5H protocols
|
||||
|
||||
For more template functions and advanced configuration options, refer to the main documentation and `utils/templates.go`.
|
||||
372
README.md
372
README.md
@@ -1,341 +1,123 @@
|
||||
<h1 align="center">Dodo - A Fast and Easy-to-Use HTTP Benchmarking Tool</h1>
|
||||
<p align="center">
|
||||
<img width="30%" height="30%" src="https://ftp.aykhans.me/web/client/pubshares/VzPtSHS7yPQT7ngoZzZSNU/browse?path=%2Fdodo.png">
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
<h4>
|
||||
<a href="./EXAMPLES.md">
|
||||
Examples
|
||||
</a>
|
||||
<span> | </span>
|
||||
<a href="#installation">
|
||||
Install
|
||||
</a>
|
||||
<span> | </span>
|
||||
<a href="https://hub.docker.com/r/aykhans/dodo">
|
||||
Docker
|
||||
</a>
|
||||
</h4>
|
||||
<br>
|
||||
<a href="https://coff.ee/aykhan">
|
||||
<img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 40px !important;width: 150px !important;">
|
||||
</a>
|
||||
|
||||
## Sarin is a high-performance HTTP load testing tool built with Go and fasthttp.
|
||||
|
||||
[](https://pkg.go.dev/go.aykhans.me/sarin)
|
||||
[](https://goreportcard.com/report/go.aykhans.me/sarin)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
</div>
|
||||
|
||||
## Table of Contents
|
||||

|
||||
|
||||
- [Installation](#installation)
|
||||
- [Using Docker (Recommended)](#using-docker-recommended)
|
||||
- [Using Pre-built Binaries](#using-pre-built-binaries)
|
||||
- [Building from Source](#building-from-source)
|
||||
- [Usage](#usage)
|
||||
- [1. CLI Usage](#1-cli-usage)
|
||||
- [2. Config File Usage](#2-config-file-usage)
|
||||
- [2.1 YAML/YML Example](#21-yamlyml-example)
|
||||
- [2.2 JSON Example](#22-json-example)
|
||||
- [3. CLI & Config File Combination](#3-cli--config-file-combination)
|
||||
- [Config Parameters Reference](#config-parameters-reference)
|
||||
- [Template Functions](#template-functions)
|
||||
<p align="center">
|
||||
<a href="#installation">Install</a> •
|
||||
<a href="#quick-start">Quick Start</a> •
|
||||
<a href="docs/examples.md">Examples</a> •
|
||||
<a href="docs/configuration.md">Configuration</a> •
|
||||
<a href="docs/templating.md">Templating</a>
|
||||
</p>
|
||||
|
||||
## Overview
|
||||
|
||||
Sarin is designed for efficient HTTP load testing with minimal resource consumption. It prioritizes simplicity—features like templating add zero overhead when unused.
|
||||
|
||||
| ✅ Supported | ❌ Not Supported |
|
||||
| ---------------------------------------------------------- | ------------------------------- |
|
||||
| High-performance with low memory footprint | Detailed response body analysis |
|
||||
| Long-running duration/count based tests | Extensive response statistics |
|
||||
| Dynamic requests via 320+ template functions | Web UI or complex TUI |
|
||||
| Request scripting with Lua and JavaScript | Distributed load testing |
|
||||
| Multiple proxy protocols<br>(HTTP, HTTPS, SOCKS5, SOCKS5H) | HTTP/2, HTTP/3, WebSocket, gRPC |
|
||||
| Flexible config (CLI, ENV, YAML) | Plugins / extensions ecosystem |
|
||||
|
||||
## Installation
|
||||
|
||||
### Using Docker (Recommended)
|
||||
|
||||
Pull the latest Dodo image from Docker Hub:
|
||||
### Docker (Recommended)
|
||||
|
||||
```sh
|
||||
docker pull aykhans/dodo:latest
|
||||
docker pull aykhans/sarin:latest
|
||||
```
|
||||
|
||||
To use Dodo with Docker and a local config file, mount the config file as a volume and pass it as an argument:
|
||||
With a local config file:
|
||||
|
||||
```sh
|
||||
docker run -v /path/to/config.json:/config.json aykhans/dodo -f /config.json
|
||||
docker run --rm -it -v /path/to/config.yaml:/config.yaml aykhans/sarin -f /config.yaml
|
||||
```
|
||||
|
||||
If you're using a remote config file via URL, you don't need to mount a volume:
|
||||
With a remote config file:
|
||||
|
||||
```sh
|
||||
docker run aykhans/dodo -f https://raw.githubusercontent.com/aykhans/dodo/main/config.yaml
|
||||
docker run --rm -it aykhans/sarin -f https://example.com/config.yaml
|
||||
```
|
||||
|
||||
### Using Pre-built Binaries
|
||||
### Pre-built Binaries
|
||||
|
||||
Download the latest binaries from the [releases](https://github.com/aykhans/dodo/releases) section.
|
||||
Download the latest binaries from the [releases](https://github.com/aykhans/sarin/releases) page.
|
||||
|
||||
### Building from Source
|
||||
|
||||
To build Dodo from source, ensure you have [Go 1.24+](https://golang.org/dl/) installed.
|
||||
Requires [Go 1.26+](https://golang.org/dl/).
|
||||
|
||||
```sh
|
||||
go install -ldflags "-s -w" github.com/aykhans/dodo@latest
|
||||
git clone https://github.com/aykhans/sarin.git && cd sarin
|
||||
|
||||
CGO_ENABLED=0 go build \
|
||||
-ldflags "-X 'go.aykhans.me/sarin/internal/version.Version=dev' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.GitCommit=$(git rev-parse HEAD)' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)' \
|
||||
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)' \
|
||||
-s -w" \
|
||||
-o sarin ./cmd/cli/main.go
|
||||
```
|
||||
|
||||
## Usage
|
||||
## Quick Start
|
||||
|
||||
Dodo supports CLI arguments, configuration files (JSON/YAML), or a combination of both. If both are used, CLI arguments take precedence.
|
||||
|
||||
### 1. CLI Usage
|
||||
|
||||
Send 1000 GET requests to https://example.com with 10 parallel dodos (threads), each with a timeout of 2 seconds, within a maximum duration of 1 minute:
|
||||
Send 10,000 GET requests with 50 concurrent connections and a random User-Agent for each request:
|
||||
|
||||
```sh
|
||||
dodo -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 2s
|
||||
sarin -U http://example.com -r 10_000 -c 50 -H "User-Agent: {{ fakeit_UserAgent }}"
|
||||
```
|
||||
|
||||
With Docker:
|
||||
Run a 5-minute duration-based test:
|
||||
|
||||
```sh
|
||||
docker run --rm -i aykhans/dodo -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 2s
|
||||
sarin -U http://example.com -d 5m -c 100
|
||||
```
|
||||
|
||||
### 2. Config File Usage
|
||||
|
||||
Send 1000 GET requests to https://example.com with 10 parallel dodos (threads), each with a timeout of 800 milliseconds, within a maximum duration of 250 seconds:
|
||||
|
||||
#### 2.1 YAML/YML Example
|
||||
|
||||
```yaml
|
||||
method: "GET"
|
||||
url: "https://example.com"
|
||||
yes: false
|
||||
timeout: "800ms"
|
||||
dodos: 10
|
||||
requests: 1000
|
||||
duration: "250s"
|
||||
skip_verify: false
|
||||
|
||||
params:
|
||||
# A random value will be selected from the list for first "key1" param on each request
|
||||
# And always "value" for second "key1" param on each request
|
||||
# e.g. "?key1=value2&key1=value"
|
||||
- key1: ["value1", "value2", "value3", "value4"]
|
||||
- key1: "value"
|
||||
|
||||
# A random value will be selected from the list for param "key2" on each request
|
||||
# e.g. "?key2=value2"
|
||||
- key2: ["value1", "value2"]
|
||||
|
||||
headers:
|
||||
# A random value will be selected from the list for first "key1" header on each request
|
||||
# And always "value" for second "key1" header on each request
|
||||
# e.g. "key1: value3", "key1: value"
|
||||
- key1: ["value1", "value2", "value3", "value4"]
|
||||
- key1: "value"
|
||||
|
||||
# A random value will be selected from the list for header "key2" on each request
|
||||
# e.g. "key2: value2"
|
||||
- key2: ["value1", "value2"]
|
||||
|
||||
cookies:
|
||||
# A random value will be selected from the list for first "key1" cookie on each request
|
||||
# And always "value" for second "key1" cookie on each request
|
||||
# e.g. "key1=value4; key1=value"
|
||||
- key1: ["value1", "value2", "value3", "value4"]
|
||||
- key1: "value"
|
||||
|
||||
# A random value will be selected from the list for cookie "key2" on each request
|
||||
# e.g. "key2=value1"
|
||||
- key2: ["value1", "value2"]
|
||||
|
||||
body: "body-text"
|
||||
# OR
|
||||
# A random body value will be selected from the list for each request
|
||||
body:
|
||||
- "body-text1"
|
||||
- "body-text2"
|
||||
- "body-text3"
|
||||
|
||||
proxy: "http://example.com:8080"
|
||||
# OR
|
||||
# A random proxy will be selected from the list for each request
|
||||
proxy:
|
||||
- "http://example.com:8080"
|
||||
- "http://username:password@example.com:8080"
|
||||
- "socks5://example.com:8080"
|
||||
- "socks5h://example.com:8080"
|
||||
```
|
||||
Use a YAML config file:
|
||||
|
||||
```sh
|
||||
dodo -f /path/config.yaml
|
||||
# OR
|
||||
dodo -f https://example.com/config.yaml
|
||||
sarin -f config.yaml
|
||||
```
|
||||
|
||||
With Docker:
|
||||
For more usage examples, see the **[Examples Guide](docs/examples.md)**.
|
||||
|
||||
## Configuration
|
||||
|
||||
Sarin supports environment variables, CLI flags, and YAML files. When the same option is specified in multiple sources, the following priority order applies:
|
||||
|
||||
```
|
||||
CLI Flags (Highest) > YAML > Environment Variables (Lowest)
|
||||
```
|
||||
|
||||
For detailed documentation on all configuration options (URL, method, timeout, concurrency, headers, cookies, proxy, etc.), see the **[Configuration Guide](docs/configuration.md)**.
|
||||
|
||||
## Templating
|
||||
|
||||
Sarin supports Go templates in URL paths, methods, bodies, headers, params, cookies, and values. Use the 320+ built-in functions to generate dynamic data for each request.
|
||||
|
||||
**Example:**
|
||||
|
||||
```sh
|
||||
docker run --rm -i -v /path/to/config.yaml:/config.yaml aykhans/dodo -f /config.yaml
|
||||
# OR
|
||||
docker run --rm -i aykhans/dodo -f https://example.com/config.yaml
|
||||
sarin -U "http://example.com/users/{{ fakeit_UUID }}" -r 1000 -c 10 \
|
||||
-V "REQUEST_ID={{ fakeit_UUID }}" \
|
||||
-H "X-Request-ID: {{ .Values.REQUEST_ID }}" \
|
||||
-B '{"request_id": "{{ .Values.REQUEST_ID }}"}'
|
||||
```
|
||||
|
||||
#### 2.2 JSON Example
|
||||
For the complete templating guide and functions reference, see the **[Templating Guide](docs/templating.md)**.
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://example.com",
|
||||
"yes": false,
|
||||
"timeout": "800ms",
|
||||
"dodos": 10,
|
||||
"requests": 1000,
|
||||
"duration": "250s",
|
||||
"skip_verify": false,
|
||||
## License
|
||||
|
||||
"params": [
|
||||
// A random value will be selected from the list for first "key1" param on each request
|
||||
// And always "value" for second "key1" param on each request
|
||||
// e.g. "?key1=value2&key1=value"
|
||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
||||
{ "key1": "value" },
|
||||
|
||||
// A random value will be selected from the list for param "key2" on each request
|
||||
// e.g. "?key2=value2"
|
||||
{ "key2": ["value1", "value2"] },
|
||||
],
|
||||
|
||||
"headers": [
|
||||
// A random value will be selected from the list for first "key1" header on each request
|
||||
// And always "value" for second "key1" header on each request
|
||||
// e.g. "key1: value3", "key1: value"
|
||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
||||
{ "key1": "value" },
|
||||
|
||||
// A random value will be selected from the list for header "key2" on each request
|
||||
// e.g. "key2: value2"
|
||||
{ "key2": ["value1", "value2"] },
|
||||
],
|
||||
|
||||
"cookies": [
|
||||
// A random value will be selected from the list for first "key1" cookie on each request
|
||||
// And always "value" for second "key1" cookie on each request
|
||||
// e.g. "key1=value4; key1=value"
|
||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
||||
{ "key1": "value" },
|
||||
|
||||
// A random value will be selected from the list for cookie "key2" on each request
|
||||
// e.g. "key2=value1"
|
||||
{ "key2": ["value1", "value2"] },
|
||||
],
|
||||
|
||||
"body": "body-text",
|
||||
// OR
|
||||
// A random body value will be selected from the list for each request
|
||||
"body": ["body-text1", "body-text2", "body-text3"],
|
||||
|
||||
"proxy": "http://example.com:8080",
|
||||
// OR
|
||||
// A random proxy will be selected from the list for each request
|
||||
"proxy": [
|
||||
"http://example.com:8080",
|
||||
"http://username:password@example.com:8080",
|
||||
"socks5://example.com:8080",
|
||||
"socks5h://example.com:8080",
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
dodo -f /path/config.json
|
||||
# OR
|
||||
dodo -f https://example.com/config.json
|
||||
```
|
||||
|
||||
With Docker:
|
||||
|
||||
```sh
|
||||
docker run --rm -i -v /path/to/config.json:/config.json aykhans/dodo
|
||||
# OR
|
||||
docker run --rm -i aykhans/dodo -f https://example.com/config.json
|
||||
```
|
||||
|
||||
### 3. CLI & Config File Combination
|
||||
|
||||
CLI arguments override config file values:
|
||||
|
||||
```sh
|
||||
dodo -f /path/to/config.yaml -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 5s
|
||||
```
|
||||
|
||||
With Docker:
|
||||
|
||||
```sh
|
||||
docker run --rm -i -v /path/to/config.json:/config.json aykhans/dodo -f /config.json -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 5s
|
||||
```
|
||||
|
||||
You can find more usage examples in the [EXAMPLES.md](./EXAMPLES.md) file.
|
||||
|
||||
## Config Parameters Reference
|
||||
|
||||
If `Headers`, `Params`, `Cookies`, `Body`, or `Proxy` fields have multiple values, each request will choose a random value from the list.
|
||||
|
||||
| Parameter | config file | CLI Flag | CLI Short Flag | Type | Description | Default |
|
||||
| --------------- | ----------- | ------------ | -------------- | ------------------------------ | ----------------------------------------------------------- | ------- |
|
||||
| Config file | | -config-file | -f | String | Path to local config file or http(s) URL of the config file | - |
|
||||
| Yes | yes | -yes | -y | Boolean | Answer yes to all questions | false |
|
||||
| URL | url | -url | -u | String | URL to send the request to | - |
|
||||
| Method | method | -method | -m | String | HTTP method | GET |
|
||||
| Dodos (Threads) | dodos | -dodos | -d | UnsignedInteger | Number of dodos (threads) to send requests in parallel | 1 |
|
||||
| Requests | requests | -requests | -r | UnsignedInteger | Total number of requests to send | - |
|
||||
| Duration | duration | -duration | -o | Time | Maximum duration for the test | - |
|
||||
| Timeout | timeout | -timeout | -t | Time | Timeout for canceling each request | 10s |
|
||||
| Params | params | -param | -p | [{String: String OR [String]}] | Request parameters | - |
|
||||
| Headers | headers | -header | -H | [{String: String OR [String]}] | Request headers | - |
|
||||
| Cookies | cookies | -cookie | -c | [{String: String OR [String]}] | Request cookies | - |
|
||||
| Body | body | -body | -b | String OR [String] | Request body or list of request bodies | - |
|
||||
| Proxy | proxies | -proxy | -x | String OR [String] | Proxy URL or list of proxy URLs | - |
|
||||
| Skip Verify | skip_verify | -skip-verify | | Boolean | Skip SSL/TLS certificate verification | false |
|
||||
|
||||
## Template Functions
|
||||
|
||||
Dodo supports template functions in `Headers`, `Params`, `Cookies`, and `Body` fields. These functions allow you to generate dynamic values for each request.
|
||||
|
||||
You can use Go template syntax to include dynamic values in your requests. Here's how to use template functions:
|
||||
|
||||
In CLI config:
|
||||
|
||||
```sh
|
||||
dodo -u https://example.com -r 1 \
|
||||
-header "User-Agent:{{ fakeit_UserAgent }}" \ # e.g. "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
|
||||
-param "username={{ strings_ToUpper fakeit_Username }}" \ # e.g. "username=JOHN BOB"
|
||||
-cookie "token={{ fakeit_Password true true true true true 10 }}" \ # e.g. token=1234567890abcdef1234567890abcdef
|
||||
-body '{"email":"{{ fakeit_Email }}", "password":"{{ fakeit_Password true true true true true 10 }}"}' # e.g. {"email":"john.doe@example.com", "password":"12rw4d-78d"}
|
||||
```
|
||||
|
||||
In YAML/YML config:
|
||||
|
||||
```yaml
|
||||
headers:
|
||||
- User-Agent: "{{ fakeit_UserAgent }}" # e.g. "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
|
||||
- "Random-Header-{{fakeit_FirstName}}": "static_value" # e.g. "Random-Header-John: static_value"
|
||||
|
||||
cookies:
|
||||
- token: "Bearer {{ fakeit_UUID }}" # e.g. "token=Bearer 1234567890abcdef1234567890abcdef"
|
||||
|
||||
params:
|
||||
- id: "{{ fakeit_Uint }}" # e.g. "id=1234567890"
|
||||
- username: "{{ fakeit_Username }}" # e.g. "username=John Doe"
|
||||
|
||||
body:
|
||||
- '{ "username": "{{ fakeit_Username }}", "password": "{{ fakeit_Password }}" }' # e.g. { "username": "john.doe", "password": "password123" }
|
||||
- '{ "email": "{{ fakeit_Email }}", "phone": "{{ fakeit_Phone }}" }' # e.g. { "email": "john.doe@example.com", "phone": "1234567890" }
|
||||
- '{{ body_FormData (dict_Str "username" fakeit_Username "password" "secret123") }}' # Creates multipart form data for form submissions, automatically sets the appropriate Content-Type header.
|
||||
```
|
||||
|
||||
In JSON config:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"headers": [
|
||||
{ "User-Agent": "{{ fakeit_UserAgent }}" }, // e.g. "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
|
||||
],
|
||||
"body": [
|
||||
"{ \"username\": \"{{ strings_RemoveSpaces fakeit_Username }}\", \"password\": \"{{ fakeit_Password }}\" }", // e.g. { "username": "johndoe", "password": "password123" }
|
||||
"{{ body_FormData (dict_Str \"username\" fakeit_Username \"password\" \"12345\") }}", // Creates multipart form data for form submissions, automatically sets the appropriate Content-Type header.
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
For the full list of template functions over 200 functions, refer to the `NewFuncMap` function in `utils/templates.go`.
|
||||
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|
||||
|
||||
124
Taskfile.yaml
124
Taskfile.yaml
@@ -1,53 +1,97 @@
|
||||
# https://taskfile.dev
|
||||
|
||||
version: "3"
|
||||
|
||||
vars:
|
||||
PLATFORMS:
|
||||
- os: darwin
|
||||
archs: [amd64, arm64]
|
||||
- os: freebsd
|
||||
archs: [386, amd64, arm]
|
||||
- os: linux
|
||||
archs: [386, amd64, arm, arm64]
|
||||
- os: netbsd
|
||||
archs: [386, amd64, arm]
|
||||
- os: openbsd
|
||||
archs: [386, amd64, arm, arm64]
|
||||
- os: windows
|
||||
archs: [386, amd64, arm64]
|
||||
BIN_DIR: ./bin
|
||||
GOLANGCI_LINT_VERSION: v2.9.0
|
||||
GOLANGCI: "{{.BIN_DIR}}/golangci-lint-{{.GOLANGCI_LINT_VERSION}}"
|
||||
|
||||
tasks:
|
||||
run: go run main.go
|
||||
ftl:
|
||||
desc: Run fmt, tidy, and lint.
|
||||
cmds:
|
||||
- task: fmt
|
||||
- task: fix
|
||||
- task: tidy
|
||||
- task: lint
|
||||
|
||||
ftl:
|
||||
cmds:
|
||||
- task: fmt
|
||||
- task: tidy
|
||||
- task: lint
|
||||
fmt:
|
||||
desc: Run format
|
||||
deps:
|
||||
- install-golangci-lint
|
||||
cmds:
|
||||
- "{{.GOLANGCI}} fmt"
|
||||
|
||||
fmt: gofmt -w -d .
|
||||
fix:
|
||||
desc: Run go fix
|
||||
cmds:
|
||||
- go fix ./...
|
||||
|
||||
tidy: go mod tidy
|
||||
tidy:
|
||||
desc: Run go mod tidy.
|
||||
cmds:
|
||||
- go mod tidy {{.CLI_ARGS}}
|
||||
|
||||
lint: golangci-lint run
|
||||
lint:
|
||||
desc: Run linters
|
||||
deps:
|
||||
- install-golangci-lint
|
||||
cmds:
|
||||
- "{{.GOLANGCI}} run"
|
||||
|
||||
build: CGO_ENABLED=0 go build -ldflags "-s -w" -o "dodo"
|
||||
e2e:
|
||||
desc: Run e2e tests
|
||||
cmds:
|
||||
- go test ./e2e/... -v -count=1 {{.CLI_ARGS}}
|
||||
|
||||
build-all:
|
||||
silent: true
|
||||
cmds:
|
||||
- rm -rf binaries
|
||||
- |
|
||||
{{ $ext := "" }}
|
||||
{{- range $platform := .PLATFORMS }}
|
||||
{{- if eq $platform.os "windows" }}
|
||||
{{ $ext = ".exe" }}
|
||||
{{- end }}
|
||||
create-bin-dir:
|
||||
desc: Create bin directory.
|
||||
cmds:
|
||||
- mkdir -p {{.BIN_DIR}}
|
||||
|
||||
{{- range $arch := $platform.archs }}
|
||||
echo "Building for {{$platform.os}}/{{$arch}}"
|
||||
GOOS={{$platform.os}} GOARCH={{$arch}} go build -ldflags "-s -w" -o "./binaries/dodo-{{$platform.os}}-{{$arch}}{{$ext}}"
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
- echo -e "\033[32m*** Build completed ***\033[0m"
|
||||
build:
|
||||
desc: Build the application.
|
||||
deps:
|
||||
- create-bin-dir
|
||||
vars:
|
||||
OUTPUT: '{{.OUTPUT | default (printf "%s/sarin" .BIN_DIR)}}'
|
||||
cmds:
|
||||
- rm -f {{.OUTPUT}}
|
||||
- >-
|
||||
CGO_ENABLED=0 go build
|
||||
-ldflags "-X 'go.aykhans.me/sarin/internal/version.Version=$(git describe --tags --always)'
|
||||
-X 'go.aykhans.me/sarin/internal/version.GitCommit=$(git rev-parse HEAD)'
|
||||
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)'
|
||||
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)'
|
||||
-s -w"
|
||||
-o {{.OUTPUT}} ./cmd/cli/main.go
|
||||
|
||||
install-golangci-lint:
|
||||
desc: Install golangci-lint
|
||||
deps:
|
||||
- create-bin-dir
|
||||
status:
|
||||
- test -f {{.GOLANGCI}}
|
||||
cmds:
|
||||
- rm -f {{.GOLANGCI}}
|
||||
- curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b {{.BIN_DIR}} {{.GOLANGCI_LINT_VERSION}}
|
||||
- mv {{.BIN_DIR}}/golangci-lint {{.GOLANGCI}}
|
||||
|
||||
docker-build:
|
||||
desc: Build the Docker image.
|
||||
vars:
|
||||
IMAGE_NAME: '{{.IMAGE_NAME | default "sarin"}}'
|
||||
TAG: '{{.TAG | default "latest"}}'
|
||||
GO_VERSION: '{{.GO_VERSION | default ""}}'
|
||||
VERSION:
|
||||
sh: git describe --tags --always
|
||||
GIT_COMMIT:
|
||||
sh: git rev-parse HEAD
|
||||
cmds:
|
||||
- >-
|
||||
docker build
|
||||
{{if .GO_VERSION}}--build-arg GO_VERSION={{.GO_VERSION}}{{end}}
|
||||
--build-arg VERSION={{.VERSION}}
|
||||
--build-arg GIT_COMMIT={{.GIT_COMMIT}}
|
||||
-t {{.IMAGE_NAME}}:{{.TAG}}
|
||||
.
|
||||
|
||||
65
benchmark.sh
Executable file
65
benchmark.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
RUNS=20
|
||||
CMD="go run ./cmd/cli -U http://localhost:80 -r 1_000_000 -c 100"
|
||||
|
||||
declare -a times_default
|
||||
declare -a times_gogcoff
|
||||
|
||||
echo "===== Benchmark: default GC ====="
|
||||
for i in $(seq 1 $RUNS); do
|
||||
echo "Run $i/$RUNS ..."
|
||||
start=$(date +%s%N)
|
||||
$CMD
|
||||
end=$(date +%s%N)
|
||||
elapsed=$(( (end - start) / 1000000 )) # milliseconds
|
||||
times_default+=("$elapsed")
|
||||
echo " -> ${elapsed} ms"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "===== Benchmark: GOGC=off ====="
|
||||
for i in $(seq 1 $RUNS); do
|
||||
echo "Run $i/$RUNS ..."
|
||||
start=$(date +%s%N)
|
||||
GOGC=off $CMD
|
||||
end=$(date +%s%N)
|
||||
elapsed=$(( (end - start) / 1000000 ))
|
||||
times_gogcoff+=("$elapsed")
|
||||
echo " -> ${elapsed} ms"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "============================================"
|
||||
echo " RESULTS"
|
||||
echo "============================================"
|
||||
|
||||
echo ""
|
||||
echo "--- Default GC ---"
|
||||
sum=0
|
||||
for i in $(seq 0 $((RUNS - 1))); do
|
||||
echo " Run $((i + 1)): ${times_default[$i]} ms"
|
||||
sum=$((sum + times_default[$i]))
|
||||
done
|
||||
avg_default=$((sum / RUNS))
|
||||
echo " Average: ${avg_default} ms"
|
||||
|
||||
echo ""
|
||||
echo "--- GOGC=off ---"
|
||||
sum=0
|
||||
for i in $(seq 0 $((RUNS - 1))); do
|
||||
echo " Run $((i + 1)): ${times_gogcoff[$i]} ms"
|
||||
sum=$((sum + times_gogcoff[$i]))
|
||||
done
|
||||
avg_gogcoff=$((sum / RUNS))
|
||||
echo " Average: ${avg_gogcoff} ms"
|
||||
|
||||
echo ""
|
||||
echo "--- Comparison ---"
|
||||
if [ "$avg_default" -gt 0 ]; then
|
||||
diff=$((avg_default - avg_gogcoff))
|
||||
echo " Difference: ${diff} ms (positive = GOGC=off is faster)"
|
||||
fi
|
||||
echo "============================================"
|
||||
95
cmd/cli/main.go
Normal file
95
cmd/cli/main.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"go.aykhans.me/sarin/internal/config"
|
||||
"go.aykhans.me/sarin/internal/sarin"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsErr "go.aykhans.me/utils/errors"
|
||||
)
|
||||
|
||||
func main() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
go listenForTermination(func() { cancel() })
|
||||
|
||||
combinedConfig := config.ReadAllConfigs()
|
||||
|
||||
combinedConfig.SetDefaults()
|
||||
|
||||
if *combinedConfig.ShowConfig {
|
||||
if !combinedConfig.Print() {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
_ = utilsErr.MustHandle(combinedConfig.Validate(),
|
||||
utilsErr.OnType(func(err types.FieldValidationErrors) error {
|
||||
for _, fieldErr := range err.Errors {
|
||||
if fieldErr.Value == "" {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
config.StyleYellow.Render(fmt.Sprintf("[VALIDATION] Field '%s': ", fieldErr.Field))+fieldErr.Err.Error(),
|
||||
)
|
||||
} else {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
config.StyleYellow.Render(fmt.Sprintf("[VALIDATION] Field '%s' (%s): ", fieldErr.Field, fieldErr.Value))+fieldErr.Err.Error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
srn, err := sarin.NewSarin(
|
||||
ctx,
|
||||
combinedConfig.Methods, combinedConfig.URL, *combinedConfig.Timeout,
|
||||
*combinedConfig.Concurrency, combinedConfig.Requests, combinedConfig.Duration,
|
||||
*combinedConfig.Quiet, *combinedConfig.Insecure, combinedConfig.Params, combinedConfig.Headers,
|
||||
combinedConfig.Cookies, combinedConfig.Bodies, combinedConfig.Proxies, combinedConfig.Values,
|
||||
*combinedConfig.Output != config.ConfigOutputTypeNone,
|
||||
*combinedConfig.DryRun,
|
||||
combinedConfig.Lua, combinedConfig.Js,
|
||||
)
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnType(func(err types.ProxyDialError) error {
|
||||
fmt.Fprintln(os.Stderr, config.StyleRed.Render("[PROXY] ")+err.Error())
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnSentinel(types.ErrScriptEmpty, func(err error) error {
|
||||
fmt.Fprintln(os.Stderr, config.StyleRed.Render("[SCRIPT] ")+err.Error())
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.ScriptLoadError) error {
|
||||
fmt.Fprintln(os.Stderr, config.StyleRed.Render("[SCRIPT] ")+err.Error())
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
srn.Start(ctx)
|
||||
|
||||
switch *combinedConfig.Output {
|
||||
case config.ConfigOutputTypeNone:
|
||||
return
|
||||
case config.ConfigOutputTypeJSON:
|
||||
srn.GetResponses().PrintJSON()
|
||||
case config.ConfigOutputTypeYAML:
|
||||
srn.GetResponses().PrintYAML()
|
||||
default:
|
||||
srn.GetResponses().PrintTable()
|
||||
}
|
||||
}
|
||||
|
||||
func listenForTermination(do func()) {
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-sigChan
|
||||
do()
|
||||
}
|
||||
37
config.json
37
config.json
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"method": "GET",
|
||||
"url": "https://example.com",
|
||||
"yes": false,
|
||||
"timeout": "5s",
|
||||
"dodos": 8,
|
||||
"requests": 1000,
|
||||
"duration": "10s",
|
||||
"skip_verify": false,
|
||||
|
||||
"params": [
|
||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
||||
{ "key1": "value" },
|
||||
{ "key2": ["value1", "value2"] }
|
||||
],
|
||||
|
||||
"headers": [
|
||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
||||
{ "key1": "value" },
|
||||
{ "key2": ["value1", "value2"] }
|
||||
],
|
||||
|
||||
"cookies": [
|
||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
||||
{ "key1": "value" },
|
||||
{ "key2": ["value1", "value2"] }
|
||||
],
|
||||
|
||||
"body": ["body-text1", "body-text2", "body-text3"],
|
||||
|
||||
"proxy": [
|
||||
"http://example.com:8080",
|
||||
"http://username:password@example.com:8080",
|
||||
"socks5://example.com:8080",
|
||||
"socks5h://example.com:8080"
|
||||
]
|
||||
}
|
||||
40
config.yaml
40
config.yaml
@@ -1,40 +0,0 @@
|
||||
method: "GET"
|
||||
url: "https://example.com"
|
||||
yes: false
|
||||
timeout: "5s"
|
||||
dodos: 8
|
||||
requests: 1000
|
||||
duration: "10s"
|
||||
skip_verify: false
|
||||
|
||||
params:
|
||||
- key1: ["value1", "value2", "value3", "value4"]
|
||||
- key1: "value"
|
||||
- key2: ["value1", "value2"]
|
||||
|
||||
headers:
|
||||
- key1: ["value1", "value2", "value3", "value4"]
|
||||
- key1: "value"
|
||||
- key2: ["value1", "value2"]
|
||||
|
||||
cookies:
|
||||
- key1: ["value1", "value2", "value3", "value4"]
|
||||
- key1: "value"
|
||||
- key2: ["value1", "value2"]
|
||||
|
||||
# body: "body-text"
|
||||
# OR
|
||||
# A random body value will be selected from the list for each request
|
||||
body:
|
||||
- "body-text1"
|
||||
- "body-text2"
|
||||
- "body-text3"
|
||||
|
||||
# proxy: "http://example.com:8080"
|
||||
# OR
|
||||
# A random proxy will be selected from the list for each request
|
||||
proxy:
|
||||
- "http://example.com:8080"
|
||||
- "http://username:password@example.com:8080"
|
||||
- "socks5://example.com:8080"
|
||||
- "socks5h://example.com:8080"
|
||||
188
config/cli.go
188
config/cli.go
@@ -1,188 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/types"
|
||||
"github.com/aykhans/dodo/utils"
|
||||
)
|
||||
|
||||
const cliUsageText = `Usage:
|
||||
dodo [flags]
|
||||
|
||||
Examples:
|
||||
|
||||
Simple usage:
|
||||
dodo -u https://example.com -o 1m
|
||||
|
||||
Usage with config file:
|
||||
dodo -f /path/to/config/file/config.json
|
||||
|
||||
Usage with all flags:
|
||||
dodo -f /path/to/config/file/config.json \
|
||||
-u https://example.com -m POST \
|
||||
-d 10 -r 1000 -o 3m -t 3s \
|
||||
-b "body1" -body "body2" \
|
||||
-H "header1:value1" -header "header2:value2" \
|
||||
-p "param1=value1" -param "param2=value2" \
|
||||
-c "cookie1=value1" -cookie "cookie2=value2" \
|
||||
-x "http://proxy.example.com:8080" -proxy "socks5://proxy2.example.com:8080" \
|
||||
-skip-verify -y
|
||||
|
||||
Flags:
|
||||
-h, -help help for dodo
|
||||
-v, -version version for dodo
|
||||
-y, -yes bool Answer yes to all questions (default %v)
|
||||
-f, -config-file string Path to the local config file or http(s) URL of the config file
|
||||
-d, -dodos uint Number of dodos(threads) (default %d)
|
||||
-r, -requests uint Number of total requests
|
||||
-o, -duration Time Maximum duration for the test (e.g. 30s, 1m, 5h)
|
||||
-t, -timeout Time Timeout for each request (e.g. 400ms, 15s, 1m10s) (default %v)
|
||||
-u, -url string URL for stress testing
|
||||
-m, -method string HTTP Method for the request (default %s)
|
||||
-b, -body [string] Body for the request (e.g. "body text")
|
||||
-p, -param [string] Parameter for the request (e.g. "key1=value1")
|
||||
-H, -header [string] Header for the request (e.g. "key1:value1")
|
||||
-c, -cookie [string] Cookie for the request (e.g. "key1=value1")
|
||||
-x, -proxy [string] Proxy for the request (e.g. "http://proxy.example.com:8080")
|
||||
-skip-verify bool Skip SSL/TLS certificate verification (default %v)`
|
||||
|
||||
func (config *Config) ReadCLI() (types.ConfigFile, error) {
|
||||
flag.Usage = func() {
|
||||
fmt.Printf(
|
||||
cliUsageText+"\n",
|
||||
DefaultYes,
|
||||
DefaultDodosCount,
|
||||
DefaultTimeout,
|
||||
DefaultMethod,
|
||||
DefaultSkipVerify,
|
||||
)
|
||||
}
|
||||
|
||||
var (
|
||||
version = false
|
||||
configFile = ""
|
||||
yes = false
|
||||
skipVerify = false
|
||||
method = ""
|
||||
url types.RequestURL
|
||||
dodosCount = uint(0)
|
||||
requestCount = uint(0)
|
||||
timeout time.Duration
|
||||
duration time.Duration
|
||||
)
|
||||
|
||||
{
|
||||
flag.BoolVar(&version, "version", false, "Prints the version of the program")
|
||||
flag.BoolVar(&version, "v", false, "Prints the version of the program")
|
||||
|
||||
flag.StringVar(&configFile, "config-file", "", "Path to the configuration file")
|
||||
flag.StringVar(&configFile, "f", "", "Path to the configuration file")
|
||||
|
||||
flag.BoolVar(&yes, "yes", false, "Answer yes to all questions")
|
||||
flag.BoolVar(&yes, "y", false, "Answer yes to all questions")
|
||||
|
||||
flag.BoolVar(&skipVerify, "skip-verify", false, "Skip SSL/TLS certificate verification")
|
||||
|
||||
flag.StringVar(&method, "method", "", "HTTP Method")
|
||||
flag.StringVar(&method, "m", "", "HTTP Method")
|
||||
|
||||
flag.Var(&url, "url", "URL to send the request")
|
||||
flag.Var(&url, "u", "URL to send the request")
|
||||
|
||||
flag.UintVar(&dodosCount, "dodos", 0, "Number of dodos(threads)")
|
||||
flag.UintVar(&dodosCount, "d", 0, "Number of dodos(threads)")
|
||||
|
||||
flag.UintVar(&requestCount, "requests", 0, "Number of total requests")
|
||||
flag.UintVar(&requestCount, "r", 0, "Number of total requests")
|
||||
|
||||
flag.DurationVar(&duration, "duration", 0, "Maximum duration of the test")
|
||||
flag.DurationVar(&duration, "o", 0, "Maximum duration of the test")
|
||||
|
||||
flag.DurationVar(&timeout, "timeout", 0, "Timeout for each request (e.g. 400ms, 15s, 1m10s)")
|
||||
flag.DurationVar(&timeout, "t", 0, "Timeout for each request (e.g. 400ms, 15s, 1m10s)")
|
||||
|
||||
flag.Var(&config.Params, "param", "URL parameter to send with the request")
|
||||
flag.Var(&config.Params, "p", "URL parameter to send with the request")
|
||||
|
||||
flag.Var(&config.Headers, "header", "Header to send with the request")
|
||||
flag.Var(&config.Headers, "H", "Header to send with the request")
|
||||
|
||||
flag.Var(&config.Cookies, "cookie", "Cookie to send with the request")
|
||||
flag.Var(&config.Cookies, "c", "Cookie to send with the request")
|
||||
|
||||
flag.Var(&config.Body, "body", "Body to send with the request")
|
||||
flag.Var(&config.Body, "b", "Body to send with the request")
|
||||
|
||||
flag.Var(&config.Proxies, "proxy", "Proxy to use for the request")
|
||||
flag.Var(&config.Proxies, "x", "Proxy to use for the request")
|
||||
}
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if len(os.Args) <= 1 {
|
||||
flag.CommandLine.Usage()
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
if args := flag.Args(); len(args) > 0 {
|
||||
return types.ConfigFile(configFile), fmt.Errorf("unexpected arguments: %v", strings.Join(args, ", "))
|
||||
}
|
||||
|
||||
if version {
|
||||
fmt.Printf("dodo version %s\n", VERSION)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
flag.Visit(func(f *flag.Flag) {
|
||||
switch f.Name {
|
||||
case "method", "m":
|
||||
config.Method = utils.ToPtr(method)
|
||||
case "url", "u":
|
||||
config.URL = utils.ToPtr(url)
|
||||
case "dodos", "d":
|
||||
config.DodosCount = utils.ToPtr(dodosCount)
|
||||
case "requests", "r":
|
||||
config.RequestCount = utils.ToPtr(requestCount)
|
||||
case "duration", "o":
|
||||
config.Duration = &types.Duration{Duration: duration}
|
||||
case "timeout", "t":
|
||||
config.Timeout = &types.Timeout{Duration: timeout}
|
||||
case "yes", "y":
|
||||
config.Yes = utils.ToPtr(yes)
|
||||
case "skip-verify":
|
||||
config.SkipVerify = utils.ToPtr(skipVerify)
|
||||
}
|
||||
})
|
||||
|
||||
return types.ConfigFile(configFile), nil
|
||||
}
|
||||
|
||||
// CLIYesOrNoReader reads a yes or no answer from the command line.
|
||||
// It prompts the user with the given message and default value,
|
||||
// and returns true if the user answers "y" or "Y", and false otherwise.
|
||||
// If there is an error while reading the input, it returns false.
|
||||
// If the user simply presses enter without providing any input,
|
||||
// it returns the default value specified by the `dft` parameter.
|
||||
func CLIYesOrNoReader(message string, dft bool) bool {
|
||||
var answer string
|
||||
defaultMessage := "Y/n"
|
||||
if !dft {
|
||||
defaultMessage = "y/N"
|
||||
}
|
||||
fmt.Printf("%s [%s]: ", message, defaultMessage)
|
||||
if _, err := fmt.Scanln(&answer); err != nil {
|
||||
if err.Error() == "unexpected newline" {
|
||||
return dft
|
||||
}
|
||||
return false
|
||||
}
|
||||
if answer == "" {
|
||||
return dft
|
||||
}
|
||||
return answer == "y" || answer == "Y"
|
||||
}
|
||||
367
config/config.go
367
config/config.go
@@ -1,367 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/types"
|
||||
"github.com/aykhans/dodo/utils"
|
||||
"github.com/jedib0t/go-pretty/v6/table"
|
||||
)
|
||||
|
||||
const (
|
||||
VERSION string = "0.7.1"
|
||||
DefaultUserAgent string = "Dodo/" + VERSION
|
||||
DefaultMethod string = "GET"
|
||||
DefaultTimeout time.Duration = time.Second * 10
|
||||
DefaultDodosCount uint = 1
|
||||
DefaultRequestCount uint = 0
|
||||
DefaultDuration time.Duration = 0
|
||||
DefaultYes bool = false
|
||||
DefaultSkipVerify bool = false
|
||||
)
|
||||
|
||||
var SupportedProxySchemes []string = []string{"http", "socks5", "socks5h"}
|
||||
|
||||
type RequestConfig struct {
|
||||
Method string
|
||||
URL url.URL
|
||||
Timeout time.Duration
|
||||
DodosCount uint
|
||||
RequestCount uint
|
||||
Duration time.Duration
|
||||
Yes bool
|
||||
SkipVerify bool
|
||||
Params types.Params
|
||||
Headers types.Headers
|
||||
Cookies types.Cookies
|
||||
Body types.Body
|
||||
Proxies types.Proxies
|
||||
}
|
||||
|
||||
func NewRequestConfig(conf *Config) *RequestConfig {
|
||||
return &RequestConfig{
|
||||
Method: *conf.Method,
|
||||
URL: conf.URL.URL,
|
||||
Timeout: conf.Timeout.Duration,
|
||||
DodosCount: *conf.DodosCount,
|
||||
RequestCount: *conf.RequestCount,
|
||||
Duration: conf.Duration.Duration,
|
||||
Yes: *conf.Yes,
|
||||
SkipVerify: *conf.SkipVerify,
|
||||
Params: conf.Params,
|
||||
Headers: conf.Headers,
|
||||
Cookies: conf.Cookies,
|
||||
Body: conf.Body,
|
||||
Proxies: conf.Proxies,
|
||||
}
|
||||
}
|
||||
|
||||
func (rc *RequestConfig) GetValidDodosCountForRequests() uint {
|
||||
if rc.RequestCount == 0 {
|
||||
return rc.DodosCount
|
||||
}
|
||||
return min(rc.DodosCount, rc.RequestCount)
|
||||
}
|
||||
|
||||
func (rc *RequestConfig) GetMaxConns(minConns uint) uint {
|
||||
maxConns := max(
|
||||
minConns, rc.GetValidDodosCountForRequests(),
|
||||
)
|
||||
return ((maxConns * 50 / 100) + maxConns)
|
||||
}
|
||||
|
||||
func (rc *RequestConfig) Print() {
|
||||
t := table.NewWriter()
|
||||
t.SetOutputMirror(os.Stdout)
|
||||
t.SetStyle(table.StyleLight)
|
||||
t.SetColumnConfigs([]table.ColumnConfig{
|
||||
{
|
||||
Number: 2,
|
||||
WidthMaxEnforcer: func(col string, maxLen int) string {
|
||||
lines := strings.Split(col, "\n")
|
||||
for i, line := range lines {
|
||||
if len(line) > maxLen {
|
||||
lines[i] = line[:maxLen-3] + "..."
|
||||
}
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
},
|
||||
WidthMax: 50},
|
||||
})
|
||||
|
||||
t.AppendHeader(table.Row{"Request Configuration"})
|
||||
t.AppendRow(table.Row{"URL", rc.URL.String()})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Method", rc.Method})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Timeout", rc.Timeout})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Dodos", rc.DodosCount})
|
||||
t.AppendSeparator()
|
||||
if rc.RequestCount > 0 {
|
||||
t.AppendRow(table.Row{"Requests", rc.RequestCount})
|
||||
} else {
|
||||
t.AppendRow(table.Row{"Requests"})
|
||||
}
|
||||
t.AppendSeparator()
|
||||
if rc.Duration > 0 {
|
||||
t.AppendRow(table.Row{"Duration", rc.Duration})
|
||||
} else {
|
||||
t.AppendRow(table.Row{"Duration"})
|
||||
}
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Params", rc.Params.String()})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Headers", rc.Headers.String()})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Cookies", rc.Cookies.String()})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Proxy", rc.Proxies.String()})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Body", rc.Body.String()})
|
||||
t.AppendSeparator()
|
||||
t.AppendRow(table.Row{"Skip Verify", rc.SkipVerify})
|
||||
|
||||
t.Render()
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Method *string `json:"method" yaml:"method"`
|
||||
URL *types.RequestURL `json:"url" yaml:"url"`
|
||||
Timeout *types.Timeout `json:"timeout" yaml:"timeout"`
|
||||
DodosCount *uint `json:"dodos" yaml:"dodos"`
|
||||
RequestCount *uint `json:"requests" yaml:"requests"`
|
||||
Duration *types.Duration `json:"duration" yaml:"duration"`
|
||||
Yes *bool `json:"yes" yaml:"yes"`
|
||||
SkipVerify *bool `json:"skip_verify" yaml:"skip_verify"`
|
||||
Params types.Params `json:"params" yaml:"params"`
|
||||
Headers types.Headers `json:"headers" yaml:"headers"`
|
||||
Cookies types.Cookies `json:"cookies" yaml:"cookies"`
|
||||
Body types.Body `json:"body" yaml:"body"`
|
||||
Proxies types.Proxies `json:"proxy" yaml:"proxy"`
|
||||
}
|
||||
|
||||
func NewConfig() *Config {
|
||||
return &Config{}
|
||||
}
|
||||
|
||||
func (config *Config) Validate() []error {
|
||||
var errs []error
|
||||
if utils.IsNilOrZero(config.URL) {
|
||||
errs = append(errs, errors.New("request URL is required"))
|
||||
} else {
|
||||
if config.URL.Scheme == "" {
|
||||
config.URL.Scheme = "http"
|
||||
}
|
||||
if config.URL.Scheme != "http" && config.URL.Scheme != "https" {
|
||||
errs = append(errs, errors.New("request URL scheme must be http or https"))
|
||||
}
|
||||
|
||||
urlParams := types.Params{}
|
||||
for key, values := range config.URL.Query() {
|
||||
for _, value := range values {
|
||||
urlParams = append(urlParams, types.KeyValue[string, []string]{
|
||||
Key: key,
|
||||
Value: []string{value},
|
||||
})
|
||||
}
|
||||
}
|
||||
config.Params = append(urlParams, config.Params...)
|
||||
config.URL.RawQuery = ""
|
||||
}
|
||||
|
||||
if utils.IsNilOrZero(config.Method) {
|
||||
errs = append(errs, errors.New("request method is required"))
|
||||
}
|
||||
if utils.IsNilOrZero(config.Timeout) {
|
||||
errs = append(errs, errors.New("request timeout must be greater than 0"))
|
||||
}
|
||||
if utils.IsNilOrZero(config.DodosCount) {
|
||||
errs = append(errs, errors.New("dodos count must be greater than 0"))
|
||||
}
|
||||
if utils.IsNilOrZero(config.Duration) && utils.IsNilOrZero(config.RequestCount) {
|
||||
errs = append(errs, errors.New("you should provide at least one of duration or request count"))
|
||||
}
|
||||
|
||||
for i, proxy := range config.Proxies {
|
||||
if proxy.String() == "" {
|
||||
errs = append(errs, fmt.Errorf("proxies[%d]: proxy cannot be empty", i))
|
||||
} else if schema := proxy.Scheme; !slices.Contains(SupportedProxySchemes, schema) {
|
||||
errs = append(errs,
|
||||
fmt.Errorf("proxies[%d]: proxy has unsupported scheme \"%s\" (supported schemes: %s)",
|
||||
i, proxy.String(), strings.Join(SupportedProxySchemes, ", "),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
funcMap := *utils.NewFuncMapGenerator(
|
||||
rand.New(
|
||||
rand.NewSource(
|
||||
time.Now().UnixNano(),
|
||||
),
|
||||
),
|
||||
).GetFuncMap()
|
||||
|
||||
for _, header := range config.Headers {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(header.Key)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("header key (%s) parse error: %v", header.Key, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("header key (%s) parse error: %v", header.Key, err))
|
||||
}
|
||||
}
|
||||
|
||||
for _, value := range header.Value {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(value)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("header value (%s) parse error: %v", value, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("header value (%s) parse error: %v", value, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, cookie := range config.Cookies {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(cookie.Key)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("cookie key (%s) parse error: %v", cookie.Key, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("cookie key (%s) parse error: %v", cookie.Key, err))
|
||||
}
|
||||
}
|
||||
|
||||
for _, value := range cookie.Value {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(value)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("cookie value (%s) parse error: %v", value, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("cookie value (%s) parse error: %v", value, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, param := range config.Params {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(param.Key)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("param key (%s) parse error: %v", param.Key, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("param key (%s) parse error: %v", param.Key, err))
|
||||
}
|
||||
}
|
||||
|
||||
for _, value := range param.Value {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(value)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("param value (%s) parse error: %v", value, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("param value (%s) parse error: %v", value, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, body := range config.Body {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(body)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("body (%s) parse error: %v", body, err))
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
if err = t.Execute(&buf, nil); err != nil {
|
||||
errs = append(errs, fmt.Errorf("body (%s) parse error: %v", body, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errs
|
||||
}
|
||||
|
||||
func (config *Config) MergeConfig(newConfig *Config) {
|
||||
if newConfig.Method != nil {
|
||||
config.Method = newConfig.Method
|
||||
}
|
||||
if newConfig.URL != nil {
|
||||
config.URL = newConfig.URL
|
||||
}
|
||||
if newConfig.Timeout != nil {
|
||||
config.Timeout = newConfig.Timeout
|
||||
}
|
||||
if newConfig.DodosCount != nil {
|
||||
config.DodosCount = newConfig.DodosCount
|
||||
}
|
||||
if newConfig.RequestCount != nil {
|
||||
config.RequestCount = newConfig.RequestCount
|
||||
}
|
||||
if newConfig.Duration != nil {
|
||||
config.Duration = newConfig.Duration
|
||||
}
|
||||
if newConfig.Yes != nil {
|
||||
config.Yes = newConfig.Yes
|
||||
}
|
||||
if newConfig.SkipVerify != nil {
|
||||
config.SkipVerify = newConfig.SkipVerify
|
||||
}
|
||||
if len(newConfig.Params) != 0 {
|
||||
config.Params = newConfig.Params
|
||||
}
|
||||
if len(newConfig.Headers) != 0 {
|
||||
config.Headers = newConfig.Headers
|
||||
}
|
||||
if len(newConfig.Cookies) != 0 {
|
||||
config.Cookies = newConfig.Cookies
|
||||
}
|
||||
if len(newConfig.Body) != 0 {
|
||||
config.Body = newConfig.Body
|
||||
}
|
||||
if len(newConfig.Proxies) != 0 {
|
||||
config.Proxies = newConfig.Proxies
|
||||
}
|
||||
}
|
||||
|
||||
func (config *Config) SetDefaults() {
|
||||
if config.Method == nil {
|
||||
config.Method = utils.ToPtr(DefaultMethod)
|
||||
}
|
||||
if config.Timeout == nil {
|
||||
config.Timeout = &types.Timeout{Duration: DefaultTimeout}
|
||||
}
|
||||
if config.DodosCount == nil {
|
||||
config.DodosCount = utils.ToPtr(DefaultDodosCount)
|
||||
}
|
||||
if config.RequestCount == nil {
|
||||
config.RequestCount = utils.ToPtr(DefaultRequestCount)
|
||||
}
|
||||
if config.Duration == nil {
|
||||
config.Duration = &types.Duration{Duration: DefaultDuration}
|
||||
}
|
||||
if config.Yes == nil {
|
||||
config.Yes = utils.ToPtr(DefaultYes)
|
||||
}
|
||||
if config.SkipVerify == nil {
|
||||
config.SkipVerify = utils.ToPtr(DefaultSkipVerify)
|
||||
}
|
||||
config.Headers.SetIfNotExists("User-Agent", DefaultUserAgent)
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/types"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var supportedFileTypes = []string{"json", "yaml", "yml"}
|
||||
|
||||
func (config *Config) ReadFile(filePath types.ConfigFile) error {
|
||||
var (
|
||||
data []byte
|
||||
err error
|
||||
)
|
||||
|
||||
fileExt := filePath.Extension()
|
||||
if slices.Contains(supportedFileTypes, fileExt) {
|
||||
if filePath.LocationType() == types.FileLocationTypeRemoteHTTP {
|
||||
client := &http.Client{
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
|
||||
resp, err := client.Get(filePath.String())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch config file from %s", filePath)
|
||||
}
|
||||
defer func() { _ = resp.Body.Close() }()
|
||||
|
||||
data, err = io.ReadAll(io.Reader(resp.Body))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read config file from %s", filePath)
|
||||
}
|
||||
} else {
|
||||
data, err = os.ReadFile(filePath.String())
|
||||
if err != nil {
|
||||
return errors.New("failed to read config file from " + filePath.String())
|
||||
}
|
||||
}
|
||||
|
||||
switch fileExt {
|
||||
case "json":
|
||||
return parseJSONConfig(data, config)
|
||||
case "yml", "yaml":
|
||||
return parseYAMLConfig(data, config)
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("unsupported config file type (supported types: %v)", strings.Join(supportedFileTypes, ", "))
|
||||
}
|
||||
|
||||
func parseJSONConfig(data []byte, config *Config) error {
|
||||
err := json.Unmarshal(data, &config)
|
||||
if err != nil {
|
||||
switch parsedErr := err.(type) {
|
||||
case *json.SyntaxError:
|
||||
return fmt.Errorf("JSON Config file: invalid syntax at byte offset %d", parsedErr.Offset)
|
||||
case *json.UnmarshalTypeError:
|
||||
return fmt.Errorf("JSON Config file: invalid type %v for field %s, expected %v", parsedErr.Value, parsedErr.Field, parsedErr.Type)
|
||||
default:
|
||||
return fmt.Errorf("JSON Config file: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseYAMLConfig(data []byte, config *Config) error {
|
||||
err := yaml.Unmarshal(data, &config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("YAML Config file: %s", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
535
docs/configuration.md
Normal file
535
docs/configuration.md
Normal file
@@ -0,0 +1,535 @@
|
||||
# Configuration
|
||||
|
||||
Sarin supports environment variables, CLI flags, and YAML files. However, they are not exactly equivalent—YAML files have the most configuration options, followed by CLI flags, and then environment variables.
|
||||
|
||||
When the same option is specified in multiple sources, the following priority order applies:
|
||||
|
||||
```
|
||||
CLI Flags (Highest) > YAML > Environment Variables (Lowest)
|
||||
```
|
||||
|
||||
Use `-s` or `--show-config` to see the final merged configuration before sending requests.
|
||||
|
||||
## Properties
|
||||
|
||||
> **Note:** For CLI flags with `string / []string` type, the flag can be used once with a single value or multiple times to provide multiple values.
|
||||
|
||||
| Name | YAML | CLI | ENV | Default | Description |
|
||||
| --------------------------- | ----------------------------------- | -------------------------------------------- | -------------------------------- | ------- | ---------------------------- |
|
||||
| [Help](#help) | - | `-help` / `-h` | - | - | Show help message |
|
||||
| [Version](#version) | - | `-version` / `-v` | - | - | Show version and build info |
|
||||
| [Show Config](#show-config) | `showConfig`<br>(boolean) | `-show-config` / `-s`<br>(boolean) | `SARIN_SHOW_CONFIG`<br>(boolean) | `false` | Show merged configuration |
|
||||
| [Config File](#config-file) | `configFile`<br>(string / []string) | `-config-file` / `-f`<br>(string / []string) | `SARIN_CONFIG_FILE`<br>(string) | - | Path to config file(s) |
|
||||
| [URL](#url) | `url`<br>(string) | `-url` / `-U`<br>(string) | `SARIN_URL`<br>(string) | - | Target URL (HTTP/HTTPS) |
|
||||
| [Method](#method) | `method`<br>(string / []string) | `-method` / `-M`<br>(string / []string) | `SARIN_METHOD`<br>(string) | `GET` | HTTP method(s) |
|
||||
| [Timeout](#timeout) | `timeout`<br>(duration) | `-timeout` / `-T`<br>(duration) | `SARIN_TIMEOUT`<br>(duration) | `10s` | Request timeout |
|
||||
| [Concurrency](#concurrency) | `concurrency`<br>(number) | `-concurrency` / `-c`<br>(number) | `SARIN_CONCURRENCY`<br>(number) | `1` | Number of concurrent workers |
|
||||
| [Requests](#requests) | `requests`<br>(number) | `-requests` / `-r`<br>(number) | `SARIN_REQUESTS`<br>(number) | - | Total requests to send |
|
||||
| [Duration](#duration) | `duration`<br>(duration) | `-duration` / `-d`<br>(duration) | `SARIN_DURATION`<br>(duration) | - | Test duration |
|
||||
| [Quiet](#quiet) | `quiet`<br>(boolean) | `-quiet` / `-q`<br>(boolean) | `SARIN_QUIET`<br>(boolean) | `false` | Hide progress bar and logs |
|
||||
| [Output](#output) | `output`<br>(string) | `-output` / `-o`<br>(string) | `SARIN_OUTPUT`<br>(string) | `table` | Output format for stats |
|
||||
| [Dry Run](#dry-run) | `dryRun`<br>(boolean) | `-dry-run` / `-z`<br>(boolean) | `SARIN_DRY_RUN`<br>(boolean) | `false` | Generate without sending |
|
||||
| [Insecure](#insecure) | `insecure`<br>(boolean) | `-insecure` / `-I`<br>(boolean) | `SARIN_INSECURE`<br>(boolean) | `false` | Skip TLS verification |
|
||||
| [Body](#body) | `body`<br>(string / []string) | `-body` / `-B`<br>(string / []string) | `SARIN_BODY`<br>(string) | - | Request body |
|
||||
| [Params](#params) | `params`<br>(object) | `-param` / `-P`<br>(string / []string) | `SARIN_PARAM`<br>(string) | - | URL query parameters |
|
||||
| [Headers](#headers) | `headers`<br>(object) | `-header` / `-H`<br>(string / []string) | `SARIN_HEADER`<br>(string) | - | HTTP headers |
|
||||
| [Cookies](#cookies) | `cookies`<br>(object) | `-cookie` / `-C`<br>(string / []string) | `SARIN_COOKIE`<br>(string) | - | HTTP cookies |
|
||||
| [Proxy](#proxy) | `proxy`<br>(string / []string) | `-proxy` / `-X`<br>(string / []string) | `SARIN_PROXY`<br>(string) | - | Proxy URL(s) |
|
||||
| [Values](#values) | `values`<br>(string / []string) | `-values` / `-V`<br>(string / []string) | `SARIN_VALUES`<br>(string) | - | Template values (key=value) |
|
||||
| [Lua](#lua) | `lua`<br>(string / []string) | `-lua`<br>(string / []string) | `SARIN_LUA`<br>(string) | - | Lua script(s) |
|
||||
| [Js](#js) | `js`<br>(string / []string) | `-js`<br>(string / []string) | `SARIN_JS`<br>(string) | - | JavaScript script(s) |
|
||||
|
||||
---
|
||||
|
||||
## Help
|
||||
|
||||
Show help message.
|
||||
|
||||
```sh
|
||||
sarin -help
|
||||
```
|
||||
|
||||
## Version
|
||||
|
||||
Show version and build information.
|
||||
|
||||
```sh
|
||||
sarin -version
|
||||
```
|
||||
|
||||
## Show Config
|
||||
|
||||
Show the final merged configuration before sending requests.
|
||||
|
||||
```sh
|
||||
sarin -show-config
|
||||
```
|
||||
|
||||
## Config File
|
||||
|
||||
Path to configuration file(s). Supports local paths and remote URLs.
|
||||
|
||||
**Priority Rules:**
|
||||
|
||||
1. **CLI flags** (`-f`) have highest priority, processed left to right (rightmost wins)
|
||||
2. **Included files** (via `configFile` property) are processed with lower priority than their parent
|
||||
3. **Environment variable** (`SARIN_CONFIG_FILE`) has lowest priority
|
||||
|
||||
**Example:**
|
||||
|
||||
```yaml
|
||||
# config2.yaml
|
||||
configFile: /config4.yaml
|
||||
url: http://from-config2.com
|
||||
```
|
||||
|
||||
```sh
|
||||
SARIN_CONFIG_FILE=/config1.yaml sarin -f /config2.yaml -f https://example.com/config3.yaml
|
||||
```
|
||||
|
||||
**Resolution order (lowest to highest priority):**
|
||||
|
||||
| Source | File | Priority |
|
||||
| ------------------------ | ------------ | -------- |
|
||||
| ENV (SARIN_CONFIG_FILE) | config1.yaml | Lowest |
|
||||
| Included by config2.yaml | config4.yaml | ↑ |
|
||||
| CLI -f (first) | config2.yaml | ↑ |
|
||||
| CLI -f (second) | config3.yaml | Highest |
|
||||
|
||||
**Why this order?**
|
||||
|
||||
- `config1.yaml` comes from ENV → lowest priority
|
||||
- `config2.yaml` comes from CLI → higher than ENV
|
||||
- `config4.yaml` is included BY `config2.yaml` → inherits position below its parent
|
||||
- `config3.yaml` comes from CLI after `config2.yaml` → highest priority
|
||||
|
||||
If all four files define `url`, the value from `config3.yaml` wins.
|
||||
|
||||
**Merge behavior by field:**
|
||||
|
||||
- **Scalar fields** (`url`, `requests`, `duration`, `timeout`, `concurrency`, etc.) — higher priority overrides lower priority
|
||||
- **Method and Body** — higher priority overrides lower priority (no merging)
|
||||
- **Headers, Params, Cookies, Proxies, Values, Lua, and Js** — accumulated across all config files
|
||||
|
||||
## URL
|
||||
|
||||
Target URL. Must be HTTP or HTTPS. The URL path supports [templating](templating.md), allowing dynamic path generation per request.
|
||||
|
||||
> **Note:** Templating is only supported in the URL path. Host and scheme must be static.
|
||||
|
||||
**Example with dynamic path:**
|
||||
|
||||
```yaml
|
||||
url: http://example.com/users/{{ fakeit_UUID }}/profile
|
||||
```
|
||||
|
||||
**CLI example with dynamic path:**
|
||||
|
||||
```sh
|
||||
sarin -U "http://example.com/users/{{ fakeit_UUID }}" -r 1000 -c 10
|
||||
```
|
||||
|
||||
## Method
|
||||
|
||||
HTTP method(s). If multiple values are provided, Sarin cycles through them in order, starting from a random index for each request. Supports [templating](templating.md).
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
method: GET
|
||||
|
||||
# OR
|
||||
|
||||
method:
|
||||
- GET
|
||||
- POST
|
||||
- PUT
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-method GET -method POST -method PUT
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_METHOD=GET
|
||||
```
|
||||
|
||||
## Timeout
|
||||
|
||||
Request timeout. Must be greater than 0.
|
||||
|
||||
Valid time units: `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`
|
||||
|
||||
**Examples:** `5s`, `300ms`, `1m20s`
|
||||
|
||||
## Concurrency
|
||||
|
||||
Number of concurrent workers. Must be between 1 and 100,000,000.
|
||||
|
||||
## Requests
|
||||
|
||||
Total number of requests to send. At least one of `requests` or `duration` must be specified. If both are provided, the test stops when either limit is reached first.
|
||||
|
||||
## Duration
|
||||
|
||||
Test duration. At least one of `requests` or `duration` must be specified. If both are provided, the test stops when either limit is reached first.
|
||||
|
||||
Valid time units: `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`
|
||||
|
||||
**Examples:** `1m30s`, `25s`, `1h`
|
||||
|
||||
## Quiet
|
||||
|
||||
Hide the progress bar and runtime logs.
|
||||
|
||||
## Output
|
||||
|
||||
Output format for response statistics.
|
||||
|
||||
Valid formats: `table`, `json`, `yaml`, `none`
|
||||
|
||||
Using `none` disables output and reduces memory usage since response statistics are not stored.
|
||||
|
||||
## Dry Run
|
||||
|
||||
Generate requests without sending them. Useful for testing templates.
|
||||
|
||||
## Insecure
|
||||
|
||||
Skip TLS certificate verification.
|
||||
|
||||
## Body
|
||||
|
||||
Request body. If multiple values are provided, Sarin cycles through them in order, starting from a random index for each request. Supports [templating](templating.md).
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
body: '{"product": "car"}'
|
||||
|
||||
# OR
|
||||
|
||||
body:
|
||||
- '{"product": "car"}'
|
||||
- '{"product": "phone"}'
|
||||
- '{"product": "watch"}'
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-body '{"product": "car"}' -body '{"product": "phone"}' -body '{"product": "watch"}'
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_BODY='{"product": "car"}'
|
||||
```
|
||||
|
||||
## Params
|
||||
|
||||
URL query parameters. Supports [templating](templating.md).
|
||||
|
||||
When the same key appears as **separate entries** (in CLI or config file), all values are sent in every request. When multiple values are specified as an **array on a single key** (config file only), Sarin cycles through them.
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
params:
|
||||
key1: value1
|
||||
key2: [value2, value3] # cycles between value2 and value3
|
||||
|
||||
# OR
|
||||
|
||||
params:
|
||||
- key1: value1
|
||||
- key2: [value2, value3] # cycles between value2 and value3
|
||||
|
||||
# To send both values in every request, use separate entries:
|
||||
params:
|
||||
- key2: value2
|
||||
- key2: value3 # both sent in every request
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-param "key1=value1" -param "key2=value2" -param "key2=value3" # sends both value2 and value3
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_PARAM="key1=value1"
|
||||
```
|
||||
|
||||
## Headers
|
||||
|
||||
HTTP headers. Supports [templating](templating.md).
|
||||
|
||||
When the same key appears as **separate entries** (in CLI or config file), all values are sent in every request. When multiple values are specified as an **array on a single key** (config file only), Sarin cycles through them.
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
headers:
|
||||
key1: value1
|
||||
key2: [value2, value3] # cycles between value2 and value3
|
||||
|
||||
# OR
|
||||
|
||||
headers:
|
||||
- key1: value1
|
||||
- key2: [value2, value3] # cycles between value2 and value3
|
||||
|
||||
# To send both values in every request, use separate entries:
|
||||
headers:
|
||||
- key2: value2
|
||||
- key2: value3 # both sent in every request
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-header "key1: value1" -header "key2: value2" -header "key2: value3" # sends both value2 and value3
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_HEADER="key1: value1"
|
||||
```
|
||||
|
||||
## Cookies
|
||||
|
||||
HTTP cookies. Supports [templating](templating.md).
|
||||
|
||||
When the same key appears as **separate entries** (in CLI or config file), all values are sent in every request. When multiple values are specified as an **array on a single key** (config file only), Sarin cycles through them.
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
cookies:
|
||||
key1: value1
|
||||
key2: [value2, value3] # cycles between value2 and value3
|
||||
|
||||
# OR
|
||||
|
||||
cookies:
|
||||
- key1: value1
|
||||
- key2: [value2, value3] # cycles between value2 and value3
|
||||
|
||||
# To send both values in every request, use separate entries:
|
||||
cookies:
|
||||
- key2: value2
|
||||
- key2: value3 # both sent in every request
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-cookie "key1=value1" -cookie "key2=value2" -cookie "key2=value3" # sends both value2 and value3
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_COOKIE="key1=value1"
|
||||
```
|
||||
|
||||
## Proxy
|
||||
|
||||
Proxy URL(s). If multiple values are provided, Sarin cycles through them in order, starting from a random index for each request.
|
||||
|
||||
Supported protocols: `http`, `https`, `socks5`, `socks5h`
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
proxy: http://proxy1.com
|
||||
|
||||
# OR
|
||||
|
||||
proxy:
|
||||
- http://proxy1.com
|
||||
- socks5://proxy2.com
|
||||
- socks5h://proxy3.com
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-proxy http://proxy1.com -proxy socks5://proxy2.com -proxy socks5h://proxy3.com
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_PROXY="http://proxy1.com"
|
||||
```
|
||||
|
||||
## Values
|
||||
|
||||
Template values in key=value format. Supports [templating](templating.md). Multiple values can be specified and all are rendered for each request.
|
||||
|
||||
See the [Templating Guide](templating.md) for more details on using values and available template functions.
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
values: "key=value"
|
||||
|
||||
# OR
|
||||
|
||||
values: |
|
||||
key1=value1
|
||||
key2=value2
|
||||
key3=value3
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-values "key1=value1" -values "key2=value2" -values "key3=value3"
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_VALUES="key1=value1"
|
||||
```
|
||||
|
||||
## Lua
|
||||
|
||||
Lua script(s) for request transformation. Each script must define a global `transform` function that receives a request object and returns the modified request object. Scripts run after template rendering, before the request is sent.
|
||||
|
||||
If multiple Lua scripts are provided, they are chained in order—the output of one becomes the input to the next. When both Lua and JavaScript scripts are specified, all Lua scripts run first, then all JavaScript scripts.
|
||||
|
||||
**Script sources:**
|
||||
|
||||
Scripts can be provided as:
|
||||
|
||||
- **Inline script:** Direct script code
|
||||
- **File reference:** `@/path/to/script.lua` or `@./relative/path.lua`
|
||||
- **URL reference:** `@http://...` or `@https://...`
|
||||
- **Escaped `@`:** `@@...` for inline scripts that start with a literal `@`
|
||||
|
||||
**The `transform` function:**
|
||||
|
||||
```lua
|
||||
function transform(req)
|
||||
-- req.method (string) - HTTP method (e.g. "GET", "POST")
|
||||
-- req.path (string) - URL path (e.g. "/api/users")
|
||||
-- req.body (string) - Request body
|
||||
-- req.headers (table of string/arrays) - HTTP headers (e.g. {["X-Key"] = "value"})
|
||||
-- req.params (table of string/arrays) - Query parameters (e.g. {["id"] = "123"})
|
||||
-- req.cookies (table of string/arrays) - Cookies (e.g. {["session"] = "abc"})
|
||||
|
||||
req.headers["X-Custom"] = "my-value"
|
||||
return req
|
||||
end
|
||||
```
|
||||
|
||||
> **Note:** Header, parameter, and cookie values can be a single string or a table (array) for multiple values per key (e.g. `{"val1", "val2"}`).
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
lua: |
|
||||
function transform(req)
|
||||
req.headers["X-Custom"] = "my-value"
|
||||
return req
|
||||
end
|
||||
|
||||
# OR
|
||||
|
||||
lua:
|
||||
- "@/path/to/script1.lua"
|
||||
- "@/path/to/script2.lua"
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-lua 'function transform(req) req.headers["X-Custom"] = "my-value" return req end'
|
||||
|
||||
# OR
|
||||
|
||||
-lua @/path/to/script1.lua -lua @/path/to/script2.lua
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_LUA='function transform(req) req.headers["X-Custom"] = "my-value" return req end'
|
||||
```
|
||||
|
||||
## Js
|
||||
|
||||
JavaScript script(s) for request transformation. Each script must define a global `transform` function that receives a request object and returns the modified request object. Scripts run after template rendering, before the request is sent.
|
||||
|
||||
If multiple JavaScript scripts are provided, they are chained in order—the output of one becomes the input to the next. When both Lua and JavaScript scripts are specified, all Lua scripts run first, then all JavaScript scripts.
|
||||
|
||||
**Script sources:**
|
||||
|
||||
Scripts can be provided as:
|
||||
|
||||
- **Inline script:** Direct script code
|
||||
- **File reference:** `@/path/to/script.js` or `@./relative/path.js`
|
||||
- **URL reference:** `@http://...` or `@https://...`
|
||||
- **Escaped `@`:** `@@...` for inline scripts that start with a literal `@`
|
||||
|
||||
**The `transform` function:**
|
||||
|
||||
```javascript
|
||||
function transform(req) {
|
||||
// req.method (string) - HTTP method (e.g. "GET", "POST")
|
||||
// req.path (string) - URL path (e.g. "/api/users")
|
||||
// req.body (string) - Request body
|
||||
// req.headers (object of string/arrays) - HTTP headers (e.g. {"X-Key": "value"})
|
||||
// req.params (object of string/arrays) - Query parameters (e.g. {"id": "123"})
|
||||
// req.cookies (object of string/arrays) - Cookies (e.g. {"session": "abc"})
|
||||
|
||||
req.headers["X-Custom"] = "my-value";
|
||||
return req;
|
||||
}
|
||||
```
|
||||
|
||||
> **Note:** Header, parameter, and cookie values can be a single string or an array for multiple values per key (e.g. `["val1", "val2"]`).
|
||||
|
||||
**YAML example:**
|
||||
|
||||
```yaml
|
||||
js: |
|
||||
function transform(req) {
|
||||
req.headers["X-Custom"] = "my-value";
|
||||
return req;
|
||||
}
|
||||
|
||||
# OR
|
||||
|
||||
js:
|
||||
- "@/path/to/script1.js"
|
||||
- "@/path/to/script2.js"
|
||||
```
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
-js 'function transform(req) { req.headers["X-Custom"] = "my-value"; return req; }'
|
||||
|
||||
# OR
|
||||
|
||||
-js @/path/to/script1.js -js @/path/to/script2.js
|
||||
```
|
||||
|
||||
**ENV example:**
|
||||
|
||||
```sh
|
||||
SARIN_JS='function transform(req) { req.headers["X-Custom"] = "my-value"; return req; }'
|
||||
```
|
||||
1030
docs/examples.md
Normal file
1030
docs/examples.md
Normal file
File diff suppressed because it is too large
Load Diff
BIN
docs/static/demo.gif
vendored
Normal file
BIN
docs/static/demo.gif
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
667
docs/templating.md
Normal file
667
docs/templating.md
Normal file
@@ -0,0 +1,667 @@
|
||||
# Templating
|
||||
|
||||
Sarin supports Go templates in URL paths, methods, bodies, headers, params, cookies, and values.
|
||||
|
||||
> **Note:** Templating in URL host and scheme is not supported. Only the path portion of the URL can contain templates.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Using Values](#using-values)
|
||||
- [General Functions](#general-functions)
|
||||
- [String Functions](#string-functions)
|
||||
- [Collection Functions](#collection-functions)
|
||||
- [Body Functions](#body-functions)
|
||||
- [File Functions](#file-functions)
|
||||
- [Fake Data Functions](#fake-data-functions)
|
||||
- [File](#file)
|
||||
- [ID](#id)
|
||||
- [Product](#product)
|
||||
- [Person](#person)
|
||||
- [Generate](#generate)
|
||||
- [Auth](#auth)
|
||||
- [Address](#address)
|
||||
- [Game](#game)
|
||||
- [Beer](#beer)
|
||||
- [Car](#car)
|
||||
- [Words](#words)
|
||||
- [Text](#text)
|
||||
- [Foods](#foods)
|
||||
- [Misc](#misc)
|
||||
- [Color](#color)
|
||||
- [Image](#image)
|
||||
- [Internet](#internet)
|
||||
- [HTML](#html)
|
||||
- [Date/Time](#datetime)
|
||||
- [Payment](#payment)
|
||||
- [Finance](#finance)
|
||||
- [Company](#company)
|
||||
- [Hacker](#hacker)
|
||||
- [Hipster](#hipster)
|
||||
- [App](#app)
|
||||
- [Animal](#animal)
|
||||
- [Emoji](#emoji)
|
||||
- [Language](#language)
|
||||
- [Number](#number)
|
||||
- [String](#string)
|
||||
- [Celebrity](#celebrity)
|
||||
- [Minecraft](#minecraft)
|
||||
- [Book](#book)
|
||||
- [Movie](#movie)
|
||||
- [Error](#error)
|
||||
- [School](#school)
|
||||
- [Song](#song)
|
||||
|
||||
## Using Values
|
||||
|
||||
Values are generated once per request and can be referenced in multiple fields using `{{ .Values.KEY }}` syntax. This is useful when you need to use the same generated value (e.g., a UUID) in both headers and body within the same request.
|
||||
|
||||
**Example:**
|
||||
|
||||
```yaml
|
||||
values: |
|
||||
REQUEST_ID={{ fakeit_UUID }}
|
||||
USER_ID={{ fakeit_UUID }}
|
||||
|
||||
headers:
|
||||
X-Request-ID: "{{ .Values.REQUEST_ID }}"
|
||||
body: |
|
||||
{
|
||||
"requestId": "{{ .Values.REQUEST_ID }}",
|
||||
"userId": "{{ .Values.USER_ID }}"
|
||||
}
|
||||
```
|
||||
|
||||
In this example, `REQUEST_ID` is generated once and the same value is used in both the header and body. Each new request generates a new `REQUEST_ID`.
|
||||
|
||||
**CLI example:**
|
||||
|
||||
```sh
|
||||
sarin -U http://example.com/users \
|
||||
-V "ID={{ fakeit_UUID }}" \
|
||||
-H "X-Request-ID: {{ .Values.ID }}" \
|
||||
-B '{"id": "{{ .Values.ID }}"}'
|
||||
```
|
||||
|
||||
## General Functions
|
||||
|
||||
### String Functions
|
||||
|
||||
| Function | Description | Example |
|
||||
| ---------------------------------------------------------- | ------------------------------------------------------------------- | --------------------------------------------------------- |
|
||||
| `strings_ToUpper` | Convert string to uppercase | `{{ strings_ToUpper "hello" }}` → `HELLO` |
|
||||
| `strings_ToLower` | Convert string to lowercase | `{{ strings_ToLower "HELLO" }}` → `hello` |
|
||||
| `strings_RemoveSpaces` | Remove all spaces from string | `{{ strings_RemoveSpaces "hello world" }}` → `helloworld` |
|
||||
| `strings_Replace(s string, old string, new string, n int)` | Replace first `n` occurrences of `old` with `new`. Use `-1` for all | `{{ strings_Replace "hello" "l" "L" -1 }}` → `heLLo` |
|
||||
| `strings_ToDate(date string)` | Parse date string (YYYY-MM-DD format) | `{{ strings_ToDate "2024-01-15" }}` |
|
||||
| `strings_First(s string, n int)` | Get first `n` characters | `{{ strings_First "hello" 2 }}` → `he` |
|
||||
| `strings_Last(s string, n int)` | Get last `n` characters | `{{ strings_Last "hello" 2 }}` → `lo` |
|
||||
| `strings_Truncate(s string, n int)` | Truncate to `n` characters with ellipsis | `{{ strings_Truncate "hello world" 5 }}` → `hello...` |
|
||||
| `strings_TrimPrefix(s string, prefix string)` | Remove prefix from string | `{{ strings_TrimPrefix "hello" "he" }}` → `llo` |
|
||||
| `strings_TrimSuffix(s string, suffix string)` | Remove suffix from string | `{{ strings_TrimSuffix "hello" "lo" }}` → `hel` |
|
||||
|
||||
### Collection Functions
|
||||
|
||||
| Function | Description | Example |
|
||||
| ---------------------------------------- | --------------------------------------------- | -------------------------------------------------------- |
|
||||
| `dict_Str(pairs ...string)` | Create string dictionary from key-value pairs | `{{ dict_Str "key1" "val1" "key2" "val2" }}` |
|
||||
| `slice_Str(values ...string)` | Create string slice | `{{ slice_Str "a" "b" "c" }}` |
|
||||
| `slice_Join(slice []string, sep string)` | Join string slice with separator | `{{ slice_Join (slice_Str "a" "b" "c") "-" }}` → `a-b-c` |
|
||||
| `slice_Int(values ...int)` | Create int slice | `{{ slice_Int 1 2 3 }}` |
|
||||
| `slice_Uint(values ...uint)` | Create uint slice | `{{ slice_Uint 1 2 3 }}` |
|
||||
|
||||
### Body Functions
|
||||
|
||||
| Function | Description | Example |
|
||||
| -------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------- |
|
||||
| `body_FormData(pairs ...string)` | Create multipart form data from key-value pairs. Automatically sets the `Content-Type` header. Values starting with `@` are treated as file references (local path or URL). Use `@@` to escape literal `@`. | `{{ body_FormData "field1" "value1" "file" "@/path/to/file.pdf" }}` |
|
||||
|
||||
**`body_FormData` Details:**
|
||||
|
||||
```yaml
|
||||
# Text fields only
|
||||
body: '{{ body_FormData "username" "john" "email" "john@example.com" }}'
|
||||
|
||||
# Single file upload
|
||||
body: '{{ body_FormData "document" "@/path/to/file.pdf" }}'
|
||||
|
||||
# File from URL
|
||||
body: '{{ body_FormData "image" "@https://example.com/photo.jpg" }}'
|
||||
|
||||
# Mixed text fields and files
|
||||
body: |
|
||||
{{ body_FormData
|
||||
"title" "My Report"
|
||||
"author" "John Doe"
|
||||
"cover" "@/path/to/cover.jpg"
|
||||
"document" "@/path/to/report.pdf"
|
||||
}}
|
||||
|
||||
# Multiple files with same field name
|
||||
body: |
|
||||
{{ body_FormData
|
||||
"files" "@/path/to/file1.pdf"
|
||||
"files" "@/path/to/file2.pdf"
|
||||
}}
|
||||
|
||||
# Escape @ for literal value (sends "@username")
|
||||
body: '{{ body_FormData "twitter" "@@username" }}'
|
||||
```
|
||||
|
||||
> **Note:** Files are cached in memory after the first read. Subsequent requests reuse the cached content, avoiding repeated disk/network I/O.
|
||||
|
||||
### File Functions
|
||||
|
||||
| Function | Description | Example |
|
||||
| ---------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------- |
|
||||
| `file_Base64(source string)` | Read a file (local path or URL) and return its Base64 encoded content. Files are cached after first read. | `{{ file_Base64 "/path/to/file.pdf" }}` |
|
||||
|
||||
**`file_Base64` Details:**
|
||||
|
||||
```yaml
|
||||
# Local file as Base64 in JSON body
|
||||
body: '{"file": "{{ file_Base64 "/path/to/document.pdf" }}", "filename": "document.pdf"}'
|
||||
|
||||
# Remote file as Base64
|
||||
body: '{"image": "{{ file_Base64 "https://example.com/photo.jpg" }}"}'
|
||||
|
||||
# Combined with values for reuse
|
||||
values: "FILE_DATA={{ file_Base64 \"/path/to/file.bin\" }}"
|
||||
body: '{"data": "{{ .Values.FILE_DATA }}"}'
|
||||
```
|
||||
|
||||
## Fake Data Functions
|
||||
|
||||
These functions are powered by [gofakeit](https://github.com/brianvoe/gofakeit) library.
|
||||
|
||||
### File
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ---------------------- | -------------- | -------------------- |
|
||||
| `fakeit_FileExtension` | File extension | `"nes"` |
|
||||
| `fakeit_FileMimeType` | MIME type | `"application/json"` |
|
||||
|
||||
### ID
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------- | --------------------------------- | ---------------------------------------- |
|
||||
| `fakeit_ID` | Generate random unique identifier | `"pfsfktb87rcmj6bqha2fz9"` |
|
||||
| `fakeit_UUID` | Generate UUID v4 | `"b4ddf623-4ea6-48e5-9292-541f028d1fdb"` |
|
||||
|
||||
### Product
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| --------------------------- | ------------------- | --------------------------------- |
|
||||
| `fakeit_ProductName` | Product name | `"olive copper monitor"` |
|
||||
| `fakeit_ProductDescription` | Product description | `"Backwards caused quarterly..."` |
|
||||
| `fakeit_ProductCategory` | Product category | `"clothing"` |
|
||||
| `fakeit_ProductFeature` | Product feature | `"ultra-lightweight"` |
|
||||
| `fakeit_ProductMaterial` | Product material | `"brass"` |
|
||||
| `fakeit_ProductUPC` | UPC code | `"012780949980"` |
|
||||
| `fakeit_ProductAudience` | Target audience | `["adults"]` |
|
||||
| `fakeit_ProductDimension` | Product dimension | `"medium"` |
|
||||
| `fakeit_ProductUseCase` | Use case | `"home"` |
|
||||
| `fakeit_ProductBenefit` | Product benefit | `"comfort"` |
|
||||
| `fakeit_ProductSuffix` | Product suffix | `"pro"` |
|
||||
| `fakeit_ProductISBN` | ISBN number | `"978-1-4028-9462-6"` |
|
||||
|
||||
### Person
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ----------------------- | ---------------------- | ------------------------ |
|
||||
| `fakeit_Name` | Full name | `"Markus Moen"` |
|
||||
| `fakeit_NamePrefix` | Name prefix | `"Mr."` |
|
||||
| `fakeit_NameSuffix` | Name suffix | `"Jr."` |
|
||||
| `fakeit_FirstName` | First name | `"Markus"` |
|
||||
| `fakeit_MiddleName` | Middle name | `"Belinda"` |
|
||||
| `fakeit_LastName` | Last name | `"Daniel"` |
|
||||
| `fakeit_Gender` | Gender | `"male"` |
|
||||
| `fakeit_Age` | Age | `40` |
|
||||
| `fakeit_Ethnicity` | Ethnicity | `"German"` |
|
||||
| `fakeit_SSN` | Social Security Number | `"296446360"` |
|
||||
| `fakeit_EIN` | Employer ID Number | `"12-3456789"` |
|
||||
| `fakeit_Hobby` | Hobby | `"Swimming"` |
|
||||
| `fakeit_Email` | Email address | `"markusmoen@pagac.net"` |
|
||||
| `fakeit_Phone` | Phone number | `"6136459948"` |
|
||||
| `fakeit_PhoneFormatted` | Formatted phone | `"136-459-9489"` |
|
||||
|
||||
### Generate
|
||||
|
||||
| Function | Description | Example |
|
||||
| ------------------------------ | -------------------------------------- | ------------------------------------------------------ |
|
||||
| `fakeit_Regex(pattern string)` | Generate string matching regex pattern | `{{ fakeit_Regex "[a-z]{5}[0-9]{3}" }}` → `"abcde123"` |
|
||||
|
||||
### Auth
|
||||
|
||||
| Function | Description | Example |
|
||||
| --------------------------------------------------------------------------------------------- | ----------------------------------------------------------- | ----------------------------------------------------- |
|
||||
| `fakeit_Username` | Username | `"Daniel1364"` |
|
||||
| `fakeit_Password(upper bool, lower bool, numeric bool, special bool, space bool, length int)` | Generate password with specified character types and length | `{{ fakeit_Password true true true false false 16 }}` |
|
||||
|
||||
### Address
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| --------------------------------------------------- | ---------------------------- | ---------------------------------------------------- |
|
||||
| `fakeit_City` | City name | `"Marcelside"` |
|
||||
| `fakeit_Country` | Country name | `"United States of America"` |
|
||||
| `fakeit_CountryAbr` | Country abbreviation | `"US"` |
|
||||
| `fakeit_State` | State name | `"Illinois"` |
|
||||
| `fakeit_StateAbr` | State abbreviation | `"IL"` |
|
||||
| `fakeit_Street` | Full street | `"364 East Rapidsborough"` |
|
||||
| `fakeit_StreetName` | Street name | `"View"` |
|
||||
| `fakeit_StreetNumber` | Street number | `"13645"` |
|
||||
| `fakeit_StreetPrefix` | Street prefix | `"East"` |
|
||||
| `fakeit_StreetSuffix` | Street suffix | `"Ave"` |
|
||||
| `fakeit_Unit` | Unit | `"Apt 123"` |
|
||||
| `fakeit_Zip` | ZIP code | `"13645"` |
|
||||
| `fakeit_Latitude` | Random latitude | `-73.534056` |
|
||||
| `fakeit_Longitude` | Random longitude | `-147.068112` |
|
||||
| `fakeit_LatitudeInRange(min float64, max float64)` | Latitude in specified range | `{{ fakeit_LatitudeInRange 0 90 }}` → `22.921026` |
|
||||
| `fakeit_LongitudeInRange(min float64, max float64)` | Longitude in specified range | `{{ fakeit_LongitudeInRange 0 180 }}` → `122.471830` |
|
||||
|
||||
### Game
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ----------------- | ----------- | ------------------- |
|
||||
| `fakeit_Gamertag` | Gamer tag | `"footinterpret63"` |
|
||||
|
||||
### Beer
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| -------------------- | --------------- | ----------------------------- |
|
||||
| `fakeit_BeerAlcohol` | Alcohol content | `"2.7%"` |
|
||||
| `fakeit_BeerBlg` | Blg | `"6.4°Blg"` |
|
||||
| `fakeit_BeerHop` | Hop | `"Glacier"` |
|
||||
| `fakeit_BeerIbu` | IBU | `"29 IBU"` |
|
||||
| `fakeit_BeerMalt` | Malt | `"Munich"` |
|
||||
| `fakeit_BeerName` | Beer name | `"Duvel"` |
|
||||
| `fakeit_BeerStyle` | Beer style | `"European Amber Lager"` |
|
||||
| `fakeit_BeerYeast` | Yeast | `"1388 - Belgian Strong Ale"` |
|
||||
|
||||
### Car
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ---------------------------- | ------------ | ---------------------- |
|
||||
| `fakeit_CarMaker` | Car maker | `"Nissan"` |
|
||||
| `fakeit_CarModel` | Car model | `"Aveo"` |
|
||||
| `fakeit_CarType` | Car type | `"Passenger car mini"` |
|
||||
| `fakeit_CarFuelType` | Fuel type | `"CNG"` |
|
||||
| `fakeit_CarTransmissionType` | Transmission | `"Manual"` |
|
||||
|
||||
### Words
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ---------------------------------- | --------------------------- | ---------------- |
|
||||
| `fakeit_Word` | Random word | `"example"` |
|
||||
| `fakeit_Noun` | Random noun | `"computer"` |
|
||||
| `fakeit_NounCommon` | Common noun | `"table"` |
|
||||
| `fakeit_NounConcrete` | Concrete noun | `"chair"` |
|
||||
| `fakeit_NounAbstract` | Abstract noun | `"freedom"` |
|
||||
| `fakeit_NounCollectivePeople` | Collective noun (people) | `"team"` |
|
||||
| `fakeit_NounCollectiveAnimal` | Collective noun (animal) | `"herd"` |
|
||||
| `fakeit_NounCollectiveThing` | Collective noun (thing) | `"bunch"` |
|
||||
| `fakeit_NounCountable` | Countable noun | `"book"` |
|
||||
| `fakeit_NounUncountable` | Uncountable noun | `"water"` |
|
||||
| `fakeit_Verb` | Random verb | `"run"` |
|
||||
| `fakeit_VerbAction` | Action verb | `"jump"` |
|
||||
| `fakeit_VerbLinking` | Linking verb | `"is"` |
|
||||
| `fakeit_VerbHelping` | Helping verb | `"can"` |
|
||||
| `fakeit_Adverb` | Random adverb | `"quickly"` |
|
||||
| `fakeit_AdverbManner` | Manner adverb | `"carefully"` |
|
||||
| `fakeit_AdverbDegree` | Degree adverb | `"very"` |
|
||||
| `fakeit_AdverbPlace` | Place adverb | `"here"` |
|
||||
| `fakeit_AdverbTimeDefinite` | Definite time adverb | `"yesterday"` |
|
||||
| `fakeit_AdverbTimeIndefinite` | Indefinite time adverb | `"soon"` |
|
||||
| `fakeit_AdverbFrequencyDefinite` | Definite frequency adverb | `"daily"` |
|
||||
| `fakeit_AdverbFrequencyIndefinite` | Indefinite frequency adverb | `"often"` |
|
||||
| `fakeit_Preposition` | Random preposition | `"on"` |
|
||||
| `fakeit_PrepositionSimple` | Simple preposition | `"in"` |
|
||||
| `fakeit_PrepositionDouble` | Double preposition | `"out of"` |
|
||||
| `fakeit_PrepositionCompound` | Compound preposition | `"according to"` |
|
||||
| `fakeit_Adjective` | Random adjective | `"beautiful"` |
|
||||
| `fakeit_AdjectiveDescriptive` | Descriptive adjective | `"large"` |
|
||||
| `fakeit_AdjectiveQuantitative` | Quantitative adjective | `"many"` |
|
||||
| `fakeit_AdjectiveProper` | Proper adjective | `"American"` |
|
||||
| `fakeit_AdjectiveDemonstrative` | Demonstrative adjective | `"this"` |
|
||||
| `fakeit_AdjectivePossessive` | Possessive adjective | `"my"` |
|
||||
| `fakeit_AdjectiveInterrogative` | Interrogative adjective | `"which"` |
|
||||
| `fakeit_AdjectiveIndefinite` | Indefinite adjective | `"some"` |
|
||||
| `fakeit_Pronoun` | Random pronoun | `"he"` |
|
||||
| `fakeit_PronounPersonal` | Personal pronoun | `"I"` |
|
||||
| `fakeit_PronounObject` | Object pronoun | `"him"` |
|
||||
| `fakeit_PronounPossessive` | Possessive pronoun | `"mine"` |
|
||||
| `fakeit_PronounReflective` | Reflective pronoun | `"myself"` |
|
||||
| `fakeit_PronounDemonstrative` | Demonstrative pronoun | `"that"` |
|
||||
| `fakeit_PronounInterrogative` | Interrogative pronoun | `"who"` |
|
||||
| `fakeit_PronounRelative` | Relative pronoun | `"which"` |
|
||||
| `fakeit_Connective` | Random connective | `"however"` |
|
||||
| `fakeit_ConnectiveTime` | Time connective | `"then"` |
|
||||
| `fakeit_ConnectiveComparative` | Comparative connective | `"similarly"` |
|
||||
| `fakeit_ConnectiveComplaint` | Complaint connective | `"although"` |
|
||||
| `fakeit_ConnectiveListing` | Listing connective | `"firstly"` |
|
||||
| `fakeit_ConnectiveCasual` | Casual connective | `"because"` |
|
||||
| `fakeit_ConnectiveExamplify` | Examplify connective | `"for example"` |
|
||||
|
||||
### Text
|
||||
|
||||
| Function | Description | Example |
|
||||
| ---------------------------------------------------------------------------------------- | ----------------------------------------------- | ----------------------------------------------------- |
|
||||
| `fakeit_Sentence(wordCount ...int)` | Random sentence (optional word count) | `{{ fakeit_Sentence }}` or `{{ fakeit_Sentence 10 }}` |
|
||||
| `fakeit_Paragraph` | Random paragraph | `{{ fakeit_Paragraph }}` |
|
||||
| `fakeit_LoremIpsumWord` | Lorem ipsum word | `"lorem"` |
|
||||
| `fakeit_LoremIpsumSentence(wordCount int)` | Lorem ipsum sentence with specified word count | `{{ fakeit_LoremIpsumSentence 5 }}` |
|
||||
| `fakeit_LoremIpsumParagraph(paragraphs int, sentences int, words int, separator string)` | Lorem ipsum paragraphs with specified structure | `{{ fakeit_LoremIpsumParagraph 1 3 5 "\n" }}` |
|
||||
| `fakeit_Question` | Random question | `"What is your name?"` |
|
||||
| `fakeit_Quote` | Random quote | `"Life is what happens..."` |
|
||||
| `fakeit_Phrase` | Random phrase | `"a piece of cake"` |
|
||||
|
||||
### Foods
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------ | -------------- | ---------------------------------------- |
|
||||
| `fakeit_Fruit` | Fruit | `"Peach"` |
|
||||
| `fakeit_Vegetable` | Vegetable | `"Amaranth Leaves"` |
|
||||
| `fakeit_Breakfast` | Breakfast food | `"Blueberry banana happy face pancakes"` |
|
||||
| `fakeit_Lunch` | Lunch food | `"No bake hersheys bar pie"` |
|
||||
| `fakeit_Dinner` | Dinner food | `"Wild addicting dip"` |
|
||||
| `fakeit_Snack` | Snack | `"Trail mix"` |
|
||||
| `fakeit_Dessert` | Dessert | `"French napoleons"` |
|
||||
|
||||
### Misc
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------ | -------------- | -------------- |
|
||||
| `fakeit_Bool` | Random boolean | `true` |
|
||||
| `fakeit_FlipACoin` | Flip a coin | `"Heads"` |
|
||||
|
||||
### Color
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------- | ------------------ | --------------------------------------------------------- |
|
||||
| `fakeit_Color` | Color name | `"MediumOrchid"` |
|
||||
| `fakeit_HexColor` | Hex color | `"#a99fb4"` |
|
||||
| `fakeit_RGBColor` | RGB color | `[85, 224, 195]` |
|
||||
| `fakeit_SafeColor` | Safe color | `"black"` |
|
||||
| `fakeit_NiceColors` | Nice color palette | `["#cfffdd", "#b4dec1", "#5c5863", "#a85163", "#ff1f4c"]` |
|
||||
|
||||
### Image
|
||||
|
||||
| Function | Description | Example |
|
||||
| ----------------------------------------- | ------------------------- | -------------------------------- |
|
||||
| `fakeit_ImageJpeg(width int, height int)` | Generate JPEG image bytes | `{{ fakeit_ImageJpeg 100 100 }}` |
|
||||
| `fakeit_ImagePng(width int, height int)` | Generate PNG image bytes | `{{ fakeit_ImagePng 100 100 }}` |
|
||||
|
||||
### Internet
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| --------------------------------- | ------------------------------------------ | ----------------------------------------------------- |
|
||||
| `fakeit_URL` | Random URL | `"http://www.principalproductize.biz/target"` |
|
||||
| `fakeit_UrlSlug(words int)` | URL slug with specified word count | `{{ fakeit_UrlSlug 3 }}` → `"bathe-regularly-quiver"` |
|
||||
| `fakeit_DomainName` | Domain name | `"centraltarget.biz"` |
|
||||
| `fakeit_DomainSuffix` | Domain suffix | `"org"` |
|
||||
| `fakeit_IPv4Address` | IPv4 address | `"222.83.191.222"` |
|
||||
| `fakeit_IPv6Address` | IPv6 address | `"2001:cafe:8898:ee17:bc35:9064:5866:d019"` |
|
||||
| `fakeit_MacAddress` | MAC address | `"cb:ce:06:94:22:e9"` |
|
||||
| `fakeit_HTTPStatusCode` | HTTP status code | `200` |
|
||||
| `fakeit_HTTPStatusCodeSimple` | Simple status code | `404` |
|
||||
| `fakeit_LogLevel(logType string)` | Log level (types: general, syslog, apache) | `{{ fakeit_LogLevel "general" }}` → `"error"` |
|
||||
| `fakeit_HTTPMethod` | HTTP method | `"HEAD"` |
|
||||
| `fakeit_HTTPVersion` | HTTP version | `"HTTP/1.1"` |
|
||||
| `fakeit_UserAgent` | Random User-Agent | `"Mozilla/5.0..."` |
|
||||
| `fakeit_ChromeUserAgent` | Chrome User-Agent | `"Mozilla/5.0 (X11; Linux i686)..."` |
|
||||
| `fakeit_FirefoxUserAgent` | Firefox User-Agent | `"Mozilla/5.0 (Macintosh; U;..."` |
|
||||
| `fakeit_OperaUserAgent` | Opera User-Agent | `"Opera/8.39..."` |
|
||||
| `fakeit_SafariUserAgent` | Safari User-Agent | `"Mozilla/5.0 (iPad;..."` |
|
||||
| `fakeit_APIUserAgent` | API User-Agent | `"curl/8.2.5"` |
|
||||
|
||||
### HTML
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------ | --------------- | ------------------ |
|
||||
| `fakeit_InputName` | HTML input name | `"email"` |
|
||||
| `fakeit_Svg` | SVG image | `"<svg>...</svg>"` |
|
||||
|
||||
### Date/Time
|
||||
|
||||
| Function | Description | Example |
|
||||
| -------------------------------------------------- | --------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `fakeit_Date` | Random date | `2023-06-15 14:30:00` |
|
||||
| `fakeit_PastDate` | Past date | `2022-03-10 09:15:00` |
|
||||
| `fakeit_FutureDate` | Future date | `2025-12-20 18:45:00` |
|
||||
| `fakeit_DateRange(start time.Time, end time.Time)` | Random date between start and end | `{{ fakeit_DateRange (strings_ToDate "2020-01-01") (strings_ToDate "2025-12-31") }}` |
|
||||
| `fakeit_NanoSecond` | Nanosecond | `123456789` |
|
||||
| `fakeit_Second` | Second (0-59) | `45` |
|
||||
| `fakeit_Minute` | Minute (0-59) | `30` |
|
||||
| `fakeit_Hour` | Hour (0-23) | `14` |
|
||||
| `fakeit_Month` | Month (1-12) | `6` |
|
||||
| `fakeit_MonthString` | Month name | `"June"` |
|
||||
| `fakeit_Day` | Day (1-31) | `15` |
|
||||
| `fakeit_WeekDay` | Weekday | `"Monday"` |
|
||||
| `fakeit_Year` | Year | `2024` |
|
||||
| `fakeit_TimeZone` | Timezone | `"America/New_York"` |
|
||||
| `fakeit_TimeZoneAbv` | Timezone abbreviation | `"EST"` |
|
||||
| `fakeit_TimeZoneFull` | Full timezone | `"Eastern Standard Time"` |
|
||||
| `fakeit_TimeZoneOffset` | Timezone offset | `-5` |
|
||||
| `fakeit_TimeZoneRegion` | Timezone region | `"America"` |
|
||||
|
||||
### Payment
|
||||
|
||||
| Function | Description | Example |
|
||||
| ---------------------------------------- | ----------------------------------------------------- | -------------------------------------------------------------- |
|
||||
| `fakeit_Price(min float64, max float64)` | Random price in range | `{{ fakeit_Price 1 100 }}` → `92.26` |
|
||||
| `fakeit_CreditCardCvv` | CVV | `"513"` |
|
||||
| `fakeit_CreditCardExp` | Expiration date | `"01/27"` |
|
||||
| `fakeit_CreditCardNumber(gaps bool)` | Credit card number. `gaps`: add spaces between groups | `{{ fakeit_CreditCardNumber true }}` → `"4111 1111 1111 1111"` |
|
||||
| `fakeit_CreditCardType` | Card type | `"Visa"` |
|
||||
| `fakeit_CurrencyLong` | Currency name | `"United States Dollar"` |
|
||||
| `fakeit_CurrencyShort` | Currency code | `"USD"` |
|
||||
| `fakeit_AchRouting` | ACH routing number | `"513715684"` |
|
||||
| `fakeit_AchAccount` | ACH account number | `"491527954328"` |
|
||||
| `fakeit_BitcoinAddress` | Bitcoin address | `"1BoatSLRHtKNngkdXEeobR76b53LETtpyT"` |
|
||||
| `fakeit_BitcoinPrivateKey` | Bitcoin private key | `"5HueCGU8rMjxEXxiPuD5BDuG6o5xjA7QkbPp"` |
|
||||
| `fakeit_BankName` | Bank name | `"Wells Fargo"` |
|
||||
| `fakeit_BankType` | Bank type | `"Investment Bank"` |
|
||||
|
||||
### Finance
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| -------------- | ---------------- | ---------------- |
|
||||
| `fakeit_Cusip` | CUSIP identifier | `"38259P508"` |
|
||||
| `fakeit_Isin` | ISIN identifier | `"US38259P5089"` |
|
||||
|
||||
### Company
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ---------------------- | -------------- | ------------------------------------------ |
|
||||
| `fakeit_BS` | Business speak | `"front-end"` |
|
||||
| `fakeit_Blurb` | Company blurb | `"word"` |
|
||||
| `fakeit_BuzzWord` | Buzzword | `"disintermediate"` |
|
||||
| `fakeit_Company` | Company name | `"Moen, Pagac and Wuckert"` |
|
||||
| `fakeit_CompanySuffix` | Company suffix | `"Inc"` |
|
||||
| `fakeit_JobDescriptor` | Job descriptor | `"Central"` |
|
||||
| `fakeit_JobLevel` | Job level | `"Assurance"` |
|
||||
| `fakeit_JobTitle` | Job title | `"Director"` |
|
||||
| `fakeit_Slogan` | Company slogan | `"Universal seamless Focus, interactive."` |
|
||||
|
||||
### Hacker
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| --------------------------- | ------------------- | --------------------------------------------------------------------------------------------- |
|
||||
| `fakeit_HackerAbbreviation` | Hacker abbreviation | `"ADP"` |
|
||||
| `fakeit_HackerAdjective` | Hacker adjective | `"wireless"` |
|
||||
| `fakeit_HackeringVerb` | Hackering verb | `"connecting"` |
|
||||
| `fakeit_HackerNoun` | Hacker noun | `"driver"` |
|
||||
| `fakeit_HackerPhrase` | Hacker phrase | `"If we calculate the program, we can get to the AI pixel through the redundant XSS matrix!"` |
|
||||
| `fakeit_HackerVerb` | Hacker verb | `"synthesize"` |
|
||||
|
||||
### Hipster
|
||||
|
||||
| Function | Description | Example |
|
||||
| ------------------------- | ----------------- | ------------------------------------------------------------------- |
|
||||
| `fakeit_HipsterWord` | Hipster word | `"microdosing"` |
|
||||
| `fakeit_HipsterSentence` | Hipster sentence | `"Soul loops with you probably haven't heard of them undertones."` |
|
||||
| `fakeit_HipsterParagraph` | Hipster paragraph | `"Single-origin austin, double why. Tag it Yuccie, keep it any..."` |
|
||||
|
||||
### App
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------- | ----------- | --------------------- |
|
||||
| `fakeit_AppName` | App name | `"Parkrespond"` |
|
||||
| `fakeit_AppVersion` | App version | `"1.12.14"` |
|
||||
| `fakeit_AppAuthor` | App author | `"Qado Energy, Inc."` |
|
||||
|
||||
### Animal
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------- | ----------- | ------------------- |
|
||||
| `fakeit_PetName` | Pet name | `"Ozzy Pawsborne"` |
|
||||
| `fakeit_Animal` | Animal | `"elk"` |
|
||||
| `fakeit_AnimalType` | Animal type | `"amphibians"` |
|
||||
| `fakeit_FarmAnimal` | Farm animal | `"Chicken"` |
|
||||
| `fakeit_Cat` | Cat breed | `"Chausie"` |
|
||||
| `fakeit_Dog` | Dog breed | `"Norwich Terrier"` |
|
||||
| `fakeit_Bird` | Bird | `"goose"` |
|
||||
|
||||
### Emoji
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------------- | ---------------------------------------------- | ------------------------------------------------------ |
|
||||
| `fakeit_Emoji` | Random emoji | `"🤣"` |
|
||||
| `fakeit_EmojiCategory` | Emoji category | `"Smileys & Emotion"` |
|
||||
| `fakeit_EmojiAlias` | Emoji alias | `"smile"` |
|
||||
| `fakeit_EmojiTag` | Emoji tag | `"happy"` |
|
||||
| `fakeit_EmojiFlag` | Flag emoji | `"🇺🇸"` |
|
||||
| `fakeit_EmojiAnimal` | Animal emoji | `"🐱"` |
|
||||
| `fakeit_EmojiFood` | Food emoji | `"🍕"` |
|
||||
| `fakeit_EmojiPlant` | Plant emoji | `"🌸"` |
|
||||
| `fakeit_EmojiMusic` | Music emoji | `"🎵"` |
|
||||
| `fakeit_EmojiVehicle` | Vehicle emoji | `"🚗"` |
|
||||
| `fakeit_EmojiSport` | Sport emoji | `"⚽"` |
|
||||
| `fakeit_EmojiFace` | Face emoji | `"😊"` |
|
||||
| `fakeit_EmojiHand` | Hand emoji | `"👋"` |
|
||||
| `fakeit_EmojiClothing` | Clothing emoji | `"👕"` |
|
||||
| `fakeit_EmojiLandmark` | Landmark emoji | `"🗽"` |
|
||||
| `fakeit_EmojiElectronics` | Electronics emoji | `"📱"` |
|
||||
| `fakeit_EmojiGame` | Game emoji | `"🎮"` |
|
||||
| `fakeit_EmojiTools` | Tools emoji | `"🔧"` |
|
||||
| `fakeit_EmojiWeather` | Weather emoji | `"☀️"` |
|
||||
| `fakeit_EmojiJob` | Job emoji | `"👨💻"` |
|
||||
| `fakeit_EmojiPerson` | Person emoji | `"👤"` |
|
||||
| `fakeit_EmojiGesture` | Gesture emoji | `"🙌"` |
|
||||
| `fakeit_EmojiCostume` | Costume emoji | `"🎃"` |
|
||||
| `fakeit_EmojiSentence` | Emoji sentence with random emojis interspersed | `"Weekends reserve time for 🖼️ Disc 🏨 golf and day."` |
|
||||
|
||||
### Language
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ----------------------------- | --------------------- | -------------- |
|
||||
| `fakeit_Language` | Language | `"English"` |
|
||||
| `fakeit_LanguageAbbreviation` | Language abbreviation | `"en"` |
|
||||
| `fakeit_ProgrammingLanguage` | Programming language | `"Go"` |
|
||||
|
||||
### Number
|
||||
|
||||
| Function | Description | Example |
|
||||
| ----------------------------------------------- | ----------------------------------- | ------------------------------------------ |
|
||||
| `fakeit_Number(min int, max int)` | Random number in range | `{{ fakeit_Number 1 100 }}` → `42` |
|
||||
| `fakeit_Int` | Random int | `{{ fakeit_Int }}` |
|
||||
| `fakeit_IntN(n int)` | Random int from 0 to n | `{{ fakeit_IntN 100 }}` |
|
||||
| `fakeit_Int8` | Random int8 | `{{ fakeit_Int8 }}` |
|
||||
| `fakeit_Int16` | Random int16 | `{{ fakeit_Int16 }}` |
|
||||
| `fakeit_Int32` | Random int32 | `{{ fakeit_Int32 }}` |
|
||||
| `fakeit_Int64` | Random int64 | `{{ fakeit_Int64 }}` |
|
||||
| `fakeit_Uint` | Random uint | `{{ fakeit_Uint }}` |
|
||||
| `fakeit_UintN(n uint)` | Random uint from 0 to n | `{{ fakeit_UintN 100 }}` |
|
||||
| `fakeit_Uint8` | Random uint8 | `{{ fakeit_Uint8 }}` |
|
||||
| `fakeit_Uint16` | Random uint16 | `{{ fakeit_Uint16 }}` |
|
||||
| `fakeit_Uint32` | Random uint32 | `{{ fakeit_Uint32 }}` |
|
||||
| `fakeit_Uint64` | Random uint64 | `{{ fakeit_Uint64 }}` |
|
||||
| `fakeit_Float32` | Random float32 | `{{ fakeit_Float32 }}` |
|
||||
| `fakeit_Float32Range(min float32, max float32)` | Random float32 in range | `{{ fakeit_Float32Range 0 100 }}` |
|
||||
| `fakeit_Float64` | Random float64 | `{{ fakeit_Float64 }}` |
|
||||
| `fakeit_Float64Range(min float64, max float64)` | Random float64 in range | `{{ fakeit_Float64Range 0 100 }}` |
|
||||
| `fakeit_RandomInt(slice []int)` | Random int from slice | `{{ fakeit_RandomInt (slice_Int 1 2 3) }}` |
|
||||
| `fakeit_HexUint(bits int)` | Random hex uint with specified bits | `{{ fakeit_HexUint 8 }}` → `"0xff"` |
|
||||
|
||||
### String
|
||||
|
||||
| Function | Description | Example |
|
||||
| ------------------------------------- | ------------------------------- | --------------------------------------------------------------- |
|
||||
| `fakeit_Digit` | Single random digit | `"0"` |
|
||||
| `fakeit_DigitN(n uint)` | Generate `n` random digits | `{{ fakeit_DigitN 5 }}` → `"71364"` |
|
||||
| `fakeit_Letter` | Single random letter | `"g"` |
|
||||
| `fakeit_LetterN(n uint)` | Generate `n` random letters | `{{ fakeit_LetterN 10 }}` → `"gbRMaRxHki"` |
|
||||
| `fakeit_Lexify(pattern string)` | Replace `?` with random letters | `{{ fakeit_Lexify "?????@??????.com" }}` → `"billy@mister.com"` |
|
||||
| `fakeit_Numerify(pattern string)` | Replace `#` with random digits | `{{ fakeit_Numerify "(###)###-####" }}` → `"(555)867-5309"` |
|
||||
| `fakeit_RandomString(slice []string)` | Random string from slice | `{{ fakeit_RandomString (slice_Str "a" "b" "c") }}` |
|
||||
|
||||
### Celebrity
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| -------------------------- | ------------------ | ------------------ |
|
||||
| `fakeit_CelebrityActor` | Celebrity actor | `"Brad Pitt"` |
|
||||
| `fakeit_CelebrityBusiness` | Celebrity business | `"Elon Musk"` |
|
||||
| `fakeit_CelebritySport` | Celebrity sport | `"Michael Phelps"` |
|
||||
|
||||
### Minecraft
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| --------------------------------- | ----------------- | ---------------- |
|
||||
| `fakeit_MinecraftOre` | Minecraft ore | `"coal"` |
|
||||
| `fakeit_MinecraftWood` | Minecraft wood | `"oak"` |
|
||||
| `fakeit_MinecraftArmorTier` | Armor tier | `"iron"` |
|
||||
| `fakeit_MinecraftArmorPart` | Armor part | `"helmet"` |
|
||||
| `fakeit_MinecraftWeapon` | Minecraft weapon | `"bow"` |
|
||||
| `fakeit_MinecraftTool` | Minecraft tool | `"shovel"` |
|
||||
| `fakeit_MinecraftDye` | Minecraft dye | `"white"` |
|
||||
| `fakeit_MinecraftFood` | Minecraft food | `"apple"` |
|
||||
| `fakeit_MinecraftAnimal` | Minecraft animal | `"chicken"` |
|
||||
| `fakeit_MinecraftVillagerJob` | Villager job | `"farmer"` |
|
||||
| `fakeit_MinecraftVillagerStation` | Villager station | `"furnace"` |
|
||||
| `fakeit_MinecraftVillagerLevel` | Villager level | `"master"` |
|
||||
| `fakeit_MinecraftMobPassive` | Passive mob | `"cow"` |
|
||||
| `fakeit_MinecraftMobNeutral` | Neutral mob | `"bee"` |
|
||||
| `fakeit_MinecraftMobHostile` | Hostile mob | `"spider"` |
|
||||
| `fakeit_MinecraftMobBoss` | Boss mob | `"ender dragon"` |
|
||||
| `fakeit_MinecraftBiome` | Minecraft biome | `"forest"` |
|
||||
| `fakeit_MinecraftWeather` | Minecraft weather | `"rain"` |
|
||||
|
||||
### Book
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------- | ----------- | -------------- |
|
||||
| `fakeit_BookTitle` | Book title | `"Hamlet"` |
|
||||
| `fakeit_BookAuthor` | Book author | `"Mark Twain"` |
|
||||
| `fakeit_BookGenre` | Book genre | `"Adventure"` |
|
||||
|
||||
### Movie
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------- | ----------- | -------------- |
|
||||
| `fakeit_MovieName` | Movie name | `"Inception"` |
|
||||
| `fakeit_MovieGenre` | Movie genre | `"Sci-Fi"` |
|
||||
|
||||
### Error
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------------ | ----------------- | ---------------------------------- |
|
||||
| `fakeit_Error` | Random error | `"connection refused"` |
|
||||
| `fakeit_ErrorDatabase` | Database error | `"database connection failed"` |
|
||||
| `fakeit_ErrorGRPC` | gRPC error | `"rpc error: code = Unavailable"` |
|
||||
| `fakeit_ErrorHTTP` | HTTP error | `"HTTP 500 Internal Server Error"` |
|
||||
| `fakeit_ErrorHTTPClient` | HTTP client error | `"HTTP 404 Not Found"` |
|
||||
| `fakeit_ErrorHTTPServer` | HTTP server error | `"HTTP 503 Service Unavailable"` |
|
||||
| `fakeit_ErrorRuntime` | Runtime error | `"panic: runtime error"` |
|
||||
|
||||
### School
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| --------------- | ----------- | ---------------------- |
|
||||
| `fakeit_School` | School name | `"Harvard University"` |
|
||||
|
||||
### Song
|
||||
|
||||
| Function | Description | Example Output |
|
||||
| ------------------- | ----------- | --------------------- |
|
||||
| `fakeit_SongName` | Song name | `"Bohemian Rhapsody"` |
|
||||
| `fakeit_SongArtist` | Song artist | `"Queen"` |
|
||||
| `fakeit_SongGenre` | Song genre | `"Rock"` |
|
||||
220
e2e/basic_test.go
Normal file
220
e2e/basic_test.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNoArgs(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run()
|
||||
assertExitCode(t, res, 1)
|
||||
// With no args and no env vars, validation should fail on required fields
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestHelp(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, flag := range []string{"-h", "-help"} {
|
||||
t.Run(flag, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run(flag)
|
||||
assertContains(t, res.Stdout, "Usage:")
|
||||
assertContains(t, res.Stdout, "-url")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestVersion(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, flag := range []string{"-v", "-version"} {
|
||||
t.Run(flag, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run(flag)
|
||||
assertExitCode(t, res, 0)
|
||||
assertContains(t, res.Stdout, "Version:")
|
||||
assertContains(t, res.Stdout, "Git Commit:")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnexpectedArgs(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "unexpected")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Unexpected CLI arguments")
|
||||
}
|
||||
|
||||
func TestSimpleRequest(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "3", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
assertResponseCount(t, out, 3)
|
||||
}
|
||||
|
||||
func TestDryRun(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "5", "-q", "-o", "json", "-z")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "dry-run")
|
||||
assertResponseCount(t, out, 5)
|
||||
}
|
||||
|
||||
func TestDryRunDoesNotSendRequests(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "5", "-q", "-o", "json", "-z")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
if cs.requestCount() != 0 {
|
||||
t.Errorf("dry-run should not send any requests, but server received %d", cs.requestCount())
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuietMode(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
if res.Stderr != "" {
|
||||
t.Errorf("expected empty stderr in quiet mode, got: %s", res.Stderr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOutputNone(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "none")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
if res.Stdout != "" {
|
||||
t.Errorf("expected empty stdout with -o none, got: %s", res.Stdout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOutputJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
if out.Responses == nil {
|
||||
t.Fatal("responses field is nil")
|
||||
}
|
||||
if out.Total.Min == "" || out.Total.Max == "" || out.Total.Average == "" {
|
||||
t.Errorf("total stats are incomplete: %+v", out.Total)
|
||||
}
|
||||
if out.Total.P90 == "" || out.Total.P95 == "" || out.Total.P99 == "" {
|
||||
t.Errorf("total percentiles are incomplete: %+v", out.Total)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOutputYAML(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "yaml")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "responses:")
|
||||
assertContains(t, res.Stdout, "total:")
|
||||
assertContains(t, res.Stdout, "count:")
|
||||
}
|
||||
|
||||
func TestOutputTable(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "table")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "Response")
|
||||
assertContains(t, res.Stdout, "Count")
|
||||
assertContains(t, res.Stdout, "Min")
|
||||
assertContains(t, res.Stdout, "P99")
|
||||
}
|
||||
|
||||
func TestInvalidOutputFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1", "-o", "invalid")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Output")
|
||||
}
|
||||
|
||||
func TestStatusCodes(t *testing.T) {
|
||||
t.Parallel()
|
||||
codes := []int{200, 201, 204, 301, 400, 404, 500, 502}
|
||||
for _, code := range codes {
|
||||
t.Run(strconv.Itoa(code), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := statusServer(code)
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, strconv.Itoa(code))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConcurrency(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "10", "-c", "5", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 10)
|
||||
}
|
||||
|
||||
func TestDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-d", "1s", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
count, _ := out.Total.Count.Int64()
|
||||
if count < 1 {
|
||||
t.Errorf("expected at least 1 request during 1s duration, got %d", count)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRequestsAndDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
// Both -r and -d set: should stop at whichever comes first
|
||||
res := run("-U", srv.URL, "-r", "3", "-d", "10s", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 3)
|
||||
}
|
||||
401
e2e/config_file_test.go
Normal file
401
e2e/config_file_test.go
Normal file
@@ -0,0 +1,401 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestConfigFileBasic(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
assertResponseCount(t, out, 1)
|
||||
}
|
||||
|
||||
func TestConfigFileWithMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
method: POST
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodPost {
|
||||
t.Errorf("expected method POST from config, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileWithHeaders(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-Config: config-value
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Config"]; len(v) == 0 || v[0] != "config-value" {
|
||||
t.Errorf("expected X-Config: config-value, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileWithParams(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
params:
|
||||
- key1: value1
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["key1"]; len(v) == 0 || v[0] != "value1" {
|
||||
t.Errorf("expected key1=value1, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileWithCookies(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
cookies:
|
||||
- session: abc123
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["session"]; !ok || v != "abc123" {
|
||||
t.Errorf("expected cookie session=abc123, got %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileWithBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
method: POST
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
body: "hello from config"
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "hello from config" {
|
||||
t.Errorf("expected body 'hello from config', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileCLIOverridesScalars(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "http://should-be-overridden.invalid"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
// CLI -U should override the config file URL (scalar override)
|
||||
res := run("-f", configPath, "-U", cs.URL)
|
||||
assertExitCode(t, res, 0)
|
||||
assertResponseCount(t, res.jsonOutput(t), 1)
|
||||
|
||||
// Verify it actually hit our server
|
||||
if cs.requestCount() != 1 {
|
||||
t.Errorf("expected 1 request to capture server, got %d", cs.requestCount())
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileCLIOverridesMethods(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
method: GET
|
||||
requests: 4
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
// CLI -M POST overrides config file's method: GET
|
||||
res := run("-f", configPath, "-M", "POST")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
for _, r := range cs.allRequests() {
|
||||
if r.Method != http.MethodPost {
|
||||
t.Errorf("expected all requests to be POST (CLI overrides config), got %s", r.Method)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileInvalidYAML(t *testing.T) {
|
||||
t.Parallel()
|
||||
configPath := writeTemp(t, "bad.yaml", `{{{not valid yaml`)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestConfigFileNotFound(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-f", "/nonexistent/path/config.yaml")
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestConfigFileWithDryRun(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
config := `
|
||||
url: "http://example.com"
|
||||
requests: 3
|
||||
quiet: true
|
||||
output: json
|
||||
dryRun: true
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "dry-run")
|
||||
assertResponseCount(t, out, 3)
|
||||
}
|
||||
|
||||
func TestConfigFileWithConcurrency(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 6
|
||||
concurrency: 3
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 6)
|
||||
}
|
||||
|
||||
func TestConfigFileNestedIncludes(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Create inner config
|
||||
innerConfig := `
|
||||
headers:
|
||||
- X-Inner: from-inner
|
||||
`
|
||||
innerPath := writeTemp(t, "inner.yaml", innerConfig)
|
||||
|
||||
// Create outer config that includes inner
|
||||
outerConfig := `
|
||||
configFile: "` + innerPath + `"
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
outerPath := writeTemp(t, "outer.yaml", outerConfig)
|
||||
|
||||
res := run("-f", outerPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Inner"]; len(v) == 0 || v[0] != "from-inner" {
|
||||
t.Errorf("expected X-Inner: from-inner from nested config, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileFromHTTPURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-Remote-Config: yes
|
||||
`
|
||||
// Serve config via HTTP
|
||||
configServer := statusServerWithBody(config)
|
||||
defer configServer.Close()
|
||||
|
||||
res := run("-f", configServer.URL)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Remote-Config"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Remote-Config: yes from HTTP config, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileMultiValueHeaders(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-Multi:
|
||||
- val1
|
||||
- val2
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
// With multiple values, sarin cycles through them (random start).
|
||||
// With -r 1, we should see exactly one of them.
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
v, ok := req.Headers["X-Multi"]
|
||||
if !ok || len(v) == 0 {
|
||||
t.Fatalf("expected X-Multi header, got headers: %v", req.Headers)
|
||||
}
|
||||
if v[0] != "val1" && v[0] != "val2" {
|
||||
t.Errorf("expected X-Multi to be val1 or val2, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileWithTimeout(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
timeout: 5s
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
assertResponseCount(t, res.jsonOutput(t), 1)
|
||||
}
|
||||
|
||||
func TestConfigFileWithInsecure(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
config := `
|
||||
url: "http://example.com"
|
||||
requests: 1
|
||||
insecure: true
|
||||
quiet: true
|
||||
output: json
|
||||
dryRun: true
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
}
|
||||
|
||||
func TestConfigFileWithLuaScript(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
scriptContent := `function transform(req) req.headers["X-Config-Lua"] = {"yes"} return req end`
|
||||
scriptPath := writeTemp(t, "script.lua", scriptContent)
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
lua: "@` + scriptPath + `"
|
||||
`
|
||||
configPath := writeTemp(t, "config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Config-Lua"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Config-Lua: yes, got %v", v)
|
||||
}
|
||||
}
|
||||
282
e2e/config_merge_test.go
Normal file
282
e2e/config_merge_test.go
Normal file
@@ -0,0 +1,282 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// --- Multiple config files ---
|
||||
|
||||
func TestMultipleConfigFiles(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config1 := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-From-File1: yes
|
||||
`
|
||||
config2 := `
|
||||
headers:
|
||||
- X-From-File2: yes
|
||||
`
|
||||
path1 := writeTemp(t, "merge1.yaml", config1)
|
||||
path2 := writeTemp(t, "merge2.yaml", config2)
|
||||
|
||||
res := run("-f", path1, "-f", path2)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-From-File1"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-From-File1: yes, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-From-File2"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-From-File2: yes, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleConfigFilesScalarOverride(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Second config file overrides URL from first
|
||||
config1 := `
|
||||
url: "http://should-be-overridden.invalid"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
config2 := `
|
||||
url: "` + cs.URL + `"
|
||||
`
|
||||
path1 := writeTemp(t, "merge_scalar1.yaml", config1)
|
||||
path2 := writeTemp(t, "merge_scalar2.yaml", config2)
|
||||
|
||||
res := run("-f", path1, "-f", path2)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
if cs.requestCount() != 1 {
|
||||
t.Errorf("expected request to go to second config's URL, got %d requests", cs.requestCount())
|
||||
}
|
||||
}
|
||||
|
||||
// --- Three-way merge: env + config file + CLI ---
|
||||
|
||||
func TestThreeWayMergePriority(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
method: PUT
|
||||
headers:
|
||||
- X-From-Config: config-value
|
||||
`
|
||||
configPath := writeTemp(t, "three_way.yaml", config)
|
||||
|
||||
// ENV sets URL and header, config file sets method and header, CLI overrides URL
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_HEADER": "X-From-Env: env-value",
|
||||
}, "-U", cs.URL, "-r", "1", "-q", "-o", "json", "-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
// Method should be PUT from config (not default GET)
|
||||
if req.Method != http.MethodPut {
|
||||
t.Errorf("expected method PUT from config, got %s", req.Method)
|
||||
}
|
||||
// Header from config file should be present
|
||||
if v := req.Headers["X-From-Config"]; len(v) == 0 || v[0] != "config-value" {
|
||||
t.Errorf("expected X-From-Config from config file, got %v", v)
|
||||
}
|
||||
// Header from env should be present
|
||||
if v := req.Headers["X-From-Env"]; len(v) == 0 || v[0] != "env-value" {
|
||||
t.Errorf("expected X-From-Env from env, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
// --- Config file nesting depth ---
|
||||
|
||||
func TestConfigFileNestedMaxDepth(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Create a chain of 12 config files (exceeds max depth of 10)
|
||||
// The innermost file has the actual URL config
|
||||
// When depth is exceeded, inner files are silently ignored
|
||||
|
||||
files := make([]string, 12)
|
||||
|
||||
// Innermost file (index 11) - has the real config
|
||||
files[11] = writeTemp(t, "depth11.yaml", `
|
||||
url: "`+cs.URL+`"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-Depth: deep
|
||||
`)
|
||||
|
||||
// Chain each file to include the next one
|
||||
for i := 10; i >= 0; i-- {
|
||||
content := `configFile: "` + files[i+1] + `"`
|
||||
files[i] = writeTemp(t, "depth"+string(rune('0'+i))+".yaml", content)
|
||||
}
|
||||
|
||||
// The outermost file: this will recurse but max depth will prevent
|
||||
// reaching the innermost file with the URL
|
||||
res := run("-f", files[0], "-q")
|
||||
// This should fail because URL is never reached (too deep)
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
// --- YAML format flexibility ---
|
||||
|
||||
func TestConfigFileParamsMapFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
params:
|
||||
key1: value1
|
||||
key2: value2
|
||||
`
|
||||
configPath := writeTemp(t, "params_map.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["key1"]; len(v) == 0 || v[0] != "value1" {
|
||||
t.Errorf("expected key1=value1, got %v", v)
|
||||
}
|
||||
if v := req.Query["key2"]; len(v) == 0 || v[0] != "value2" {
|
||||
t.Errorf("expected key2=value2, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileHeadersMapFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
X-Map-A: map-val-a
|
||||
X-Map-B: map-val-b
|
||||
`
|
||||
configPath := writeTemp(t, "headers_map.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Map-A"]; len(v) == 0 || v[0] != "map-val-a" {
|
||||
t.Errorf("expected X-Map-A: map-val-a, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-Map-B"]; len(v) == 0 || v[0] != "map-val-b" {
|
||||
t.Errorf("expected X-Map-B: map-val-b, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileCookiesMapFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
cookies:
|
||||
sess: abc
|
||||
token: xyz
|
||||
`
|
||||
configPath := writeTemp(t, "cookies_map.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["sess"]; !ok || v != "abc" {
|
||||
t.Errorf("expected cookie sess=abc, got %v", req.Cookies)
|
||||
}
|
||||
if v, ok := req.Cookies["token"]; !ok || v != "xyz" {
|
||||
t.Errorf("expected cookie token=xyz, got %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileMultipleBodies(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 10
|
||||
concurrency: 1
|
||||
method: POST
|
||||
quiet: true
|
||||
output: json
|
||||
body:
|
||||
- "body-one"
|
||||
- "body-two"
|
||||
`
|
||||
configPath := writeTemp(t, "multi_body.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
bodies := map[string]bool{}
|
||||
for _, req := range cs.allRequests() {
|
||||
bodies[req.Body] = true
|
||||
}
|
||||
if !bodies["body-one"] || !bodies["body-two"] {
|
||||
t.Errorf("expected both body-one and body-two to appear, got %v", bodies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFileMultipleMethods(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 10
|
||||
concurrency: 1
|
||||
quiet: true
|
||||
output: json
|
||||
method:
|
||||
- GET
|
||||
- POST
|
||||
`
|
||||
configPath := writeTemp(t, "multi_method.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
methods := map[string]bool{}
|
||||
for _, req := range cs.allRequests() {
|
||||
methods[req.Method] = true
|
||||
}
|
||||
if !methods["GET"] || !methods["POST"] {
|
||||
t.Errorf("expected both GET and POST, got %v", methods)
|
||||
}
|
||||
}
|
||||
37
e2e/config_nested_http_test.go
Normal file
37
e2e/config_nested_http_test.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestConfigFileNestedHTTPInclude(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Inner config served via HTTP
|
||||
innerConfig := `
|
||||
headers:
|
||||
- X-From-HTTP-Nested: yes
|
||||
`
|
||||
innerServer := statusServerWithBody(innerConfig)
|
||||
defer innerServer.Close()
|
||||
|
||||
// Outer config references the inner config via HTTP URL
|
||||
outerConfig := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
configFile: "` + innerServer.URL + `"
|
||||
`
|
||||
outerPath := writeTemp(t, "outer_http.yaml", outerConfig)
|
||||
|
||||
res := run("-f", outerPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-From-Http-Nested"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-From-Http-Nested: yes from nested HTTP config, got %v", v)
|
||||
}
|
||||
}
|
||||
117
e2e/coverage_gaps_test.go
Normal file
117
e2e/coverage_gaps_test.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package e2e
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestValidation_InvalidTemplateInMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-M", "{{ invalid_func }}")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Method[0]")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidTemplateInParamKey(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-P", "{{ invalid_func }}=value")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Param[0].Key")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidTemplateInCookieValue(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-C", "session={{ invalid_func }}")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Cookie[0].Value[0]")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidTemplateInURLPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com/{{ invalid_func }}", "-r", "1")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "URL.Path")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidTemplateInValues(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-V", "A={{ invalid_func }}")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Values[0]")
|
||||
}
|
||||
|
||||
func TestValidation_ScriptURLWithoutHost(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-lua", "@http://")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "host")
|
||||
}
|
||||
|
||||
func TestEnvInvalidURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "://bad-url",
|
||||
"SARIN_REQUESTS": "1",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "SARIN_URL")
|
||||
}
|
||||
|
||||
func TestEnvInvalidProxy(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_PROXY": "://bad-proxy",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "SARIN_PROXY")
|
||||
}
|
||||
|
||||
func TestConfigFileInvalidURLParse(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
configPath := writeTemp(t, "invalid_url.yaml", `
|
||||
url: "://bad-url"
|
||||
requests: 1
|
||||
`)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Field 'url'")
|
||||
}
|
||||
|
||||
func TestConfigFileInvalidProxyParse(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
configPath := writeTemp(t, "invalid_proxy.yaml", `
|
||||
url: "http://example.com"
|
||||
requests: 1
|
||||
proxy: "://bad-proxy"
|
||||
`)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "proxy[0]")
|
||||
}
|
||||
|
||||
func TestConfigFileInvalidHeadersType(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
configPath := writeTemp(t, "invalid_headers_type.yaml", `
|
||||
url: "http://example.com"
|
||||
requests: 1
|
||||
headers:
|
||||
- X-Test: value
|
||||
- 42
|
||||
`)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Failed to parse config file")
|
||||
}
|
||||
316
e2e/e2e_test.go
Normal file
316
e2e/e2e_test.go
Normal file
@@ -0,0 +1,316 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
var binaryPath string
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
// Build the binary once before all tests.
|
||||
tmpDir, err := os.MkdirTemp("", "sarin-e2e-*")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create temp dir: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
binaryPath = filepath.Join(tmpDir, "sarin")
|
||||
if runtime.GOOS == "windows" {
|
||||
binaryPath += ".exe"
|
||||
}
|
||||
|
||||
cmd := exec.Command("go", "build", "-o", binaryPath, "../cmd/cli/main.go")
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to build binary: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
code := m.Run()
|
||||
os.RemoveAll(tmpDir)
|
||||
os.Exit(code)
|
||||
}
|
||||
|
||||
// --- Result type ---
|
||||
|
||||
// runResult holds the output of a sarin binary execution.
|
||||
type runResult struct {
|
||||
Stdout string
|
||||
Stderr string
|
||||
ExitCode int
|
||||
}
|
||||
|
||||
// jsonOutput parses the stdout as JSON output from sarin.
|
||||
// Fails the test if parsing fails.
|
||||
func (r runResult) jsonOutput(t *testing.T) outputData {
|
||||
t.Helper()
|
||||
var out outputData
|
||||
if err := json.Unmarshal([]byte(r.Stdout), &out); err != nil {
|
||||
t.Fatalf("failed to parse JSON output: %v\nstdout: %s", err, r.Stdout)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// --- JSON output structures ---
|
||||
|
||||
type responseStat struct {
|
||||
Count json.Number `json:"count"`
|
||||
Min string `json:"min"`
|
||||
Max string `json:"max"`
|
||||
Average string `json:"average"`
|
||||
P90 string `json:"p90"`
|
||||
P95 string `json:"p95"`
|
||||
P99 string `json:"p99"`
|
||||
}
|
||||
|
||||
type outputData struct {
|
||||
Responses map[string]responseStat `json:"responses"`
|
||||
Total responseStat `json:"total"`
|
||||
}
|
||||
|
||||
// --- echoResponse is the JSON structure returned by echoServer ---
|
||||
|
||||
type echoResponse struct {
|
||||
Method string `json:"method"`
|
||||
Path string `json:"path"`
|
||||
Query map[string][]string `json:"query"`
|
||||
Headers map[string][]string `json:"headers"`
|
||||
Cookies map[string]string `json:"cookies"`
|
||||
Body string `json:"body"`
|
||||
}
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
// run executes the sarin binary with the given args and returns the result.
|
||||
func run(args ...string) runResult {
|
||||
cmd := exec.Command(binaryPath, args...)
|
||||
var stdout, stderr strings.Builder
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
err := cmd.Run()
|
||||
exitCode := 0
|
||||
if err != nil {
|
||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
||||
exitCode = exitErr.ExitCode()
|
||||
} else {
|
||||
exitCode = -1
|
||||
}
|
||||
}
|
||||
|
||||
return runResult{
|
||||
Stdout: stdout.String(),
|
||||
Stderr: stderr.String(),
|
||||
ExitCode: exitCode,
|
||||
}
|
||||
}
|
||||
|
||||
// runWithEnv executes the sarin binary with the given args and environment variables.
|
||||
func runWithEnv(env map[string]string, args ...string) runResult {
|
||||
cmd := exec.Command(binaryPath, args...)
|
||||
var stdout, stderr strings.Builder
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
// Start with a clean env, then add the requested vars
|
||||
cmd.Env = os.Environ()
|
||||
for k, v := range env {
|
||||
cmd.Env = append(cmd.Env, k+"="+v)
|
||||
}
|
||||
|
||||
err := cmd.Run()
|
||||
exitCode := 0
|
||||
if err != nil {
|
||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
||||
exitCode = exitErr.ExitCode()
|
||||
} else {
|
||||
exitCode = -1
|
||||
}
|
||||
}
|
||||
|
||||
return runResult{
|
||||
Stdout: stdout.String(),
|
||||
Stderr: stderr.String(),
|
||||
ExitCode: exitCode,
|
||||
}
|
||||
}
|
||||
|
||||
// startProcess starts the sarin binary and returns the exec.Cmd without waiting.
|
||||
// The caller is responsible for managing the process lifecycle.
|
||||
func startProcess(args ...string) (*exec.Cmd, *strings.Builder) {
|
||||
cmd := exec.Command(binaryPath, args...)
|
||||
var stdout strings.Builder
|
||||
cmd.Stdout = &stdout
|
||||
return cmd, &stdout
|
||||
}
|
||||
|
||||
// slowServer returns a server that delays each response by the given duration.
|
||||
func slowServer(delay time.Duration) *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
time.Sleep(delay)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
}
|
||||
|
||||
// echoServer starts an HTTP test server that echoes request details back as JSON.
|
||||
// The response includes method, path, headers, query params, cookies, and body.
|
||||
func echoServer() *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
body, _ := io.ReadAll(r.Body)
|
||||
|
||||
cookies := make(map[string]string)
|
||||
for _, c := range r.Cookies() {
|
||||
cookies[c.Name] = c.Value
|
||||
}
|
||||
|
||||
resp := echoResponse{
|
||||
Method: r.Method,
|
||||
Path: r.URL.Path,
|
||||
Query: r.URL.Query(),
|
||||
Headers: r.Header,
|
||||
Cookies: cookies,
|
||||
Body: string(body),
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(resp)
|
||||
}))
|
||||
}
|
||||
|
||||
// captureServer records every request it receives and responds with 200.
|
||||
// Use lastRequest() to inspect the most recent request.
|
||||
type captureServer struct {
|
||||
*httptest.Server
|
||||
|
||||
mu sync.Mutex
|
||||
requests []echoResponse
|
||||
}
|
||||
|
||||
func newCaptureServer() *captureServer {
|
||||
cs := &captureServer{}
|
||||
cs.Server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
body, _ := io.ReadAll(r.Body)
|
||||
|
||||
cookies := make(map[string]string)
|
||||
for _, c := range r.Cookies() {
|
||||
cookies[c.Name] = c.Value
|
||||
}
|
||||
|
||||
cs.mu.Lock()
|
||||
cs.requests = append(cs.requests, echoResponse{
|
||||
Method: r.Method,
|
||||
Path: r.URL.Path,
|
||||
Query: r.URL.Query(),
|
||||
Headers: r.Header,
|
||||
Cookies: cookies,
|
||||
Body: string(body),
|
||||
})
|
||||
cs.mu.Unlock()
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
return cs
|
||||
}
|
||||
|
||||
func (cs *captureServer) lastRequest() echoResponse {
|
||||
cs.mu.Lock()
|
||||
defer cs.mu.Unlock()
|
||||
if len(cs.requests) == 0 {
|
||||
return echoResponse{}
|
||||
}
|
||||
return cs.requests[len(cs.requests)-1]
|
||||
}
|
||||
|
||||
func (cs *captureServer) allRequests() []echoResponse {
|
||||
cs.mu.Lock()
|
||||
defer cs.mu.Unlock()
|
||||
copied := make([]echoResponse, len(cs.requests))
|
||||
copy(copied, cs.requests)
|
||||
return copied
|
||||
}
|
||||
|
||||
func (cs *captureServer) requestCount() int {
|
||||
cs.mu.Lock()
|
||||
defer cs.mu.Unlock()
|
||||
return len(cs.requests)
|
||||
}
|
||||
|
||||
// statusServer returns a server that always responds with the given status code.
|
||||
func statusServer(code int) *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(code)
|
||||
}))
|
||||
}
|
||||
|
||||
// statusServerWithBody returns a server that responds with 200 and the given body.
|
||||
func statusServerWithBody(body string) *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(body))
|
||||
}))
|
||||
}
|
||||
|
||||
// writeTemp creates a temporary file with the given content and returns its path.
|
||||
// The file is automatically cleaned up when the test finishes.
|
||||
func writeTemp(t *testing.T, name, content string) string {
|
||||
t.Helper()
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, name)
|
||||
if err := os.WriteFile(path, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("failed to write temp file: %v", err)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
// --- Assertion helpers ---
|
||||
|
||||
func assertExitCode(t *testing.T, res runResult, want int) {
|
||||
t.Helper()
|
||||
if res.ExitCode != want {
|
||||
t.Errorf("expected exit code %d, got %d\nstdout: %s\nstderr: %s", want, res.ExitCode, res.Stdout, res.Stderr)
|
||||
}
|
||||
}
|
||||
|
||||
func assertContains(t *testing.T, s, substr string) {
|
||||
t.Helper()
|
||||
if !strings.Contains(s, substr) {
|
||||
t.Errorf("expected output to contain %q, got:\n%s", substr, s)
|
||||
}
|
||||
}
|
||||
|
||||
func assertResponseCount(t *testing.T, out outputData, wantTotal int) {
|
||||
t.Helper()
|
||||
got, err := out.Total.Count.Int64()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse total count: %v", err)
|
||||
}
|
||||
if got != int64(wantTotal) {
|
||||
t.Errorf("expected total count %d, got %d", wantTotal, got)
|
||||
}
|
||||
}
|
||||
|
||||
func assertHasResponseKey(t *testing.T, out outputData, key string) {
|
||||
t.Helper()
|
||||
if _, ok := out.Responses[key]; !ok {
|
||||
t.Errorf("expected %q in responses, got keys: %v", key, responseKeys(out))
|
||||
}
|
||||
}
|
||||
|
||||
func responseKeys(out outputData) []string {
|
||||
keys := make([]string, 0, len(out.Responses))
|
||||
for k := range out.Responses {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
87
e2e/env_errors_test.go
Normal file
87
e2e/env_errors_test.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEnvInvalidConcurrency(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_CONCURRENCY": "not-a-number",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value for unsigned integer")
|
||||
}
|
||||
|
||||
func TestEnvInvalidRequests(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "abc",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value for unsigned integer")
|
||||
}
|
||||
|
||||
func TestEnvInvalidDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_DURATION": "not-a-duration",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value duration")
|
||||
}
|
||||
|
||||
func TestEnvInvalidTimeout(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_TIMEOUT": "xyz",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value duration")
|
||||
}
|
||||
|
||||
func TestEnvInvalidInsecure(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_INSECURE": "maybe",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value for boolean")
|
||||
}
|
||||
|
||||
func TestEnvInvalidDryRun(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_DRY_RUN": "yes",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value for boolean")
|
||||
}
|
||||
|
||||
func TestEnvInvalidShowConfig(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_SHOW_CONFIG": "nope",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "invalid value for boolean")
|
||||
}
|
||||
348
e2e/env_test.go
Normal file
348
e2e/env_test.go
Normal file
@@ -0,0 +1,348 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEnvURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
assertResponseCount(t, out, 1)
|
||||
}
|
||||
|
||||
func TestEnvMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_METHOD": "POST",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodPost {
|
||||
t.Errorf("expected method POST from env, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvConcurrency(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "6",
|
||||
"SARIN_CONCURRENCY": "3",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 6)
|
||||
}
|
||||
|
||||
func TestEnvDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_DURATION": "1s",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
count, _ := out.Total.Count.Int64()
|
||||
if count < 1 {
|
||||
t.Errorf("expected at least 1 request during 1s, got %d", count)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvTimeout(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_TIMEOUT": "5s",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
assertResponseCount(t, res.jsonOutput(t), 1)
|
||||
}
|
||||
|
||||
func TestEnvHeader(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_HEADER": "X-From-Env: env-value",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-From-Env"]; len(v) == 0 || v[0] != "env-value" {
|
||||
t.Errorf("expected X-From-Env: env-value, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvParam(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_PARAM": "env_key=env_val",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["env_key"]; len(v) == 0 || v[0] != "env_val" {
|
||||
t.Errorf("expected env_key=env_val, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvCookie(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_COOKIE": "env_session=env_abc",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["env_session"]; !ok || v != "env_abc" {
|
||||
t.Errorf("expected cookie env_session=env_abc, got %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_METHOD": "POST",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_BODY": "env-body-content",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "env-body-content" {
|
||||
t.Errorf("expected body 'env-body-content', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvDryRun(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "3",
|
||||
"SARIN_DRY_RUN": "true",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "dry-run")
|
||||
assertResponseCount(t, out, 3)
|
||||
}
|
||||
|
||||
func TestEnvInsecure(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_INSECURE": "true",
|
||||
"SARIN_DRY_RUN": "true",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
}
|
||||
|
||||
func TestEnvOutputNone(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "none",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
if res.Stdout != "" {
|
||||
t.Errorf("expected empty stdout with output=none, got: %s", res.Stdout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvConfigFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-From-Env-Config: yes
|
||||
`
|
||||
configPath := writeTemp(t, "env_config.yaml", config)
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_CONFIG_FILE": configPath,
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-From-Env-Config"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-From-Env-Config: yes, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvCLIOverridesEnv(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// CLI should take priority over env vars
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://should-be-overridden.invalid",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
}, "-U", cs.URL)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
if cs.requestCount() != 1 {
|
||||
t.Errorf("expected CLI URL to override env, but server got %d requests", cs.requestCount())
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvInvalidBool(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "not-a-bool",
|
||||
})
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestEnvLuaScript(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.headers["X-Env-Lua"] = {"yes"} return req end`
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
"SARIN_LUA": script,
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Env-Lua"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Env-Lua: yes, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvJsScript(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.headers["X-Env-Js"] = ["yes"]; return req; }`
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
"SARIN_JS": script,
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Env-Js"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Env-Js: yes, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvValues(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": cs.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
"SARIN_VALUES": "MY_KEY=my_val",
|
||||
}, "-H", "X-Val: {{ .Values.MY_KEY }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Val"]; len(v) == 0 || v[0] != "my_val" {
|
||||
t.Errorf("expected X-Val: my_val, got %v", v)
|
||||
}
|
||||
}
|
||||
149
e2e/formdata_test.go
Normal file
149
e2e/formdata_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBodyFormDataSimple(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ body_FormData "name" "John" "age" "30" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
// Body should contain multipart form data
|
||||
assertContains(t, req.Body, "name")
|
||||
assertContains(t, req.Body, "John")
|
||||
assertContains(t, req.Body, "age")
|
||||
assertContains(t, req.Body, "30")
|
||||
|
||||
// Content-Type should be multipart/form-data
|
||||
ct := req.Headers["Content-Type"]
|
||||
if len(ct) == 0 {
|
||||
t.Fatal("expected Content-Type header for form data")
|
||||
}
|
||||
assertContains(t, ct[0], "multipart/form-data")
|
||||
}
|
||||
|
||||
func TestBodyFormDataWithFileUpload(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Create a temp file to upload
|
||||
filePath := writeTemp(t, "upload.txt", "file content here")
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ body_FormData "description" "test file" "document" "@`+filePath+`" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
assertContains(t, req.Body, "description")
|
||||
assertContains(t, req.Body, "test file")
|
||||
assertContains(t, req.Body, "file content here")
|
||||
assertContains(t, req.Body, "upload.txt")
|
||||
}
|
||||
|
||||
func TestBodyFormDataWithRemoteFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Serve a file via HTTP
|
||||
fileServer := statusServerWithBody("remote file content")
|
||||
defer fileServer.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ body_FormData "file" "@`+fileServer.URL+`" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
assertContains(t, req.Body, "remote file content")
|
||||
}
|
||||
|
||||
func TestBodyFormDataEscapedAt(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// @@ should send literal @ prefixed value
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ body_FormData "email" "@@user@example.com" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
assertContains(t, req.Body, "@user@example.com")
|
||||
}
|
||||
|
||||
func TestBodyFormDataOddArgsError(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Odd number of args should cause an error
|
||||
res := run("-U", "http://example.com", "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ body_FormData "key_only" }}`)
|
||||
// This should either fail at validation or produce an error in output
|
||||
// The template is valid syntax but body_FormData returns an error at runtime
|
||||
if res.ExitCode == 0 {
|
||||
out := res.jsonOutput(t)
|
||||
// If it didn't exit 1, the error should show up as a response key
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected error for odd form data args, but got 200")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileBase64(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
content := "hello base64 world"
|
||||
filePath := writeTemp(t, "base64test.txt", content)
|
||||
expected := base64.StdEncoding.EncodeToString([]byte(content))
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ file_Base64 "`+filePath+`" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != expected {
|
||||
t.Errorf("expected base64 %q, got %q", expected, req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileBase64RemoteFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
content := "remote base64 content"
|
||||
fileServer := statusServerWithBody(content)
|
||||
defer fileServer.Close()
|
||||
|
||||
expected := base64.StdEncoding.EncodeToString([]byte(content))
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ file_Base64 "`+fileServer.URL+`" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != expected {
|
||||
t.Errorf("expected base64 %q, got %q", expected, req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBodyFormDataMultipleRequests(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "3", "-c", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ body_FormData "id" "{{ fakeit_UUID }}" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertResponseCount(t, res.jsonOutput(t), 3)
|
||||
}
|
||||
226
e2e/multi_value_test.go
Normal file
226
e2e/multi_value_test.go
Normal file
@@ -0,0 +1,226 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// --- CLI: multiple same-key values are all sent in every request ---
|
||||
|
||||
func TestMultipleHeadersSameKeyCLI(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", "X-Multi: value1", "-H", "X-Multi: value2")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals := req.Headers["X-Multi"]
|
||||
if len(vals) < 2 {
|
||||
t.Fatalf("expected 2 values for X-Multi, got %v", vals)
|
||||
}
|
||||
found := map[string]bool{}
|
||||
for _, v := range vals {
|
||||
found[v] = true
|
||||
}
|
||||
if !found["value1"] || !found["value2"] {
|
||||
t.Errorf("expected both value1 and value2, got %v", vals)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleParamsSameKeyCLI(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-P", "color=red", "-P", "color=blue")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals := req.Query["color"]
|
||||
if len(vals) < 2 {
|
||||
t.Fatalf("expected 2 values for color param, got %v", vals)
|
||||
}
|
||||
found := map[string]bool{}
|
||||
for _, v := range vals {
|
||||
found[v] = true
|
||||
}
|
||||
if !found["red"] || !found["blue"] {
|
||||
t.Errorf("expected both red and blue, got %v", vals)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleCookiesSameKeyCLI(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-C", "token=abc", "-C", "token=def")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
cookieHeader := ""
|
||||
if v := req.Headers["Cookie"]; len(v) > 0 {
|
||||
cookieHeader = v[0]
|
||||
}
|
||||
assertContains(t, cookieHeader, "token=abc")
|
||||
assertContains(t, cookieHeader, "token=def")
|
||||
}
|
||||
|
||||
// --- Config file: multiple values for same key cycle across requests ---
|
||||
|
||||
func TestMultipleHeadersSameKeyYAMLCycle(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 20
|
||||
concurrency: 1
|
||||
quiet: true
|
||||
output: json
|
||||
headers:
|
||||
- X-Multi: [val-a, val-b]
|
||||
`
|
||||
configPath := writeTemp(t, "multi_header.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
seen := map[string]bool{}
|
||||
for _, req := range cs.allRequests() {
|
||||
if vals := req.Headers["X-Multi"]; len(vals) > 0 {
|
||||
seen[vals[0]] = true
|
||||
}
|
||||
}
|
||||
if !seen["val-a"] {
|
||||
t.Error("expected val-a to appear in some requests")
|
||||
}
|
||||
if !seen["val-b"] {
|
||||
t.Error("expected val-b to appear in some requests")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleParamsSameKeyYAMLCycle(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
config := `
|
||||
url: "` + cs.URL + `"
|
||||
requests: 20
|
||||
concurrency: 1
|
||||
quiet: true
|
||||
output: json
|
||||
params:
|
||||
- tag: [go, rust]
|
||||
`
|
||||
configPath := writeTemp(t, "multi_param.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
seen := map[string]bool{}
|
||||
for _, req := range cs.allRequests() {
|
||||
if vals := req.Query["tag"]; len(vals) > 0 {
|
||||
seen[vals[0]] = true
|
||||
}
|
||||
}
|
||||
if !seen["go"] {
|
||||
t.Error("expected 'go' to appear in some requests")
|
||||
}
|
||||
if !seen["rust"] {
|
||||
t.Error("expected 'rust' to appear in some requests")
|
||||
}
|
||||
}
|
||||
|
||||
// --- Multiple bodies cycle ---
|
||||
|
||||
func TestMultipleBodiesCycle(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "10", "-c", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", "body-alpha", "-B", "body-beta")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
bodies := map[string]bool{}
|
||||
for _, req := range cs.allRequests() {
|
||||
bodies[req.Body] = true
|
||||
}
|
||||
if !bodies["body-alpha"] {
|
||||
t.Error("expected body-alpha to appear in requests")
|
||||
}
|
||||
if !bodies["body-beta"] {
|
||||
t.Error("expected body-beta to appear in requests")
|
||||
}
|
||||
}
|
||||
|
||||
// --- Multiple methods cycling ---
|
||||
|
||||
func TestMultipleMethodsCycleDistribution(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "20", "-c", "1", "-q", "-o", "json",
|
||||
"-M", "GET", "-M", "POST", "-M", "PUT")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
methods := map[string]int{}
|
||||
for _, req := range cs.allRequests() {
|
||||
methods[req.Method]++
|
||||
}
|
||||
if methods["GET"] == 0 {
|
||||
t.Error("expected GET to appear")
|
||||
}
|
||||
if methods["POST"] == 0 {
|
||||
t.Error("expected POST to appear")
|
||||
}
|
||||
if methods["PUT"] == 0 {
|
||||
t.Error("expected PUT to appear")
|
||||
}
|
||||
}
|
||||
|
||||
// --- Template in method ---
|
||||
|
||||
func TestTemplateInMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-M", `{{ strings_ToUpper "post" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodPost {
|
||||
t.Errorf("expected method POST from template, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
// --- Template in cookie value ---
|
||||
|
||||
func TestTemplateInCookie(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-C", `session={{ fakeit_UUID }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Cookies["session"] == "" {
|
||||
t.Error("expected session cookie with UUID value, got empty")
|
||||
}
|
||||
if len(req.Cookies["session"]) < 10 {
|
||||
t.Errorf("expected UUID-like session cookie, got %q", req.Cookies["session"])
|
||||
}
|
||||
}
|
||||
198
e2e/output_test.go
Normal file
198
e2e/output_test.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
// --- JSON output structure verification ---
|
||||
|
||||
func TestJSONOutputHasStatFields(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "3", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
|
||||
// Verify total has all stat fields
|
||||
if out.Total.Count.String() != "3" {
|
||||
t.Errorf("expected count 3, got %s", out.Total.Count.String())
|
||||
}
|
||||
if out.Total.Min == "" {
|
||||
t.Error("expected min to be non-empty")
|
||||
}
|
||||
if out.Total.Max == "" {
|
||||
t.Error("expected max to be non-empty")
|
||||
}
|
||||
if out.Total.Average == "" {
|
||||
t.Error("expected average to be non-empty")
|
||||
}
|
||||
if out.Total.P90 == "" {
|
||||
t.Error("expected p90 to be non-empty")
|
||||
}
|
||||
if out.Total.P95 == "" {
|
||||
t.Error("expected p95 to be non-empty")
|
||||
}
|
||||
if out.Total.P99 == "" {
|
||||
t.Error("expected p99 to be non-empty")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONOutputResponseStatFields(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "5", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
stat, ok := out.Responses["200"]
|
||||
if !ok {
|
||||
t.Fatal("expected 200 in responses")
|
||||
}
|
||||
|
||||
if stat.Count.String() != "5" {
|
||||
t.Errorf("expected response count 5, got %s", stat.Count.String())
|
||||
}
|
||||
if stat.Min == "" || stat.Max == "" || stat.Average == "" {
|
||||
t.Error("expected min/max/average to be non-empty")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONOutputMultipleStatusCodes(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Create servers with different status codes
|
||||
srv200 := statusServer(200)
|
||||
defer srv200.Close()
|
||||
srv404 := statusServer(404)
|
||||
defer srv404.Close()
|
||||
|
||||
// We can only target one URL, so use a single server
|
||||
// Instead, test that dry-run produces the expected structure
|
||||
res := run("-U", "http://example.com", "-r", "3", "-z", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
// dry-run should have "dry-run" key
|
||||
stat := out.Responses["dry-run"]
|
||||
if stat.Count.String() != "3" {
|
||||
t.Errorf("expected dry-run count 3, got %s", stat.Count.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONOutputIsValidJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// Verify it's valid JSON
|
||||
var raw map[string]any
|
||||
if err := json.Unmarshal([]byte(res.Stdout), &raw); err != nil {
|
||||
t.Fatalf("stdout is not valid JSON: %v", err)
|
||||
}
|
||||
|
||||
// Verify top-level structure
|
||||
if _, ok := raw["responses"]; !ok {
|
||||
t.Error("expected 'responses' key in JSON output")
|
||||
}
|
||||
if _, ok := raw["total"]; !ok {
|
||||
t.Error("expected 'total' key in JSON output")
|
||||
}
|
||||
}
|
||||
|
||||
// --- YAML output structure verification ---
|
||||
|
||||
func TestYAMLOutputIsValidYAML(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "yaml")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
var raw map[string]any
|
||||
if err := yaml.Unmarshal([]byte(res.Stdout), &raw); err != nil {
|
||||
t.Fatalf("stdout is not valid YAML: %v", err)
|
||||
}
|
||||
|
||||
if _, ok := raw["responses"]; !ok {
|
||||
t.Error("expected 'responses' key in YAML output")
|
||||
}
|
||||
if _, ok := raw["total"]; !ok {
|
||||
t.Error("expected 'total' key in YAML output")
|
||||
}
|
||||
}
|
||||
|
||||
func TestYAMLOutputHasStatFields(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "yaml")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "count:")
|
||||
assertContains(t, res.Stdout, "min:")
|
||||
assertContains(t, res.Stdout, "max:")
|
||||
assertContains(t, res.Stdout, "average:")
|
||||
assertContains(t, res.Stdout, "p90:")
|
||||
assertContains(t, res.Stdout, "p95:")
|
||||
assertContains(t, res.Stdout, "p99:")
|
||||
}
|
||||
|
||||
// --- Table output content verification ---
|
||||
|
||||
func TestTableOutputContainsHeaders(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "table")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// Table should contain column headers
|
||||
assertContains(t, res.Stdout, "Response")
|
||||
assertContains(t, res.Stdout, "Count")
|
||||
assertContains(t, res.Stdout, "Min")
|
||||
assertContains(t, res.Stdout, "Max")
|
||||
}
|
||||
|
||||
func TestTableOutputContainsStatusCode(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "table")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "200")
|
||||
}
|
||||
|
||||
// --- Version output format ---
|
||||
|
||||
func TestVersionOutputFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-v")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
lines := strings.Split(strings.TrimSpace(res.Stdout), "\n")
|
||||
if len(lines) < 4 {
|
||||
t.Fatalf("expected at least 4 lines in version output, got %d: %s", len(lines), res.Stdout)
|
||||
}
|
||||
assertContains(t, lines[0], "Version:")
|
||||
assertContains(t, lines[1], "Git Commit:")
|
||||
assertContains(t, lines[2], "Build Date:")
|
||||
assertContains(t, lines[3], "Go Version:")
|
||||
}
|
||||
103
e2e/proxy_test.go
Normal file
103
e2e/proxy_test.go
Normal file
@@ -0,0 +1,103 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Note: We can't easily test actual proxy connections in E2E tests without
|
||||
// setting up real proxy servers. These tests verify the validation and
|
||||
// error handling around proxy configuration.
|
||||
|
||||
func TestProxyValidSchemes(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Valid proxy scheme should not cause a validation error
|
||||
// (will fail at connection time since no proxy is running, but should pass validation)
|
||||
for _, scheme := range []string{"http", "https", "socks5", "socks5h"} {
|
||||
t.Run(scheme, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-z", "-q", "-o", "json",
|
||||
"-X", scheme+"://127.0.0.1:9999")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "dry-run")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyInvalidScheme(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-q", "-o", "json",
|
||||
"-X", "ftp://proxy.example.com:8080")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestMultipleProxiesDryRun(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Multiple proxies with dry-run to verify they're accepted
|
||||
res := run("-U", "http://example.com", "-r", "3", "-z", "-q", "-o", "json",
|
||||
"-X", "http://127.0.0.1:8080",
|
||||
"-X", "http://127.0.0.1:8081")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 3)
|
||||
}
|
||||
|
||||
func TestProxyConnectionFailure(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Use a proxy that doesn't exist — should get a connection error
|
||||
res := run("-U", "http://example.com", "-r", "1", "-q", "-o", "json",
|
||||
"-X", "http://127.0.0.1:1")
|
||||
// The process should still exit (may exit 0 with error in output or exit 1)
|
||||
if res.ExitCode == 0 {
|
||||
out := res.jsonOutput(t)
|
||||
// Should NOT get a 200 — should have a proxy error
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected proxy connection error, but got 200")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyFromConfigFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
config := `
|
||||
url: "http://example.com"
|
||||
requests: 1
|
||||
quiet: true
|
||||
output: json
|
||||
dryRun: true
|
||||
proxy:
|
||||
- http://127.0.0.1:8080
|
||||
`
|
||||
configPath := writeTemp(t, "proxy_config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "dry-run")
|
||||
}
|
||||
|
||||
func TestProxyFromEnv(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_DRY_RUN": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
"SARIN_PROXY": "http://127.0.0.1:8080",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "dry-run")
|
||||
}
|
||||
331
e2e/request_test.go
Normal file
331
e2e/request_test.go
Normal file
@@ -0,0 +1,331 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"slices"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMethodGET(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodGet {
|
||||
t.Errorf("expected default method GET, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMethodPOST(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodPost {
|
||||
t.Errorf("expected method POST, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMethodExplicit(t *testing.T) {
|
||||
t.Parallel()
|
||||
methods := []string{"GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"}
|
||||
for _, method := range methods {
|
||||
t.Run(method, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", method, "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != method {
|
||||
t.Errorf("expected method %s, got %s", method, req.Method)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleMethods(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// With multiple methods, sarin cycles through them
|
||||
res := run("-U", cs.URL, "-r", "4", "-M", "GET", "-M", "POST", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
reqs := cs.allRequests()
|
||||
if len(reqs) != 4 {
|
||||
t.Fatalf("expected 4 requests, got %d", len(reqs))
|
||||
}
|
||||
|
||||
// Should see both GET and POST used
|
||||
methods := make(map[string]bool)
|
||||
for _, r := range reqs {
|
||||
methods[r.Method] = true
|
||||
}
|
||||
if !methods["GET"] || !methods["POST"] {
|
||||
t.Errorf("expected both GET and POST to be used, got methods: %v", methods)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSingleHeader(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-H", "X-Custom: hello", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals, ok := req.Headers["X-Custom"]
|
||||
if !ok {
|
||||
t.Fatalf("expected X-Custom header, got headers: %v", req.Headers)
|
||||
}
|
||||
if len(vals) != 1 || vals[0] != "hello" {
|
||||
t.Errorf("expected X-Custom: [hello], got %v", vals)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleHeaders(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1",
|
||||
"-H", "X-First: one",
|
||||
"-H", "X-Second: two",
|
||||
"-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-First"]; len(v) == 0 || v[0] != "one" {
|
||||
t.Errorf("expected X-First: one, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-Second"]; len(v) == 0 || v[0] != "two" {
|
||||
t.Errorf("expected X-Second: two, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHeaderWithEmptyValue(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Header without ": " separator should have empty value
|
||||
res := run("-U", cs.URL, "-r", "1", "-H", "X-Empty", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if _, ok := req.Headers["X-Empty"]; !ok {
|
||||
t.Errorf("expected X-Empty header to be present, got headers: %v", req.Headers)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultUserAgentHeader(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
ua, ok := req.Headers["User-Agent"]
|
||||
if !ok || len(ua) == 0 {
|
||||
t.Fatalf("expected User-Agent header, got headers: %v", req.Headers)
|
||||
}
|
||||
assertContains(t, ua[0], "Sarin/")
|
||||
}
|
||||
|
||||
func TestCustomUserAgentOverridesDefault(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-H", "User-Agent: MyAgent/1.0", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
ua := req.Headers["User-Agent"]
|
||||
if len(ua) == 0 {
|
||||
t.Fatal("expected User-Agent header")
|
||||
}
|
||||
// When user sets User-Agent, the default should not be added
|
||||
if slices.Contains(ua, "MyAgent/1.0") {
|
||||
return // found the custom one
|
||||
}
|
||||
t.Errorf("expected custom User-Agent 'MyAgent/1.0', got %v", ua)
|
||||
}
|
||||
|
||||
func TestSingleParam(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-P", "key1=value1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals, ok := req.Query["key1"]
|
||||
if !ok {
|
||||
t.Fatalf("expected key1 param, got query: %v", req.Query)
|
||||
}
|
||||
if len(vals) != 1 || vals[0] != "value1" {
|
||||
t.Errorf("expected key1=[value1], got %v", vals)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleParams(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1",
|
||||
"-P", "a=1",
|
||||
"-P", "b=2",
|
||||
"-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["a"]; len(v) == 0 || v[0] != "1" {
|
||||
t.Errorf("expected a=1, got %v", v)
|
||||
}
|
||||
if v := req.Query["b"]; len(v) == 0 || v[0] != "2" {
|
||||
t.Errorf("expected b=2, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParamsFromURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Params in the URL itself should be extracted and sent
|
||||
res := run("-U", cs.URL+"?fromurl=yes", "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["fromurl"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected fromurl=yes from URL query, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParamsFromURLAndFlag(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Both URL params and -P params should be sent
|
||||
res := run("-U", cs.URL+"?fromurl=yes", "-r", "1", "-P", "fromflag=also", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["fromurl"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected fromurl=yes, got %v", v)
|
||||
}
|
||||
if v := req.Query["fromflag"]; len(v) == 0 || v[0] != "also" {
|
||||
t.Errorf("expected fromflag=also, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSingleCookie(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-C", "session=abc123", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["session"]; !ok || v != "abc123" {
|
||||
t.Errorf("expected cookie session=abc123, got cookies: %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleCookies(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1",
|
||||
"-C", "session=abc",
|
||||
"-C", "token=xyz",
|
||||
"-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["session"]; !ok || v != "abc" {
|
||||
t.Errorf("expected cookie session=abc, got %v", req.Cookies)
|
||||
}
|
||||
if v, ok := req.Cookies["token"]; !ok || v != "xyz" {
|
||||
t.Errorf("expected cookie token=xyz, got %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-B", "hello world", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "hello world" {
|
||||
t.Errorf("expected body 'hello world', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBodyJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
jsonBody := `{"name":"test","value":42}`
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-B", jsonBody, "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != jsonBody {
|
||||
t.Errorf("expected body %q, got %q", jsonBody, req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestURLPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL+"/api/v1/users", "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Path != "/api/v1/users" {
|
||||
t.Errorf("expected path /api/v1/users, got %s", req.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParamWithEmptyValue(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Param without = value
|
||||
res := run("-U", cs.URL, "-r", "1", "-P", "empty", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if _, ok := req.Query["empty"]; !ok {
|
||||
t.Errorf("expected 'empty' param to be present, got query: %v", req.Query)
|
||||
}
|
||||
}
|
||||
137
e2e/script_errors_test.go
Normal file
137
e2e/script_errors_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestJsScriptModifiesPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.path = "/js-modified"; return req; }`
|
||||
scriptPath := writeTemp(t, "modify_path.js", script)
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json", "-js", "@"+scriptPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Path != "/js-modified" {
|
||||
t.Errorf("expected path /js-modified from JS script, got %s", req.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptRuntimeError(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// This script throws an error at runtime
|
||||
script := `function transform(req) { throw new Error("runtime boom"); }`
|
||||
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json", "-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// The request should fail with a script error, not a 200
|
||||
out := res.jsonOutput(t)
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected script runtime error, but got 200")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptRuntimeError(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Script that will error at runtime
|
||||
script := `function transform(req) error("lua runtime boom") end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json", "-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected script runtime error, but got 200")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptReturnsNull(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// transform returns null instead of object
|
||||
script := `function transform(req) { return null; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json", "-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected error for null return, but got 200")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptReturnsUndefined(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// transform returns nothing (undefined)
|
||||
script := `function transform(req) { }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json", "-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected error for undefined return, but got 200")
|
||||
}
|
||||
}
|
||||
|
||||
func TestScriptFromNonexistentFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-q", "-o", "json",
|
||||
"-lua", "@/nonexistent/path/script.lua")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
assertContains(t, res.Stderr, "failed to load script")
|
||||
}
|
||||
|
||||
func TestScriptFromNonexistentURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-q", "-o", "json",
|
||||
"-js", "@http://127.0.0.1:1/nonexistent.js")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
assertContains(t, res.Stderr, "failed to load script")
|
||||
}
|
||||
|
||||
func TestMultipleLuaAndJsScripts(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
lua1 := `function transform(req) req.headers["X-Lua-1"] = {"yes"} return req end`
|
||||
lua2 := `function transform(req) req.headers["X-Lua-2"] = {"yes"} return req end`
|
||||
js1 := `function transform(req) { req.headers["X-Js-1"] = ["yes"]; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", lua1, "-lua", lua2, "-js", js1)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Lua-1"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Lua-1: yes, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-Lua-2"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Lua-2: yes, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-Js-1"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Js-1: yes, got %v", v)
|
||||
}
|
||||
}
|
||||
392
e2e/script_test.go
Normal file
392
e2e/script_test.go
Normal file
@@ -0,0 +1,392 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLuaScriptInline(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.headers["X-Lua"] = {"from-lua"} return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Headers["X-Lua"]; !ok || len(v) == 0 || v[0] != "from-lua" {
|
||||
t.Errorf("expected X-Lua: from-lua, got headers: %v", req.Headers)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptInline(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.headers["X-Js"] = ["from-js"]; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Headers["X-Js"]; !ok || len(v) == 0 || v[0] != "from-js" {
|
||||
t.Errorf("expected X-Js: from-js, got headers: %v", req.Headers)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptFromFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
scriptContent := `function transform(req)
|
||||
req.headers["X-From-File"] = {"yes"}
|
||||
return req
|
||||
end`
|
||||
scriptPath := writeTemp(t, "test.lua", scriptContent)
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", "@"+scriptPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Headers["X-From-File"]; !ok || len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-From-File: yes, got headers: %v", req.Headers)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptFromFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
scriptContent := `function transform(req) {
|
||||
req.headers["X-From-File"] = ["yes"];
|
||||
return req;
|
||||
}`
|
||||
scriptPath := writeTemp(t, "test.js", scriptContent)
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-js", "@"+scriptPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Headers["X-From-File"]; !ok || len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-From-File: yes, got headers: %v", req.Headers)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptModifiesMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.method = "PUT" return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodPut {
|
||||
t.Errorf("expected method PUT after Lua transform, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptModifiesMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.method = "DELETE"; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodDelete {
|
||||
t.Errorf("expected method DELETE after JS transform, got %s", req.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptModifiesPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.path = "/modified" return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Path != "/modified" {
|
||||
t.Errorf("expected path /modified, got %s", req.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptModifiesBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.body = "lua-body" return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "lua-body" {
|
||||
t.Errorf("expected body 'lua-body', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptModifiesBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.body = "js-body"; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "js-body" {
|
||||
t.Errorf("expected body 'js-body', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptModifiesParams(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.params["lua_param"] = {"lua_value"} return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Query["lua_param"]; !ok || len(v) == 0 || v[0] != "lua_value" {
|
||||
t.Errorf("expected lua_param=lua_value, got query: %v", req.Query)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptModifiesParams(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.params["js_param"] = ["js_value"]; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Query["js_param"]; !ok || len(v) == 0 || v[0] != "js_value" {
|
||||
t.Errorf("expected js_param=js_value, got query: %v", req.Query)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptModifiesCookies(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.cookies["lua_cookie"] = {"lua_val"} return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["lua_cookie"]; !ok || v != "lua_val" {
|
||||
t.Errorf("expected cookie lua_cookie=lua_val, got cookies: %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptModifiesCookies(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) { req.cookies["js_cookie"] = ["js_val"]; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Cookies["js_cookie"]; !ok || v != "js_val" {
|
||||
t.Errorf("expected cookie js_cookie=js_val, got cookies: %v", req.Cookies)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScriptChainLuaThenJs(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
luaScript := `function transform(req) req.headers["X-Step"] = {"lua"} return req end`
|
||||
jsScript := `function transform(req) { req.headers["X-Js-Step"] = ["js"]; return req; }`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", luaScript,
|
||||
"-js", jsScript)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v, ok := req.Headers["X-Step"]; !ok || len(v) == 0 || v[0] != "lua" {
|
||||
t.Errorf("expected X-Step: lua from Lua script, got %v", req.Headers["X-Step"])
|
||||
}
|
||||
if v, ok := req.Headers["X-Js-Step"]; !ok || len(v) == 0 || v[0] != "js" {
|
||||
t.Errorf("expected X-Js-Step: js from JS script, got %v", req.Headers["X-Js-Step"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleLuaScriptsChained(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
lua1 := `function transform(req) req.headers["X-First"] = {"1"} return req end`
|
||||
lua2 := `function transform(req) req.headers["X-Second"] = {"2"} return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", lua1,
|
||||
"-lua", lua2)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-First"]; len(v) == 0 || v[0] != "1" {
|
||||
t.Errorf("expected X-First: 1, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-Second"]; len(v) == 0 || v[0] != "2" {
|
||||
t.Errorf("expected X-Second: 2, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScriptWithEscapedAt(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// @@ means the first @ is stripped, rest is treated as inline script
|
||||
script := `@@function transform(req) req.headers["X-At"] = {"escaped"} return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
// The @@ prefix strips one @, leaving "@function transform..." which is valid Lua?
|
||||
// Actually no — after stripping the first @, it becomes:
|
||||
// "@function transform(req) ..." which would be interpreted as a file reference.
|
||||
// Wait — the code says: strings starting with "@@" → content = source[1:] = "@function..."
|
||||
// Then it's returned as inline content "@function transform..."
|
||||
// Lua would fail because "@" is not valid Lua syntax.
|
||||
// So this test just validates that the @@ mechanism doesn't crash.
|
||||
// It should fail at the validation step since "@function..." is not valid Lua.
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestLuaScriptMultipleHeaderValues(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
script := `function transform(req) req.headers["X-Multi"] = {"val1", "val2"} return req end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals, ok := req.Headers["X-Multi"]
|
||||
if !ok {
|
||||
t.Fatalf("expected X-Multi header, got headers: %v", req.Headers)
|
||||
}
|
||||
if len(vals) != 2 || vals[0] != "val1" || vals[1] != "val2" {
|
||||
t.Errorf("expected X-Multi: [val1, val2], got %v", vals)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsScriptCanReadExistingHeaders(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Set a header via CLI, then read it in JS and set a new one based on it
|
||||
script := `function transform(req) {
|
||||
var original = req.headers["X-Original"];
|
||||
if (original && original.length > 0) {
|
||||
req.headers["X-Copy"] = [original[0]];
|
||||
}
|
||||
return req;
|
||||
}`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", "X-Original: hello",
|
||||
"-js", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Copy"]; len(v) == 0 || v[0] != "hello" {
|
||||
t.Errorf("expected X-Copy: hello (copied from X-Original), got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaScriptCanReadExistingParams(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Set a param via CLI, then read it in Lua
|
||||
script := `function transform(req)
|
||||
local original = req.params["key1"]
|
||||
if original and #original > 0 then
|
||||
req.params["key1_copy"] = {original[1]}
|
||||
end
|
||||
return req
|
||||
end`
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-P", "key1=val1",
|
||||
"-lua", script)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["key1_copy"]; len(v) == 0 || v[0] != "val1" {
|
||||
t.Errorf("expected key1_copy=val1 (copied from key1), got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScriptFromHTTPURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Serve a Lua script via HTTP
|
||||
scriptContent := `function transform(req) req.headers["X-Remote"] = {"yes"} return req end`
|
||||
scriptServer := statusServerWithBody(scriptContent)
|
||||
defer scriptServer.Close()
|
||||
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-lua", "@"+scriptServer.URL)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Remote"]; len(v) == 0 || v[0] != "yes" {
|
||||
t.Errorf("expected X-Remote: yes from remote script, got %v", req.Headers)
|
||||
}
|
||||
}
|
||||
36
e2e/show_config_extra_test.go
Normal file
36
e2e/show_config_extra_test.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestShowConfigFromYAML(t *testing.T) {
|
||||
t.Parallel()
|
||||
config := `
|
||||
url: "http://example.com"
|
||||
requests: 1
|
||||
showConfig: true
|
||||
`
|
||||
configPath := writeTemp(t, "show_config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// Non-TTY: should output raw YAML config
|
||||
assertContains(t, res.Stdout, "url:")
|
||||
assertContains(t, res.Stdout, "example.com")
|
||||
}
|
||||
|
||||
func TestShowConfigFromEnv(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_SHOW_CONFIG": "true",
|
||||
}, "-q")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "url:")
|
||||
assertContains(t, res.Stdout, "example.com")
|
||||
}
|
||||
61
e2e/show_config_test.go
Normal file
61
e2e/show_config_test.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestShowConfigNonTTY(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// In non-TTY mode (like tests), -s should output raw YAML and exit
|
||||
res := run("-U", "http://example.com", "-r", "1", "-s")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// Should contain YAML-formatted config
|
||||
assertContains(t, res.Stdout, "url:")
|
||||
assertContains(t, res.Stdout, "example.com")
|
||||
assertContains(t, res.Stdout, "requests:")
|
||||
}
|
||||
|
||||
func TestShowConfigContainsMethod(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-M", "POST", "-s")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "method:")
|
||||
assertContains(t, res.Stdout, "POST")
|
||||
}
|
||||
|
||||
func TestShowConfigContainsHeaders(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-s",
|
||||
"-H", "X-Custom: test-value")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "X-Custom")
|
||||
assertContains(t, res.Stdout, "test-value")
|
||||
}
|
||||
|
||||
func TestShowConfigContainsTimeout(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-T", "5s", "-s")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "timeout:")
|
||||
}
|
||||
|
||||
func TestShowConfigWithEnvVars(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": "http://example.com",
|
||||
"SARIN_REQUESTS": "5",
|
||||
}, "-s")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
assertContains(t, res.Stdout, "example.com")
|
||||
assertContains(t, res.Stdout, "requests:")
|
||||
}
|
||||
116
e2e/signal_test.go
Normal file
116
e2e/signal_test.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"syscall"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestSIGINTGracefulShutdown(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := slowServer(100 * time.Millisecond)
|
||||
defer srv.Close()
|
||||
|
||||
// Start a duration-based test that would run for a long time
|
||||
cmd, stdout := startProcess(
|
||||
"-U", srv.URL, "-d", "30s", "-q", "-o", "json",
|
||||
)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Fatalf("failed to start process: %v", err)
|
||||
}
|
||||
|
||||
// Let it run for a bit so some requests complete
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
|
||||
// Send SIGINT for graceful shutdown
|
||||
if err := cmd.Process.Signal(syscall.SIGINT); err != nil {
|
||||
t.Fatalf("failed to send SIGINT: %v", err)
|
||||
}
|
||||
|
||||
// Wait for process to exit
|
||||
err := cmd.Wait()
|
||||
_ = err // May exit with 0 or non-zero depending on timing
|
||||
|
||||
// Should have produced valid JSON output with partial results
|
||||
output := stdout.String()
|
||||
if output == "" {
|
||||
t.Fatal("expected JSON output after SIGINT, got empty stdout")
|
||||
}
|
||||
|
||||
var out outputData
|
||||
if err := json.Unmarshal([]byte(output), &out); err != nil {
|
||||
t.Fatalf("expected valid JSON after graceful shutdown: %v\nstdout: %s", err, output)
|
||||
}
|
||||
|
||||
count, _ := out.Total.Count.Int64()
|
||||
if count < 1 {
|
||||
t.Errorf("expected at least 1 request before shutdown, got %d", count)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSIGTERMGracefulShutdown(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := slowServer(100 * time.Millisecond)
|
||||
defer srv.Close()
|
||||
|
||||
cmd, stdout := startProcess(
|
||||
"-U", srv.URL, "-d", "30s", "-q", "-o", "json",
|
||||
)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Fatalf("failed to start process: %v", err)
|
||||
}
|
||||
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
|
||||
if err := cmd.Process.Signal(syscall.SIGTERM); err != nil {
|
||||
t.Fatalf("failed to send SIGTERM: %v", err)
|
||||
}
|
||||
|
||||
err := cmd.Wait()
|
||||
_ = err
|
||||
|
||||
output := stdout.String()
|
||||
if output == "" {
|
||||
t.Fatal("expected JSON output after SIGTERM, got empty stdout")
|
||||
}
|
||||
|
||||
var out outputData
|
||||
if err := json.Unmarshal([]byte(output), &out); err != nil {
|
||||
t.Fatalf("expected valid JSON after graceful shutdown: %v\nstdout: %s", err, output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSIGINTExitsInReasonableTime(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := slowServer(50 * time.Millisecond)
|
||||
defer srv.Close()
|
||||
|
||||
cmd, _ := startProcess(
|
||||
"-U", srv.URL, "-d", "60s", "-q", "-o", "none",
|
||||
)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Fatalf("failed to start process: %v", err)
|
||||
}
|
||||
|
||||
time.Sleep(300 * time.Millisecond)
|
||||
|
||||
if err := cmd.Process.Signal(syscall.SIGINT); err != nil {
|
||||
t.Fatalf("failed to send SIGINT: %v", err)
|
||||
}
|
||||
|
||||
// Should exit within 5 seconds
|
||||
done := make(chan error, 1)
|
||||
go func() { done <- cmd.Wait() }()
|
||||
|
||||
select {
|
||||
case <-done:
|
||||
// Good — exited in time
|
||||
case <-time.After(5 * time.Second):
|
||||
cmd.Process.Kill()
|
||||
t.Fatal("process did not exit within 5 seconds after SIGINT")
|
||||
}
|
||||
}
|
||||
116
e2e/template_funcs_extra_test.go
Normal file
116
e2e/template_funcs_extra_test.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDictStr(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// dict_Str creates a map; use with index to retrieve a value
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ $d := dict_Str "name" "alice" "role" "admin" }}{{ index $d "name" }}-{{ index $d "role" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "alice-admin" {
|
||||
t.Errorf("expected body alice-admin, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringsToDate(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// strings_ToDate parses a date string; verify it produces a non-empty result
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", `X-Date: {{ strings_ToDate "2024-06-15" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Date"]; len(v) == 0 || v[0] == "" {
|
||||
t.Error("expected X-Date to have a non-empty value")
|
||||
} else {
|
||||
assertContains(t, v[0], "2024")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileBase64NonexistentFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// file_Base64 errors at runtime, the error becomes the response key
|
||||
res := run("-U", "http://example.com", "-r", "1", "-z", "-q", "-o", "json",
|
||||
"-B", `{{ file_Base64 "/nonexistent/file.txt" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
// Should have a template rendering error as response key, not "dry-run"
|
||||
if _, ok := out.Responses["dry-run"]; ok {
|
||||
t.Error("expected template error, but got dry-run response")
|
||||
}
|
||||
assertResponseCount(t, out, 1)
|
||||
}
|
||||
|
||||
func TestFileBase64FailedHTTP(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-z", "-q", "-o", "json",
|
||||
"-B", `{{ file_Base64 "http://127.0.0.1:1/nonexistent" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
if _, ok := out.Responses["dry-run"]; ok {
|
||||
t.Error("expected template error, but got dry-run response")
|
||||
}
|
||||
assertResponseCount(t, out, 1)
|
||||
}
|
||||
|
||||
func TestMultipleValuesFlags(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-V", "KEY1=val1", "-V", "KEY2=val2",
|
||||
"-H", "X-K1: {{ .Values.KEY1 }}",
|
||||
"-H", "X-K2: {{ .Values.KEY2 }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-K1"]; len(v) == 0 || v[0] != "val1" {
|
||||
t.Errorf("expected X-K1: val1, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-K2"]; len(v) == 0 || v[0] != "val2" {
|
||||
t.Errorf("expected X-K2: val2, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesUsedInBodyAndHeader(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Same value used in both header and body within the same request
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-V", "ID={{ fakeit_UUID }}",
|
||||
"-H", "X-Request-Id: {{ .Values.ID }}",
|
||||
"-B", `{"id":"{{ .Values.ID }}"}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
headerID := ""
|
||||
if v := req.Headers["X-Request-Id"]; len(v) > 0 {
|
||||
headerID = v[0]
|
||||
}
|
||||
if headerID == "" {
|
||||
t.Fatal("expected X-Request-Id to have a value")
|
||||
}
|
||||
// Body should contain the same UUID as the header
|
||||
if !strings.Contains(req.Body, headerID) {
|
||||
t.Errorf("expected body to contain same ID as header (%s), got body: %s", headerID, req.Body)
|
||||
}
|
||||
}
|
||||
170
e2e/template_funcs_test.go
Normal file
170
e2e/template_funcs_test.go
Normal file
@@ -0,0 +1,170 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestStringToUpper(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", `X-Upper: {{ strings_ToUpper "hello" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Upper"]; len(v) == 0 || v[0] != "HELLO" {
|
||||
t.Errorf("expected X-Upper: HELLO, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringToLower(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", `X-Lower: {{ strings_ToLower "WORLD" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Lower"]; len(v) == 0 || v[0] != "world" {
|
||||
t.Errorf("expected X-Lower: world, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringReplace(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_Replace "foo-bar-baz" "-" "_" -1 }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "foo_bar_baz" {
|
||||
t.Errorf("expected body foo_bar_baz, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringRemoveSpaces(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_RemoveSpaces "hello world foo" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "helloworldfoo" {
|
||||
t.Errorf("expected body helloworldfoo, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringTrimPrefix(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_TrimPrefix "hello-world" "hello-" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "world" {
|
||||
t.Errorf("expected body world, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringTrimSuffix(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_TrimSuffix "hello-world" "-world" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "hello" {
|
||||
t.Errorf("expected body hello, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSliceJoin(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ slice_Join (slice_Str "a" "b" "c") ", " }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "a, b, c" {
|
||||
t.Errorf("expected body 'a, b, c', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringFirst(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_First "abcdef" 3 }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "abc" {
|
||||
t.Errorf("expected body abc, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringLast(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_Last "abcdef" 3 }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "def" {
|
||||
t.Errorf("expected body def, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringTruncate(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ strings_Truncate "hello world" 5 }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "hello..." {
|
||||
t.Errorf("expected body 'hello...', got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSliceStr(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{{ slice_Join (slice_Str "a" "b" "c") "-" }}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != "a-b-c" {
|
||||
t.Errorf("expected body a-b-c, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
241
e2e/template_test.go
Normal file
241
e2e/template_test.go
Normal file
@@ -0,0 +1,241 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTemplateInHeader(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Use a template function that generates a UUID
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", "X-Request-Id: {{ fakeit_UUID }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals, ok := req.Headers["X-Request-Id"]
|
||||
if !ok || len(vals) == 0 {
|
||||
t.Fatalf("expected X-Request-Id header, got headers: %v", req.Headers)
|
||||
}
|
||||
// UUID format: 8-4-4-4-12
|
||||
if len(vals[0]) != 36 {
|
||||
t.Errorf("expected UUID (36 chars), got %q (%d chars)", vals[0], len(vals[0]))
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemplateInParam(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-P", "id={{ fakeit_UUID }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals, ok := req.Query["id"]
|
||||
if !ok || len(vals) == 0 {
|
||||
t.Fatalf("expected 'id' param, got query: %v", req.Query)
|
||||
}
|
||||
if len(vals[0]) != 36 {
|
||||
t.Errorf("expected UUID in param value, got %q", vals[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemplateInBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-B", `{"id":"{{ fakeit_UUID }}"}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if len(req.Body) < 36 {
|
||||
t.Errorf("expected body to contain a UUID, got %q", req.Body)
|
||||
}
|
||||
assertContains(t, req.Body, `"id":"`)
|
||||
}
|
||||
|
||||
func TestTemplateInURLPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL+"/api/{{ fakeit_UUID }}", "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if len(req.Path) < 5+36 { // "/api/" + UUID
|
||||
t.Errorf("expected path to contain a UUID, got %q", req.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesBasic(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-V", "MY_VAR=hello",
|
||||
"-H", "X-Val: {{ .Values.MY_VAR }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Val"]; len(v) == 0 || v[0] != "hello" {
|
||||
t.Errorf("expected X-Val: hello from Values, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesMultiple(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-V", "A=first",
|
||||
"-V", "B=second",
|
||||
"-H", "X-A: {{ .Values.A }}",
|
||||
"-H", "X-B: {{ .Values.B }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-A"]; len(v) == 0 || v[0] != "first" {
|
||||
t.Errorf("expected X-A: first, got %v", v)
|
||||
}
|
||||
if v := req.Headers["X-B"]; len(v) == 0 || v[0] != "second" {
|
||||
t.Errorf("expected X-B: second, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesWithTemplate(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
// Values themselves can contain templates
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-V", "REQ_ID={{ fakeit_UUID }}",
|
||||
"-H", "X-Request-Id: {{ .Values.REQ_ID }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
vals, ok := req.Headers["X-Request-Id"]
|
||||
if !ok || len(vals) == 0 {
|
||||
t.Fatalf("expected X-Request-Id header, got %v", req.Headers)
|
||||
}
|
||||
if len(vals[0]) != 36 {
|
||||
t.Errorf("expected UUID from value template, got %q", vals[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesInParam(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-V", "TOKEN=abc123",
|
||||
"-P", "token={{ .Values.TOKEN }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Query["token"]; len(v) == 0 || v[0] != "abc123" {
|
||||
t.Errorf("expected token=abc123, got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesInBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-M", "POST", "-q", "-o", "json",
|
||||
"-V", "NAME=test-user",
|
||||
"-B", `{"name":"{{ .Values.NAME }}"}`)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Body != `{"name":"test-user"}` {
|
||||
t.Errorf("expected body with interpolated value, got %q", req.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValuesInURLPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL+"/users/{{ .Values.USER_ID }}", "-r", "1", "-q", "-o", "json",
|
||||
"-V", "USER_ID=42")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Path != "/users/42" {
|
||||
t.Errorf("expected path /users/42, got %s", req.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemplateGeneratesDifferentValues(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "5", "-c", "1", "-q", "-o", "json",
|
||||
"-H", "X-Unique: {{ fakeit_UUID }}")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
reqs := cs.allRequests()
|
||||
if len(reqs) < 5 {
|
||||
t.Fatalf("expected 5 requests, got %d", len(reqs))
|
||||
}
|
||||
|
||||
// UUIDs should be unique across requests
|
||||
seen := make(map[string]bool)
|
||||
for _, r := range reqs {
|
||||
vals := r.Headers["X-Unique"]
|
||||
if len(vals) > 0 {
|
||||
seen[vals[0]] = true
|
||||
}
|
||||
}
|
||||
if len(seen) < 2 {
|
||||
t.Errorf("expected template to generate different UUIDs across requests, got %d unique values", len(seen))
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemplateFunctionFakeit(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
t.Cleanup(cs.Close)
|
||||
|
||||
// Test various fakeit functions
|
||||
tests := []struct {
|
||||
name string
|
||||
template string
|
||||
}{
|
||||
{"UUID", "{{ fakeit_UUID }}"},
|
||||
{"Name", "{{ fakeit_Name }}"},
|
||||
{"Email", "{{ fakeit_Email }}"},
|
||||
{"Number", "{{ fakeit_Number 1 100 }}"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := newCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL, "-r", "1", "-q", "-o", "json",
|
||||
"-H", "X-Test: "+tt.template)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if v := req.Headers["X-Test"]; len(v) == 0 || v[0] == "" {
|
||||
t.Errorf("expected non-empty value from %s, got %v", tt.template, v)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
110
e2e/timeout_test.go
Normal file
110
e2e/timeout_test.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestRequestTimeout(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Server that takes 2 seconds to respond
|
||||
srv := slowServer(2 * time.Second)
|
||||
defer srv.Close()
|
||||
|
||||
// Timeout of 200ms — should fail with timeout error
|
||||
res := run("-U", srv.URL, "-r", "1", "-T", "200ms", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
// Should NOT have "200" — should have a timeout error
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected timeout error, but got 200")
|
||||
}
|
||||
// Total count should still be 1 (the timed-out request is counted)
|
||||
assertResponseCount(t, out, 1)
|
||||
}
|
||||
|
||||
func TestRequestTimeoutMultiple(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
srv := slowServer(2 * time.Second)
|
||||
defer srv.Close()
|
||||
|
||||
res := run("-U", srv.URL, "-r", "3", "-c", "3", "-T", "200ms", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 3)
|
||||
|
||||
// None should be 200
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected all requests to timeout, but got some 200s")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimeoutDoesNotAffectFastRequests(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
// Short timeout but server responds instantly — should succeed
|
||||
res := run("-U", srv.URL, "-r", "3", "-T", "5s", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
assertResponseCount(t, out, 3)
|
||||
}
|
||||
|
||||
func TestDurationStopsAfterTime(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
start := time.Now()
|
||||
res := run("-U", srv.URL, "-d", "1s", "-q", "-o", "json")
|
||||
elapsed := time.Since(start)
|
||||
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// Should finish roughly around 1s (allow some tolerance)
|
||||
if elapsed < 900*time.Millisecond {
|
||||
t.Errorf("expected test to run ~1s, but finished in %v", elapsed)
|
||||
}
|
||||
if elapsed > 3*time.Second {
|
||||
t.Errorf("expected test to finish around 1s, but took %v", elapsed)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDurationWithRequestLimit(t *testing.T) {
|
||||
t.Parallel()
|
||||
srv := echoServer()
|
||||
defer srv.Close()
|
||||
|
||||
// Request limit reached before duration — should stop early
|
||||
res := run("-U", srv.URL, "-r", "2", "-d", "30s", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertResponseCount(t, out, 2)
|
||||
}
|
||||
|
||||
func TestDurationWithSlowServerStopsAtDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Server delays 500ms per request
|
||||
srv := slowServer(500 * time.Millisecond)
|
||||
defer srv.Close()
|
||||
|
||||
start := time.Now()
|
||||
res := run("-U", srv.URL, "-d", "1s", "-c", "1", "-q", "-o", "json")
|
||||
elapsed := time.Since(start)
|
||||
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
// Should stop after ~1s even though requests are slow
|
||||
if elapsed > 3*time.Second {
|
||||
t.Errorf("expected to stop around 1s duration, took %v", elapsed)
|
||||
}
|
||||
}
|
||||
164
e2e/tls_test.go
Normal file
164
e2e/tls_test.go
Normal file
@@ -0,0 +1,164 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestHTTPSWithInsecureFlag(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Create a TLS server with a self-signed cert
|
||||
srv := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
// Without --insecure, it should fail (cert not trusted)
|
||||
// With --insecure, it should succeed
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "json", "-I")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
}
|
||||
|
||||
func TestHTTPSWithoutInsecureFails(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
srv := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
// Without --insecure, should get a TLS error (not a clean 200)
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "json")
|
||||
assertExitCode(t, res, 0) // Process still exits 0, but response key is an error
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
// Should NOT have a "200" key — should have a TLS error
|
||||
if _, ok := out.Responses["200"]; ok {
|
||||
t.Error("expected TLS error without --insecure, but got 200")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHTTPSInsecureViaCLILongFlag(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
srv := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
// Use the long form flag
|
||||
res := run("-U", srv.URL, "-r", "1", "-q", "-o", "json", "-insecure")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
}
|
||||
|
||||
func TestHTTPSInsecureViaConfigFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
srv := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
config := `
|
||||
url: "` + srv.URL + `"
|
||||
requests: 1
|
||||
insecure: true
|
||||
quiet: true
|
||||
output: json
|
||||
`
|
||||
configPath := writeTemp(t, "tls_config.yaml", config)
|
||||
|
||||
res := run("-f", configPath)
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
}
|
||||
|
||||
func TestHTTPSInsecureViaEnv(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
srv := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
res := runWithEnv(map[string]string{
|
||||
"SARIN_URL": srv.URL,
|
||||
"SARIN_REQUESTS": "1",
|
||||
"SARIN_INSECURE": "true",
|
||||
"SARIN_QUIET": "true",
|
||||
"SARIN_OUTPUT": "json",
|
||||
})
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
}
|
||||
|
||||
func TestHTTPSEchoServer(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// TLS echo server that returns request details
|
||||
srv := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
resp := map[string]any{
|
||||
"method": r.Method,
|
||||
"path": r.URL.Path,
|
||||
"tls": r.TLS != nil,
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(resp)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
// Verify request was received over TLS
|
||||
res := run("-U", srv.URL+"/secure-path", "-r", "1", "-q", "-o", "json", "-I")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
out := res.jsonOutput(t)
|
||||
assertHasResponseKey(t, out, "200")
|
||||
}
|
||||
|
||||
// tlsCaptureServer is like captureServer but with TLS
|
||||
func tlsCaptureServer() *captureServer {
|
||||
cs := &captureServer{}
|
||||
cs.Server = httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
cs.mu.Lock()
|
||||
cs.requests = append(cs.requests, echoResponse{
|
||||
Method: r.Method,
|
||||
Path: r.URL.Path,
|
||||
})
|
||||
cs.mu.Unlock()
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
cs.TLS = &tls.Config{}
|
||||
cs.StartTLS()
|
||||
return cs
|
||||
}
|
||||
|
||||
func TestHTTPSHeadersSentCorrectly(t *testing.T) {
|
||||
t.Parallel()
|
||||
cs := tlsCaptureServer()
|
||||
defer cs.Close()
|
||||
|
||||
res := run("-U", cs.URL+"/api/test", "-r", "1", "-M", "POST", "-q", "-o", "json", "-I")
|
||||
assertExitCode(t, res, 0)
|
||||
|
||||
req := cs.lastRequest()
|
||||
if req.Method != http.MethodPost {
|
||||
t.Errorf("expected POST over HTTPS, got %s", req.Method)
|
||||
}
|
||||
if req.Path != "/api/test" {
|
||||
t.Errorf("expected path /api/test over HTTPS, got %s", req.Path)
|
||||
}
|
||||
}
|
||||
13
e2e/validation_extra_test.go
Normal file
13
e2e/validation_extra_test.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestValidation_ConcurrencyExceedsMax(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
res := run("-U", "http://example.com", "-r", "1", "-q", "-c", "200000000")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "concurrency must not exceed 100,000,000")
|
||||
}
|
||||
168
e2e/validation_test.go
Normal file
168
e2e/validation_test.go
Normal file
@@ -0,0 +1,168 @@
|
||||
package e2e
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestValidation_MissingURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-r", "1")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "URL")
|
||||
assertContains(t, res.Stderr, "required")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidURLScheme(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "ftp://example.com", "-r", "1")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "URL")
|
||||
assertContains(t, res.Stderr, "scheme")
|
||||
}
|
||||
|
||||
func TestValidation_URLWithoutHost(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://", "-r", "1")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "URL")
|
||||
}
|
||||
|
||||
func TestValidation_NoRequestsOrDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "request count or duration")
|
||||
}
|
||||
|
||||
func TestValidation_ZeroRequests(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "0")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Requests")
|
||||
}
|
||||
|
||||
func TestValidation_ZeroDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-d", "0s")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Duration")
|
||||
}
|
||||
|
||||
func TestValidation_ZeroRequestsAndZeroDuration(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "0", "-d", "0s")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_ConcurrencyZero(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1", "-c", "0")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "concurrency")
|
||||
}
|
||||
|
||||
func TestValidation_TimeoutZero(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Timeout of 0 is invalid (must be > 0)
|
||||
res := run("-U", "http://example.com", "-r", "1", "-T", "0s")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "timeout")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidOutputFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1", "-o", "xml")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "Output")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidProxyScheme(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1", "-X", "ftp://proxy.example.com:8080")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "proxy")
|
||||
}
|
||||
|
||||
func TestValidation_EmptyLuaScript(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1", "-lua", "")
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestValidation_EmptyJsScript(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1", "-js", "")
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestValidation_LuaScriptMissingTransform(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-lua", `print("hello")`)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_JsScriptMissingTransform(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-js", `console.log("hello")`)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_LuaScriptSyntaxError(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-lua", `function transform(req invalid syntax`)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_JsScriptSyntaxError(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-js", `function transform(req { invalid`)
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_ScriptEmptyFileRef(t *testing.T) {
|
||||
t.Parallel()
|
||||
// "@" with nothing after it
|
||||
res := run("-U", "http://example.com", "-r", "1", "-lua", "@")
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestValidation_ScriptNonexistentFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-lua", "@/nonexistent/path/script.lua")
|
||||
assertExitCode(t, res, 1)
|
||||
}
|
||||
|
||||
func TestValidation_InvalidTemplateInHeader(t *testing.T) {
|
||||
t.Parallel()
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-H", "X-Test: {{ invalid_func }}")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_InvalidTemplateInBody(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Use a template with invalid syntax (unclosed action)
|
||||
res := run("-U", "http://example.com", "-r", "1",
|
||||
"-B", "{{ invalid_func_xyz }}")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "VALIDATION")
|
||||
}
|
||||
|
||||
func TestValidation_MultipleErrors(t *testing.T) {
|
||||
t.Parallel()
|
||||
// No URL, no requests/duration — should report multiple validation errors
|
||||
res := run("-c", "1")
|
||||
assertExitCode(t, res, 1)
|
||||
assertContains(t, res.Stderr, "URL")
|
||||
}
|
||||
63
go.mod
63
go.mod
@@ -1,22 +1,59 @@
|
||||
module github.com/aykhans/dodo
|
||||
module go.aykhans.me/sarin
|
||||
|
||||
go 1.24.2
|
||||
go 1.26.0
|
||||
|
||||
require (
|
||||
github.com/brianvoe/gofakeit/v7 v7.2.1
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.7
|
||||
github.com/valyala/fasthttp v1.62.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
github.com/brianvoe/gofakeit/v7 v7.14.0
|
||||
github.com/charmbracelet/bubbles v1.0.0
|
||||
github.com/charmbracelet/bubbletea v1.3.10
|
||||
github.com/charmbracelet/glamour v0.10.0
|
||||
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834
|
||||
github.com/charmbracelet/x/term v0.2.2
|
||||
github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/valyala/fasthttp v1.69.0
|
||||
github.com/yuin/gopher-lua v1.1.1
|
||||
go.aykhans.me/utils v1.0.7
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3
|
||||
golang.org/x/net v0.50.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/alecthomas/chroma/v2 v2.21.1 // indirect
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/aymerick/douceur v0.2.0 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.4.1 // indirect
|
||||
github.com/charmbracelet/harmonica v0.2.0 // indirect
|
||||
github.com/charmbracelet/x/ansi v0.11.6 // indirect
|
||||
github.com/charmbracelet/x/cellbuf v0.0.15 // indirect
|
||||
github.com/charmbracelet/x/exp/slice v0.0.0-20260109001716-2fbdffcb221f // indirect
|
||||
github.com/clipperhouse/displaywidth v0.9.0 // indirect
|
||||
github.com/clipperhouse/stringish v0.1.1 // indirect
|
||||
github.com/clipperhouse/uax29/v2 v2.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
||||
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.19 // indirect
|
||||
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||
github.com/muesli/reflow v0.3.0 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/term v0.32.0 // indirect
|
||||
golang.org/x/text v0.25.0 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||
github.com/yuin/goldmark v1.7.16 // indirect
|
||||
github.com/yuin/goldmark-emoji v1.0.6 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/term v0.40.0 // indirect
|
||||
golang.org/x/text v0.34.0 // indirect
|
||||
)
|
||||
|
||||
146
go.sum
146
go.sum
@@ -1,37 +1,127 @@
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/brianvoe/gofakeit/v7 v7.2.1 h1:AGojgaaCdgq4Adzrd2uWdbGNDyX6MWNhHdQBraNfOHI=
|
||||
github.com/brianvoe/gofakeit/v7 v7.2.1/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.7 h1:m+LbHpm0aIAPLzLbMfn8dc3Ht8MW7lsSO4MPItz/Uuo=
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.7/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
|
||||
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
|
||||
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/chroma/v2 v2.21.1 h1:FaSDrp6N+3pphkNKU6HPCiYLgm8dbe5UXIXcoBhZSWA=
|
||||
github.com/alecthomas/chroma/v2 v2.21.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
|
||||
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
|
||||
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||
github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY=
|
||||
github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E=
|
||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||
github.com/brianvoe/gofakeit/v7 v7.14.0 h1:R8tmT/rTDJmD2ngpqBL9rAKydiL7Qr2u3CXPqRt59pk=
|
||||
github.com/brianvoe/gofakeit/v7 v7.14.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA=
|
||||
github.com/charmbracelet/bubbles v1.0.0 h1:12J8/ak/uCZEMQ6KU7pcfwceyjLlWsDLAxB5fXonfvc=
|
||||
github.com/charmbracelet/bubbles v1.0.0/go.mod h1:9d/Zd5GdnauMI5ivUIVisuEm3ave1XwXtD1ckyV6r3E=
|
||||
github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw=
|
||||
github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4=
|
||||
github.com/charmbracelet/colorprofile v0.4.1 h1:a1lO03qTrSIRaK8c3JRxJDZOvhvIeSco3ej+ngLk1kk=
|
||||
github.com/charmbracelet/colorprofile v0.4.1/go.mod h1:U1d9Dljmdf9DLegaJ0nGZNJvoXAhayhmidOdcBwAvKk=
|
||||
github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY=
|
||||
github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk=
|
||||
github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ=
|
||||
github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao=
|
||||
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE=
|
||||
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA=
|
||||
github.com/charmbracelet/x/ansi v0.11.6 h1:GhV21SiDz/45W9AnV2R61xZMRri5NlLnl6CVF7ihZW8=
|
||||
github.com/charmbracelet/x/ansi v0.11.6/go.mod h1:2JNYLgQUsyqaiLovhU2Rv/pb8r6ydXKS3NIttu3VGZQ=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.15 h1:ur3pZy0o6z/R7EylET877CBxaiE1Sp1GMxoFPAIztPI=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.15/go.mod h1:J1YVbR7MUuEGIFPCaaZ96KDl5NoS0DAWkskup+mOY+Q=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
|
||||
github.com/charmbracelet/x/exp/slice v0.0.0-20260109001716-2fbdffcb221f h1:kvAY8ffwhFuxWqtVI6+9E5vmgTApG96hswFLXJfsxHI=
|
||||
github.com/charmbracelet/x/exp/slice v0.0.0-20260109001716-2fbdffcb221f/go.mod h1:vqEfX6xzqW1pKKZUUiFOKg0OQ7bCh54Q2vR/tserrRA=
|
||||
github.com/charmbracelet/x/term v0.2.2 h1:xVRT/S2ZcKdhhOuSP4t5cLi5o+JxklsoEObBSgfgZRk=
|
||||
github.com/charmbracelet/x/term v0.2.2/go.mod h1:kF8CY5RddLWrsgVwpw4kAa6TESp6EB5y3uxGLeCqzAI=
|
||||
github.com/clipperhouse/displaywidth v0.9.0 h1:Qb4KOhYwRiN3viMv1v/3cTBlz3AcAZX3+y9OLhMtAtA=
|
||||
github.com/clipperhouse/displaywidth v0.9.0/go.mod h1:aCAAqTlh4GIVkhQnJpbL0T/WfcrJXHcj8C0yjYcjOZA=
|
||||
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
|
||||
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
|
||||
github.com/clipperhouse/uax29/v2 v2.5.0 h1:x7T0T4eTHDONxFJsL94uKNKPHrclyFI0lm7+w94cO8U=
|
||||
github.com/clipperhouse/uax29/v2 v2.5.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
|
||||
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3 h1:bVp3yUzvSAJzu9GqID+Z96P+eu5TKnIMJSV4QaZMauM=
|
||||
github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE2YxKWtnnQls6rQjjW5oV7qg2U=
|
||||
github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
|
||||
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
|
||||
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
|
||||
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
||||
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
|
||||
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
|
||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.62.0 h1:8dKRBX/y2rCzyc6903Zu1+3qN0H/d2MsxPPmVNamiH0=
|
||||
github.com/valyala/fasthttp v1.62.0/go.mod h1:FCINgr4GKdKqV8Q0xv8b+UxPV+H/O5nNFo3D+r54Htg=
|
||||
github.com/valyala/fasthttp v1.69.0 h1:fNLLESD2SooWeh2cidsuFtOcrEi4uB4m1mPrkJMZyVI=
|
||||
github.com/valyala/fasthttp v1.69.0/go.mod h1:4wA4PfAraPlAsJ5jMSqCE2ug5tqUPwKXxVj8oNECGcw=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE=
|
||||
github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
github.com/yuin/goldmark-emoji v1.0.6 h1:QWfF2FYaXwL74tfGOW5izeiZepUDroDJfWubQI9HTHs=
|
||||
github.com/yuin/goldmark-emoji v1.0.6/go.mod h1:ukxJDKFpdFb5x0a5HqbdlcKtebh086iJpI31LTKmWuA=
|
||||
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
go.aykhans.me/utils v1.0.7 h1:ClHXHlWmkjfFlD7+w5BQY29lKCEztxY/yCf543x4hZw=
|
||||
go.aykhans.me/utils v1.0.7/go.mod h1:0Jz8GlZLN35cCHLOLx39sazWwEe33bF6SYlSeqzEXoI=
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go=
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
|
||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
|
||||
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
276
internal/config/cli.go
Normal file
276
internal/config/cli.go
Normal file
@@ -0,0 +1,276 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
versionpkg "go.aykhans.me/sarin/internal/version"
|
||||
)
|
||||
|
||||
const cliUsageText = `Usage:
|
||||
sarin [flags]
|
||||
|
||||
Simple usage:
|
||||
sarin -U https://example.com -r 1
|
||||
|
||||
Flags:
|
||||
General Config:
|
||||
-h, -help Help for sarin
|
||||
-v, -version Version for sarin
|
||||
-s, -show-config bool Show the final config after parsing all sources (default %v)
|
||||
-f, -config-file string Path to the config file (local file / http URL)
|
||||
-c, -concurrency uint Number of concurrent requests (default %d)
|
||||
-r, -requests uint Number of total requests
|
||||
-d, -duration time Maximum duration for the test (e.g. 30s, 1m, 5h)
|
||||
-q, -quiet bool Hide the progress bar and runtime logs (default %v)
|
||||
-o, -output string Output format (possible values: table, json, yaml, none) (default '%v')
|
||||
-z, -dry-run bool Run without sending requests (default %v)
|
||||
|
||||
Request Config:
|
||||
-U, -url string Target URL for the request
|
||||
-M, -method []string HTTP method for the request (default %s)
|
||||
-B, -body []string Body for the request (e.g. "body text")
|
||||
-P, -param []string URL parameter for the request (e.g. "key1=value1")
|
||||
-H, -header []string Header for the request (e.g. "key1: value1")
|
||||
-C, -cookie []string Cookie for the request (e.g. "key1=value1")
|
||||
-X, -proxy []string Proxy for the request (e.g. "http://proxy.example.com:8080")
|
||||
-V, -values []string List of values for templating (e.g. "key1=value1")
|
||||
-T, -timeout time Timeout for the request (e.g. 400ms, 3s, 1m10s) (default %v)
|
||||
-I, -insecure bool Skip SSL/TLS certificate verification (default %v)
|
||||
-lua []string Lua script for request transformation (inline or @file/@url)
|
||||
-js []string JavaScript script for request transformation (inline or @file/@url)`
|
||||
|
||||
var _ IParser = ConfigCLIParser{}
|
||||
|
||||
type ConfigCLIParser struct {
|
||||
args []string
|
||||
}
|
||||
|
||||
func NewConfigCLIParser(args []string) *ConfigCLIParser {
|
||||
if args == nil {
|
||||
args = []string{}
|
||||
}
|
||||
return &ConfigCLIParser{args: args}
|
||||
}
|
||||
|
||||
type stringSliceArg []string
|
||||
|
||||
func (arg *stringSliceArg) String() string {
|
||||
return strings.Join(*arg, ",")
|
||||
}
|
||||
|
||||
func (arg *stringSliceArg) Set(value string) error {
|
||||
*arg = append(*arg, value)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parse parses command-line arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.CLIUnexpectedArgsError
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigCLIParser) Parse() (*Config, error) {
|
||||
flagSet := flag.NewFlagSet("sarin", flag.ExitOnError)
|
||||
|
||||
flagSet.Usage = func() { parser.PrintHelp() }
|
||||
|
||||
var (
|
||||
config = &Config{}
|
||||
|
||||
// General config
|
||||
version bool
|
||||
showConfig bool
|
||||
configFiles = stringSliceArg{}
|
||||
concurrency uint
|
||||
requestCount uint64
|
||||
duration time.Duration
|
||||
quiet bool
|
||||
output string
|
||||
dryRun bool
|
||||
|
||||
// Request config
|
||||
urlInput string
|
||||
methods = stringSliceArg{}
|
||||
bodies = stringSliceArg{}
|
||||
params = stringSliceArg{}
|
||||
headers = stringSliceArg{}
|
||||
cookies = stringSliceArg{}
|
||||
proxies = stringSliceArg{}
|
||||
values = stringSliceArg{}
|
||||
timeout time.Duration
|
||||
insecure bool
|
||||
luaScripts = stringSliceArg{}
|
||||
jsScripts = stringSliceArg{}
|
||||
)
|
||||
|
||||
{
|
||||
// General config
|
||||
flagSet.BoolVar(&version, "version", false, "Version for sarin")
|
||||
flagSet.BoolVar(&version, "v", false, "Version for sarin")
|
||||
|
||||
flagSet.BoolVar(&showConfig, "show-config", false, "Show the final config after parsing all sources")
|
||||
flagSet.BoolVar(&showConfig, "s", false, "Show the final config after parsing all sources")
|
||||
|
||||
flagSet.Var(&configFiles, "config-file", "Path to the config file")
|
||||
flagSet.Var(&configFiles, "f", "Path to the config file")
|
||||
|
||||
flagSet.UintVar(&concurrency, "concurrency", 0, "Number of concurrent requests")
|
||||
flagSet.UintVar(&concurrency, "c", 0, "Number of concurrent requests")
|
||||
|
||||
flagSet.Uint64Var(&requestCount, "requests", 0, "Number of total requests")
|
||||
flagSet.Uint64Var(&requestCount, "r", 0, "Number of total requests")
|
||||
|
||||
flagSet.DurationVar(&duration, "duration", 0, "Maximum duration for the test")
|
||||
flagSet.DurationVar(&duration, "d", 0, "Maximum duration for the test")
|
||||
|
||||
flagSet.BoolVar(&quiet, "quiet", false, "Hide the progress bar and runtime logs")
|
||||
flagSet.BoolVar(&quiet, "q", false, "Hide the progress bar and runtime logs")
|
||||
|
||||
flagSet.StringVar(&output, "output", "", "Output format (possible values: table, json, yaml, none)")
|
||||
flagSet.StringVar(&output, "o", "", "Output format (possible values: table, json, yaml, none)")
|
||||
|
||||
flagSet.BoolVar(&dryRun, "dry-run", false, "Run without sending requests")
|
||||
flagSet.BoolVar(&dryRun, "z", false, "Run without sending requests")
|
||||
|
||||
// Request config
|
||||
flagSet.StringVar(&urlInput, "url", "", "Target URL for the request")
|
||||
flagSet.StringVar(&urlInput, "U", "", "Target URL for the request")
|
||||
|
||||
flagSet.Var(&methods, "method", "HTTP method for the request")
|
||||
flagSet.Var(&methods, "M", "HTTP method for the request")
|
||||
|
||||
flagSet.Var(&bodies, "body", "Body for the request")
|
||||
flagSet.Var(&bodies, "B", "Body for the request")
|
||||
|
||||
flagSet.Var(¶ms, "param", "URL parameter for the request")
|
||||
flagSet.Var(¶ms, "P", "URL parameter for the request")
|
||||
|
||||
flagSet.Var(&headers, "header", "Header for the request")
|
||||
flagSet.Var(&headers, "H", "Header for the request")
|
||||
|
||||
flagSet.Var(&cookies, "cookie", "Cookie for the request")
|
||||
flagSet.Var(&cookies, "C", "Cookie for the request")
|
||||
|
||||
flagSet.Var(&proxies, "proxy", "Proxy for the request")
|
||||
flagSet.Var(&proxies, "X", "Proxy for the request")
|
||||
|
||||
flagSet.Var(&values, "values", "List of values for templating")
|
||||
flagSet.Var(&values, "V", "List of values for templating")
|
||||
|
||||
flagSet.DurationVar(&timeout, "timeout", 0, "Timeout for the request (e.g. 400ms, 15s, 1m10s)")
|
||||
flagSet.DurationVar(&timeout, "T", 0, "Timeout for the request (e.g. 400ms, 15s, 1m10s)")
|
||||
|
||||
flagSet.BoolVar(&insecure, "insecure", false, "Skip SSL/TLS certificate verification")
|
||||
flagSet.BoolVar(&insecure, "I", false, "Skip SSL/TLS certificate verification")
|
||||
|
||||
flagSet.Var(&luaScripts, "lua", "Lua script for request transformation (inline or @file/@url)")
|
||||
|
||||
flagSet.Var(&jsScripts, "js", "JavaScript script for request transformation (inline or @file/@url)")
|
||||
}
|
||||
|
||||
// Parse the specific arguments provided to the parser, skipping the program name.
|
||||
if err := flagSet.Parse(parser.args[1:]); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Check for any unexpected non-flag arguments remaining after parsing.
|
||||
if args := flagSet.Args(); len(args) > 0 {
|
||||
return nil, types.NewCLIUnexpectedArgsError(args)
|
||||
}
|
||||
|
||||
if version {
|
||||
fmt.Printf("Version: %s\nGit Commit: %s\nBuild Date: %s\nGo Version: %s\n",
|
||||
versionpkg.Version, versionpkg.GitCommit, versionpkg.BuildDate, versionpkg.GoVersion)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
var fieldParseErrors []types.FieldParseError
|
||||
// Iterate over flags that were explicitly set on the command line.
|
||||
flagSet.Visit(func(flagVar *flag.Flag) {
|
||||
switch flagVar.Name {
|
||||
// General config
|
||||
case "show-config", "s":
|
||||
config.ShowConfig = new(showConfig)
|
||||
case "config-file", "f":
|
||||
for _, configFile := range configFiles {
|
||||
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||
}
|
||||
case "concurrency", "c":
|
||||
config.Concurrency = new(concurrency)
|
||||
case "requests", "r":
|
||||
config.Requests = new(requestCount)
|
||||
case "duration", "d":
|
||||
config.Duration = new(duration)
|
||||
case "quiet", "q":
|
||||
config.Quiet = new(quiet)
|
||||
case "output", "o":
|
||||
config.Output = new(ConfigOutputType(output))
|
||||
case "dry-run", "z":
|
||||
config.DryRun = new(dryRun)
|
||||
|
||||
// Request config
|
||||
case "url", "U":
|
||||
urlParsed, err := url.Parse(urlInput)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(fieldParseErrors, types.NewFieldParseError("url", urlInput, err))
|
||||
} else {
|
||||
config.URL = urlParsed
|
||||
}
|
||||
case "method", "M":
|
||||
config.Methods = append(config.Methods, methods...)
|
||||
case "body", "B":
|
||||
config.Bodies = append(config.Bodies, bodies...)
|
||||
case "param", "P":
|
||||
config.Params.Parse(params...)
|
||||
case "header", "H":
|
||||
config.Headers.Parse(headers...)
|
||||
case "cookie", "C":
|
||||
config.Cookies.Parse(cookies...)
|
||||
case "proxy", "X":
|
||||
for i, proxy := range proxies {
|
||||
err := config.Proxies.Parse(proxy)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(fmt.Sprintf("proxy[%d]", i), proxy, err),
|
||||
)
|
||||
}
|
||||
}
|
||||
case "values", "V":
|
||||
config.Values = append(config.Values, values...)
|
||||
case "timeout", "T":
|
||||
config.Timeout = new(timeout)
|
||||
case "insecure", "I":
|
||||
config.Insecure = new(insecure)
|
||||
case "lua":
|
||||
config.Lua = append(config.Lua, luaScripts...)
|
||||
case "js":
|
||||
config.Js = append(config.Js, jsScripts...)
|
||||
}
|
||||
})
|
||||
|
||||
if len(fieldParseErrors) > 0 {
|
||||
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (parser ConfigCLIParser) PrintHelp() {
|
||||
fmt.Printf(
|
||||
cliUsageText+"\n",
|
||||
Defaults.ShowConfig,
|
||||
Defaults.Concurrency,
|
||||
Defaults.Quiet,
|
||||
Defaults.Output,
|
||||
Defaults.DryRun,
|
||||
|
||||
Defaults.Method,
|
||||
Defaults.RequestTimeout,
|
||||
Defaults.Insecure,
|
||||
)
|
||||
}
|
||||
848
internal/config/config.go
Normal file
848
internal/config/config.go
Normal file
@@ -0,0 +1,848 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/bubbles/viewport"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/glamour"
|
||||
"github.com/charmbracelet/glamour/styles"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/x/term"
|
||||
"go.aykhans.me/sarin/internal/script"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
"go.aykhans.me/sarin/internal/version"
|
||||
"go.aykhans.me/utils/common"
|
||||
utilsErr "go.aykhans.me/utils/errors"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
var Defaults = struct {
|
||||
UserAgent string
|
||||
Method string
|
||||
RequestTimeout time.Duration
|
||||
Concurrency uint
|
||||
ShowConfig bool
|
||||
Quiet bool
|
||||
Insecure bool
|
||||
Output ConfigOutputType
|
||||
DryRun bool
|
||||
}{
|
||||
UserAgent: "Sarin/" + version.Version,
|
||||
Method: "GET",
|
||||
RequestTimeout: time.Second * 10,
|
||||
Concurrency: 1,
|
||||
ShowConfig: false,
|
||||
Quiet: false,
|
||||
Insecure: false,
|
||||
Output: ConfigOutputTypeTable,
|
||||
DryRun: false,
|
||||
}
|
||||
|
||||
var (
|
||||
ValidProxySchemes = []string{"http", "https", "socks5", "socks5h"}
|
||||
ValidRequestURLSchemes = []string{"http", "https"}
|
||||
)
|
||||
|
||||
var (
|
||||
StyleYellow = lipgloss.NewStyle().Foreground(lipgloss.Color("220"))
|
||||
StyleRed = lipgloss.NewStyle().Foreground(lipgloss.Color("196"))
|
||||
)
|
||||
|
||||
type IParser interface {
|
||||
Parse() (*Config, error)
|
||||
}
|
||||
|
||||
type ConfigOutputType string
|
||||
|
||||
var (
|
||||
ConfigOutputTypeTable ConfigOutputType = "table"
|
||||
ConfigOutputTypeJSON ConfigOutputType = "json"
|
||||
ConfigOutputTypeYAML ConfigOutputType = "yaml"
|
||||
ConfigOutputTypeNone ConfigOutputType = "none"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
ShowConfig *bool `yaml:"showConfig,omitempty"`
|
||||
Files []types.ConfigFile `yaml:"files,omitempty"`
|
||||
Methods []string `yaml:"methods,omitempty"`
|
||||
URL *url.URL `yaml:"url,omitempty"`
|
||||
Timeout *time.Duration `yaml:"timeout,omitempty"`
|
||||
Concurrency *uint `yaml:"concurrency,omitempty"`
|
||||
Requests *uint64 `yaml:"requests,omitempty"`
|
||||
Duration *time.Duration `yaml:"duration,omitempty"`
|
||||
Quiet *bool `yaml:"quiet,omitempty"`
|
||||
Output *ConfigOutputType `yaml:"output,omitempty"`
|
||||
Insecure *bool `yaml:"insecure,omitempty"`
|
||||
DryRun *bool `yaml:"dryRun,omitempty"`
|
||||
Params types.Params `yaml:"params,omitempty"`
|
||||
Headers types.Headers `yaml:"headers,omitempty"`
|
||||
Cookies types.Cookies `yaml:"cookies,omitempty"`
|
||||
Bodies []string `yaml:"bodies,omitempty"`
|
||||
Proxies types.Proxies `yaml:"proxies,omitempty"`
|
||||
Values []string `yaml:"values,omitempty"`
|
||||
Lua []string `yaml:"lua,omitempty"`
|
||||
Js []string `yaml:"js,omitempty"`
|
||||
}
|
||||
|
||||
func (config Config) MarshalYAML() (any, error) {
|
||||
const randomValueComment = "Cycles through all values, with a new random start each round"
|
||||
|
||||
toNode := func(v any) *yaml.Node {
|
||||
node := &yaml.Node{}
|
||||
_ = node.Encode(v)
|
||||
return node
|
||||
}
|
||||
|
||||
addField := func(content *[]*yaml.Node, key string, value *yaml.Node, comment string) {
|
||||
if value.Kind == 0 || (value.Kind == yaml.ScalarNode && value.Value == "") ||
|
||||
(value.Kind == yaml.SequenceNode && len(value.Content) == 0) {
|
||||
return
|
||||
}
|
||||
keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: key, LineComment: comment}
|
||||
*content = append(*content, keyNode, value)
|
||||
}
|
||||
|
||||
addStringSlice := func(content *[]*yaml.Node, key string, items []string, withComment bool) {
|
||||
comment := ""
|
||||
if withComment && len(items) > 1 {
|
||||
comment = randomValueComment
|
||||
}
|
||||
switch len(items) {
|
||||
case 1:
|
||||
addField(content, key, toNode(items[0]), "")
|
||||
default:
|
||||
addField(content, key, toNode(items), comment)
|
||||
}
|
||||
}
|
||||
|
||||
marshalKeyValues := func(items []types.KeyValue[string, []string]) *yaml.Node {
|
||||
seqNode := &yaml.Node{Kind: yaml.SequenceNode}
|
||||
for _, item := range items {
|
||||
keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: item.Key}
|
||||
var valueNode *yaml.Node
|
||||
|
||||
switch len(item.Value) {
|
||||
case 1:
|
||||
valueNode = &yaml.Node{Kind: yaml.ScalarNode, Value: item.Value[0]}
|
||||
default:
|
||||
valueNode = &yaml.Node{Kind: yaml.SequenceNode}
|
||||
for _, v := range item.Value {
|
||||
valueNode.Content = append(valueNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: v})
|
||||
}
|
||||
if len(item.Value) > 1 {
|
||||
keyNode.LineComment = randomValueComment
|
||||
}
|
||||
}
|
||||
|
||||
mapNode := &yaml.Node{Kind: yaml.MappingNode, Content: []*yaml.Node{keyNode, valueNode}}
|
||||
seqNode.Content = append(seqNode.Content, mapNode)
|
||||
}
|
||||
return seqNode
|
||||
}
|
||||
|
||||
root := &yaml.Node{Kind: yaml.MappingNode}
|
||||
content := &root.Content
|
||||
|
||||
if config.ShowConfig != nil {
|
||||
addField(content, "showConfig", toNode(*config.ShowConfig), "")
|
||||
}
|
||||
|
||||
addStringSlice(content, "method", config.Methods, true)
|
||||
|
||||
if config.URL != nil {
|
||||
addField(content, "url", toNode(config.URL.String()), "")
|
||||
}
|
||||
if config.Timeout != nil {
|
||||
addField(content, "timeout", toNode(*config.Timeout), "")
|
||||
}
|
||||
if config.Concurrency != nil {
|
||||
addField(content, "concurrency", toNode(*config.Concurrency), "")
|
||||
}
|
||||
if config.Requests != nil {
|
||||
addField(content, "requests", toNode(*config.Requests), "")
|
||||
}
|
||||
if config.Duration != nil {
|
||||
addField(content, "duration", toNode(*config.Duration), "")
|
||||
}
|
||||
if config.Quiet != nil {
|
||||
addField(content, "quiet", toNode(*config.Quiet), "")
|
||||
}
|
||||
if config.Output != nil {
|
||||
addField(content, "output", toNode(string(*config.Output)), "")
|
||||
}
|
||||
if config.Insecure != nil {
|
||||
addField(content, "insecure", toNode(*config.Insecure), "")
|
||||
}
|
||||
if config.DryRun != nil {
|
||||
addField(content, "dryRun", toNode(*config.DryRun), "")
|
||||
}
|
||||
|
||||
if len(config.Params) > 0 {
|
||||
items := make([]types.KeyValue[string, []string], len(config.Params))
|
||||
for i, p := range config.Params {
|
||||
items[i] = types.KeyValue[string, []string](p)
|
||||
}
|
||||
addField(content, "params", marshalKeyValues(items), "")
|
||||
}
|
||||
if len(config.Headers) > 0 {
|
||||
items := make([]types.KeyValue[string, []string], len(config.Headers))
|
||||
for i, h := range config.Headers {
|
||||
items[i] = types.KeyValue[string, []string](h)
|
||||
}
|
||||
addField(content, "headers", marshalKeyValues(items), "")
|
||||
}
|
||||
if len(config.Cookies) > 0 {
|
||||
items := make([]types.KeyValue[string, []string], len(config.Cookies))
|
||||
for i, c := range config.Cookies {
|
||||
items[i] = types.KeyValue[string, []string](c)
|
||||
}
|
||||
addField(content, "cookies", marshalKeyValues(items), "")
|
||||
}
|
||||
|
||||
addStringSlice(content, "body", config.Bodies, true)
|
||||
|
||||
if len(config.Proxies) > 0 {
|
||||
proxyStrings := make([]string, len(config.Proxies))
|
||||
for i, p := range config.Proxies {
|
||||
proxyStrings[i] = p.String()
|
||||
}
|
||||
addStringSlice(content, "proxy", proxyStrings, true)
|
||||
}
|
||||
|
||||
addStringSlice(content, "values", config.Values, false)
|
||||
addStringSlice(content, "lua", config.Lua, false)
|
||||
addStringSlice(content, "js", config.Js, false)
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func (config Config) Print() bool {
|
||||
configYAML, err := yaml.Marshal(config)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render("Error marshaling config to yaml: "+err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Pipe mode: output raw content directly
|
||||
if !term.IsTerminal(os.Stdout.Fd()) {
|
||||
fmt.Println(string(configYAML))
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
style := styles.TokyoNightStyleConfig
|
||||
style.Document.Margin = common.ToPtr[uint](0)
|
||||
style.CodeBlock.Margin = common.ToPtr[uint](0)
|
||||
|
||||
renderer, err := glamour.NewTermRenderer(
|
||||
glamour.WithStyles(style),
|
||||
glamour.WithWordWrap(0),
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
content, err := renderer.Render("```yaml\n" + string(configYAML) + "```")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
p := tea.NewProgram(
|
||||
printConfigModel{content: strings.Trim(content, "\n"), rawContent: configYAML},
|
||||
tea.WithAltScreen(),
|
||||
tea.WithMouseCellMotion(),
|
||||
)
|
||||
|
||||
m, err := p.Run()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
return m.(printConfigModel).start //nolint:forcetypeassert // m is guaranteed to be of type printConfigModel as it was the only model passed to tea.NewProgram
|
||||
}
|
||||
|
||||
func (config *Config) Merge(newConfig *Config) {
|
||||
config.Files = append(config.Files, newConfig.Files...)
|
||||
if len(newConfig.Methods) > 0 {
|
||||
config.Methods = newConfig.Methods
|
||||
}
|
||||
if newConfig.URL != nil {
|
||||
config.URL = newConfig.URL
|
||||
}
|
||||
if newConfig.Timeout != nil {
|
||||
config.Timeout = newConfig.Timeout
|
||||
}
|
||||
if newConfig.Concurrency != nil {
|
||||
config.Concurrency = newConfig.Concurrency
|
||||
}
|
||||
if newConfig.Requests != nil {
|
||||
config.Requests = newConfig.Requests
|
||||
}
|
||||
if newConfig.Duration != nil {
|
||||
config.Duration = newConfig.Duration
|
||||
}
|
||||
if newConfig.ShowConfig != nil {
|
||||
config.ShowConfig = newConfig.ShowConfig
|
||||
}
|
||||
if newConfig.Quiet != nil {
|
||||
config.Quiet = newConfig.Quiet
|
||||
}
|
||||
if newConfig.Output != nil {
|
||||
config.Output = newConfig.Output
|
||||
}
|
||||
if newConfig.Insecure != nil {
|
||||
config.Insecure = newConfig.Insecure
|
||||
}
|
||||
if newConfig.DryRun != nil {
|
||||
config.DryRun = newConfig.DryRun
|
||||
}
|
||||
if len(newConfig.Params) != 0 {
|
||||
config.Params = append(config.Params, newConfig.Params...)
|
||||
}
|
||||
if len(newConfig.Headers) != 0 {
|
||||
config.Headers = append(config.Headers, newConfig.Headers...)
|
||||
}
|
||||
if len(newConfig.Cookies) != 0 {
|
||||
config.Cookies = append(config.Cookies, newConfig.Cookies...)
|
||||
}
|
||||
if len(newConfig.Bodies) != 0 {
|
||||
config.Bodies = newConfig.Bodies
|
||||
}
|
||||
if len(newConfig.Proxies) != 0 {
|
||||
config.Proxies.Append(newConfig.Proxies...)
|
||||
}
|
||||
if len(newConfig.Values) != 0 {
|
||||
config.Values = append(config.Values, newConfig.Values...)
|
||||
}
|
||||
if len(newConfig.Lua) != 0 {
|
||||
config.Lua = append(config.Lua, newConfig.Lua...)
|
||||
}
|
||||
if len(newConfig.Js) != 0 {
|
||||
config.Js = append(config.Js, newConfig.Js...)
|
||||
}
|
||||
}
|
||||
|
||||
func (config *Config) SetDefaults() {
|
||||
if config.URL != nil && len(config.URL.Query()) > 0 {
|
||||
urlParams := types.Params{}
|
||||
for key, values := range config.URL.Query() {
|
||||
for _, value := range values {
|
||||
urlParams = append(urlParams, types.Param{
|
||||
Key: key,
|
||||
Value: []string{value},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
config.Params = append(urlParams, config.Params...)
|
||||
config.URL.RawQuery = ""
|
||||
}
|
||||
|
||||
if len(config.Methods) == 0 {
|
||||
config.Methods = []string{Defaults.Method}
|
||||
}
|
||||
if config.Timeout == nil {
|
||||
config.Timeout = &Defaults.RequestTimeout
|
||||
}
|
||||
if config.Concurrency == nil {
|
||||
config.Concurrency = new(Defaults.Concurrency)
|
||||
}
|
||||
if config.ShowConfig == nil {
|
||||
config.ShowConfig = new(Defaults.ShowConfig)
|
||||
}
|
||||
if config.Quiet == nil {
|
||||
config.Quiet = new(Defaults.Quiet)
|
||||
}
|
||||
if config.Insecure == nil {
|
||||
config.Insecure = new(Defaults.Insecure)
|
||||
}
|
||||
if config.DryRun == nil {
|
||||
config.DryRun = new(Defaults.DryRun)
|
||||
}
|
||||
if !config.Headers.Has("User-Agent") {
|
||||
config.Headers = append(config.Headers, types.Header{Key: "User-Agent", Value: []string{Defaults.UserAgent}})
|
||||
}
|
||||
|
||||
if config.Output == nil {
|
||||
config.Output = new(Defaults.Output)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate validates the config fields.
|
||||
// It can return the following errors:
|
||||
// - types.FieldValidationErrors
|
||||
func (config Config) Validate() error {
|
||||
validationErrors := make([]types.FieldValidationError, 0)
|
||||
|
||||
if len(config.Methods) == 0 {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Method", "", errors.New("method is required")))
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.URL == nil:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", "", errors.New("URL is required")))
|
||||
case !slices.Contains(ValidRequestURLSchemes, config.URL.Scheme):
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", config.URL.String(), fmt.Errorf("URL scheme must be one of: %s", strings.Join(ValidRequestURLSchemes, ", "))))
|
||||
case config.URL.Host == "":
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", config.URL.String(), errors.New("URL must have a host")))
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.Concurrency == nil:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", "", errors.New("concurrency count is required")))
|
||||
case *config.Concurrency == 0:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", "0", errors.New("concurrency must be greater than 0")))
|
||||
case *config.Concurrency > 100_000_000:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", strconv.FormatUint(uint64(*config.Concurrency), 10), errors.New("concurrency must not exceed 100,000,000")))
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.Requests == nil && config.Duration == nil:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests / Duration", "", errors.New("either request count or duration must be specified")))
|
||||
case (config.Requests != nil && config.Duration != nil) && (*config.Requests == 0 && *config.Duration == 0):
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests / Duration", "0", errors.New("both request count and duration cannot be zero")))
|
||||
case config.Requests != nil && config.Duration == nil && *config.Requests == 0:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests", "0", errors.New("request count must be greater than 0")))
|
||||
case config.Requests == nil && config.Duration != nil && *config.Duration == 0:
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Duration", "0", errors.New("duration must be greater than 0")))
|
||||
}
|
||||
|
||||
if *config.Timeout < 1 {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Timeout", "0", errors.New("timeout must be greater than 0")))
|
||||
}
|
||||
|
||||
if config.ShowConfig == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("ShowConfig", "", errors.New("showConfig field is required")))
|
||||
}
|
||||
|
||||
if config.Quiet == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Quiet", "", errors.New("quiet field is required")))
|
||||
}
|
||||
|
||||
if config.Output == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Output", "", errors.New("output field is required")))
|
||||
} else {
|
||||
switch *config.Output {
|
||||
case "":
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Output", "", errors.New("output field is required")))
|
||||
case ConfigOutputTypeTable, ConfigOutputTypeJSON, ConfigOutputTypeYAML, ConfigOutputTypeNone:
|
||||
default:
|
||||
validOutputs := []string{string(ConfigOutputTypeTable), string(ConfigOutputTypeJSON), string(ConfigOutputTypeYAML), string(ConfigOutputTypeNone)}
|
||||
validationErrors = append(validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
"Output",
|
||||
string(*config.Output),
|
||||
fmt.Errorf(
|
||||
"output type must be one of: %s",
|
||||
strings.Join(validOutputs, ", "),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if config.Insecure == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("Insecure", "", errors.New("insecure field is required")))
|
||||
}
|
||||
|
||||
if config.DryRun == nil {
|
||||
validationErrors = append(validationErrors, types.NewFieldValidationError("DryRun", "", errors.New("dryRun field is required")))
|
||||
}
|
||||
|
||||
for i, proxy := range config.Proxies {
|
||||
if !slices.Contains(ValidProxySchemes, proxy.Scheme) {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Proxy[%d]", i),
|
||||
proxy.String(),
|
||||
fmt.Errorf("proxy scheme must be one of: %v", ValidProxySchemes),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Create a context with timeout for script validation (loading from URLs)
|
||||
scriptCtx, scriptCancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer scriptCancel()
|
||||
|
||||
for i, scriptSrc := range config.Lua {
|
||||
if err := validateScriptSource(scriptSrc); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(fmt.Sprintf("Lua[%d]", i), scriptSrc, err),
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Validate script syntax
|
||||
if err := script.ValidateScript(scriptCtx, scriptSrc, script.EngineTypeLua); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(fmt.Sprintf("Lua[%d]", i), scriptSrc, err),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
for i, scriptSrc := range config.Js {
|
||||
if err := validateScriptSource(scriptSrc); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(fmt.Sprintf("Js[%d]", i), scriptSrc, err),
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Validate script syntax
|
||||
if err := script.ValidateScript(scriptCtx, scriptSrc, script.EngineTypeJavaScript); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(fmt.Sprintf("Js[%d]", i), scriptSrc, err),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
templateErrors := ValidateTemplates(&config)
|
||||
validationErrors = append(validationErrors, templateErrors...)
|
||||
|
||||
if len(validationErrors) > 0 {
|
||||
return types.NewFieldValidationErrors(validationErrors)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ReadAllConfigs() *Config {
|
||||
envParser := NewConfigENVParser("SARIN")
|
||||
envConfig, err := envParser.Parse()
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||
printParseErrors("ENV", err.Errors...)
|
||||
fmt.Println()
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
cliParser := NewConfigCLIParser(os.Args)
|
||||
cliConf, err := cliParser.Parse()
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnType(func(err types.CLIUnexpectedArgsError) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(
|
||||
"\nUnexpected CLI arguments provided: ",
|
||||
)+strings.Join(err.Args, ", "),
|
||||
)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Println()
|
||||
printParseErrors("CLI", err.Errors...)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
for _, configFile := range append(envConfig.Files, cliConf.Files...) {
|
||||
fileConfig, err := parseConfigFile(configFile, 10)
|
||||
_ = utilsErr.MustHandle(err,
|
||||
utilsErr.OnType(func(err types.ConfigFileReadError) error {
|
||||
cliParser.PrintHelp()
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(
|
||||
fmt.Sprintf("\nFailed to read config file (%s): ", configFile.Path())+err.Error(),
|
||||
),
|
||||
)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.UnmarshalError) error {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(
|
||||
fmt.Sprintf("\nFailed to parse config file (%s): ", configFile.Path())+err.Error(),
|
||||
),
|
||||
)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||
printParseErrors(fmt.Sprintf("CONFIG FILE '%s'", configFile.Path()), err.Errors...)
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}),
|
||||
)
|
||||
|
||||
envConfig.Merge(fileConfig)
|
||||
}
|
||||
|
||||
envConfig.Merge(cliConf)
|
||||
|
||||
return envConfig
|
||||
}
|
||||
|
||||
// parseConfigFile recursively parses a config file and its nested files up to maxDepth levels.
|
||||
// Returns the merged configuration or an error if parsing fails.
|
||||
// It can return the following errors:
|
||||
// - types.ConfigFileReadError
|
||||
// - types.UnmarshalError
|
||||
// - types.FieldParseErrors
|
||||
func parseConfigFile(configFile types.ConfigFile, maxDepth int) (*Config, error) {
|
||||
configFileParser := NewConfigFileParser(configFile)
|
||||
fileConfig, err := configFileParser.Parse()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if maxDepth <= 0 {
|
||||
return fileConfig, nil
|
||||
}
|
||||
|
||||
for _, c := range fileConfig.Files {
|
||||
innerFileConfig, err := parseConfigFile(c, maxDepth-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
innerFileConfig.Merge(fileConfig)
|
||||
fileConfig = innerFileConfig
|
||||
}
|
||||
|
||||
return fileConfig, nil
|
||||
}
|
||||
|
||||
// validateScriptSource validates a script source string.
|
||||
// Scripts can be:
|
||||
// - Inline script: any string not starting with "@"
|
||||
// - Escaped "@": strings starting with "@@" (literal "@" at start)
|
||||
// - File reference: "@/path/to/file" or "@./relative/path"
|
||||
// - URL reference: "@http://..." or "@https://..."
|
||||
//
|
||||
// It can return the following errors:
|
||||
// - types.ErrScriptEmpty
|
||||
// - types.ErrScriptSourceEmpty
|
||||
// - types.ErrScriptURLNoHost
|
||||
// - types.URLParseError
|
||||
func validateScriptSource(script string) error {
|
||||
// Empty script is invalid
|
||||
if script == "" {
|
||||
return types.ErrScriptEmpty
|
||||
}
|
||||
|
||||
// Not a file/URL reference - it's an inline script
|
||||
if !strings.HasPrefix(script, "@") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Escaped @ - it's an inline script starting with literal @
|
||||
if strings.HasPrefix(script, "@@") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// It's a file or URL reference - validate the source
|
||||
source := script[1:] // Remove the @ prefix
|
||||
|
||||
if source == "" {
|
||||
return types.ErrScriptSourceEmpty
|
||||
}
|
||||
|
||||
// Check if it's a http(s) URL
|
||||
if strings.HasPrefix(source, "http://") || strings.HasPrefix(source, "https://") {
|
||||
parsedURL, err := url.Parse(source)
|
||||
if err != nil {
|
||||
return types.NewURLParseError(source, err)
|
||||
}
|
||||
if parsedURL.Host == "" {
|
||||
return types.ErrScriptURLNoHost
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// It's a file path - basic validation (not empty, checked above)
|
||||
return nil
|
||||
}
|
||||
|
||||
func printParseErrors(parserName string, errors ...types.FieldParseError) {
|
||||
for _, fieldErr := range errors {
|
||||
if fieldErr.Value == "" {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(fmt.Sprintf("[%s] Field '%s': ", parserName, fieldErr.Field))+fieldErr.Err.Error(),
|
||||
)
|
||||
} else {
|
||||
fmt.Fprintln(os.Stderr,
|
||||
StyleYellow.Render(fmt.Sprintf("[%s] Field '%s' (%s): ", parserName, fieldErr.Field, fieldErr.Value))+fieldErr.Err.Error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
scrollbarWidth = 1
|
||||
scrollbarBottomSpace = 1
|
||||
statusDisplayTime = 3 * time.Second
|
||||
)
|
||||
|
||||
var (
|
||||
printConfigBorderStyle = func() lipgloss.Border {
|
||||
b := lipgloss.RoundedBorder()
|
||||
return b
|
||||
}()
|
||||
|
||||
printConfigHelpStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1)
|
||||
printConfigSuccessStatusStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1).Foreground(lipgloss.Color("10"))
|
||||
printConfigErrorStatusStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1).Foreground(lipgloss.Color("9"))
|
||||
printConfigKeyStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("12")).Bold(true)
|
||||
printConfigDescStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("252"))
|
||||
)
|
||||
|
||||
type printConfigClearStatusMsg struct{}
|
||||
|
||||
type printConfigModel struct {
|
||||
viewport viewport.Model
|
||||
content string
|
||||
rawContent []byte
|
||||
statusMsg string
|
||||
ready bool
|
||||
start bool
|
||||
}
|
||||
|
||||
func (m printConfigModel) Init() tea.Cmd { return nil }
|
||||
|
||||
func (m printConfigModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var cmd tea.Cmd
|
||||
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
switch msg.String() {
|
||||
case "ctrl+c", "esc":
|
||||
return m, tea.Quit
|
||||
case "ctrl+s":
|
||||
return m.saveContent()
|
||||
case "enter":
|
||||
m.start = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
|
||||
case printConfigClearStatusMsg:
|
||||
m.statusMsg = ""
|
||||
return m, nil
|
||||
|
||||
case tea.WindowSizeMsg:
|
||||
m.handleResize(msg)
|
||||
}
|
||||
|
||||
m.viewport, cmd = m.viewport.Update(msg)
|
||||
return m, cmd
|
||||
}
|
||||
|
||||
func (m printConfigModel) View() string {
|
||||
if !m.ready {
|
||||
return "\n Initializing..."
|
||||
}
|
||||
|
||||
content := lipgloss.JoinHorizontal(lipgloss.Top, m.viewport.View(), m.scrollbar())
|
||||
return fmt.Sprintf("%s\n%s\n%s", m.headerView(), content, m.footerView())
|
||||
}
|
||||
|
||||
func (m *printConfigModel) saveContent() (printConfigModel, tea.Cmd) {
|
||||
filename := fmt.Sprintf("sarin_config_%s.yaml", time.Now().Format("2006-01-02_15-04-05"))
|
||||
if err := os.WriteFile(filename, m.rawContent, 0600); err != nil {
|
||||
m.statusMsg = printConfigErrorStatusStyle.Render("✗ Error saving file: " + err.Error())
|
||||
} else {
|
||||
m.statusMsg = printConfigSuccessStatusStyle.Render("✓ Saved to " + filename)
|
||||
}
|
||||
return *m, tea.Tick(statusDisplayTime, func(time.Time) tea.Msg { return printConfigClearStatusMsg{} })
|
||||
}
|
||||
|
||||
func (m *printConfigModel) handleResize(msg tea.WindowSizeMsg) {
|
||||
headerHeight := lipgloss.Height(m.headerView())
|
||||
footerHeight := lipgloss.Height(m.footerView())
|
||||
height := msg.Height - headerHeight - footerHeight
|
||||
width := msg.Width - scrollbarWidth
|
||||
|
||||
if !m.ready {
|
||||
m.viewport = viewport.New(width, height)
|
||||
m.viewport.SetContent(m.contentWithLineNumbers())
|
||||
m.ready = true
|
||||
} else {
|
||||
m.viewport.Width = width
|
||||
m.viewport.Height = height
|
||||
}
|
||||
}
|
||||
|
||||
func (m printConfigModel) headerView() string {
|
||||
var title string
|
||||
if m.statusMsg != "" {
|
||||
title = ("" + m.statusMsg)
|
||||
} else {
|
||||
sep := printConfigDescStyle.Render(" / ")
|
||||
help := printConfigKeyStyle.Render("ENTER") + printConfigDescStyle.Render(" start") + sep +
|
||||
printConfigKeyStyle.Render("CTRL+S") + printConfigDescStyle.Render(" save") + sep +
|
||||
printConfigKeyStyle.Render("ESC") + printConfigDescStyle.Render(" exit")
|
||||
title = printConfigHelpStyle.Render(help)
|
||||
}
|
||||
line := strings.Repeat("─", max(0, m.viewport.Width+scrollbarWidth-lipgloss.Width(title)))
|
||||
return lipgloss.JoinHorizontal(lipgloss.Center, title, line)
|
||||
}
|
||||
|
||||
func (m printConfigModel) footerView() string {
|
||||
return strings.Repeat("─", m.viewport.Width+scrollbarWidth)
|
||||
}
|
||||
|
||||
func (m printConfigModel) contentWithLineNumbers() string {
|
||||
lines := strings.Split(m.content, "\n")
|
||||
width := len(strconv.Itoa(len(lines)))
|
||||
lineNumStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("246"))
|
||||
|
||||
var sb strings.Builder
|
||||
for i, line := range lines {
|
||||
lineNum := lineNumStyle.Render(fmt.Sprintf("%*d", width, i+1))
|
||||
sb.WriteString(lineNum)
|
||||
sb.WriteString(" ")
|
||||
sb.WriteString(line)
|
||||
if i < len(lines)-1 {
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (m printConfigModel) scrollbar() string {
|
||||
height := m.viewport.Height
|
||||
trackHeight := height - scrollbarBottomSpace
|
||||
totalLines := m.viewport.TotalLineCount()
|
||||
|
||||
if totalLines <= height {
|
||||
return strings.Repeat(" \n", trackHeight) + " "
|
||||
}
|
||||
|
||||
thumbSize := max(1, (height*trackHeight)/totalLines)
|
||||
thumbPos := int(m.viewport.ScrollPercent() * float64(trackHeight-thumbSize))
|
||||
|
||||
var sb strings.Builder
|
||||
for i := range trackHeight {
|
||||
if i >= thumbPos && i < thumbPos+thumbSize {
|
||||
sb.WriteByte('\xe2') // █ (U+2588)
|
||||
sb.WriteByte('\x96')
|
||||
sb.WriteByte('\x88')
|
||||
} else {
|
||||
sb.WriteByte('\xe2') // ░ (U+2591)
|
||||
sb.WriteByte('\x96')
|
||||
sb.WriteByte('\x91')
|
||||
}
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
sb.WriteByte(' ')
|
||||
return sb.String()
|
||||
}
|
||||
242
internal/config/env.go
Normal file
242
internal/config/env.go
Normal file
@@ -0,0 +1,242 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/url"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsParse "go.aykhans.me/utils/parser"
|
||||
)
|
||||
|
||||
var _ IParser = ConfigENVParser{}
|
||||
|
||||
type ConfigENVParser struct {
|
||||
envPrefix string
|
||||
}
|
||||
|
||||
func NewConfigENVParser(envPrefix string) *ConfigENVParser {
|
||||
return &ConfigENVParser{envPrefix}
|
||||
}
|
||||
|
||||
// Parse parses env arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigENVParser) Parse() (*Config, error) {
|
||||
var (
|
||||
config = &Config{}
|
||||
fieldParseErrors []types.FieldParseError
|
||||
)
|
||||
|
||||
if showConfig := parser.getEnv("SHOW_CONFIG"); showConfig != "" {
|
||||
showConfigParsed, err := utilsParse.ParseString[bool](showConfig)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("SHOW_CONFIG"),
|
||||
showConfig,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.ShowConfig = &showConfigParsed
|
||||
}
|
||||
}
|
||||
|
||||
if configFile := parser.getEnv("CONFIG_FILE"); configFile != "" {
|
||||
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||
}
|
||||
|
||||
if quiet := parser.getEnv("QUIET"); quiet != "" {
|
||||
quietParsed, err := utilsParse.ParseString[bool](quiet)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("QUIET"),
|
||||
quiet,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Quiet = &quietParsed
|
||||
}
|
||||
}
|
||||
|
||||
if output := parser.getEnv("OUTPUT"); output != "" {
|
||||
config.Output = new(ConfigOutputType(output))
|
||||
}
|
||||
|
||||
if insecure := parser.getEnv("INSECURE"); insecure != "" {
|
||||
insecureParsed, err := utilsParse.ParseString[bool](insecure)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("INSECURE"),
|
||||
insecure,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Insecure = &insecureParsed
|
||||
}
|
||||
}
|
||||
|
||||
if dryRun := parser.getEnv("DRY_RUN"); dryRun != "" {
|
||||
dryRunParsed, err := utilsParse.ParseString[bool](dryRun)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("DRY_RUN"),
|
||||
dryRun,
|
||||
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.DryRun = &dryRunParsed
|
||||
}
|
||||
}
|
||||
|
||||
if method := parser.getEnv("METHOD"); method != "" {
|
||||
config.Methods = []string{method}
|
||||
}
|
||||
|
||||
if urlEnv := parser.getEnv("URL"); urlEnv != "" {
|
||||
urlEnvParsed, err := url.Parse(urlEnv)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(parser.getFullEnvName("URL"), urlEnv, err),
|
||||
)
|
||||
} else {
|
||||
config.URL = urlEnvParsed
|
||||
}
|
||||
}
|
||||
|
||||
if concurrency := parser.getEnv("CONCURRENCY"); concurrency != "" {
|
||||
concurrencyParsed, err := utilsParse.ParseString[uint](concurrency)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("CONCURRENCY"),
|
||||
concurrency,
|
||||
errors.New("invalid value for unsigned integer"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Concurrency = &concurrencyParsed
|
||||
}
|
||||
}
|
||||
|
||||
if requests := parser.getEnv("REQUESTS"); requests != "" {
|
||||
requestsParsed, err := utilsParse.ParseString[uint64](requests)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("REQUESTS"),
|
||||
requests,
|
||||
errors.New("invalid value for unsigned integer"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Requests = &requestsParsed
|
||||
}
|
||||
}
|
||||
|
||||
if duration := parser.getEnv("DURATION"); duration != "" {
|
||||
durationParsed, err := utilsParse.ParseString[time.Duration](duration)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("DURATION"),
|
||||
duration,
|
||||
errors.New("invalid value duration, expected a duration string (e.g., '10s', '1h30m')"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Duration = &durationParsed
|
||||
}
|
||||
}
|
||||
|
||||
if timeout := parser.getEnv("TIMEOUT"); timeout != "" {
|
||||
timeoutParsed, err := utilsParse.ParseString[time.Duration](timeout)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("TIMEOUT"),
|
||||
timeout,
|
||||
errors.New("invalid value duration, expected a duration string (e.g., '10s', '1h30m')"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
config.Timeout = &timeoutParsed
|
||||
}
|
||||
}
|
||||
|
||||
if param := parser.getEnv("PARAM"); param != "" {
|
||||
config.Params.Parse(param)
|
||||
}
|
||||
|
||||
if header := parser.getEnv("HEADER"); header != "" {
|
||||
config.Headers.Parse(header)
|
||||
}
|
||||
|
||||
if cookie := parser.getEnv("COOKIE"); cookie != "" {
|
||||
config.Cookies.Parse(cookie)
|
||||
}
|
||||
|
||||
if body := parser.getEnv("BODY"); body != "" {
|
||||
config.Bodies = []string{body}
|
||||
}
|
||||
|
||||
if proxy := parser.getEnv("PROXY"); proxy != "" {
|
||||
err := config.Proxies.Parse(proxy)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(
|
||||
parser.getFullEnvName("PROXY"),
|
||||
proxy,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if values := parser.getEnv("VALUES"); values != "" {
|
||||
config.Values = []string{values}
|
||||
}
|
||||
|
||||
if lua := parser.getEnv("LUA"); lua != "" {
|
||||
config.Lua = []string{lua}
|
||||
}
|
||||
|
||||
if js := parser.getEnv("JS"); js != "" {
|
||||
config.Js = []string{js}
|
||||
}
|
||||
|
||||
if len(fieldParseErrors) > 0 {
|
||||
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (parser ConfigENVParser) getFullEnvName(envName string) string {
|
||||
if parser.envPrefix == "" {
|
||||
return envName
|
||||
}
|
||||
return parser.envPrefix + "_" + envName
|
||||
}
|
||||
|
||||
func (parser ConfigENVParser) getEnv(envName string) string {
|
||||
return os.Getenv(parser.getFullEnvName(envName))
|
||||
}
|
||||
292
internal/config/file.go
Normal file
292
internal/config/file.go
Normal file
@@ -0,0 +1,292 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
var _ IParser = ConfigFileParser{}
|
||||
|
||||
type ConfigFileParser struct {
|
||||
configFile types.ConfigFile
|
||||
}
|
||||
|
||||
func NewConfigFileParser(configFile types.ConfigFile) *ConfigFileParser {
|
||||
return &ConfigFileParser{configFile}
|
||||
}
|
||||
|
||||
// Parse parses config file arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.ConfigFileReadError
|
||||
// - types.UnmarshalError
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigFileParser) Parse() (*Config, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second*30)
|
||||
defer cancel()
|
||||
|
||||
configFileData, err := fetchFile(ctx, parser.configFile.Path())
|
||||
if err != nil {
|
||||
return nil, types.NewConfigFileReadError(err)
|
||||
}
|
||||
|
||||
switch parser.configFile.Type() {
|
||||
case types.ConfigFileTypeYAML, types.ConfigFileTypeUnknown:
|
||||
return parser.ParseYAML(configFileData)
|
||||
default:
|
||||
panic("unhandled config file type")
|
||||
}
|
||||
}
|
||||
|
||||
// fetchFile retrieves file contents from a local path or HTTP/HTTPS URL.
|
||||
// It can return the following errors:
|
||||
// - types.FileReadError
|
||||
// - types.HTTPFetchError
|
||||
// - types.HTTPStatusError
|
||||
func fetchFile(ctx context.Context, src string) ([]byte, error) {
|
||||
if strings.HasPrefix(src, "http://") || strings.HasPrefix(src, "https://") {
|
||||
return fetchHTTP(ctx, src)
|
||||
}
|
||||
return fetchLocal(src)
|
||||
}
|
||||
|
||||
// fetchHTTP downloads file contents from an HTTP/HTTPS URL.
|
||||
// It can return the following errors:
|
||||
// - types.HTTPFetchError
|
||||
// - types.HTTPStatusError
|
||||
func fetchHTTP(ctx context.Context, url string) ([]byte, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
defer resp.Body.Close() //nolint:errcheck
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, types.NewHTTPStatusError(url, resp.StatusCode, resp.Status)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// fetchLocal reads file contents from the local filesystem.
|
||||
// It resolves relative paths from the current working directory.
|
||||
// It can return the following errors:
|
||||
// - types.FileReadError
|
||||
func fetchLocal(src string) ([]byte, error) {
|
||||
path := src
|
||||
if !filepath.IsAbs(src) {
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, types.NewFileReadError(src, err)
|
||||
}
|
||||
path = filepath.Join(pwd, src)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(path) //nolint:gosec
|
||||
if err != nil {
|
||||
return nil, types.NewFileReadError(path, err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
type stringOrSliceField []string
|
||||
|
||||
func (ss *stringOrSliceField) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
case yaml.ScalarNode:
|
||||
// Handle single string value
|
||||
*ss = []string{node.Value}
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
// Handle array of strings
|
||||
var slice []string
|
||||
if err := node.Decode(&slice); err != nil {
|
||||
return err //nolint:wrapcheck
|
||||
}
|
||||
*ss = slice
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("expected a string or a sequence of strings, but got %v", node.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
// keyValuesField handles flexible YAML formats for key-value pairs.
|
||||
// Supported formats:
|
||||
// - Sequence of maps: [{key1: value1}, {key2: [value2, value3]}]
|
||||
// - Single map: {key1: value1, key2: [value2, value3]}
|
||||
//
|
||||
// Values can be either a single string or an array of strings.
|
||||
type keyValuesField []types.KeyValue[string, []string]
|
||||
|
||||
func (kv *keyValuesField) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
case yaml.MappingNode:
|
||||
// Handle single map: {key1: value1, key2: [value2]}
|
||||
return kv.unmarshalMapping(node)
|
||||
case yaml.SequenceNode:
|
||||
// Handle sequence of maps: [{key1: value1}, {key2: value2}]
|
||||
for _, item := range node.Content {
|
||||
if item.Kind != yaml.MappingNode {
|
||||
return fmt.Errorf("expected a mapping in sequence, but got %v", item.Kind)
|
||||
}
|
||||
if err := kv.unmarshalMapping(item); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("expected a mapping or sequence of mappings, but got %v", node.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
func (kv *keyValuesField) unmarshalMapping(node *yaml.Node) error {
|
||||
// MappingNode content is [key1, value1, key2, value2, ...]
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
|
||||
if keyNode.Kind != yaml.ScalarNode {
|
||||
return fmt.Errorf("expected a string key, but got %v", keyNode.Kind)
|
||||
}
|
||||
|
||||
key := keyNode.Value
|
||||
var values []string
|
||||
|
||||
switch valueNode.Kind {
|
||||
case yaml.ScalarNode:
|
||||
values = []string{valueNode.Value}
|
||||
case yaml.SequenceNode:
|
||||
for _, v := range valueNode.Content {
|
||||
if v.Kind != yaml.ScalarNode {
|
||||
return fmt.Errorf("expected string values in array for key %q, but got %v", key, v.Kind)
|
||||
}
|
||||
values = append(values, v.Value)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("expected a string or array of strings for key %q, but got %v", key, valueNode.Kind)
|
||||
}
|
||||
|
||||
*kv = append(*kv, types.KeyValue[string, []string]{Key: key, Value: values})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type configYAML struct {
|
||||
ConfigFiles stringOrSliceField `yaml:"configFile"`
|
||||
Method stringOrSliceField `yaml:"method"`
|
||||
URL *string `yaml:"url"`
|
||||
Timeout *time.Duration `yaml:"timeout"`
|
||||
Concurrency *uint `yaml:"concurrency"`
|
||||
RequestCount *uint64 `yaml:"requests"`
|
||||
Duration *time.Duration `yaml:"duration"`
|
||||
Quiet *bool `yaml:"quiet"`
|
||||
Output *string `yaml:"output"`
|
||||
Insecure *bool `yaml:"insecure"`
|
||||
ShowConfig *bool `yaml:"showConfig"`
|
||||
DryRun *bool `yaml:"dryRun"`
|
||||
Params keyValuesField `yaml:"params"`
|
||||
Headers keyValuesField `yaml:"headers"`
|
||||
Cookies keyValuesField `yaml:"cookies"`
|
||||
Bodies stringOrSliceField `yaml:"body"`
|
||||
Proxies stringOrSliceField `yaml:"proxy"`
|
||||
Values stringOrSliceField `yaml:"values"`
|
||||
Lua stringOrSliceField `yaml:"lua"`
|
||||
Js stringOrSliceField `yaml:"js"`
|
||||
}
|
||||
|
||||
// ParseYAML parses YAML config file arguments into a Config object.
|
||||
// It can return the following errors:
|
||||
// - types.UnmarshalError
|
||||
// - types.FieldParseErrors
|
||||
func (parser ConfigFileParser) ParseYAML(data []byte) (*Config, error) {
|
||||
var (
|
||||
config = &Config{}
|
||||
parsedData = &configYAML{}
|
||||
)
|
||||
|
||||
err := yaml.Unmarshal(data, &parsedData)
|
||||
if err != nil {
|
||||
return nil, types.NewUnmarshalError(err)
|
||||
}
|
||||
|
||||
var fieldParseErrors []types.FieldParseError
|
||||
|
||||
config.Methods = append(config.Methods, parsedData.Method...)
|
||||
config.Timeout = parsedData.Timeout
|
||||
config.Concurrency = parsedData.Concurrency
|
||||
config.Requests = parsedData.RequestCount
|
||||
config.Duration = parsedData.Duration
|
||||
config.ShowConfig = parsedData.ShowConfig
|
||||
config.Quiet = parsedData.Quiet
|
||||
|
||||
if parsedData.Output != nil {
|
||||
config.Output = new(ConfigOutputType(*parsedData.Output))
|
||||
}
|
||||
|
||||
config.Insecure = parsedData.Insecure
|
||||
config.DryRun = parsedData.DryRun
|
||||
for _, kv := range parsedData.Params {
|
||||
config.Params = append(config.Params, types.Param(kv))
|
||||
}
|
||||
for _, kv := range parsedData.Headers {
|
||||
config.Headers = append(config.Headers, types.Header(kv))
|
||||
}
|
||||
for _, kv := range parsedData.Cookies {
|
||||
config.Cookies = append(config.Cookies, types.Cookie(kv))
|
||||
}
|
||||
config.Bodies = append(config.Bodies, parsedData.Bodies...)
|
||||
config.Values = append(config.Values, parsedData.Values...)
|
||||
config.Lua = append(config.Lua, parsedData.Lua...)
|
||||
config.Js = append(config.Js, parsedData.Js...)
|
||||
|
||||
if len(parsedData.ConfigFiles) > 0 {
|
||||
for _, configFile := range parsedData.ConfigFiles {
|
||||
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||
}
|
||||
}
|
||||
|
||||
if parsedData.URL != nil {
|
||||
urlParsed, err := url.Parse(*parsedData.URL)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(fieldParseErrors, types.NewFieldParseError("url", *parsedData.URL, err))
|
||||
} else {
|
||||
config.URL = urlParsed
|
||||
}
|
||||
}
|
||||
|
||||
for i, proxy := range parsedData.Proxies {
|
||||
err := config.Proxies.Parse(proxy)
|
||||
if err != nil {
|
||||
fieldParseErrors = append(
|
||||
fieldParseErrors,
|
||||
types.NewFieldParseError(fmt.Sprintf("proxy[%d]", i), proxy, err),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if len(fieldParseErrors) > 0 {
|
||||
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
229
internal/config/template_validator.go
Normal file
229
internal/config/template_validator.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"text/template"
|
||||
|
||||
"go.aykhans.me/sarin/internal/sarin"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
// It can return the following errors:
|
||||
// - types.TemplateParseError
|
||||
func validateTemplateString(value string, funcMap template.FuncMap) error {
|
||||
if value == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := template.New("").Funcs(funcMap).Parse(value)
|
||||
if err != nil {
|
||||
return types.NewTemplateParseError(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateTemplateMethods(methods []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for i, method := range methods {
|
||||
if err := validateTemplateString(method, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Method[%d]", i),
|
||||
method,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateParams(params types.Params, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for paramIndex, param := range params {
|
||||
// Validate param key
|
||||
if err := validateTemplateString(param.Key, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Param[%d].Key", paramIndex),
|
||||
param.Key,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate param values
|
||||
for valueIndex, value := range param.Value {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Param[%d].Value[%d]", paramIndex, valueIndex),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateHeaders(headers types.Headers, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for headerIndex, header := range headers {
|
||||
// Validate header key
|
||||
if err := validateTemplateString(header.Key, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Header[%d].Key", headerIndex),
|
||||
header.Key,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate header values
|
||||
for valueIndex, value := range header.Value {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Header[%d].Value[%d]", headerIndex, valueIndex),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateCookies(cookies types.Cookies, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for cookieIndex, cookie := range cookies {
|
||||
// Validate cookie key
|
||||
if err := validateTemplateString(cookie.Key, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Cookie[%d].Key", cookieIndex),
|
||||
cookie.Key,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate cookie values
|
||||
for valueIndex, value := range cookie.Value {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Cookie[%d].Value[%d]", cookieIndex, valueIndex),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateBodies(bodies []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for i, body := range bodies {
|
||||
if err := validateTemplateString(body, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Body[%d]", i),
|
||||
body,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateValues(values []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
var validationErrors []types.FieldValidationError
|
||||
|
||||
for i, value := range values {
|
||||
if err := validateTemplateString(value, funcMap); err != nil {
|
||||
validationErrors = append(
|
||||
validationErrors,
|
||||
types.NewFieldValidationError(
|
||||
fmt.Sprintf("Values[%d]", i),
|
||||
value,
|
||||
err,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return validationErrors
|
||||
}
|
||||
|
||||
func validateTemplateURLPath(urlPath string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||
if err := validateTemplateString(urlPath, funcMap); err != nil {
|
||||
return []types.FieldValidationError{
|
||||
types.NewFieldValidationError("URL.Path", urlPath, err),
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ValidateTemplates(config *Config) []types.FieldValidationError {
|
||||
// Create template function map using the same functions as sarin package
|
||||
// Use nil for fileCache during validation - templates are only parsed, not executed
|
||||
randSource := sarin.NewDefaultRandSource()
|
||||
funcMap := sarin.NewDefaultTemplateFuncMap(randSource, nil)
|
||||
|
||||
bodyFuncMapData := &sarin.BodyTemplateFuncMapData{}
|
||||
bodyFuncMap := sarin.NewDefaultBodyTemplateFuncMap(randSource, bodyFuncMapData, nil)
|
||||
|
||||
var allErrors []types.FieldValidationError
|
||||
|
||||
// Validate URL path
|
||||
if config.URL != nil {
|
||||
allErrors = append(allErrors, validateTemplateURLPath(config.URL.Path, funcMap)...)
|
||||
}
|
||||
|
||||
// Validate methods
|
||||
allErrors = append(allErrors, validateTemplateMethods(config.Methods, funcMap)...)
|
||||
|
||||
// Validate params
|
||||
allErrors = append(allErrors, validateTemplateParams(config.Params, funcMap)...)
|
||||
|
||||
// Validate headers
|
||||
allErrors = append(allErrors, validateTemplateHeaders(config.Headers, funcMap)...)
|
||||
|
||||
// Validate cookies
|
||||
allErrors = append(allErrors, validateTemplateCookies(config.Cookies, funcMap)...)
|
||||
|
||||
// Validate bodies
|
||||
allErrors = append(allErrors, validateTemplateBodies(config.Bodies, bodyFuncMap)...)
|
||||
|
||||
// Validate values
|
||||
allErrors = append(allErrors, validateTemplateValues(config.Values, funcMap)...)
|
||||
|
||||
return allErrors
|
||||
}
|
||||
324
internal/sarin/client.go
Normal file
324
internal/sarin/client.go
Normal file
@@ -0,0 +1,324 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/base64"
|
||||
"math"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/valyala/fasthttp"
|
||||
"github.com/valyala/fasthttp/fasthttpproxy"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsSlice "go.aykhans.me/utils/slice"
|
||||
"golang.org/x/net/proxy"
|
||||
)
|
||||
|
||||
type HostClientGenerator func() *fasthttp.HostClient
|
||||
|
||||
func safeUintToInt(u uint) int {
|
||||
if u > math.MaxInt {
|
||||
return math.MaxInt
|
||||
}
|
||||
return int(u)
|
||||
}
|
||||
|
||||
// NewHostClients creates a list of fasthttp.HostClient instances for the given proxies.
|
||||
// If no proxies are provided, a single client without a proxy is returned.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func NewHostClients(
|
||||
ctx context.Context,
|
||||
timeout time.Duration,
|
||||
proxies []url.URL,
|
||||
maxConns uint,
|
||||
requestURL *url.URL,
|
||||
skipVerify bool,
|
||||
) ([]*fasthttp.HostClient, error) {
|
||||
isTLS := requestURL.Scheme == "https"
|
||||
|
||||
if proxiesLen := len(proxies); proxiesLen > 0 {
|
||||
clients := make([]*fasthttp.HostClient, 0, proxiesLen)
|
||||
addr := requestURL.Host
|
||||
if isTLS && requestURL.Port() == "" {
|
||||
addr += ":443"
|
||||
}
|
||||
|
||||
for _, proxy := range proxies {
|
||||
dialFunc, err := NewProxyDialFunc(ctx, &proxy, timeout)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxy.String(), err)
|
||||
}
|
||||
|
||||
clients = append(clients, &fasthttp.HostClient{
|
||||
MaxConns: safeUintToInt(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||
},
|
||||
Addr: addr,
|
||||
Dial: dialFunc,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
DisableHeaderNamesNormalizing: true,
|
||||
DisablePathNormalizing: true,
|
||||
NoDefaultUserAgentHeader: true,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return clients, nil
|
||||
}
|
||||
|
||||
client := &fasthttp.HostClient{
|
||||
MaxConns: safeUintToInt(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||
},
|
||||
Addr: requestURL.Host,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
DisableHeaderNamesNormalizing: true,
|
||||
DisablePathNormalizing: true,
|
||||
NoDefaultUserAgentHeader: true,
|
||||
}
|
||||
return []*fasthttp.HostClient{client}, nil
|
||||
}
|
||||
|
||||
// NewProxyDialFunc creates a dial function for the given proxy URL.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyUnsupportedSchemeError
|
||||
func NewProxyDialFunc(ctx context.Context, proxyURL *url.URL, timeout time.Duration) (fasthttp.DialFunc, error) {
|
||||
var (
|
||||
dialer fasthttp.DialFunc
|
||||
err error
|
||||
)
|
||||
|
||||
switch proxyURL.Scheme {
|
||||
case "socks5":
|
||||
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "socks5h":
|
||||
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "http":
|
||||
dialer = fasthttpproxy.FasthttpHTTPDialerDualStackTimeout(proxyURL.String(), timeout)
|
||||
case "https":
|
||||
dialer = fasthttpHTTPSDialerDualStackTimeout(proxyURL, timeout)
|
||||
default:
|
||||
return nil, types.NewProxyUnsupportedSchemeError(proxyURL.Scheme)
|
||||
}
|
||||
|
||||
return dialer, nil
|
||||
}
|
||||
|
||||
// The returned dial function can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func fasthttpSocksDialerDualStackTimeout(ctx context.Context, proxyURL *url.URL, timeout time.Duration, resolveLocally bool) (fasthttp.DialFunc, error) {
|
||||
netDialer := &net.Dialer{}
|
||||
|
||||
// Parse auth from proxy URL if present
|
||||
var auth *proxy.Auth
|
||||
if proxyURL.User != nil {
|
||||
auth = &proxy.Auth{
|
||||
User: proxyURL.User.Username(),
|
||||
}
|
||||
if password, ok := proxyURL.User.Password(); ok {
|
||||
auth.Password = password
|
||||
}
|
||||
}
|
||||
|
||||
// Create SOCKS5 dialer with net.Dialer as forward dialer
|
||||
socksDialer, err := proxy.SOCKS5("tcp", proxyURL.Host, auth, netDialer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxyStr := proxyURL.String()
|
||||
|
||||
// Assert to ContextDialer for timeout support
|
||||
contextDialer, ok := socksDialer.(proxy.ContextDialer)
|
||||
if !ok {
|
||||
// Fallback without timeout (should not happen with net.Dialer)
|
||||
return func(addr string) (net.Conn, error) {
|
||||
conn, err := socksDialer.Dial("tcp", addr)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
return conn, nil
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Return dial function that uses context with timeout
|
||||
return func(addr string) (net.Conn, error) {
|
||||
deadline := time.Now().Add(timeout)
|
||||
|
||||
if resolveLocally {
|
||||
host, port, err := net.SplitHostPort(addr)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
|
||||
// Cap DNS resolution to half the timeout to reserve time for dial
|
||||
dnsCtx, dnsCancel := context.WithTimeout(ctx, timeout)
|
||||
ips, err := net.DefaultResolver.LookupIP(dnsCtx, "ip", host)
|
||||
dnsCancel()
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
if len(ips) == 0 {
|
||||
return nil, types.NewProxyDialError(proxyStr, types.NewProxyResolveError(host))
|
||||
}
|
||||
|
||||
// Use the first resolved IP
|
||||
addr = net.JoinHostPort(ips[0].String(), port)
|
||||
}
|
||||
|
||||
// Use remaining time for dial
|
||||
remaining := time.Until(deadline)
|
||||
if remaining <= 0 {
|
||||
return nil, types.NewProxyDialError(proxyStr, context.DeadlineExceeded)
|
||||
}
|
||||
|
||||
dialCtx, dialCancel := context.WithTimeout(ctx, remaining)
|
||||
defer dialCancel()
|
||||
|
||||
conn, err := contextDialer.DialContext(dialCtx, "tcp", addr)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
return conn, nil
|
||||
}, nil
|
||||
}
|
||||
|
||||
// The returned dial function can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func fasthttpHTTPSDialerDualStackTimeout(proxyURL *url.URL, timeout time.Duration) fasthttp.DialFunc {
|
||||
proxyAddr := proxyURL.Host
|
||||
if proxyURL.Port() == "" {
|
||||
proxyAddr = net.JoinHostPort(proxyURL.Hostname(), "443")
|
||||
}
|
||||
|
||||
// Build Proxy-Authorization header if auth is present
|
||||
var proxyAuth string
|
||||
if proxyURL.User != nil {
|
||||
username := proxyURL.User.Username()
|
||||
password, _ := proxyURL.User.Password()
|
||||
credentials := username + ":" + password
|
||||
proxyAuth = "Basic " + base64.StdEncoding.EncodeToString([]byte(credentials))
|
||||
}
|
||||
|
||||
proxyStr := proxyURL.String()
|
||||
|
||||
return func(addr string) (net.Conn, error) {
|
||||
// Establish TCP connection to proxy with timeout
|
||||
start := time.Now()
|
||||
conn, err := fasthttp.DialDualStackTimeout(proxyAddr, timeout)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
|
||||
remaining := timeout - time.Since(start)
|
||||
if remaining <= 0 {
|
||||
conn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, context.DeadlineExceeded)
|
||||
}
|
||||
|
||||
// Set deadline for the TLS handshake and CONNECT request
|
||||
if err := conn.SetDeadline(time.Now().Add(remaining)); err != nil {
|
||||
conn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
|
||||
// Upgrade to TLS
|
||||
tlsConn := tls.Client(conn, &tls.Config{ //nolint:gosec
|
||||
ServerName: proxyURL.Hostname(),
|
||||
})
|
||||
if err := tlsConn.Handshake(); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
|
||||
// Build and send CONNECT request
|
||||
connectReq := &http.Request{
|
||||
Method: http.MethodConnect,
|
||||
URL: &url.URL{Opaque: addr},
|
||||
Host: addr,
|
||||
Header: make(http.Header),
|
||||
}
|
||||
if proxyAuth != "" {
|
||||
connectReq.Header.Set("Proxy-Authorization", proxyAuth)
|
||||
}
|
||||
|
||||
if err := connectReq.Write(tlsConn); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
|
||||
// Read response using buffered reader, but return wrapped connection
|
||||
// to preserve any buffered data
|
||||
bufReader := bufio.NewReader(tlsConn)
|
||||
resp, err := http.ReadResponse(bufReader, connectReq)
|
||||
if err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
resp.Body.Close() //nolint:errcheck,gosec
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, types.NewProxyConnectError(resp.Status))
|
||||
}
|
||||
|
||||
// Clear deadline for the tunneled connection
|
||||
if err := tlsConn.SetDeadline(time.Time{}); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, types.NewProxyDialError(proxyStr, err)
|
||||
}
|
||||
|
||||
// Return wrapped connection that uses the buffered reader
|
||||
// to avoid losing any data that was read ahead
|
||||
return &bufferedConn{Conn: tlsConn, reader: bufReader}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// bufferedConn wraps a net.Conn with a buffered reader to preserve
|
||||
// any data that was read during HTTP response parsing.
|
||||
type bufferedConn struct {
|
||||
net.Conn
|
||||
|
||||
reader *bufio.Reader
|
||||
}
|
||||
|
||||
func (c *bufferedConn) Read(b []byte) (int, error) {
|
||||
return c.reader.Read(b)
|
||||
}
|
||||
|
||||
func NewHostClientGenerator(clients ...*fasthttp.HostClient) HostClientGenerator {
|
||||
switch len(clients) {
|
||||
case 0:
|
||||
hostClient := &fasthttp.HostClient{}
|
||||
return func() *fasthttp.HostClient {
|
||||
return hostClient
|
||||
}
|
||||
case 1:
|
||||
return func() *fasthttp.HostClient {
|
||||
return clients[0]
|
||||
}
|
||||
default:
|
||||
return utilsSlice.RandomCycle(nil, clients...)
|
||||
}
|
||||
}
|
||||
114
internal/sarin/filecache.go
Normal file
114
internal/sarin/filecache.go
Normal file
@@ -0,0 +1,114 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
// CachedFile holds the cached content and metadata of a file.
|
||||
type CachedFile struct {
|
||||
Content []byte
|
||||
Filename string
|
||||
}
|
||||
|
||||
type FileCache struct {
|
||||
cache sync.Map // map[string]*CachedFile
|
||||
requestTimeout time.Duration
|
||||
}
|
||||
|
||||
func NewFileCache(requestTimeout time.Duration) *FileCache {
|
||||
return &FileCache{
|
||||
requestTimeout: requestTimeout,
|
||||
}
|
||||
}
|
||||
|
||||
// GetOrLoad retrieves a file from cache or loads it using the provided source.
|
||||
// The source can be a local file path or an HTTP/HTTPS URL.
|
||||
// It can return the following errors:
|
||||
// - types.FileReadError
|
||||
// - types.HTTPFetchError
|
||||
// - types.HTTPStatusError
|
||||
func (fc *FileCache) GetOrLoad(source string) (*CachedFile, error) {
|
||||
if val, ok := fc.cache.Load(source); ok {
|
||||
return val.(*CachedFile), nil
|
||||
}
|
||||
|
||||
var (
|
||||
content []byte
|
||||
filename string
|
||||
err error
|
||||
)
|
||||
if strings.HasPrefix(source, "http://") || strings.HasPrefix(source, "https://") {
|
||||
content, filename, err = fc.fetchURL(source)
|
||||
} else {
|
||||
content, filename, err = fc.readLocalFile(source)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
file := &CachedFile{Content: content, Filename: filename}
|
||||
|
||||
// LoadOrStore handles race condition - if another goroutine
|
||||
// cached it first, we get theirs (no duplicate storage)
|
||||
actual, _ := fc.cache.LoadOrStore(source, file)
|
||||
return actual.(*CachedFile), nil
|
||||
}
|
||||
|
||||
// readLocalFile reads a file from the local filesystem and returns its content and filename.
|
||||
// It can return the following errors:
|
||||
// - types.FileReadError
|
||||
func (fc *FileCache) readLocalFile(filePath string) ([]byte, string, error) {
|
||||
content, err := os.ReadFile(filePath) //nolint:gosec
|
||||
if err != nil {
|
||||
return nil, "", types.NewFileReadError(filePath, err)
|
||||
}
|
||||
return content, filepath.Base(filePath), nil
|
||||
}
|
||||
|
||||
// fetchURL downloads file contents from an HTTP/HTTPS URL.
|
||||
// It can return the following errors:
|
||||
// - types.HTTPFetchError
|
||||
// - types.HTTPStatusError
|
||||
func (fc *FileCache) fetchURL(url string) ([]byte, string, error) {
|
||||
client := &http.Client{
|
||||
Timeout: fc.requestTimeout,
|
||||
}
|
||||
|
||||
resp, err := client.Get(url)
|
||||
if err != nil {
|
||||
return nil, "", types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
defer resp.Body.Close() //nolint:errcheck
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, "", types.NewHTTPStatusError(url, resp.StatusCode, resp.Status)
|
||||
}
|
||||
|
||||
content, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, "", types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
|
||||
// Extract filename from URL path
|
||||
filename := path.Base(url)
|
||||
if filename == "" || filename == "/" || filename == "." {
|
||||
filename = "downloaded_file"
|
||||
}
|
||||
|
||||
// Remove query string from filename if present
|
||||
if idx := strings.Index(filename, "?"); idx != -1 {
|
||||
filename = filename[:idx]
|
||||
}
|
||||
|
||||
return content, filename, nil
|
||||
}
|
||||
14
internal/sarin/helpers.go
Normal file
14
internal/sarin/helpers.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"math/rand/v2"
|
||||
"time"
|
||||
)
|
||||
|
||||
func NewDefaultRandSource() rand.Source {
|
||||
now := time.Now().UnixNano()
|
||||
return rand.NewPCG(
|
||||
uint64(now), //nolint:gosec // G115: Safe conversion; UnixNano timestamp used as random seed, bit pattern is intentional
|
||||
uint64(now>>32), //nolint:gosec // G115: Safe conversion; right-shifted timestamp for seed entropy, overflow is acceptable
|
||||
)
|
||||
}
|
||||
404
internal/sarin/request.go
Normal file
404
internal/sarin/request.go
Normal file
@@ -0,0 +1,404 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"maps"
|
||||
"math/rand/v2"
|
||||
"net/url"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/valyala/fasthttp"
|
||||
"go.aykhans.me/sarin/internal/script"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsSlice "go.aykhans.me/utils/slice"
|
||||
)
|
||||
|
||||
type RequestGenerator func(*fasthttp.Request) error
|
||||
|
||||
type requestDataGenerator func(*script.RequestData, any) error
|
||||
|
||||
type valuesData struct {
|
||||
Values map[string]string
|
||||
}
|
||||
|
||||
// NewRequestGenerator creates a new RequestGenerator function that generates HTTP requests
|
||||
// with the specified configuration. The returned RequestGenerator is NOT safe for concurrent
|
||||
// use by multiple goroutines.
|
||||
//
|
||||
// Note: Scripts must be validated before calling this function (e.g., in NewSarin).
|
||||
// The caller is responsible for managing the scriptTransformer lifecycle.
|
||||
func NewRequestGenerator(
|
||||
methods []string,
|
||||
requestURL *url.URL,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
bodies []string,
|
||||
values []string,
|
||||
fileCache *FileCache,
|
||||
scriptTransformer *script.Transformer,
|
||||
) (RequestGenerator, bool) {
|
||||
randSource := NewDefaultRandSource()
|
||||
//nolint:gosec // G404: Using non-cryptographic rand for load testing, not security
|
||||
localRand := rand.New(randSource)
|
||||
templateFuncMap := NewDefaultTemplateFuncMap(randSource, fileCache)
|
||||
|
||||
pathGenerator, isPathGeneratorDynamic := createTemplateFunc(requestURL.Path, templateFuncMap)
|
||||
methodGenerator, isMethodGeneratorDynamic := NewMethodGeneratorFunc(localRand, methods, templateFuncMap)
|
||||
paramsGenerator, isParamsGeneratorDynamic := NewParamsGeneratorFunc(localRand, params, templateFuncMap)
|
||||
headersGenerator, isHeadersGeneratorDynamic := NewHeadersGeneratorFunc(localRand, headers, templateFuncMap)
|
||||
cookiesGenerator, isCookiesGeneratorDynamic := NewCookiesGeneratorFunc(localRand, cookies, templateFuncMap)
|
||||
|
||||
bodyTemplateFuncMapData := &BodyTemplateFuncMapData{}
|
||||
bodyTemplateFuncMap := NewDefaultBodyTemplateFuncMap(randSource, bodyTemplateFuncMapData, fileCache)
|
||||
bodyGenerator, isBodyGeneratorDynamic := NewBodyGeneratorFunc(localRand, bodies, bodyTemplateFuncMap)
|
||||
|
||||
valuesGenerator := NewValuesGeneratorFunc(values, templateFuncMap)
|
||||
|
||||
hasScripts := scriptTransformer != nil && !scriptTransformer.IsEmpty()
|
||||
|
||||
host := requestURL.Host
|
||||
scheme := requestURL.Scheme
|
||||
|
||||
reqData := &script.RequestData{
|
||||
Headers: make(map[string][]string),
|
||||
Params: make(map[string][]string),
|
||||
Cookies: make(map[string][]string),
|
||||
}
|
||||
|
||||
var (
|
||||
data valuesData
|
||||
path string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request) error {
|
||||
resetRequestData(reqData)
|
||||
|
||||
data, err = valuesGenerator()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
path, err = pathGenerator(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reqData.Path = path
|
||||
|
||||
if err = methodGenerator(reqData, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bodyTemplateFuncMapData.ClearFormDataContenType()
|
||||
if err = bodyGenerator(reqData, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = headersGenerator(reqData, data); err != nil {
|
||||
return err
|
||||
}
|
||||
if bodyTemplateFuncMapData.GetFormDataContenType() != "" {
|
||||
reqData.Headers["Content-Type"] = append(reqData.Headers["Content-Type"], bodyTemplateFuncMapData.GetFormDataContenType())
|
||||
}
|
||||
|
||||
if err = paramsGenerator(reqData, data); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = cookiesGenerator(reqData, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if hasScripts {
|
||||
if err = scriptTransformer.Transform(reqData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
applyRequestDataToFastHTTP(reqData, req, host, scheme)
|
||||
|
||||
return nil
|
||||
}, isPathGeneratorDynamic ||
|
||||
isMethodGeneratorDynamic ||
|
||||
isParamsGeneratorDynamic ||
|
||||
isHeadersGeneratorDynamic ||
|
||||
isCookiesGeneratorDynamic ||
|
||||
isBodyGeneratorDynamic ||
|
||||
hasScripts
|
||||
}
|
||||
|
||||
func resetRequestData(reqData *script.RequestData) {
|
||||
reqData.Method = ""
|
||||
reqData.Path = ""
|
||||
reqData.Body = ""
|
||||
clear(reqData.Headers)
|
||||
clear(reqData.Params)
|
||||
clear(reqData.Cookies)
|
||||
}
|
||||
|
||||
func applyRequestDataToFastHTTP(reqData *script.RequestData, req *fasthttp.Request, host, scheme string) {
|
||||
req.Header.SetHost(host)
|
||||
req.SetRequestURI(reqData.Path)
|
||||
req.Header.SetMethod(reqData.Method)
|
||||
req.SetBody([]byte(reqData.Body))
|
||||
|
||||
for k, values := range reqData.Headers {
|
||||
for _, v := range values {
|
||||
req.Header.Add(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
for k, values := range reqData.Params {
|
||||
for _, v := range values {
|
||||
req.URI().QueryArgs().Add(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
if len(reqData.Cookies) > 0 {
|
||||
cookieStrings := make([]string, 0, len(reqData.Cookies))
|
||||
for k, values := range reqData.Cookies {
|
||||
for _, v := range values {
|
||||
cookieStrings = append(cookieStrings, k+"="+v)
|
||||
}
|
||||
}
|
||||
req.Header.Add("Cookie", strings.Join(cookieStrings, "; "))
|
||||
}
|
||||
|
||||
if scheme == "https" {
|
||||
req.URI().SetScheme("https")
|
||||
}
|
||||
}
|
||||
|
||||
func NewMethodGeneratorFunc(localRand *rand.Rand, methods []string, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||
methodGenerator, isDynamic := buildStringSliceGenerator(localRand, methods, templateFunctions)
|
||||
|
||||
var (
|
||||
method string
|
||||
err error
|
||||
)
|
||||
return func(reqData *script.RequestData, data any) error {
|
||||
method, err = methodGenerator()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
reqData.Method = method
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewBodyGeneratorFunc(localRand *rand.Rand, bodies []string, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||
bodyGenerator, isDynamic := buildStringSliceGenerator(localRand, bodies, templateFunctions)
|
||||
|
||||
var (
|
||||
body string
|
||||
err error
|
||||
)
|
||||
return func(reqData *script.RequestData, data any) error {
|
||||
body, err = bodyGenerator()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
reqData.Body = body
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewParamsGeneratorFunc(localRand *rand.Rand, params types.Params, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, params, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(reqData *script.RequestData, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
reqData.Params[key] = append(reqData.Params[key], value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewHeadersGeneratorFunc(localRand *rand.Rand, headers types.Headers, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, headers, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(reqData *script.RequestData, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
reqData.Headers[key] = append(reqData.Headers[key], value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewCookiesGeneratorFunc(localRand *rand.Rand, cookies types.Cookies, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, cookies, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(reqData *script.RequestData, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
reqData.Cookies[key] = append(reqData.Cookies[key], value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewValuesGeneratorFunc(values []string, templateFunctions template.FuncMap) func() (valuesData, error) {
|
||||
generators := make([]func(_ any) (string, error), len(values))
|
||||
|
||||
for i, v := range values {
|
||||
generators[i], _ = createTemplateFunc(v, templateFunctions)
|
||||
}
|
||||
|
||||
var (
|
||||
rendered string
|
||||
data map[string]string
|
||||
err error
|
||||
)
|
||||
return func() (valuesData, error) {
|
||||
result := make(map[string]string)
|
||||
for _, generator := range generators {
|
||||
rendered, err = generator(nil)
|
||||
if err != nil {
|
||||
return valuesData{}, types.NewTemplateRenderError(err)
|
||||
}
|
||||
|
||||
data, err = godotenv.Unmarshal(rendered)
|
||||
if err != nil {
|
||||
return valuesData{}, types.NewTemplateRenderError(err)
|
||||
}
|
||||
|
||||
maps.Copy(result, data)
|
||||
}
|
||||
|
||||
return valuesData{Values: result}, nil
|
||||
}
|
||||
}
|
||||
|
||||
func createTemplateFunc(value string, templateFunctions template.FuncMap) (func(data any) (string, error), bool) {
|
||||
tmpl, err := template.New("").Funcs(templateFunctions).Parse(value)
|
||||
if err == nil && hasTemplateActions(tmpl) {
|
||||
var err error
|
||||
return func(data any) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
if err = tmpl.Execute(&buf, data); err != nil {
|
||||
return "", types.NewTemplateRenderError(err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}, true
|
||||
}
|
||||
return func(_ any) (string, error) { return value, nil }, false
|
||||
}
|
||||
|
||||
type keyValueGenerator struct {
|
||||
Key func(data any) (string, error)
|
||||
Value func() func(data any) (string, error)
|
||||
}
|
||||
|
||||
type keyValueItem interface {
|
||||
types.Param | types.Header | types.Cookie
|
||||
}
|
||||
|
||||
func buildKeyValueGenerators[T keyValueItem](
|
||||
localRand *rand.Rand,
|
||||
items []T,
|
||||
templateFunctions template.FuncMap,
|
||||
) ([]keyValueGenerator, bool) {
|
||||
isDynamic := false
|
||||
generators := make([]keyValueGenerator, len(items))
|
||||
|
||||
for generatorIndex, item := range items {
|
||||
// Convert to KeyValue to access fields
|
||||
keyValue := types.KeyValue[string, []string](item)
|
||||
|
||||
// Generate key function
|
||||
keyFunc, keyIsDynamic := createTemplateFunc(keyValue.Key, templateFunctions)
|
||||
if keyIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
|
||||
// Generate value functions
|
||||
valueFuncs := make([]func(data any) (string, error), len(keyValue.Value))
|
||||
for j, v := range keyValue.Value {
|
||||
valueFunc, valueIsDynamic := createTemplateFunc(v, templateFunctions)
|
||||
if valueIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
valueFuncs[j] = valueFunc
|
||||
}
|
||||
|
||||
generators[generatorIndex] = keyValueGenerator{
|
||||
Key: keyFunc,
|
||||
Value: utilsSlice.RandomCycle(localRand, valueFuncs...),
|
||||
}
|
||||
|
||||
if len(keyValue.Value) > 1 {
|
||||
isDynamic = true
|
||||
}
|
||||
}
|
||||
|
||||
return generators, isDynamic
|
||||
}
|
||||
|
||||
func buildStringSliceGenerator(
|
||||
localRand *rand.Rand,
|
||||
values []string,
|
||||
templateFunctions template.FuncMap,
|
||||
) (func() func(data any) (string, error), bool) {
|
||||
// Return a function that returns an empty string generator if values is empty
|
||||
if len(values) == 0 {
|
||||
emptyFunc := func(_ any) (string, error) { return "", nil }
|
||||
return func() func(_ any) (string, error) { return emptyFunc }, false
|
||||
}
|
||||
|
||||
isDynamic := len(values) > 1
|
||||
valueFuncs := make([]func(data any) (string, error), len(values))
|
||||
|
||||
for i, value := range values {
|
||||
valueFunc, valueIsDynamic := createTemplateFunc(value, templateFunctions)
|
||||
if valueIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
valueFuncs[i] = valueFunc
|
||||
}
|
||||
|
||||
return utilsSlice.RandomCycle(localRand, valueFuncs...), isDynamic
|
||||
}
|
||||
348
internal/sarin/response.go
Normal file
348
internal/sarin/response.go
Normal file
@@ -0,0 +1,348 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/lipgloss/table"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
const DefaultResponseDurationAccuracy uint32 = 1
|
||||
const DefaultResponseColumnMaxWidth = 50
|
||||
|
||||
// Duration wraps time.Duration to provide consistent JSON/YAML marshaling as human-readable strings.
|
||||
type Duration time.Duration
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
//nolint:wrapcheck
|
||||
return json.Marshal(time.Duration(d).String())
|
||||
}
|
||||
|
||||
func (d Duration) MarshalYAML() (any, error) {
|
||||
return time.Duration(d).String(), nil
|
||||
}
|
||||
|
||||
func (d Duration) String() string {
|
||||
dur := time.Duration(d)
|
||||
switch {
|
||||
case dur >= time.Second:
|
||||
return dur.Round(time.Millisecond).String()
|
||||
case dur >= time.Millisecond:
|
||||
return dur.Round(time.Microsecond).String()
|
||||
default:
|
||||
return dur.String()
|
||||
}
|
||||
}
|
||||
|
||||
// BigInt wraps big.Int to provide consistent JSON/YAML marshaling as numbers.
|
||||
type BigInt struct {
|
||||
*big.Int
|
||||
}
|
||||
|
||||
func (b BigInt) MarshalJSON() ([]byte, error) {
|
||||
return []byte(b.Int.String()), nil
|
||||
}
|
||||
|
||||
func (b BigInt) MarshalYAML() (any, error) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: b.Int.String(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b BigInt) String() string {
|
||||
return b.Int.String()
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
durations map[time.Duration]uint64
|
||||
}
|
||||
|
||||
type SarinResponseData struct {
|
||||
sync.Mutex
|
||||
|
||||
Responses map[string]*Response
|
||||
|
||||
// accuracy is the time bucket size in nanoseconds for storing response durations.
|
||||
// Larger values (e.g., 1000) save memory but reduce accuracy by grouping more durations together.
|
||||
// Smaller values (e.g., 10) improve accuracy but increase memory usage.
|
||||
// Minimum value is 1 (most accurate, highest memory usage).
|
||||
// Default value is 1.
|
||||
accuracy time.Duration
|
||||
}
|
||||
|
||||
func NewSarinResponseData(accuracy uint32) *SarinResponseData {
|
||||
if accuracy == 0 {
|
||||
accuracy = DefaultResponseDurationAccuracy
|
||||
}
|
||||
|
||||
return &SarinResponseData{
|
||||
Responses: make(map[string]*Response),
|
||||
accuracy: time.Duration(accuracy),
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) Add(responseKey string, responseTime time.Duration) {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
response, ok := data.Responses[responseKey]
|
||||
if !ok {
|
||||
data.Responses[responseKey] = &Response{
|
||||
durations: map[time.Duration]uint64{
|
||||
responseTime / data.accuracy: 1,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
response.durations[responseTime/data.accuracy]++
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintTable() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
|
||||
headerStyle := lipgloss.NewStyle().
|
||||
Bold(true).
|
||||
Foreground(lipgloss.Color("246")).
|
||||
Padding(0, 1)
|
||||
|
||||
cellStyle := lipgloss.NewStyle().
|
||||
Padding(0, 1)
|
||||
|
||||
rows := make([][]string, 0, len(output.Responses)+1)
|
||||
for key, stats := range output.Responses {
|
||||
rows = append(rows, []string{
|
||||
wrapText(key, DefaultResponseColumnMaxWidth),
|
||||
stats.Count.String(),
|
||||
stats.Min.String(),
|
||||
stats.Max.String(),
|
||||
stats.Average.String(),
|
||||
stats.P90.String(),
|
||||
stats.P95.String(),
|
||||
stats.P99.String(),
|
||||
})
|
||||
}
|
||||
|
||||
rows = append(rows, []string{
|
||||
"Total",
|
||||
output.Total.Count.String(),
|
||||
output.Total.Min.String(),
|
||||
output.Total.Max.String(),
|
||||
output.Total.Average.String(),
|
||||
output.Total.P90.String(),
|
||||
output.Total.P95.String(),
|
||||
output.Total.P99.String(),
|
||||
})
|
||||
|
||||
tbl := table.New().
|
||||
Border(lipgloss.NormalBorder()).
|
||||
BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("240"))).
|
||||
BorderRow(true).
|
||||
Headers("Response", "Count", "Min", "Max", "Average", "P90", "P95", "P99").
|
||||
Rows(rows...).
|
||||
StyleFunc(func(row, col int) lipgloss.Style {
|
||||
if row == table.HeaderRow {
|
||||
return headerStyle
|
||||
}
|
||||
return cellStyle
|
||||
})
|
||||
|
||||
fmt.Println(tbl)
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintJSON() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
encoder := json.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent("", " ")
|
||||
if err := encoder.Encode(output); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintYAML() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
encoder := yaml.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent(2)
|
||||
if err := encoder.Encode(output); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
type responseStat struct {
|
||||
Count BigInt `json:"count" yaml:"count"`
|
||||
Min Duration `json:"min" yaml:"min"`
|
||||
Max Duration `json:"max" yaml:"max"`
|
||||
Average Duration `json:"average" yaml:"average"`
|
||||
P90 Duration `json:"p90" yaml:"p90"`
|
||||
P95 Duration `json:"p95" yaml:"p95"`
|
||||
P99 Duration `json:"p99" yaml:"p99"`
|
||||
}
|
||||
|
||||
type responseStats map[string]responseStat
|
||||
|
||||
type outputData struct {
|
||||
Responses map[string]responseStat `json:"responses" yaml:"responses"`
|
||||
Total responseStat `json:"total" yaml:"total"`
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) prepareOutputData() outputData {
|
||||
switch len(data.Responses) {
|
||||
case 0:
|
||||
return outputData{
|
||||
Responses: make(map[string]responseStat),
|
||||
Total: responseStat{},
|
||||
}
|
||||
case 1:
|
||||
var (
|
||||
responseKey string
|
||||
stats responseStat
|
||||
)
|
||||
for key, response := range data.Responses {
|
||||
stats = calculateStats(response.durations, data.accuracy)
|
||||
responseKey = key
|
||||
}
|
||||
return outputData{
|
||||
Responses: responseStats{
|
||||
responseKey: stats,
|
||||
},
|
||||
Total: stats,
|
||||
}
|
||||
default:
|
||||
// Calculate stats for each response
|
||||
allStats := make(responseStats)
|
||||
var totalDurations = make(map[time.Duration]uint64)
|
||||
|
||||
for key, response := range data.Responses {
|
||||
stats := calculateStats(response.durations, data.accuracy)
|
||||
allStats[key] = stats
|
||||
|
||||
// Aggregate for total row
|
||||
for duration, count := range response.durations {
|
||||
totalDurations[duration] += count
|
||||
}
|
||||
}
|
||||
|
||||
return outputData{
|
||||
Responses: allStats,
|
||||
Total: calculateStats(totalDurations, data.accuracy),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calculateStats(durations map[time.Duration]uint64, accuracy time.Duration) responseStat {
|
||||
if len(durations) == 0 {
|
||||
return responseStat{}
|
||||
}
|
||||
|
||||
// Extract and sort unique durations
|
||||
sortedDurations := make([]time.Duration, 0, len(durations))
|
||||
for duration := range durations {
|
||||
sortedDurations = append(sortedDurations, duration)
|
||||
}
|
||||
slices.Sort(sortedDurations)
|
||||
|
||||
sum := new(big.Int)
|
||||
totalCount := new(big.Int)
|
||||
minDuration := sortedDurations[0] * accuracy
|
||||
maxDuration := sortedDurations[len(sortedDurations)-1] * accuracy
|
||||
|
||||
for _, duration := range sortedDurations {
|
||||
actualDuration := duration * accuracy
|
||||
count := durations[duration]
|
||||
|
||||
totalCount.Add(
|
||||
totalCount,
|
||||
new(big.Int).SetUint64(count),
|
||||
)
|
||||
|
||||
sum.Add(
|
||||
sum,
|
||||
new(big.Int).Mul(
|
||||
new(big.Int).SetInt64(int64(actualDuration)),
|
||||
new(big.Int).SetUint64(count),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Calculate percentiles
|
||||
p90 := calculatePercentile(sortedDurations, durations, totalCount, 90, accuracy)
|
||||
p95 := calculatePercentile(sortedDurations, durations, totalCount, 95, accuracy)
|
||||
p99 := calculatePercentile(sortedDurations, durations, totalCount, 99, accuracy)
|
||||
|
||||
return responseStat{
|
||||
Count: BigInt{totalCount},
|
||||
Min: Duration(minDuration),
|
||||
Max: Duration(maxDuration),
|
||||
Average: Duration(div(sum, totalCount).Int64()),
|
||||
P90: p90,
|
||||
P95: p95,
|
||||
P99: p99,
|
||||
}
|
||||
}
|
||||
|
||||
func calculatePercentile(sortedDurations []time.Duration, durations map[time.Duration]uint64, totalCount *big.Int, percentile int, accuracy time.Duration) Duration {
|
||||
// Calculate the target position for the percentile
|
||||
// Using ceiling method: position = ceil(totalCount * percentile / 100)
|
||||
target := new(big.Int).Mul(totalCount, big.NewInt(int64(percentile)))
|
||||
target.Add(target, big.NewInt(99)) // Add 99 to achieve ceiling division by 100
|
||||
target.Div(target, big.NewInt(100))
|
||||
|
||||
// Accumulate counts until we reach the target position
|
||||
cumulative := new(big.Int)
|
||||
for _, duration := range sortedDurations {
|
||||
count := durations[duration]
|
||||
cumulative.Add(cumulative, new(big.Int).SetUint64(count))
|
||||
|
||||
if cumulative.Cmp(target) >= 0 {
|
||||
return Duration(duration * accuracy)
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to the last duration (shouldn't happen with valid data)
|
||||
return Duration(sortedDurations[len(sortedDurations)-1] * accuracy)
|
||||
}
|
||||
|
||||
// div performs division with rounding to the nearest integer.
|
||||
func div(x, y *big.Int) *big.Int {
|
||||
quotient, remainder := new(big.Int).DivMod(x, y, new(big.Int))
|
||||
if remainder.Mul(remainder, big.NewInt(2)).Cmp(y) >= 0 {
|
||||
quotient.Add(quotient, big.NewInt(1))
|
||||
}
|
||||
return quotient
|
||||
}
|
||||
|
||||
// wrapText wraps a string to multiple lines if it exceeds maxWidth.
|
||||
func wrapText(s string, maxWidth int) string {
|
||||
if len(s) <= maxWidth {
|
||||
return s
|
||||
}
|
||||
|
||||
var lines []string
|
||||
for len(s) > maxWidth {
|
||||
lines = append(lines, s[:maxWidth])
|
||||
s = s[maxWidth:]
|
||||
}
|
||||
if len(s) > 0 {
|
||||
lines = append(lines, s)
|
||||
}
|
||||
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
818
internal/sarin/sarin.go
Normal file
818
internal/sarin/sarin.go
Normal file
@@ -0,0 +1,818 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/bubbles/progress"
|
||||
"github.com/charmbracelet/bubbles/spinner"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/x/term"
|
||||
"github.com/valyala/fasthttp"
|
||||
"go.aykhans.me/sarin/internal/script"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
type runtimeMessageLevel uint8
|
||||
|
||||
const (
|
||||
runtimeMessageLevelWarning runtimeMessageLevel = iota
|
||||
runtimeMessageLevelError
|
||||
)
|
||||
|
||||
type runtimeMessage struct {
|
||||
timestamp time.Time
|
||||
level runtimeMessageLevel
|
||||
text string
|
||||
}
|
||||
|
||||
type messageSender func(level runtimeMessageLevel, text string)
|
||||
|
||||
type sarin struct {
|
||||
workers uint
|
||||
requestURL *url.URL
|
||||
methods []string
|
||||
params types.Params
|
||||
headers types.Headers
|
||||
cookies types.Cookies
|
||||
bodies []string
|
||||
totalRequests *uint64
|
||||
totalDuration *time.Duration
|
||||
timeout time.Duration
|
||||
quiet bool
|
||||
skipCertVerify bool
|
||||
values []string
|
||||
collectStats bool
|
||||
dryRun bool
|
||||
|
||||
hostClients []*fasthttp.HostClient
|
||||
responses *SarinResponseData
|
||||
fileCache *FileCache
|
||||
scriptChain *script.Chain
|
||||
}
|
||||
|
||||
// NewSarin creates a new sarin instance for load testing.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
// - types.ErrScriptEmpty
|
||||
// - types.ScriptLoadError
|
||||
func NewSarin(
|
||||
ctx context.Context,
|
||||
methods []string,
|
||||
requestURL *url.URL,
|
||||
timeout time.Duration,
|
||||
workers uint,
|
||||
totalRequests *uint64,
|
||||
totalDuration *time.Duration,
|
||||
quiet bool,
|
||||
skipCertVerify bool,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
bodies []string,
|
||||
proxies types.Proxies,
|
||||
values []string,
|
||||
collectStats bool,
|
||||
dryRun bool,
|
||||
luaScripts []string,
|
||||
jsScripts []string,
|
||||
) (*sarin, error) {
|
||||
if workers == 0 {
|
||||
workers = 1
|
||||
}
|
||||
|
||||
hostClients, err := newHostClients(ctx, timeout, proxies, workers, requestURL, skipCertVerify)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Load script sources
|
||||
luaSources, err := script.LoadSources(ctx, luaScripts, script.EngineTypeLua)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
jsSources, err := script.LoadSources(ctx, jsScripts, script.EngineTypeJavaScript)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
scriptChain := script.NewChain(luaSources, jsSources)
|
||||
|
||||
srn := &sarin{
|
||||
workers: workers,
|
||||
requestURL: requestURL,
|
||||
methods: methods,
|
||||
params: params,
|
||||
headers: headers,
|
||||
cookies: cookies,
|
||||
bodies: bodies,
|
||||
totalRequests: totalRequests,
|
||||
totalDuration: totalDuration,
|
||||
timeout: timeout,
|
||||
quiet: quiet,
|
||||
skipCertVerify: skipCertVerify,
|
||||
values: values,
|
||||
collectStats: collectStats,
|
||||
dryRun: dryRun,
|
||||
hostClients: hostClients,
|
||||
fileCache: NewFileCache(time.Second * 10),
|
||||
scriptChain: scriptChain,
|
||||
}
|
||||
|
||||
if collectStats {
|
||||
srn.responses = NewSarinResponseData(uint32(100))
|
||||
}
|
||||
|
||||
return srn, nil
|
||||
}
|
||||
|
||||
func (q sarin) GetResponses() *SarinResponseData {
|
||||
return q.responses
|
||||
}
|
||||
|
||||
func (q sarin) Start(ctx context.Context) {
|
||||
jobsCtx, jobsCancel := context.WithCancel(ctx)
|
||||
|
||||
var workersWG sync.WaitGroup
|
||||
jobsCh := make(chan struct{}, max(q.workers, 1))
|
||||
|
||||
var counter atomic.Uint64
|
||||
|
||||
totalRequests := uint64(0)
|
||||
if q.totalRequests != nil {
|
||||
totalRequests = *q.totalRequests
|
||||
}
|
||||
|
||||
var streamCtx context.Context
|
||||
var streamCancel context.CancelFunc
|
||||
var streamCh chan struct{}
|
||||
var messageChannel chan runtimeMessage
|
||||
var sendMessage messageSender
|
||||
|
||||
if !q.quiet && !term.IsTerminal(os.Stdout.Fd()) {
|
||||
q.quiet = true
|
||||
}
|
||||
|
||||
if q.quiet {
|
||||
sendMessage = func(level runtimeMessageLevel, text string) {}
|
||||
} else {
|
||||
streamCtx, streamCancel = context.WithCancel(context.Background())
|
||||
defer streamCancel()
|
||||
streamCh = make(chan struct{})
|
||||
messageChannel = make(chan runtimeMessage, max(q.workers, 1))
|
||||
sendMessage = func(level runtimeMessageLevel, text string) {
|
||||
messageChannel <- runtimeMessage{
|
||||
timestamp: time.Now(),
|
||||
level: level,
|
||||
text: text,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start workers
|
||||
q.startWorkers(&workersWG, jobsCh, q.hostClients, &counter, sendMessage)
|
||||
|
||||
if !q.quiet {
|
||||
// Start streaming to terminal
|
||||
//nolint:contextcheck // streamCtx must remain active until all workers complete to ensure all collected data is streamed
|
||||
go q.streamProgress(streamCtx, jobsCancel, streamCh, totalRequests, &counter, messageChannel)
|
||||
}
|
||||
|
||||
// Setup duration-based cancellation
|
||||
q.setupDurationTimeout(ctx, jobsCancel)
|
||||
// Distribute jobs to workers.
|
||||
// This blocks until all jobs are sent or the context is canceled.
|
||||
q.sendJobs(jobsCtx, jobsCh)
|
||||
|
||||
// Close the jobs channel so workers stop after completing their current job
|
||||
close(jobsCh)
|
||||
// Wait until all workers stopped
|
||||
workersWG.Wait()
|
||||
if messageChannel != nil {
|
||||
close(messageChannel)
|
||||
}
|
||||
|
||||
if !q.quiet {
|
||||
// Stop the progress streaming
|
||||
streamCancel()
|
||||
// Wait until progress streaming has completely stopped
|
||||
<-streamCh
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) Worker(
|
||||
jobs <-chan struct{},
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
req := fasthttp.AcquireRequest()
|
||||
resp := fasthttp.AcquireResponse()
|
||||
defer fasthttp.ReleaseRequest(req)
|
||||
defer fasthttp.ReleaseResponse(resp)
|
||||
|
||||
// Create script transformer for this worker (engines are not thread-safe)
|
||||
// Scripts are pre-validated in NewSarin, so this should not fail
|
||||
var scriptTransformer *script.Transformer
|
||||
if !q.scriptChain.IsEmpty() {
|
||||
var err error
|
||||
scriptTransformer, err = q.scriptChain.NewTransformer()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer scriptTransformer.Close()
|
||||
}
|
||||
|
||||
requestGenerator, isDynamic := NewRequestGenerator(
|
||||
q.methods, q.requestURL, q.params, q.headers, q.cookies, q.bodies, q.values, q.fileCache, scriptTransformer,
|
||||
)
|
||||
|
||||
if q.dryRun {
|
||||
switch {
|
||||
case q.collectStats && isDynamic:
|
||||
q.workerDryRunStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
case q.collectStats && !isDynamic:
|
||||
q.workerDryRunStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
case !q.collectStats && isDynamic:
|
||||
q.workerDryRunNoStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
default:
|
||||
q.workerDryRunNoStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
}
|
||||
} else {
|
||||
switch {
|
||||
case q.collectStats && isDynamic:
|
||||
q.workerStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
case q.collectStats && !isDynamic:
|
||||
q.workerStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
case !q.collectStats && isDynamic:
|
||||
q.workerNoStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
default:
|
||||
q.workerNoStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
resp.Reset()
|
||||
|
||||
if err := requestGenerator(req); err != nil {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
|
||||
startTime := time.Now()
|
||||
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
if err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
} else {
|
||||
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
// Static request generation failed - record all jobs as errors
|
||||
for range jobs {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
resp.Reset()
|
||||
|
||||
startTime := time.Now()
|
||||
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
if err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
} else {
|
||||
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerNoStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
resp.Reset()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerNoStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
|
||||
// Static request generation failed - just count the jobs without sending
|
||||
for range jobs {
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
resp.Reset()
|
||||
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
const dryRunResponseKey = "dry-run"
|
||||
|
||||
// statusCodeStrings contains pre-computed string representations for HTTP status codes 100-599.
|
||||
var statusCodeStrings = func() map[int]string {
|
||||
m := make(map[int]string, 500)
|
||||
for i := 100; i < 600; i++ {
|
||||
m[i] = strconv.Itoa(i)
|
||||
}
|
||||
return m
|
||||
}()
|
||||
|
||||
// statusCodeToString returns a string representation of the HTTP status code.
|
||||
// Uses a pre-computed map for codes 100-599, falls back to strconv.Itoa for others.
|
||||
func statusCodeToString(code int) string {
|
||||
if s, ok := statusCodeStrings[code]; ok {
|
||||
return s
|
||||
}
|
||||
return strconv.Itoa(code)
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
startTime := time.Now()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
q.responses.Add(dryRunResponseKey, time.Since(startTime))
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
// Static request generation failed - record all jobs as errors
|
||||
for range jobs {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
q.responses.Add(dryRunResponseKey, 0)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunNoStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunNoStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
// newHostClients initializes HTTP clients for the given configuration.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func newHostClients(
|
||||
ctx context.Context,
|
||||
timeout time.Duration,
|
||||
proxies types.Proxies,
|
||||
workers uint,
|
||||
requestURL *url.URL,
|
||||
skipCertVerify bool,
|
||||
) ([]*fasthttp.HostClient, error) {
|
||||
proxiesRaw := make([]url.URL, len(proxies))
|
||||
for i, proxy := range proxies {
|
||||
proxiesRaw[i] = url.URL(proxy)
|
||||
}
|
||||
|
||||
maxConns := max(fasthttp.DefaultMaxConnsPerHost, workers)
|
||||
maxConns = ((maxConns * 50 / 100) + maxConns)
|
||||
return NewHostClients(
|
||||
ctx,
|
||||
timeout,
|
||||
proxiesRaw,
|
||||
maxConns,
|
||||
requestURL,
|
||||
skipCertVerify,
|
||||
)
|
||||
}
|
||||
|
||||
func (q sarin) startWorkers(wg *sync.WaitGroup, jobs <-chan struct{}, hostClients []*fasthttp.HostClient, counter *atomic.Uint64, sendMessage messageSender) {
|
||||
for range max(q.workers, 1) {
|
||||
wg.Go(func() {
|
||||
q.Worker(jobs, NewHostClientGenerator(hostClients...), counter, sendMessage)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) setupDurationTimeout(ctx context.Context, cancel context.CancelFunc) {
|
||||
if q.totalDuration != nil {
|
||||
go func() {
|
||||
timer := time.NewTimer(*q.totalDuration)
|
||||
defer timer.Stop()
|
||||
select {
|
||||
case <-timer.C:
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
// Context cancelled, cleanup
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) sendJobs(ctx context.Context, jobs chan<- struct{}) {
|
||||
if q.totalRequests != nil && *q.totalRequests > 0 {
|
||||
for range *q.totalRequests {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
jobs <- struct{}{}
|
||||
}
|
||||
} else {
|
||||
for ctx.Err() == nil {
|
||||
jobs <- struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type tickMsg time.Time
|
||||
|
||||
var (
|
||||
helpStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#d1d1d1"))
|
||||
errorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FC5B5B")).Bold(true)
|
||||
warningStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FFD93D")).Bold(true)
|
||||
messageChannelStyle = lipgloss.NewStyle().
|
||||
Border(lipgloss.ThickBorder(), false, false, false, true).
|
||||
BorderForeground(lipgloss.Color("#757575")).
|
||||
PaddingLeft(1).
|
||||
Margin(1, 0, 0, 0).
|
||||
Foreground(lipgloss.Color("#888888"))
|
||||
)
|
||||
|
||||
type progressModel struct {
|
||||
progress progress.Model
|
||||
startTime time.Time
|
||||
messages []string
|
||||
counter *atomic.Uint64
|
||||
current uint64
|
||||
maxValue uint64
|
||||
ctx context.Context //nolint:containedctx
|
||||
cancel context.CancelFunc
|
||||
cancelling bool
|
||||
}
|
||||
|
||||
func (m progressModel) Init() tea.Cmd {
|
||||
return tea.Batch(progressTickCmd())
|
||||
}
|
||||
|
||||
func (m progressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if msg.Type == tea.KeyCtrlC {
|
||||
m.cancelling = true
|
||||
m.cancel()
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case tea.WindowSizeMsg:
|
||||
m.progress.Width = max(10, msg.Width-1)
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case runtimeMessage:
|
||||
var msgBuilder strings.Builder
|
||||
msgBuilder.WriteString("[")
|
||||
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||
msgBuilder.WriteString("] ")
|
||||
switch msg.level {
|
||||
case runtimeMessageLevelError:
|
||||
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||
case runtimeMessageLevelWarning:
|
||||
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||
}
|
||||
msgBuilder.WriteString(msg.text)
|
||||
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case tickMsg:
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, progressTickCmd()
|
||||
|
||||
default:
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m progressModel) View() string {
|
||||
var messagesBuilder strings.Builder
|
||||
for i, msg := range m.messages {
|
||||
if len(msg) > 0 {
|
||||
messagesBuilder.WriteString(msg)
|
||||
if i < len(m.messages)-1 {
|
||||
messagesBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var finalBuilder strings.Builder
|
||||
if messagesBuilder.Len() > 0 {
|
||||
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||
finalBuilder.WriteString("\n")
|
||||
}
|
||||
|
||||
m.current = m.counter.Load()
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.current, 10))
|
||||
finalBuilder.WriteString("/")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.maxValue, 10))
|
||||
finalBuilder.WriteString(" - ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(m.progress.ViewAs(float64(m.current) / float64(m.maxValue)))
|
||||
finalBuilder.WriteString("\n\n ")
|
||||
if m.cancelling {
|
||||
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||
} else {
|
||||
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||
}
|
||||
return finalBuilder.String()
|
||||
}
|
||||
|
||||
func progressTickCmd() tea.Cmd {
|
||||
return tea.Tick(time.Millisecond*250, func(t time.Time) tea.Msg {
|
||||
return tickMsg(t)
|
||||
})
|
||||
}
|
||||
|
||||
var infiniteProgressStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#00D4FF"))
|
||||
|
||||
type infiniteProgressModel struct {
|
||||
spinner spinner.Model
|
||||
startTime time.Time
|
||||
counter *atomic.Uint64
|
||||
messages []string
|
||||
ctx context.Context //nolint:containedctx
|
||||
quit bool
|
||||
cancel context.CancelFunc
|
||||
cancelling bool
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) Init() tea.Cmd {
|
||||
return m.spinner.Tick
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if msg.Type == tea.KeyCtrlC {
|
||||
m.cancelling = true
|
||||
m.cancel()
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case runtimeMessage:
|
||||
var msgBuilder strings.Builder
|
||||
msgBuilder.WriteString("[")
|
||||
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||
msgBuilder.WriteString("] ")
|
||||
switch msg.level {
|
||||
case runtimeMessageLevelError:
|
||||
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||
case runtimeMessageLevelWarning:
|
||||
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||
}
|
||||
msgBuilder.WriteString(msg.text)
|
||||
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||
if m.ctx.Err() != nil {
|
||||
m.quit = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
default:
|
||||
if m.ctx.Err() != nil {
|
||||
m.quit = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
var cmd tea.Cmd
|
||||
m.spinner, cmd = m.spinner.Update(msg)
|
||||
return m, cmd
|
||||
}
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) View() string {
|
||||
var messagesBuilder strings.Builder
|
||||
for i, msg := range m.messages {
|
||||
if len(msg) > 0 {
|
||||
messagesBuilder.WriteString(msg)
|
||||
if i < len(m.messages)-1 {
|
||||
messagesBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var finalBuilder strings.Builder
|
||||
if messagesBuilder.Len() > 0 {
|
||||
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||
finalBuilder.WriteString("\n")
|
||||
}
|
||||
|
||||
if m.quit {
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(infiniteProgressStyle.Render("∙∙∙∙∙"))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n\n")
|
||||
} else {
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(m.spinner.View())
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n\n ")
|
||||
if m.cancelling {
|
||||
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||
} else {
|
||||
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||
}
|
||||
}
|
||||
return finalBuilder.String()
|
||||
}
|
||||
|
||||
func (q sarin) streamProgress(
|
||||
ctx context.Context,
|
||||
cancel context.CancelFunc,
|
||||
done chan<- struct{},
|
||||
total uint64,
|
||||
counter *atomic.Uint64,
|
||||
messageChannel <-chan runtimeMessage,
|
||||
) {
|
||||
var program *tea.Program
|
||||
if total > 0 {
|
||||
model := progressModel{
|
||||
progress: progress.New(progress.WithGradient("#151594", "#00D4FF")),
|
||||
startTime: time.Now(),
|
||||
messages: make([]string, 8),
|
||||
counter: counter,
|
||||
current: 0,
|
||||
maxValue: total,
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
}
|
||||
|
||||
program = tea.NewProgram(model)
|
||||
} else {
|
||||
model := infiniteProgressModel{
|
||||
spinner: spinner.New(
|
||||
spinner.WithSpinner(
|
||||
spinner.Spinner{
|
||||
Frames: []string{
|
||||
"●∙∙∙∙",
|
||||
"∙●∙∙∙",
|
||||
"∙∙●∙∙",
|
||||
"∙∙∙●∙",
|
||||
"∙∙∙∙●",
|
||||
"∙∙∙●∙",
|
||||
"∙∙●∙∙",
|
||||
"∙●∙∙∙",
|
||||
},
|
||||
FPS: time.Second / 8, //nolint:mnd
|
||||
},
|
||||
),
|
||||
spinner.WithStyle(infiniteProgressStyle),
|
||||
),
|
||||
startTime: time.Now(),
|
||||
counter: counter,
|
||||
messages: make([]string, 8),
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
quit: false,
|
||||
}
|
||||
|
||||
program = tea.NewProgram(model)
|
||||
}
|
||||
|
||||
go func() {
|
||||
for msg := range messageChannel {
|
||||
program.Send(msg)
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err := program.Run(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
done <- struct{}{}
|
||||
}
|
||||
647
internal/sarin/template.go
Normal file
647
internal/sarin/template.go
Normal file
@@ -0,0 +1,647 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"math/rand/v2"
|
||||
"mime/multipart"
|
||||
"strings"
|
||||
"text/template"
|
||||
"text/template/parse"
|
||||
"time"
|
||||
|
||||
"github.com/brianvoe/gofakeit/v7"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
func NewDefaultTemplateFuncMap(randSource rand.Source, fileCache *FileCache) template.FuncMap {
|
||||
fakeit := gofakeit.NewFaker(randSource, false)
|
||||
|
||||
return template.FuncMap{
|
||||
// Strings
|
||||
"strings_ToUpper": strings.ToUpper,
|
||||
"strings_ToLower": strings.ToLower,
|
||||
"strings_RemoveSpaces": func(s string) string { return strings.ReplaceAll(s, " ", "") },
|
||||
"strings_Replace": strings.Replace,
|
||||
"strings_ToDate": func(dateString string) time.Time {
|
||||
date, err := time.Parse("2006-01-02", dateString)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return date
|
||||
},
|
||||
"strings_First": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[:n])
|
||||
},
|
||||
"strings_Last": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[len(runes)-n:])
|
||||
},
|
||||
"strings_Truncate": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return "..."
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[:n]) + "..."
|
||||
},
|
||||
"strings_TrimPrefix": strings.TrimPrefix,
|
||||
"strings_TrimSuffix": strings.TrimSuffix,
|
||||
// Dict
|
||||
"dict_Str": func(values ...string) map[string]string {
|
||||
dict := make(map[string]string)
|
||||
for i := 0; i < len(values); i += 2 {
|
||||
if i+1 < len(values) {
|
||||
key := values[i]
|
||||
value := values[i+1]
|
||||
dict[key] = value
|
||||
}
|
||||
}
|
||||
return dict
|
||||
},
|
||||
|
||||
// Slice
|
||||
"slice_Str": func(values ...string) []string { return values },
|
||||
"slice_Int": func(values ...int) []int { return values },
|
||||
"slice_Uint": func(values ...uint) []uint { return values },
|
||||
"slice_Join": strings.Join,
|
||||
|
||||
// File
|
||||
// file_Base64 reads a file (local or remote URL) and returns its Base64 encoded content.
|
||||
// Usage: {{ file_Base64 "/path/to/file.pdf" }}
|
||||
// {{ file_Base64 "https://example.com/image.png" }}
|
||||
"file_Base64": func(source string) (string, error) {
|
||||
if fileCache == nil {
|
||||
return "", types.ErrFileCacheNotInitialized
|
||||
}
|
||||
cached, err := fileCache.GetOrLoad(source)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(cached.Content), nil
|
||||
},
|
||||
|
||||
// Fakeit / File
|
||||
// "fakeit_CSV": fakeit.CSV(nil),
|
||||
// "fakeit_JSON": fakeit.JSON(nil),
|
||||
// "fakeit_XML": fakeit.XML(nil),
|
||||
"fakeit_FileExtension": fakeit.FileExtension,
|
||||
"fakeit_FileMimeType": fakeit.FileMimeType,
|
||||
|
||||
// Fakeit / ID
|
||||
"fakeit_ID": fakeit.ID,
|
||||
"fakeit_UUID": fakeit.UUID,
|
||||
|
||||
// Fakeit / Template
|
||||
// "fakeit_Template": fakeit.Template(nil) (string, error),
|
||||
// "fakeit_Markdown": fakeit.Markdown(nil) (string, error),
|
||||
// "fakeit_EmailText": fakeit.EmailText(nil) (string, error),
|
||||
// "fakeit_FixedWidth": fakeit.FixedWidth(nil) (string, error),
|
||||
|
||||
// Fakeit / Product
|
||||
// "fakeit_Product": fakeit.Product() *ProductInfo,
|
||||
"fakeit_ProductName": fakeit.ProductName,
|
||||
"fakeit_ProductDescription": fakeit.ProductDescription,
|
||||
"fakeit_ProductCategory": fakeit.ProductCategory,
|
||||
"fakeit_ProductFeature": fakeit.ProductFeature,
|
||||
"fakeit_ProductMaterial": fakeit.ProductMaterial,
|
||||
"fakeit_ProductUPC": fakeit.ProductUPC,
|
||||
"fakeit_ProductAudience": fakeit.ProductAudience,
|
||||
"fakeit_ProductDimension": fakeit.ProductDimension,
|
||||
"fakeit_ProductUseCase": fakeit.ProductUseCase,
|
||||
"fakeit_ProductBenefit": fakeit.ProductBenefit,
|
||||
"fakeit_ProductSuffix": fakeit.ProductSuffix,
|
||||
"fakeit_ProductISBN": func() string { return fakeit.ProductISBN(nil) },
|
||||
|
||||
// Fakeit / Person
|
||||
// "fakeit_Person": fakeit.Person() *PersonInfo,
|
||||
"fakeit_Name": fakeit.Name,
|
||||
"fakeit_NamePrefix": fakeit.NamePrefix,
|
||||
"fakeit_NameSuffix": fakeit.NameSuffix,
|
||||
"fakeit_FirstName": fakeit.FirstName,
|
||||
"fakeit_MiddleName": fakeit.MiddleName,
|
||||
"fakeit_LastName": fakeit.LastName,
|
||||
"fakeit_Gender": fakeit.Gender,
|
||||
"fakeit_Age": fakeit.Age,
|
||||
"fakeit_Ethnicity": fakeit.Ethnicity,
|
||||
"fakeit_SSN": fakeit.SSN,
|
||||
"fakeit_EIN": fakeit.EIN,
|
||||
"fakeit_Hobby": fakeit.Hobby,
|
||||
// "fakeit_Contact": fakeit.Contact() *ContactInfo,
|
||||
"fakeit_Email": fakeit.Email,
|
||||
"fakeit_Phone": fakeit.Phone,
|
||||
"fakeit_PhoneFormatted": fakeit.PhoneFormatted,
|
||||
// "fakeit_Teams": fakeit.Teams(peopleArray []string, teamsArray []string) map[string][]string,
|
||||
|
||||
// Fakeit / Generate
|
||||
// "fakeit_Struct": fakeit.Struct(v any),
|
||||
// "fakeit_Slice": fakeit.Slice(v any),
|
||||
// "fakeit_Map": fakeit.Map() map[string]any,
|
||||
// "fakeit_Generate": fakeit.Generate(value string) string,
|
||||
"fakeit_Regex": fakeit.Regex,
|
||||
|
||||
// Fakeit / Auth
|
||||
"fakeit_Username": fakeit.Username,
|
||||
"fakeit_Password": fakeit.Password,
|
||||
|
||||
// Fakeit / Address
|
||||
// "fakeit_Address": fakeit.Address() *AddressInfo,
|
||||
"fakeit_City": fakeit.City,
|
||||
"fakeit_Country": fakeit.Country,
|
||||
"fakeit_CountryAbr": fakeit.CountryAbr,
|
||||
"fakeit_State": fakeit.State,
|
||||
"fakeit_StateAbr": fakeit.StateAbr,
|
||||
"fakeit_Street": fakeit.Street,
|
||||
"fakeit_StreetName": fakeit.StreetName,
|
||||
"fakeit_StreetNumber": fakeit.StreetNumber,
|
||||
"fakeit_StreetPrefix": fakeit.StreetPrefix,
|
||||
"fakeit_StreetSuffix": fakeit.StreetSuffix,
|
||||
"fakeit_Unit": fakeit.Unit,
|
||||
"fakeit_Zip": fakeit.Zip,
|
||||
"fakeit_Latitude": fakeit.Latitude,
|
||||
"fakeit_LatitudeInRange": func(minLatitude, maxLatitude float64) float64 {
|
||||
value, err := fakeit.LatitudeInRange(minLatitude, maxLatitude)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
"fakeit_Longitude": fakeit.Longitude,
|
||||
"fakeit_LongitudeInRange": func(minLongitude, maxLongitude float64) float64 {
|
||||
value, err := fakeit.LongitudeInRange(minLongitude, maxLongitude)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
|
||||
// Fakeit / Game
|
||||
"fakeit_Gamertag": fakeit.Gamertag,
|
||||
// "fakeit_Dice": fakeit.Dice(numDice uint, sides []uint) []uint,
|
||||
|
||||
// Fakeit / Beer
|
||||
"fakeit_BeerAlcohol": fakeit.BeerAlcohol,
|
||||
"fakeit_BeerBlg": fakeit.BeerBlg,
|
||||
"fakeit_BeerHop": fakeit.BeerHop,
|
||||
"fakeit_BeerIbu": fakeit.BeerIbu,
|
||||
"fakeit_BeerMalt": fakeit.BeerMalt,
|
||||
"fakeit_BeerName": fakeit.BeerName,
|
||||
"fakeit_BeerStyle": fakeit.BeerStyle,
|
||||
"fakeit_BeerYeast": fakeit.BeerYeast,
|
||||
|
||||
// Fakeit / Car
|
||||
// "fakeit_Car": fakeit.Car() *CarInfo,
|
||||
"fakeit_CarMaker": fakeit.CarMaker,
|
||||
"fakeit_CarModel": fakeit.CarModel,
|
||||
"fakeit_CarType": fakeit.CarType,
|
||||
"fakeit_CarFuelType": fakeit.CarFuelType,
|
||||
"fakeit_CarTransmissionType": fakeit.CarTransmissionType,
|
||||
|
||||
// Fakeit / Words
|
||||
// Nouns
|
||||
"fakeit_Noun": fakeit.Noun,
|
||||
"fakeit_NounCommon": fakeit.NounCommon,
|
||||
"fakeit_NounConcrete": fakeit.NounConcrete,
|
||||
"fakeit_NounAbstract": fakeit.NounAbstract,
|
||||
"fakeit_NounCollectivePeople": fakeit.NounCollectivePeople,
|
||||
"fakeit_NounCollectiveAnimal": fakeit.NounCollectiveAnimal,
|
||||
"fakeit_NounCollectiveThing": fakeit.NounCollectiveThing,
|
||||
"fakeit_NounCountable": fakeit.NounCountable,
|
||||
"fakeit_NounUncountable": fakeit.NounUncountable,
|
||||
|
||||
// Verbs
|
||||
"fakeit_Verb": fakeit.Verb,
|
||||
"fakeit_VerbAction": fakeit.VerbAction,
|
||||
"fakeit_VerbLinking": fakeit.VerbLinking,
|
||||
"fakeit_VerbHelping": fakeit.VerbHelping,
|
||||
|
||||
// Adverbs
|
||||
"fakeit_Adverb": fakeit.Adverb,
|
||||
"fakeit_AdverbManner": fakeit.AdverbManner,
|
||||
"fakeit_AdverbDegree": fakeit.AdverbDegree,
|
||||
"fakeit_AdverbPlace": fakeit.AdverbPlace,
|
||||
"fakeit_AdverbTimeDefinite": fakeit.AdverbTimeDefinite,
|
||||
"fakeit_AdverbTimeIndefinite": fakeit.AdverbTimeIndefinite,
|
||||
"fakeit_AdverbFrequencyDefinite": fakeit.AdverbFrequencyDefinite,
|
||||
"fakeit_AdverbFrequencyIndefinite": fakeit.AdverbFrequencyIndefinite,
|
||||
|
||||
// Propositions
|
||||
"fakeit_Preposition": fakeit.Preposition,
|
||||
"fakeit_PrepositionSimple": fakeit.PrepositionSimple,
|
||||
"fakeit_PrepositionDouble": fakeit.PrepositionDouble,
|
||||
"fakeit_PrepositionCompound": fakeit.PrepositionCompound,
|
||||
|
||||
// Adjectives
|
||||
"fakeit_Adjective": fakeit.Adjective,
|
||||
"fakeit_AdjectiveDescriptive": fakeit.AdjectiveDescriptive,
|
||||
"fakeit_AdjectiveQuantitative": fakeit.AdjectiveQuantitative,
|
||||
"fakeit_AdjectiveProper": fakeit.AdjectiveProper,
|
||||
"fakeit_AdjectiveDemonstrative": fakeit.AdjectiveDemonstrative,
|
||||
"fakeit_AdjectivePossessive": fakeit.AdjectivePossessive,
|
||||
"fakeit_AdjectiveInterrogative": fakeit.AdjectiveInterrogative,
|
||||
"fakeit_AdjectiveIndefinite": fakeit.AdjectiveIndefinite,
|
||||
|
||||
// Pronouns
|
||||
"fakeit_Pronoun": fakeit.Pronoun,
|
||||
"fakeit_PronounPersonal": fakeit.PronounPersonal,
|
||||
"fakeit_PronounObject": fakeit.PronounObject,
|
||||
"fakeit_PronounPossessive": fakeit.PronounPossessive,
|
||||
"fakeit_PronounReflective": fakeit.PronounReflective,
|
||||
"fakeit_PronounDemonstrative": fakeit.PronounDemonstrative,
|
||||
"fakeit_PronounInterrogative": fakeit.PronounInterrogative,
|
||||
"fakeit_PronounRelative": fakeit.PronounRelative,
|
||||
|
||||
// Connectives
|
||||
"fakeit_Connective": fakeit.Connective,
|
||||
"fakeit_ConnectiveTime": fakeit.ConnectiveTime,
|
||||
"fakeit_ConnectiveComparative": fakeit.ConnectiveComparative,
|
||||
"fakeit_ConnectiveComplaint": fakeit.ConnectiveComplaint,
|
||||
"fakeit_ConnectiveListing": fakeit.ConnectiveListing,
|
||||
"fakeit_ConnectiveCasual": fakeit.ConnectiveCasual,
|
||||
"fakeit_ConnectiveExamplify": fakeit.ConnectiveExamplify,
|
||||
|
||||
// Words
|
||||
"fakeit_Word": fakeit.Word,
|
||||
|
||||
// Text
|
||||
"fakeit_Sentence": fakeit.Sentence,
|
||||
"fakeit_Paragraph": fakeit.Paragraph,
|
||||
"fakeit_LoremIpsumWord": fakeit.LoremIpsumWord,
|
||||
"fakeit_LoremIpsumSentence": fakeit.LoremIpsumSentence,
|
||||
"fakeit_LoremIpsumParagraph": fakeit.LoremIpsumParagraph,
|
||||
"fakeit_Question": fakeit.Question,
|
||||
"fakeit_Quote": fakeit.Quote,
|
||||
"fakeit_Phrase": fakeit.Phrase,
|
||||
|
||||
// Fakeit / Foods
|
||||
"fakeit_Fruit": fakeit.Fruit,
|
||||
"fakeit_Vegetable": fakeit.Vegetable,
|
||||
"fakeit_Breakfast": fakeit.Breakfast,
|
||||
"fakeit_Lunch": fakeit.Lunch,
|
||||
"fakeit_Dinner": fakeit.Dinner,
|
||||
"fakeit_Snack": fakeit.Snack,
|
||||
"fakeit_Dessert": fakeit.Dessert,
|
||||
|
||||
// Fakeit / Misc
|
||||
"fakeit_Bool": fakeit.Bool,
|
||||
// "fakeit_Weighted": fakeit.Weighted(options []any, weights []float32) (any, error),
|
||||
"fakeit_FlipACoin": fakeit.FlipACoin,
|
||||
// "fakeit_RandomMapKey": fakeit.RandomMapKey(mapI any) any,
|
||||
// "fakeit_ShuffleAnySlice": fakeit.ShuffleAnySlice(v any),
|
||||
|
||||
// Fakeit / Colors
|
||||
"fakeit_Color": fakeit.Color,
|
||||
"fakeit_HexColor": fakeit.HexColor,
|
||||
"fakeit_RGBColor": fakeit.RGBColor,
|
||||
"fakeit_SafeColor": fakeit.SafeColor,
|
||||
"fakeit_NiceColors": fakeit.NiceColors,
|
||||
|
||||
// Fakeit / Images
|
||||
// "fakeit_Image": fakeit.Image(width int, height int) *img.RGBA,
|
||||
"fakeit_ImageJpeg": fakeit.ImageJpeg,
|
||||
"fakeit_ImagePng": fakeit.ImagePng,
|
||||
|
||||
// Fakeit / Internet
|
||||
"fakeit_URL": fakeit.URL,
|
||||
"fakeit_UrlSlug": fakeit.UrlSlug,
|
||||
"fakeit_DomainName": fakeit.DomainName,
|
||||
"fakeit_DomainSuffix": fakeit.DomainSuffix,
|
||||
"fakeit_IPv4Address": fakeit.IPv4Address,
|
||||
"fakeit_IPv6Address": fakeit.IPv6Address,
|
||||
"fakeit_MacAddress": fakeit.MacAddress,
|
||||
"fakeit_HTTPStatusCode": fakeit.HTTPStatusCode,
|
||||
"fakeit_HTTPStatusCodeSimple": fakeit.HTTPStatusCodeSimple,
|
||||
"fakeit_LogLevel": fakeit.LogLevel,
|
||||
"fakeit_HTTPMethod": fakeit.HTTPMethod,
|
||||
"fakeit_HTTPVersion": fakeit.HTTPVersion,
|
||||
"fakeit_UserAgent": fakeit.UserAgent,
|
||||
"fakeit_ChromeUserAgent": fakeit.ChromeUserAgent,
|
||||
"fakeit_FirefoxUserAgent": fakeit.FirefoxUserAgent,
|
||||
"fakeit_OperaUserAgent": fakeit.OperaUserAgent,
|
||||
"fakeit_SafariUserAgent": fakeit.SafariUserAgent,
|
||||
"fakeit_APIUserAgent": fakeit.APIUserAgent,
|
||||
|
||||
// Fakeit / HTML
|
||||
"fakeit_InputName": fakeit.InputName,
|
||||
"fakeit_Svg": func() string { return fakeit.Svg(nil) },
|
||||
|
||||
// Fakeit / Date/Time
|
||||
"fakeit_Date": fakeit.Date,
|
||||
"fakeit_PastDate": fakeit.PastDate,
|
||||
"fakeit_FutureDate": fakeit.FutureDate,
|
||||
"fakeit_DateRange": fakeit.DateRange,
|
||||
"fakeit_NanoSecond": fakeit.NanoSecond,
|
||||
"fakeit_Second": fakeit.Second,
|
||||
"fakeit_Minute": fakeit.Minute,
|
||||
"fakeit_Hour": fakeit.Hour,
|
||||
"fakeit_Month": fakeit.Month,
|
||||
"fakeit_MonthString": fakeit.MonthString,
|
||||
"fakeit_Day": fakeit.Day,
|
||||
"fakeit_WeekDay": fakeit.WeekDay,
|
||||
"fakeit_Year": fakeit.Year,
|
||||
"fakeit_TimeZone": fakeit.TimeZone,
|
||||
"fakeit_TimeZoneAbv": fakeit.TimeZoneAbv,
|
||||
"fakeit_TimeZoneFull": fakeit.TimeZoneFull,
|
||||
"fakeit_TimeZoneOffset": fakeit.TimeZoneOffset,
|
||||
"fakeit_TimeZoneRegion": fakeit.TimeZoneRegion,
|
||||
|
||||
// Fakeit / Payment
|
||||
"fakeit_Price": fakeit.Price,
|
||||
// "fakeit_CreditCard": fakeit.CreditCard() *CreditCardInfo,
|
||||
"fakeit_CreditCardCvv": fakeit.CreditCardCvv,
|
||||
"fakeit_CreditCardExp": fakeit.CreditCardExp,
|
||||
"fakeit_CreditCardNumber": func(gaps bool) string {
|
||||
return fakeit.CreditCardNumber(&gofakeit.CreditCardOptions{Gaps: gaps})
|
||||
},
|
||||
"fakeit_CreditCardType": fakeit.CreditCardType,
|
||||
// "fakeit_Currency": fakeit.Currency() *CurrencyInfo,
|
||||
"fakeit_CurrencyLong": fakeit.CurrencyLong,
|
||||
"fakeit_CurrencyShort": fakeit.CurrencyShort,
|
||||
"fakeit_AchRouting": fakeit.AchRouting,
|
||||
"fakeit_AchAccount": fakeit.AchAccount,
|
||||
"fakeit_BitcoinAddress": fakeit.BitcoinAddress,
|
||||
"fakeit_BitcoinPrivateKey": fakeit.BitcoinPrivateKey,
|
||||
"fakeit_BankName": fakeit.BankName,
|
||||
"fakeit_BankType": fakeit.BankType,
|
||||
|
||||
// Fakeit / Finance
|
||||
"fakeit_Cusip": fakeit.Cusip,
|
||||
"fakeit_Isin": fakeit.Isin,
|
||||
|
||||
// Fakeit / Company
|
||||
"fakeit_BS": fakeit.BS,
|
||||
"fakeit_Blurb": fakeit.Blurb,
|
||||
"fakeit_BuzzWord": fakeit.BuzzWord,
|
||||
"fakeit_Company": fakeit.Company,
|
||||
"fakeit_CompanySuffix": fakeit.CompanySuffix,
|
||||
// "fakeit_Job": fakeit.Job() *JobInfo,
|
||||
"fakeit_JobDescriptor": fakeit.JobDescriptor,
|
||||
"fakeit_JobLevel": fakeit.JobLevel,
|
||||
"fakeit_JobTitle": fakeit.JobTitle,
|
||||
"fakeit_Slogan": fakeit.Slogan,
|
||||
|
||||
// Fakeit / Hacker
|
||||
"fakeit_HackerAbbreviation": fakeit.HackerAbbreviation,
|
||||
"fakeit_HackerAdjective": fakeit.HackerAdjective,
|
||||
"fakeit_HackeringVerb": fakeit.HackeringVerb,
|
||||
"fakeit_HackerNoun": fakeit.HackerNoun,
|
||||
"fakeit_HackerPhrase": fakeit.HackerPhrase,
|
||||
"fakeit_HackerVerb": fakeit.HackerVerb,
|
||||
|
||||
// Fakeit / Hipster
|
||||
"fakeit_HipsterWord": fakeit.HipsterWord,
|
||||
"fakeit_HipsterSentence": fakeit.HipsterSentence,
|
||||
"fakeit_HipsterParagraph": fakeit.HipsterParagraph,
|
||||
|
||||
// Fakeit / App
|
||||
"fakeit_AppName": fakeit.AppName,
|
||||
"fakeit_AppVersion": fakeit.AppVersion,
|
||||
"fakeit_AppAuthor": fakeit.AppAuthor,
|
||||
|
||||
// Fakeit / Animal
|
||||
"fakeit_PetName": fakeit.PetName,
|
||||
"fakeit_Animal": fakeit.Animal,
|
||||
"fakeit_AnimalType": fakeit.AnimalType,
|
||||
"fakeit_FarmAnimal": fakeit.FarmAnimal,
|
||||
"fakeit_Cat": fakeit.Cat,
|
||||
"fakeit_Dog": fakeit.Dog,
|
||||
"fakeit_Bird": fakeit.Bird,
|
||||
|
||||
// Fakeit / Emoji
|
||||
"fakeit_Emoji": fakeit.Emoji,
|
||||
"fakeit_EmojiCategory": fakeit.EmojiCategory,
|
||||
"fakeit_EmojiAlias": fakeit.EmojiAlias,
|
||||
"fakeit_EmojiTag": fakeit.EmojiTag,
|
||||
"fakeit_EmojiFlag": fakeit.EmojiFlag,
|
||||
"fakeit_EmojiAnimal": fakeit.EmojiAnimal,
|
||||
"fakeit_EmojiFood": fakeit.EmojiFood,
|
||||
"fakeit_EmojiPlant": fakeit.EmojiPlant,
|
||||
"fakeit_EmojiMusic": fakeit.EmojiMusic,
|
||||
"fakeit_EmojiVehicle": fakeit.EmojiVehicle,
|
||||
"fakeit_EmojiSport": fakeit.EmojiSport,
|
||||
"fakeit_EmojiFace": fakeit.EmojiFace,
|
||||
"fakeit_EmojiHand": fakeit.EmojiHand,
|
||||
"fakeit_EmojiClothing": fakeit.EmojiClothing,
|
||||
"fakeit_EmojiLandmark": fakeit.EmojiLandmark,
|
||||
"fakeit_EmojiElectronics": fakeit.EmojiElectronics,
|
||||
"fakeit_EmojiGame": fakeit.EmojiGame,
|
||||
"fakeit_EmojiTools": fakeit.EmojiTools,
|
||||
"fakeit_EmojiWeather": fakeit.EmojiWeather,
|
||||
"fakeit_EmojiJob": fakeit.EmojiJob,
|
||||
"fakeit_EmojiPerson": fakeit.EmojiPerson,
|
||||
"fakeit_EmojiGesture": fakeit.EmojiGesture,
|
||||
"fakeit_EmojiCostume": fakeit.EmojiCostume,
|
||||
"fakeit_EmojiSentence": fakeit.EmojiSentence,
|
||||
|
||||
// Fakeit / Language
|
||||
"fakeit_Language": fakeit.Language,
|
||||
"fakeit_LanguageAbbreviation": fakeit.LanguageAbbreviation,
|
||||
"fakeit_ProgrammingLanguage": fakeit.ProgrammingLanguage,
|
||||
|
||||
// Fakeit / Number
|
||||
"fakeit_Number": fakeit.Number,
|
||||
"fakeit_Int": fakeit.Int,
|
||||
"fakeit_IntN": fakeit.IntN,
|
||||
"fakeit_Int8": fakeit.Int8,
|
||||
"fakeit_Int16": fakeit.Int16,
|
||||
"fakeit_Int32": fakeit.Int32,
|
||||
"fakeit_Int64": fakeit.Int64,
|
||||
"fakeit_Uint": fakeit.Uint,
|
||||
"fakeit_UintN": fakeit.UintN,
|
||||
"fakeit_Uint8": fakeit.Uint8,
|
||||
"fakeit_Uint16": fakeit.Uint16,
|
||||
"fakeit_Uint32": fakeit.Uint32,
|
||||
"fakeit_Uint64": fakeit.Uint64,
|
||||
"fakeit_Float32": fakeit.Float32,
|
||||
"fakeit_Float32Range": fakeit.Float32Range,
|
||||
"fakeit_Float64": fakeit.Float64,
|
||||
"fakeit_Float64Range": fakeit.Float64Range,
|
||||
// "fakeit_ShuffleInts": fakeit.ShuffleInts,
|
||||
"fakeit_RandomInt": fakeit.RandomInt,
|
||||
"fakeit_HexUint": fakeit.HexUint,
|
||||
|
||||
// Fakeit / String
|
||||
"fakeit_Digit": fakeit.Digit,
|
||||
"fakeit_DigitN": fakeit.DigitN,
|
||||
"fakeit_Letter": fakeit.Letter,
|
||||
"fakeit_LetterN": fakeit.LetterN,
|
||||
"fakeit_Lexify": fakeit.Lexify,
|
||||
"fakeit_Numerify": fakeit.Numerify,
|
||||
// "fakeit_ShuffleStrings": fakeit.ShuffleStrings,
|
||||
"fakeit_RandomString": fakeit.RandomString,
|
||||
|
||||
// Fakeit / Celebrity
|
||||
"fakeit_CelebrityActor": fakeit.CelebrityActor,
|
||||
"fakeit_CelebrityBusiness": fakeit.CelebrityBusiness,
|
||||
"fakeit_CelebritySport": fakeit.CelebritySport,
|
||||
|
||||
// Fakeit / Minecraft
|
||||
"fakeit_MinecraftOre": fakeit.MinecraftOre,
|
||||
"fakeit_MinecraftWood": fakeit.MinecraftWood,
|
||||
"fakeit_MinecraftArmorTier": fakeit.MinecraftArmorTier,
|
||||
"fakeit_MinecraftArmorPart": fakeit.MinecraftArmorPart,
|
||||
"fakeit_MinecraftWeapon": fakeit.MinecraftWeapon,
|
||||
"fakeit_MinecraftTool": fakeit.MinecraftTool,
|
||||
"fakeit_MinecraftDye": fakeit.MinecraftDye,
|
||||
"fakeit_MinecraftFood": fakeit.MinecraftFood,
|
||||
"fakeit_MinecraftAnimal": fakeit.MinecraftAnimal,
|
||||
"fakeit_MinecraftVillagerJob": fakeit.MinecraftVillagerJob,
|
||||
"fakeit_MinecraftVillagerStation": fakeit.MinecraftVillagerStation,
|
||||
"fakeit_MinecraftVillagerLevel": fakeit.MinecraftVillagerLevel,
|
||||
"fakeit_MinecraftMobPassive": fakeit.MinecraftMobPassive,
|
||||
"fakeit_MinecraftMobNeutral": fakeit.MinecraftMobNeutral,
|
||||
"fakeit_MinecraftMobHostile": fakeit.MinecraftMobHostile,
|
||||
"fakeit_MinecraftMobBoss": fakeit.MinecraftMobBoss,
|
||||
"fakeit_MinecraftBiome": fakeit.MinecraftBiome,
|
||||
"fakeit_MinecraftWeather": fakeit.MinecraftWeather,
|
||||
|
||||
// Fakeit / Book
|
||||
// "fakeit_Book": fakeit.Book() *BookInfo,
|
||||
"fakeit_BookTitle": fakeit.BookTitle,
|
||||
"fakeit_BookAuthor": fakeit.BookAuthor,
|
||||
"fakeit_BookGenre": fakeit.BookGenre,
|
||||
|
||||
// Fakeit / Movie
|
||||
// "fakeit_Movie": fakeit.Movie() *MovieInfo,
|
||||
"fakeit_MovieName": fakeit.MovieName,
|
||||
"fakeit_MovieGenre": fakeit.MovieGenre,
|
||||
|
||||
// Fakeit / Error
|
||||
"fakeit_Error": func() string { return fakeit.Error().Error() },
|
||||
"fakeit_ErrorDatabase": func() string { return fakeit.ErrorDatabase().Error() },
|
||||
"fakeit_ErrorGRPC": func() string { return fakeit.ErrorGRPC().Error() },
|
||||
"fakeit_ErrorHTTP": func() string { return fakeit.ErrorHTTP().Error() },
|
||||
"fakeit_ErrorHTTPClient": func() string { return fakeit.ErrorHTTPClient().Error() },
|
||||
"fakeit_ErrorHTTPServer": func() string { return fakeit.ErrorHTTPServer().Error() },
|
||||
// "fakeit_ErrorInput": func() string { return fakeit.ErrorInput().Error() },
|
||||
"fakeit_ErrorRuntime": func() string { return fakeit.ErrorRuntime().Error() },
|
||||
|
||||
// Fakeit / School
|
||||
"fakeit_School": fakeit.School,
|
||||
|
||||
// Fakeit / Song
|
||||
// "fakeit_Song": fakeit.Song() *SongInfo,
|
||||
"fakeit_SongName": fakeit.SongName,
|
||||
"fakeit_SongArtist": fakeit.SongArtist,
|
||||
"fakeit_SongGenre": fakeit.SongGenre,
|
||||
}
|
||||
}
|
||||
|
||||
type BodyTemplateFuncMapData struct {
|
||||
formDataContenType string
|
||||
}
|
||||
|
||||
func (data BodyTemplateFuncMapData) GetFormDataContenType() string {
|
||||
return data.formDataContenType
|
||||
}
|
||||
|
||||
func (data *BodyTemplateFuncMapData) ClearFormDataContenType() {
|
||||
data.formDataContenType = ""
|
||||
}
|
||||
|
||||
func NewDefaultBodyTemplateFuncMap(
|
||||
randSource rand.Source,
|
||||
data *BodyTemplateFuncMapData,
|
||||
fileCache *FileCache,
|
||||
) template.FuncMap {
|
||||
funcMap := NewDefaultTemplateFuncMap(randSource, fileCache)
|
||||
|
||||
if data != nil {
|
||||
// body_FormData creates a multipart/form-data body from key-value pairs.
|
||||
// Usage: {{ body_FormData "field1" "value1" "field2" "value2" ... }}
|
||||
//
|
||||
// Values starting with "@" are treated as file references:
|
||||
// - "@/path/to/file.txt" - local file
|
||||
// - "@http://example.com/file" - remote file via HTTP
|
||||
// - "@https://example.com/file" - remote file via HTTPS
|
||||
//
|
||||
// To send a literal string starting with "@", escape it with "@@":
|
||||
// - "@@literal" sends "@literal"
|
||||
//
|
||||
// Example with mixed text and files:
|
||||
// {{ body_FormData "name" "John" "avatar" "@/path/to/photo.jpg" "doc" "@https://example.com/file.pdf" }}
|
||||
funcMap["body_FormData"] = func(pairs ...string) (string, error) {
|
||||
if len(pairs)%2 != 0 {
|
||||
return "", types.ErrFormDataOddArgs
|
||||
}
|
||||
|
||||
var multipartData bytes.Buffer
|
||||
writer := multipart.NewWriter(&multipartData)
|
||||
data.formDataContenType = writer.FormDataContentType()
|
||||
|
||||
for i := 0; i < len(pairs); i += 2 {
|
||||
key := pairs[i]
|
||||
val := pairs[i+1]
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(val, "@@"):
|
||||
// Escaped @ - send as literal string without first @
|
||||
if err := writer.WriteField(key, val[1:]); err != nil {
|
||||
return "", err
|
||||
}
|
||||
case strings.HasPrefix(val, "@"):
|
||||
// File (local path or remote URL)
|
||||
if fileCache == nil {
|
||||
return "", types.ErrFileCacheNotInitialized
|
||||
}
|
||||
source := val[1:]
|
||||
cached, err := fileCache.GetOrLoad(source)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
part, err := writer.CreateFormFile(key, cached.Filename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if _, err := part.Write(cached.Content); err != nil {
|
||||
return "", err
|
||||
}
|
||||
default:
|
||||
// Regular text field
|
||||
if err := writer.WriteField(key, val); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := writer.Close(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return multipartData.String(), nil
|
||||
}
|
||||
}
|
||||
|
||||
return funcMap
|
||||
}
|
||||
|
||||
func hasTemplateActions(tmpl *template.Template) bool {
|
||||
if tmpl.Tree == nil || tmpl.Root == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, node := range tmpl.Root.Nodes {
|
||||
switch node.Type() {
|
||||
case parse.NodeAction, parse.NodeIf, parse.NodeRange,
|
||||
parse.NodeWith, parse.NodeTemplate:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
107
internal/script/chain.go
Normal file
107
internal/script/chain.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package script
|
||||
|
||||
import (
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
// Chain holds the loaded script sources and can create engine instances.
|
||||
// The sources are loaded once, but engines are created per-worker since they're not thread-safe.
|
||||
type Chain struct {
|
||||
luaSources []*Source
|
||||
jsSources []*Source
|
||||
}
|
||||
|
||||
// NewChain creates a new script chain from loaded sources.
|
||||
// Lua scripts run first, then JavaScript scripts, in the order provided.
|
||||
func NewChain(luaSources, jsSources []*Source) *Chain {
|
||||
return &Chain{
|
||||
luaSources: luaSources,
|
||||
jsSources: jsSources,
|
||||
}
|
||||
}
|
||||
|
||||
// IsEmpty returns true if there are no scripts to execute.
|
||||
func (c *Chain) IsEmpty() bool {
|
||||
return len(c.luaSources) == 0 && len(c.jsSources) == 0
|
||||
}
|
||||
|
||||
// Transformer holds instantiated script engines for a single worker.
|
||||
// It is NOT safe for concurrent use.
|
||||
type Transformer struct {
|
||||
luaEngines []*LuaEngine
|
||||
jsEngines []*JsEngine
|
||||
}
|
||||
|
||||
// NewTransformer creates engine instances from the chain's sources.
|
||||
// Call this once per worker goroutine.
|
||||
// It can return the following errors:
|
||||
// - types.ScriptChainError
|
||||
func (c *Chain) NewTransformer() (*Transformer, error) {
|
||||
if c.IsEmpty() {
|
||||
return &Transformer{}, nil
|
||||
}
|
||||
|
||||
t := &Transformer{
|
||||
luaEngines: make([]*LuaEngine, 0, len(c.luaSources)),
|
||||
jsEngines: make([]*JsEngine, 0, len(c.jsSources)),
|
||||
}
|
||||
|
||||
// Create Lua engines
|
||||
for i, src := range c.luaSources {
|
||||
engine, err := NewLuaEngine(src.Content)
|
||||
if err != nil {
|
||||
t.Close() // Clean up already created engines
|
||||
return nil, types.NewScriptChainError("lua", i, err)
|
||||
}
|
||||
t.luaEngines = append(t.luaEngines, engine)
|
||||
}
|
||||
|
||||
// Create JS engines
|
||||
for i, src := range c.jsSources {
|
||||
engine, err := NewJsEngine(src.Content)
|
||||
if err != nil {
|
||||
t.Close() // Clean up already created engines
|
||||
return nil, types.NewScriptChainError("js", i, err)
|
||||
}
|
||||
t.jsEngines = append(t.jsEngines, engine)
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// Transform applies all scripts to the request data.
|
||||
// Lua scripts run first, then JavaScript scripts.
|
||||
// It can return the following errors:
|
||||
// - types.ScriptChainError
|
||||
func (t *Transformer) Transform(req *RequestData) error {
|
||||
// Run Lua scripts
|
||||
for i, engine := range t.luaEngines {
|
||||
if err := engine.Transform(req); err != nil {
|
||||
return types.NewScriptChainError("lua", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Run JS scripts
|
||||
for i, engine := range t.jsEngines {
|
||||
if err := engine.Transform(req); err != nil {
|
||||
return types.NewScriptChainError("js", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close releases all engine resources.
|
||||
func (t *Transformer) Close() {
|
||||
for _, engine := range t.luaEngines {
|
||||
engine.Close()
|
||||
}
|
||||
for _, engine := range t.jsEngines {
|
||||
engine.Close()
|
||||
}
|
||||
}
|
||||
|
||||
// IsEmpty returns true if there are no engines.
|
||||
func (t *Transformer) IsEmpty() bool {
|
||||
return len(t.luaEngines) == 0 && len(t.jsEngines) == 0
|
||||
}
|
||||
198
internal/script/js.go
Normal file
198
internal/script/js.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package script
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/dop251/goja"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
// JsEngine implements the Engine interface using goja (JavaScript).
|
||||
type JsEngine struct {
|
||||
runtime *goja.Runtime
|
||||
transform goja.Callable
|
||||
}
|
||||
|
||||
// NewJsEngine creates a new JavaScript script engine with the given script content.
|
||||
// The script must define a global `transform` function that takes a request object
|
||||
// and returns the modified request object.
|
||||
//
|
||||
// Example JavaScript script:
|
||||
//
|
||||
// function transform(req) {
|
||||
// req.headers["X-Custom"] = ["value"];
|
||||
// return req;
|
||||
// }
|
||||
//
|
||||
// It can return the following errors:
|
||||
// - types.ErrScriptTransformMissing
|
||||
// - types.ScriptExecutionError
|
||||
func NewJsEngine(scriptContent string) (*JsEngine, error) {
|
||||
vm := goja.New()
|
||||
|
||||
// Execute the script to define the transform function
|
||||
_, err := vm.RunString(scriptContent)
|
||||
if err != nil {
|
||||
return nil, types.NewScriptExecutionError("JavaScript", err)
|
||||
}
|
||||
|
||||
// Get the transform function
|
||||
transformVal := vm.Get("transform")
|
||||
if transformVal == nil || goja.IsUndefined(transformVal) || goja.IsNull(transformVal) {
|
||||
return nil, types.ErrScriptTransformMissing
|
||||
}
|
||||
|
||||
transform, ok := goja.AssertFunction(transformVal)
|
||||
if !ok {
|
||||
return nil, types.NewScriptExecutionError("JavaScript", errors.New("'transform' must be a function"))
|
||||
}
|
||||
|
||||
return &JsEngine{
|
||||
runtime: vm,
|
||||
transform: transform,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Transform executes the JavaScript transform function with the given request data.
|
||||
// It can return the following errors:
|
||||
// - types.ScriptExecutionError
|
||||
func (e *JsEngine) Transform(req *RequestData) error {
|
||||
// Convert RequestData to JavaScript object
|
||||
reqObj := e.requestDataToObject(req)
|
||||
|
||||
// Call transform(req)
|
||||
result, err := e.transform(goja.Undefined(), reqObj)
|
||||
if err != nil {
|
||||
return types.NewScriptExecutionError("JavaScript", err)
|
||||
}
|
||||
|
||||
// Update RequestData from the returned object
|
||||
if err := e.objectToRequestData(result, req); err != nil {
|
||||
return types.NewScriptExecutionError("JavaScript", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close releases the JavaScript runtime resources.
|
||||
func (e *JsEngine) Close() {
|
||||
// goja doesn't have an explicit close method, but we can help GC
|
||||
e.runtime = nil
|
||||
e.transform = nil
|
||||
}
|
||||
|
||||
// requestDataToObject converts RequestData to a goja Value (JavaScript object).
|
||||
func (e *JsEngine) requestDataToObject(req *RequestData) goja.Value {
|
||||
obj := e.runtime.NewObject()
|
||||
|
||||
_ = obj.Set("method", req.Method)
|
||||
_ = obj.Set("path", req.Path)
|
||||
_ = obj.Set("body", req.Body)
|
||||
|
||||
// Headers (map[string][]string -> object of arrays)
|
||||
headers := e.runtime.NewObject()
|
||||
for k, values := range req.Headers {
|
||||
_ = headers.Set(k, e.stringSliceToArray(values))
|
||||
}
|
||||
_ = obj.Set("headers", headers)
|
||||
|
||||
// Params (map[string][]string -> object of arrays)
|
||||
params := e.runtime.NewObject()
|
||||
for k, values := range req.Params {
|
||||
_ = params.Set(k, e.stringSliceToArray(values))
|
||||
}
|
||||
_ = obj.Set("params", params)
|
||||
|
||||
// Cookies (map[string][]string -> object of arrays)
|
||||
cookies := e.runtime.NewObject()
|
||||
for k, values := range req.Cookies {
|
||||
_ = cookies.Set(k, e.stringSliceToArray(values))
|
||||
}
|
||||
_ = obj.Set("cookies", cookies)
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
// objectToRequestData updates RequestData from a JavaScript object.
|
||||
func (e *JsEngine) objectToRequestData(val goja.Value, req *RequestData) error {
|
||||
if val == nil || goja.IsUndefined(val) || goja.IsNull(val) {
|
||||
return types.ErrScriptTransformReturnObject
|
||||
}
|
||||
|
||||
obj := val.ToObject(e.runtime)
|
||||
if obj == nil {
|
||||
return types.ErrScriptTransformReturnObject
|
||||
}
|
||||
|
||||
// Method
|
||||
if v := obj.Get("method"); v != nil && !goja.IsUndefined(v) {
|
||||
req.Method = v.String()
|
||||
}
|
||||
|
||||
// Path
|
||||
if v := obj.Get("path"); v != nil && !goja.IsUndefined(v) {
|
||||
req.Path = v.String()
|
||||
}
|
||||
|
||||
// Body
|
||||
if v := obj.Get("body"); v != nil && !goja.IsUndefined(v) {
|
||||
req.Body = v.String()
|
||||
}
|
||||
|
||||
// Headers
|
||||
if v := obj.Get("headers"); v != nil && !goja.IsUndefined(v) && !goja.IsNull(v) {
|
||||
req.Headers = e.objectToStringSliceMap(v.ToObject(e.runtime))
|
||||
}
|
||||
|
||||
// Params
|
||||
if v := obj.Get("params"); v != nil && !goja.IsUndefined(v) && !goja.IsNull(v) {
|
||||
req.Params = e.objectToStringSliceMap(v.ToObject(e.runtime))
|
||||
}
|
||||
|
||||
// Cookies
|
||||
if v := obj.Get("cookies"); v != nil && !goja.IsUndefined(v) && !goja.IsNull(v) {
|
||||
req.Cookies = e.objectToStringSliceMap(v.ToObject(e.runtime))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// stringSliceToArray converts a Go []string to a JavaScript array.
|
||||
func (e *JsEngine) stringSliceToArray(values []string) *goja.Object {
|
||||
ifaces := make([]any, len(values))
|
||||
for i, v := range values {
|
||||
ifaces[i] = v
|
||||
}
|
||||
return e.runtime.NewArray(ifaces...)
|
||||
}
|
||||
|
||||
// objectToStringSliceMap converts a JavaScript object to a Go map[string][]string.
|
||||
// Supports both single string values and array values.
|
||||
func (e *JsEngine) objectToStringSliceMap(obj *goja.Object) map[string][]string {
|
||||
if obj == nil {
|
||||
return make(map[string][]string)
|
||||
}
|
||||
|
||||
result := make(map[string][]string)
|
||||
for _, key := range obj.Keys() {
|
||||
v := obj.Get(key)
|
||||
if v == nil || goja.IsUndefined(v) || goja.IsNull(v) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if it's an array
|
||||
if arr, ok := v.Export().([]any); ok {
|
||||
var values []string
|
||||
for _, item := range arr {
|
||||
if s, ok := item.(string); ok {
|
||||
values = append(values, s)
|
||||
}
|
||||
}
|
||||
result[key] = values
|
||||
} else {
|
||||
// Single value - wrap in slice
|
||||
result[key] = []string{v.String()}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
191
internal/script/lua.go
Normal file
191
internal/script/lua.go
Normal file
@@ -0,0 +1,191 @@
|
||||
package script
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
// LuaEngine implements the Engine interface using gopher-lua.
|
||||
type LuaEngine struct {
|
||||
state *lua.LState
|
||||
transform *lua.LFunction
|
||||
}
|
||||
|
||||
// NewLuaEngine creates a new Lua script engine with the given script content.
|
||||
// The script must define a global `transform` function that takes a request table
|
||||
// and returns the modified request table.
|
||||
//
|
||||
// Example Lua script:
|
||||
//
|
||||
// function transform(req)
|
||||
// req.headers["X-Custom"] = {"value"}
|
||||
// return req
|
||||
// end
|
||||
//
|
||||
// It can return the following errors:
|
||||
// - types.ErrScriptTransformMissing
|
||||
// - types.ScriptExecutionError
|
||||
func NewLuaEngine(scriptContent string) (*LuaEngine, error) {
|
||||
L := lua.NewState()
|
||||
|
||||
// Execute the script to define the transform function
|
||||
if err := L.DoString(scriptContent); err != nil {
|
||||
L.Close()
|
||||
return nil, types.NewScriptExecutionError("Lua", err)
|
||||
}
|
||||
|
||||
// Get the transform function
|
||||
transform := L.GetGlobal("transform")
|
||||
if transform.Type() != lua.LTFunction {
|
||||
L.Close()
|
||||
return nil, types.ErrScriptTransformMissing
|
||||
}
|
||||
|
||||
return &LuaEngine{
|
||||
state: L,
|
||||
transform: transform.(*lua.LFunction),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Transform executes the Lua transform function with the given request data.
|
||||
// It can return the following errors:
|
||||
// - types.ScriptExecutionError
|
||||
func (e *LuaEngine) Transform(req *RequestData) error {
|
||||
// Convert RequestData to Lua table
|
||||
reqTable := e.requestDataToTable(req)
|
||||
|
||||
// Call transform(req)
|
||||
e.state.Push(e.transform)
|
||||
e.state.Push(reqTable)
|
||||
if err := e.state.PCall(1, 1, nil); err != nil {
|
||||
return types.NewScriptExecutionError("Lua", err)
|
||||
}
|
||||
|
||||
// Get the result
|
||||
result := e.state.Get(-1)
|
||||
e.state.Pop(1)
|
||||
|
||||
if result.Type() != lua.LTTable {
|
||||
return types.NewScriptExecutionError("Lua", fmt.Errorf("transform function must return a table, got %s", result.Type()))
|
||||
}
|
||||
|
||||
// Update RequestData from the returned table
|
||||
e.tableToRequestData(result.(*lua.LTable), req)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close releases the Lua state resources.
|
||||
func (e *LuaEngine) Close() {
|
||||
if e.state != nil {
|
||||
e.state.Close()
|
||||
}
|
||||
}
|
||||
|
||||
// requestDataToTable converts RequestData to a Lua table.
|
||||
func (e *LuaEngine) requestDataToTable(req *RequestData) *lua.LTable {
|
||||
L := e.state
|
||||
t := L.NewTable()
|
||||
|
||||
t.RawSetString("method", lua.LString(req.Method))
|
||||
t.RawSetString("path", lua.LString(req.Path))
|
||||
t.RawSetString("body", lua.LString(req.Body))
|
||||
|
||||
// Headers (map[string][]string -> table of arrays)
|
||||
headers := L.NewTable()
|
||||
for k, values := range req.Headers {
|
||||
arr := L.NewTable()
|
||||
for _, v := range values {
|
||||
arr.Append(lua.LString(v))
|
||||
}
|
||||
headers.RawSetString(k, arr)
|
||||
}
|
||||
t.RawSetString("headers", headers)
|
||||
|
||||
// Params (map[string][]string -> table of arrays)
|
||||
params := L.NewTable()
|
||||
for k, values := range req.Params {
|
||||
arr := L.NewTable()
|
||||
for _, v := range values {
|
||||
arr.Append(lua.LString(v))
|
||||
}
|
||||
params.RawSetString(k, arr)
|
||||
}
|
||||
t.RawSetString("params", params)
|
||||
|
||||
// Cookies (map[string][]string -> table of arrays)
|
||||
cookies := L.NewTable()
|
||||
for k, values := range req.Cookies {
|
||||
arr := L.NewTable()
|
||||
for _, v := range values {
|
||||
arr.Append(lua.LString(v))
|
||||
}
|
||||
cookies.RawSetString(k, arr)
|
||||
}
|
||||
t.RawSetString("cookies", cookies)
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
// tableToRequestData updates RequestData from a Lua table.
|
||||
func (e *LuaEngine) tableToRequestData(t *lua.LTable, req *RequestData) {
|
||||
// Method
|
||||
if v := t.RawGetString("method"); v.Type() == lua.LTString {
|
||||
req.Method = string(v.(lua.LString))
|
||||
}
|
||||
|
||||
// Path
|
||||
if v := t.RawGetString("path"); v.Type() == lua.LTString {
|
||||
req.Path = string(v.(lua.LString))
|
||||
}
|
||||
|
||||
// Body
|
||||
if v := t.RawGetString("body"); v.Type() == lua.LTString {
|
||||
req.Body = string(v.(lua.LString))
|
||||
}
|
||||
|
||||
// Headers
|
||||
if v := t.RawGetString("headers"); v.Type() == lua.LTTable {
|
||||
req.Headers = e.tableToStringSliceMap(v.(*lua.LTable))
|
||||
}
|
||||
|
||||
// Params
|
||||
if v := t.RawGetString("params"); v.Type() == lua.LTTable {
|
||||
req.Params = e.tableToStringSliceMap(v.(*lua.LTable))
|
||||
}
|
||||
|
||||
// Cookies
|
||||
if v := t.RawGetString("cookies"); v.Type() == lua.LTTable {
|
||||
req.Cookies = e.tableToStringSliceMap(v.(*lua.LTable))
|
||||
}
|
||||
}
|
||||
|
||||
// tableToStringSliceMap converts a Lua table to a Go map[string][]string.
|
||||
// Supports both single string values and array values.
|
||||
func (e *LuaEngine) tableToStringSliceMap(t *lua.LTable) map[string][]string {
|
||||
result := make(map[string][]string)
|
||||
t.ForEach(func(k, v lua.LValue) {
|
||||
if k.Type() != lua.LTString {
|
||||
return
|
||||
}
|
||||
key := string(k.(lua.LString))
|
||||
|
||||
switch v.Type() {
|
||||
case lua.LTString:
|
||||
// Single string value
|
||||
result[key] = []string{string(v.(lua.LString))}
|
||||
case lua.LTTable:
|
||||
// Array of strings
|
||||
var values []string
|
||||
v.(*lua.LTable).ForEach(func(_, item lua.LValue) {
|
||||
if item.Type() == lua.LTString {
|
||||
values = append(values, string(item.(lua.LString)))
|
||||
}
|
||||
})
|
||||
result[key] = values
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
197
internal/script/script.go
Normal file
197
internal/script/script.go
Normal file
@@ -0,0 +1,197 @@
|
||||
package script
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
// RequestData represents the request data passed to scripts for transformation.
|
||||
// Scripts can modify any field and the changes will be applied to the actual request.
|
||||
// Headers, Params, and Cookies use []string values to support multiple values per key.
|
||||
type RequestData struct {
|
||||
Method string `json:"method"`
|
||||
Path string `json:"path"`
|
||||
Headers map[string][]string `json:"headers"`
|
||||
Params map[string][]string `json:"params"`
|
||||
Cookies map[string][]string `json:"cookies"`
|
||||
Body string `json:"body"`
|
||||
}
|
||||
|
||||
// Engine defines the interface for script engines (Lua, JavaScript).
|
||||
// Each engine must be able to transform request data using a user-provided script.
|
||||
type Engine interface {
|
||||
// Transform executes the script's transform function with the given request data.
|
||||
// The script should modify the RequestData and return it.
|
||||
Transform(req *RequestData) error
|
||||
|
||||
// Close releases any resources held by the engine.
|
||||
Close()
|
||||
}
|
||||
|
||||
// EngineType represents the type of script engine.
|
||||
type EngineType string
|
||||
|
||||
const (
|
||||
EngineTypeLua EngineType = "lua"
|
||||
EngineTypeJavaScript EngineType = "js"
|
||||
)
|
||||
|
||||
// Source represents a loaded script source.
|
||||
type Source struct {
|
||||
Content string
|
||||
EngineType EngineType
|
||||
}
|
||||
|
||||
// LoadSource loads a script from the given source string.
|
||||
// The source can be:
|
||||
// - Inline script: any string not starting with "@"
|
||||
// - Escaped "@": strings starting with "@@" (literal "@" at start, returns string without first @)
|
||||
// - File reference: "@/path/to/file" or "@./relative/path"
|
||||
// - URL reference: "@http://..." or "@https://..."
|
||||
//
|
||||
// It can return the following errors:
|
||||
// - types.ErrScriptEmpty
|
||||
// - types.ScriptLoadError
|
||||
func LoadSource(ctx context.Context, source string, engineType EngineType) (*Source, error) {
|
||||
if source == "" {
|
||||
return nil, types.ErrScriptEmpty
|
||||
}
|
||||
|
||||
var content string
|
||||
var err error
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(source, "@@"):
|
||||
// Escaped @ - it's an inline script starting with literal @
|
||||
content = source[1:] // Remove first @, keep the rest
|
||||
case strings.HasPrefix(source, "@"):
|
||||
// File or URL reference
|
||||
ref := source[1:]
|
||||
if strings.HasPrefix(ref, "http://") || strings.HasPrefix(ref, "https://") {
|
||||
content, err = fetchURL(ctx, ref)
|
||||
} else {
|
||||
content, err = readFile(ref)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, types.NewScriptLoadError(ref, err)
|
||||
}
|
||||
default:
|
||||
// Inline script
|
||||
content = source
|
||||
}
|
||||
|
||||
return &Source{
|
||||
Content: content,
|
||||
EngineType: engineType,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// LoadSources loads multiple script sources.
|
||||
// It can return the following errors:
|
||||
// - types.ErrScriptEmpty
|
||||
// - types.ScriptLoadError
|
||||
func LoadSources(ctx context.Context, sources []string, engineType EngineType) ([]*Source, error) {
|
||||
loaded := make([]*Source, 0, len(sources))
|
||||
for _, src := range sources {
|
||||
source, err := LoadSource(ctx, src, engineType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
loaded = append(loaded, source)
|
||||
}
|
||||
return loaded, nil
|
||||
}
|
||||
|
||||
// ValidateScript validates a script source by loading it and checking syntax.
|
||||
// It loads the script (from file/URL/inline), parses it, and verifies
|
||||
// that a 'transform' function is defined.
|
||||
// It can return the following errors:
|
||||
// - types.ErrScriptEmpty
|
||||
// - types.ErrScriptTransformMissing
|
||||
// - types.ScriptLoadError
|
||||
// - types.ScriptExecutionError
|
||||
// - types.ScriptUnknownEngineError
|
||||
func ValidateScript(ctx context.Context, source string, engineType EngineType) error {
|
||||
// Load the script source
|
||||
src, err := LoadSource(ctx, source, engineType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Try to create an engine - this validates syntax and transform function
|
||||
var engine Engine
|
||||
switch engineType {
|
||||
case EngineTypeLua:
|
||||
engine, err = NewLuaEngine(src.Content)
|
||||
case EngineTypeJavaScript:
|
||||
engine, err = NewJsEngine(src.Content)
|
||||
default:
|
||||
return types.NewScriptUnknownEngineError(string(engineType))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Clean up the engine - we only needed it for validation
|
||||
engine.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
// fetchURL downloads content from an HTTP/HTTPS URL.
|
||||
// It can return the following errors:
|
||||
// - types.HTTPFetchError
|
||||
// - types.HTTPStatusError
|
||||
func fetchURL(ctx context.Context, url string) (string, error) {
|
||||
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return "", types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return "", types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
defer resp.Body.Close() //nolint:errcheck
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", types.NewHTTPStatusError(url, resp.StatusCode, resp.Status)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", types.NewHTTPFetchError(url, err)
|
||||
}
|
||||
|
||||
return string(data), nil
|
||||
}
|
||||
|
||||
// readFile reads content from a local file.
|
||||
// It can return the following errors:
|
||||
// - types.FileReadError
|
||||
func readFile(path string) (string, error) {
|
||||
if !filepath.IsAbs(path) {
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", types.NewFileReadError(path, err)
|
||||
}
|
||||
path = filepath.Join(pwd, path)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(path) //nolint:gosec
|
||||
if err != nil {
|
||||
return "", types.NewFileReadError(path, err)
|
||||
}
|
||||
|
||||
return string(data), nil
|
||||
}
|
||||
46
internal/types/config_file.go
Normal file
46
internal/types/config_file.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ConfigFileType string
|
||||
|
||||
const (
|
||||
ConfigFileTypeUnknown ConfigFileType = "unknown"
|
||||
ConfigFileTypeYAML ConfigFileType = "yaml/yml"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
path string
|
||||
_type ConfigFileType
|
||||
}
|
||||
|
||||
func (configFile ConfigFile) Path() string {
|
||||
return configFile.path
|
||||
}
|
||||
|
||||
func (configFile ConfigFile) Type() ConfigFileType {
|
||||
return configFile._type
|
||||
}
|
||||
|
||||
func ParseConfigFile(configFileRaw string) *ConfigFile {
|
||||
// TODO: Improve file type detection
|
||||
// (e.g., use magic bytes or content inspection instead of relying solely on file extension)
|
||||
|
||||
configFileParsed := &ConfigFile{
|
||||
path: configFileRaw,
|
||||
}
|
||||
|
||||
configFileExtension, _ := strings.CutPrefix(filepath.Ext(configFileRaw), ".")
|
||||
|
||||
switch strings.ToLower(configFileExtension) {
|
||||
case "yml", "yaml":
|
||||
configFileParsed._type = ConfigFileTypeYAML
|
||||
default:
|
||||
configFileParsed._type = ConfigFileTypeUnknown
|
||||
}
|
||||
|
||||
return configFileParsed
|
||||
}
|
||||
40
internal/types/cookie.go
Normal file
40
internal/types/cookie.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type Cookie KeyValue[string, []string]
|
||||
|
||||
type Cookies []Cookie
|
||||
|
||||
func (cookies Cookies) GetValue(key string) *[]string {
|
||||
for i := range cookies {
|
||||
if cookies[i].Key == key {
|
||||
return &cookies[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cookies *Cookies) Merge(cookie ...Cookie) {
|
||||
for _, c := range cookie {
|
||||
if item := cookies.GetValue(c.Key); item != nil {
|
||||
*item = append(*item, c.Value...)
|
||||
} else {
|
||||
*cookies = append(*cookies, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (cookies *Cookies) Parse(rawValues ...string) {
|
||||
for _, rawValue := range rawValues {
|
||||
*cookies = append(*cookies, *ParseCookie(rawValue))
|
||||
}
|
||||
}
|
||||
|
||||
func ParseCookie(rawValue string) *Cookie {
|
||||
parts := strings.SplitN(rawValue, "=", 2)
|
||||
if len(parts) == 1 {
|
||||
return &Cookie{Key: parts[0], Value: []string{""}}
|
||||
}
|
||||
return &Cookie{Key: parts[0], Value: []string{parts[1]}}
|
||||
}
|
||||
444
internal/types/errors.go
Normal file
444
internal/types/errors.go
Normal file
@@ -0,0 +1,444 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ======================================== General ========================================
|
||||
|
||||
var (
|
||||
errNoError = errors.New("no error (internal)")
|
||||
)
|
||||
|
||||
type FieldParseError struct {
|
||||
Field string
|
||||
Value string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewFieldParseError(field string, value string, err error) FieldParseError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return FieldParseError{field, value, err}
|
||||
}
|
||||
|
||||
func (e FieldParseError) Error() string {
|
||||
return fmt.Sprintf("Field '%s' parse failed: %v", e.Field, e.Err)
|
||||
}
|
||||
|
||||
func (e FieldParseError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type FieldParseErrors struct {
|
||||
Errors []FieldParseError
|
||||
}
|
||||
|
||||
func NewFieldParseErrors(fieldParseErrors []FieldParseError) FieldParseErrors {
|
||||
return FieldParseErrors{fieldParseErrors}
|
||||
}
|
||||
|
||||
func (e FieldParseErrors) Error() string {
|
||||
if len(e.Errors) == 0 {
|
||||
return "No field parse errors"
|
||||
}
|
||||
if len(e.Errors) == 1 {
|
||||
return e.Errors[0].Error()
|
||||
}
|
||||
|
||||
var builder strings.Builder
|
||||
for i, err := range e.Errors {
|
||||
if i > 0 {
|
||||
builder.WriteString("\n")
|
||||
}
|
||||
builder.WriteString(err.Error())
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
type FieldValidationError struct {
|
||||
Field string
|
||||
Value string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewFieldValidationError(field string, value string, err error) FieldValidationError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return FieldValidationError{field, value, err}
|
||||
}
|
||||
|
||||
func (e FieldValidationError) Error() string {
|
||||
return fmt.Sprintf("Field '%s' validation failed: %v", e.Field, e.Err)
|
||||
}
|
||||
|
||||
func (e FieldValidationError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type FieldValidationErrors struct {
|
||||
Errors []FieldValidationError
|
||||
}
|
||||
|
||||
func NewFieldValidationErrors(fieldValidationErrors []FieldValidationError) FieldValidationErrors {
|
||||
return FieldValidationErrors{fieldValidationErrors}
|
||||
}
|
||||
|
||||
func (e FieldValidationErrors) Error() string {
|
||||
if len(e.Errors) == 0 {
|
||||
return "No field validation errors"
|
||||
}
|
||||
if len(e.Errors) == 1 {
|
||||
return e.Errors[0].Error()
|
||||
}
|
||||
|
||||
var builder strings.Builder
|
||||
for i, err := range e.Errors {
|
||||
if i > 0 {
|
||||
builder.WriteString("\n")
|
||||
}
|
||||
builder.WriteString(err.Error())
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
type UnmarshalError struct {
|
||||
error error
|
||||
}
|
||||
|
||||
func NewUnmarshalError(err error) UnmarshalError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return UnmarshalError{err}
|
||||
}
|
||||
|
||||
func (e UnmarshalError) Error() string {
|
||||
return "Unmarshal error: " + e.error.Error()
|
||||
}
|
||||
|
||||
func (e UnmarshalError) Unwrap() error {
|
||||
return e.error
|
||||
}
|
||||
|
||||
// ======================================== General I/O ========================================
|
||||
|
||||
type FileReadError struct {
|
||||
Path string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewFileReadError(path string, err error) FileReadError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return FileReadError{path, err}
|
||||
}
|
||||
|
||||
func (e FileReadError) Error() string {
|
||||
return fmt.Sprintf("failed to read file %s: %v", e.Path, e.Err)
|
||||
}
|
||||
|
||||
func (e FileReadError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type HTTPFetchError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewHTTPFetchError(url string, err error) HTTPFetchError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return HTTPFetchError{url, err}
|
||||
}
|
||||
|
||||
func (e HTTPFetchError) Error() string {
|
||||
return fmt.Sprintf("failed to fetch %s: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
func (e HTTPFetchError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type HTTPStatusError struct {
|
||||
URL string
|
||||
StatusCode int
|
||||
Status string
|
||||
}
|
||||
|
||||
func NewHTTPStatusError(url string, statusCode int, status string) HTTPStatusError {
|
||||
return HTTPStatusError{url, statusCode, status}
|
||||
}
|
||||
|
||||
func (e HTTPStatusError) Error() string {
|
||||
return fmt.Sprintf("HTTP %d %s (url: %s)", e.StatusCode, e.Status, e.URL)
|
||||
}
|
||||
|
||||
type URLParseError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewURLParseError(url string, err error) URLParseError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return URLParseError{url, err}
|
||||
}
|
||||
|
||||
func (e URLParseError) Error() string {
|
||||
return fmt.Sprintf("invalid URL %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
func (e URLParseError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
// ======================================== Template ========================================
|
||||
|
||||
var (
|
||||
ErrFileCacheNotInitialized = errors.New("file cache is not initialized")
|
||||
ErrFormDataOddArgs = errors.New("body_FormData requires an even number of arguments (key-value pairs)")
|
||||
)
|
||||
|
||||
type TemplateParseError struct {
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewTemplateParseError(err error) TemplateParseError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return TemplateParseError{err}
|
||||
}
|
||||
|
||||
func (e TemplateParseError) Error() string {
|
||||
return "template parse error: " + e.Err.Error()
|
||||
}
|
||||
|
||||
func (e TemplateParseError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type TemplateRenderError struct {
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewTemplateRenderError(err error) TemplateRenderError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return TemplateRenderError{err}
|
||||
}
|
||||
|
||||
func (e TemplateRenderError) Error() string {
|
||||
return "template rendering: " + e.Err.Error()
|
||||
}
|
||||
|
||||
func (e TemplateRenderError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
// ======================================== CLI ========================================
|
||||
|
||||
type CLIUnexpectedArgsError struct {
|
||||
Args []string
|
||||
}
|
||||
|
||||
func NewCLIUnexpectedArgsError(args []string) CLIUnexpectedArgsError {
|
||||
return CLIUnexpectedArgsError{args}
|
||||
}
|
||||
|
||||
func (e CLIUnexpectedArgsError) Error() string {
|
||||
return fmt.Sprintf("CLI received unexpected arguments: %v", strings.Join(e.Args, ","))
|
||||
}
|
||||
|
||||
// ======================================== Config File ========================================
|
||||
|
||||
type ConfigFileReadError struct {
|
||||
error error
|
||||
}
|
||||
|
||||
func NewConfigFileReadError(err error) ConfigFileReadError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return ConfigFileReadError{err}
|
||||
}
|
||||
|
||||
func (e ConfigFileReadError) Error() string {
|
||||
return "Config file read error: " + e.error.Error()
|
||||
}
|
||||
|
||||
func (e ConfigFileReadError) Unwrap() error {
|
||||
return e.error
|
||||
}
|
||||
|
||||
// ======================================== Proxy ========================================
|
||||
|
||||
type ProxyUnsupportedSchemeError struct {
|
||||
Scheme string
|
||||
}
|
||||
|
||||
func NewProxyUnsupportedSchemeError(scheme string) ProxyUnsupportedSchemeError {
|
||||
return ProxyUnsupportedSchemeError{scheme}
|
||||
}
|
||||
|
||||
func (e ProxyUnsupportedSchemeError) Error() string {
|
||||
return "unsupported proxy scheme: " + e.Scheme
|
||||
}
|
||||
|
||||
type ProxyParseError struct {
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewProxyParseError(err error) ProxyParseError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return ProxyParseError{err}
|
||||
}
|
||||
|
||||
func (e ProxyParseError) Error() string {
|
||||
return "failed to parse proxy URL: " + e.Err.Error()
|
||||
}
|
||||
|
||||
func (e ProxyParseError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type ProxyConnectError struct {
|
||||
Status string
|
||||
}
|
||||
|
||||
func NewProxyConnectError(status string) ProxyConnectError {
|
||||
return ProxyConnectError{status}
|
||||
}
|
||||
|
||||
func (e ProxyConnectError) Error() string {
|
||||
return "proxy CONNECT failed: " + e.Status
|
||||
}
|
||||
|
||||
type ProxyResolveError struct {
|
||||
Host string
|
||||
}
|
||||
|
||||
func NewProxyResolveError(host string) ProxyResolveError {
|
||||
return ProxyResolveError{host}
|
||||
}
|
||||
|
||||
func (e ProxyResolveError) Error() string {
|
||||
return "no IP addresses found for host: " + e.Host
|
||||
}
|
||||
|
||||
type ProxyDialError struct {
|
||||
Proxy string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewProxyDialError(proxy string, err error) ProxyDialError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return ProxyDialError{proxy, err}
|
||||
}
|
||||
|
||||
func (e ProxyDialError) Error() string {
|
||||
return "proxy \"" + e.Proxy + "\": " + e.Err.Error()
|
||||
}
|
||||
|
||||
func (e ProxyDialError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
// ======================================== Script ========================================
|
||||
|
||||
var (
|
||||
ErrScriptEmpty = errors.New("script cannot be empty")
|
||||
ErrScriptSourceEmpty = errors.New("script source cannot be empty after @")
|
||||
ErrScriptTransformMissing = errors.New("script must define a global 'transform' function")
|
||||
ErrScriptTransformReturnObject = errors.New("transform function must return an object")
|
||||
ErrScriptURLNoHost = errors.New("script URL must have a host")
|
||||
)
|
||||
|
||||
type ScriptLoadError struct {
|
||||
Source string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewScriptLoadError(source string, err error) ScriptLoadError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return ScriptLoadError{source, err}
|
||||
}
|
||||
|
||||
func (e ScriptLoadError) Error() string {
|
||||
return fmt.Sprintf("failed to load script from %q: %v", e.Source, e.Err)
|
||||
}
|
||||
|
||||
func (e ScriptLoadError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type ScriptExecutionError struct {
|
||||
EngineType string
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewScriptExecutionError(engineType string, err error) ScriptExecutionError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return ScriptExecutionError{engineType, err}
|
||||
}
|
||||
|
||||
func (e ScriptExecutionError) Error() string {
|
||||
return fmt.Sprintf("%s script error: %v", e.EngineType, e.Err)
|
||||
}
|
||||
|
||||
func (e ScriptExecutionError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type ScriptChainError struct {
|
||||
EngineType string
|
||||
Index int
|
||||
Err error
|
||||
}
|
||||
|
||||
func NewScriptChainError(engineType string, index int, err error) ScriptChainError {
|
||||
if err == nil {
|
||||
err = errNoError
|
||||
}
|
||||
return ScriptChainError{engineType, index, err}
|
||||
}
|
||||
|
||||
func (e ScriptChainError) Error() string {
|
||||
return fmt.Sprintf("%s script[%d]: %v", e.EngineType, e.Index, e.Err)
|
||||
}
|
||||
|
||||
func (e ScriptChainError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
type ScriptUnknownEngineError struct {
|
||||
EngineType string
|
||||
}
|
||||
|
||||
func NewScriptUnknownEngineError(engineType string) ScriptUnknownEngineError {
|
||||
return ScriptUnknownEngineError{engineType}
|
||||
}
|
||||
|
||||
func (e ScriptUnknownEngineError) Error() string {
|
||||
return "unknown engine type: " + e.EngineType
|
||||
}
|
||||
49
internal/types/header.go
Normal file
49
internal/types/header.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type Header KeyValue[string, []string]
|
||||
|
||||
type Headers []Header
|
||||
|
||||
func (headers Headers) Has(key string) bool {
|
||||
for i := range headers {
|
||||
if headers[i].Key == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (headers Headers) GetValue(key string) *[]string {
|
||||
for i := range headers {
|
||||
if headers[i].Key == key {
|
||||
return &headers[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (headers *Headers) Merge(header ...Header) {
|
||||
for _, h := range header {
|
||||
if item := headers.GetValue(h.Key); item != nil {
|
||||
*item = append(*item, h.Value...)
|
||||
} else {
|
||||
*headers = append(*headers, h)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (headers *Headers) Parse(rawValues ...string) {
|
||||
for _, rawValue := range rawValues {
|
||||
*headers = append(*headers, *ParseHeader(rawValue))
|
||||
}
|
||||
}
|
||||
|
||||
func ParseHeader(rawValue string) *Header {
|
||||
parts := strings.SplitN(rawValue, ": ", 2)
|
||||
if len(parts) == 1 {
|
||||
return &Header{Key: parts[0], Value: []string{""}}
|
||||
}
|
||||
return &Header{Key: parts[0], Value: []string{parts[1]}}
|
||||
}
|
||||
6
internal/types/key_value.go
Normal file
6
internal/types/key_value.go
Normal file
@@ -0,0 +1,6 @@
|
||||
package types
|
||||
|
||||
type KeyValue[K, V any] struct {
|
||||
Key K
|
||||
Value V
|
||||
}
|
||||
40
internal/types/param.go
Normal file
40
internal/types/param.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type Param KeyValue[string, []string]
|
||||
|
||||
type Params []Param
|
||||
|
||||
func (params Params) GetValue(key string) *[]string {
|
||||
for i := range params {
|
||||
if params[i].Key == key {
|
||||
return ¶ms[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (params *Params) Merge(param ...Param) {
|
||||
for _, p := range param {
|
||||
if item := params.GetValue(p.Key); item != nil {
|
||||
*item = append(*item, p.Value...)
|
||||
} else {
|
||||
*params = append(*params, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (params *Params) Parse(rawValues ...string) {
|
||||
for _, rawValue := range rawValues {
|
||||
*params = append(*params, *ParseParam(rawValue))
|
||||
}
|
||||
}
|
||||
|
||||
func ParseParam(rawValue string) *Param {
|
||||
parts := strings.SplitN(rawValue, "=", 2)
|
||||
if len(parts) == 1 {
|
||||
return &Param{Key: parts[0], Value: []string{""}}
|
||||
}
|
||||
return &Param{Key: parts[0], Value: []string{parts[1]}}
|
||||
}
|
||||
43
internal/types/proxy.go
Normal file
43
internal/types/proxy.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type Proxy url.URL
|
||||
|
||||
func (proxy Proxy) String() string {
|
||||
return (*url.URL)(&proxy).String()
|
||||
}
|
||||
|
||||
type Proxies []Proxy
|
||||
|
||||
func (proxies *Proxies) Append(proxy ...Proxy) {
|
||||
*proxies = append(*proxies, proxy...)
|
||||
}
|
||||
|
||||
// Parse parses a raw proxy string and appends it to the list.
|
||||
// It can return the following errors:
|
||||
// - ProxyParseError
|
||||
func (proxies *Proxies) Parse(rawValue string) error {
|
||||
parsedProxy, err := ParseProxy(rawValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
proxies.Append(*parsedProxy)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ParseProxy parses a raw proxy URL string into a Proxy.
|
||||
// It can return the following errors:
|
||||
// - ProxyParseError
|
||||
func ParseProxy(rawValue string) (*Proxy, error) {
|
||||
urlParsed, err := url.Parse(rawValue)
|
||||
if err != nil {
|
||||
return nil, NewProxyParseError(err)
|
||||
}
|
||||
|
||||
proxyParsed := Proxy(*urlParsed)
|
||||
return &proxyParsed, nil
|
||||
}
|
||||
8
internal/version/version.go
Normal file
8
internal/version/version.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package version
|
||||
|
||||
var (
|
||||
Version = "unknown" // Set via ldflags
|
||||
GitCommit = "unknown" // Set via ldflags
|
||||
BuildDate = "unknown" // Set via ldflags
|
||||
GoVersion = "unknown" // Set via ldflags
|
||||
)
|
||||
69
main.go
69
main.go
@@ -1,69 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"github.com/aykhans/dodo/config"
|
||||
"github.com/aykhans/dodo/requests"
|
||||
"github.com/aykhans/dodo/types"
|
||||
"github.com/aykhans/dodo/utils"
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
func main() {
|
||||
conf := config.NewConfig()
|
||||
configFile, err := conf.ReadCLI()
|
||||
if err != nil {
|
||||
utils.PrintErrAndExit(err)
|
||||
}
|
||||
|
||||
if configFile.String() != "" {
|
||||
tempConf := config.NewConfig()
|
||||
if err := tempConf.ReadFile(configFile); err != nil {
|
||||
utils.PrintErrAndExit(err)
|
||||
}
|
||||
tempConf.MergeConfig(conf)
|
||||
conf = tempConf
|
||||
}
|
||||
conf.SetDefaults()
|
||||
|
||||
if errs := conf.Validate(); len(errs) > 0 {
|
||||
utils.PrintErrAndExit(errors.Join(errs...))
|
||||
}
|
||||
|
||||
requestConf := config.NewRequestConfig(conf)
|
||||
requestConf.Print()
|
||||
|
||||
if !requestConf.Yes {
|
||||
response := config.CLIYesOrNoReader("Do you want to continue?", false)
|
||||
if !response {
|
||||
utils.PrintAndExit("Exiting...\n")
|
||||
}
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
go listenForTermination(func() { cancel() })
|
||||
|
||||
responses, err := requests.Run(ctx, requestConf)
|
||||
if err != nil {
|
||||
if err == types.ErrInterrupt {
|
||||
fmt.Println(text.FgYellow.Sprint(err.Error()))
|
||||
return
|
||||
}
|
||||
utils.PrintErrAndExit(err)
|
||||
}
|
||||
|
||||
responses.Print()
|
||||
}
|
||||
|
||||
func listenForTermination(do func()) {
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-sigChan
|
||||
do()
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
package requests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/utils"
|
||||
"github.com/valyala/fasthttp"
|
||||
"github.com/valyala/fasthttp/fasthttpproxy"
|
||||
)
|
||||
|
||||
type ClientGeneratorFunc func() *fasthttp.HostClient
|
||||
|
||||
// getClients initializes and returns a slice of fasthttp.HostClient based on the provided parameters.
|
||||
// It can either return clients with proxies or a single client without proxies.
|
||||
func getClients(
|
||||
_ context.Context,
|
||||
timeout time.Duration,
|
||||
proxies []url.URL,
|
||||
maxConns uint,
|
||||
URL url.URL,
|
||||
skipVerify bool,
|
||||
) []*fasthttp.HostClient {
|
||||
isTLS := URL.Scheme == "https"
|
||||
|
||||
if proxiesLen := len(proxies); proxiesLen > 0 {
|
||||
clients := make([]*fasthttp.HostClient, 0, proxiesLen)
|
||||
addr := URL.Host
|
||||
if isTLS && URL.Port() == "" {
|
||||
addr += ":443"
|
||||
}
|
||||
|
||||
for _, proxy := range proxies {
|
||||
dialFunc, err := getDialFunc(&proxy, timeout)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
clients = append(clients, &fasthttp.HostClient{
|
||||
MaxConns: int(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify,
|
||||
},
|
||||
Addr: addr,
|
||||
Dial: dialFunc,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
},
|
||||
)
|
||||
}
|
||||
return clients
|
||||
}
|
||||
|
||||
client := &fasthttp.HostClient{
|
||||
MaxConns: int(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify,
|
||||
},
|
||||
Addr: URL.Host,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
}
|
||||
return []*fasthttp.HostClient{client}
|
||||
}
|
||||
|
||||
// getDialFunc returns the appropriate fasthttp.DialFunc based on the provided proxy URL scheme.
|
||||
// It supports SOCKS5 ('socks5' or 'socks5h') and HTTP ('http') proxy schemes.
|
||||
// For HTTP proxies, the timeout parameter determines connection timeouts.
|
||||
// Returns an error if the proxy scheme is unsupported.
|
||||
func getDialFunc(proxy *url.URL, timeout time.Duration) (fasthttp.DialFunc, error) {
|
||||
var dialer fasthttp.DialFunc
|
||||
|
||||
switch proxy.Scheme {
|
||||
case "socks5", "socks5h":
|
||||
dialer = fasthttpproxy.FasthttpSocksDialerDualStack(proxy.String())
|
||||
case "http":
|
||||
dialer = fasthttpproxy.FasthttpHTTPDialerDualStackTimeout(proxy.String(), timeout)
|
||||
default:
|
||||
return nil, errors.New("unsupported proxy scheme")
|
||||
}
|
||||
|
||||
if dialer == nil {
|
||||
return nil, errors.New("internal error: proxy dialer is nil")
|
||||
}
|
||||
|
||||
return dialer, nil
|
||||
}
|
||||
|
||||
// getSharedClientFuncMultiple returns a ClientGeneratorFunc that cycles through a list of fasthttp.HostClient instances.
|
||||
// The function uses a local random number generator to determine the starting index and stop index for cycling through the clients.
|
||||
// The returned function isn't thread-safe and should be used in a single-threaded context.
|
||||
func getSharedClientFuncMultiple(clients []*fasthttp.HostClient, localRand *rand.Rand) ClientGeneratorFunc {
|
||||
return utils.RandomValueCycle(clients, localRand)
|
||||
}
|
||||
|
||||
// getSharedClientFuncSingle returns a ClientGeneratorFunc that always returns the provided fasthttp.HostClient instance.
|
||||
// This can be useful for sharing a single client instance across multiple requests.
|
||||
func getSharedClientFuncSingle(client *fasthttp.HostClient) ClientGeneratorFunc {
|
||||
return func() *fasthttp.HostClient {
|
||||
return client
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
package requests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/progress"
|
||||
)
|
||||
|
||||
// streamProgress streams the progress of a task to the console using a progress bar.
|
||||
// It listens for increments on the provided channel and updates the progress bar accordingly.
|
||||
//
|
||||
// The function will stop and mark the progress as errored if the context is cancelled.
|
||||
// It will also stop and mark the progress as done when the total number of increments is reached.
|
||||
func streamProgress(
|
||||
ctx context.Context,
|
||||
wg *sync.WaitGroup,
|
||||
total uint,
|
||||
message string,
|
||||
increase <-chan int64,
|
||||
) {
|
||||
defer wg.Done()
|
||||
pw := progress.NewWriter()
|
||||
pw.SetTrackerPosition(progress.PositionRight)
|
||||
pw.SetStyle(progress.StyleBlocks)
|
||||
pw.SetTrackerLength(40)
|
||||
pw.SetUpdateFrequency(time.Millisecond * 250)
|
||||
if total == 0 {
|
||||
pw.Style().Visibility.Percentage = false
|
||||
}
|
||||
go pw.Render()
|
||||
dodosTracker := progress.Tracker{
|
||||
Message: message,
|
||||
Total: int64(total),
|
||||
}
|
||||
pw.AppendTracker(&dodosTracker)
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
if err := ctx.Err(); err == context.Canceled || err == context.DeadlineExceeded {
|
||||
dodosTracker.MarkAsDone()
|
||||
} else {
|
||||
dodosTracker.MarkAsErrored()
|
||||
}
|
||||
time.Sleep(time.Millisecond * 300)
|
||||
fmt.Printf("\r")
|
||||
return
|
||||
|
||||
case value := <-increase:
|
||||
dodosTracker.Increment(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,341 +0,0 @@
|
||||
package requests
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/config"
|
||||
"github.com/aykhans/dodo/types"
|
||||
"github.com/aykhans/dodo/utils"
|
||||
"github.com/valyala/fasthttp"
|
||||
)
|
||||
|
||||
type RequestGeneratorFunc func() *fasthttp.Request
|
||||
|
||||
// Request represents an HTTP request to be sent using the fasthttp client.
|
||||
// It isn't thread-safe and should be used by a single goroutine.
|
||||
type Request struct {
|
||||
getClient ClientGeneratorFunc
|
||||
getRequest RequestGeneratorFunc
|
||||
}
|
||||
|
||||
type keyValueGenerator struct {
|
||||
key func() string
|
||||
value func() string
|
||||
}
|
||||
|
||||
// Send sends the HTTP request using the fasthttp client with a specified timeout.
|
||||
// It returns the HTTP response or an error if the request fails or times out.
|
||||
func (r *Request) Send(ctx context.Context, timeout time.Duration) (*fasthttp.Response, error) {
|
||||
client := r.getClient()
|
||||
request := r.getRequest()
|
||||
defer fasthttp.ReleaseRequest(request)
|
||||
|
||||
response := fasthttp.AcquireResponse()
|
||||
ch := make(chan error)
|
||||
go func() {
|
||||
err := client.DoTimeout(request, response, timeout)
|
||||
ch <- err
|
||||
}()
|
||||
select {
|
||||
case err := <-ch:
|
||||
if err != nil {
|
||||
fasthttp.ReleaseResponse(response)
|
||||
return nil, err
|
||||
}
|
||||
return response, nil
|
||||
case <-time.After(timeout):
|
||||
fasthttp.ReleaseResponse(response)
|
||||
return nil, types.ErrTimeout
|
||||
case <-ctx.Done():
|
||||
return nil, types.ErrInterrupt
|
||||
}
|
||||
}
|
||||
|
||||
// newRequest creates a new Request instance based on the provided configuration and clients.
|
||||
// It initializes a random number generator using the current time and a unique identifier (uid).
|
||||
// Depending on the number of clients provided, it sets up a function to select the appropriate client.
|
||||
// It also sets up a function to generate the request based on the provided configuration.
|
||||
func newRequest(
|
||||
requestConfig config.RequestConfig,
|
||||
clients []*fasthttp.HostClient,
|
||||
uid int64,
|
||||
) *Request {
|
||||
localRand := rand.New(rand.NewSource(time.Now().UnixNano() + uid))
|
||||
|
||||
clientsCount := len(clients)
|
||||
if clientsCount < 1 {
|
||||
panic("no clients")
|
||||
}
|
||||
|
||||
getClient := ClientGeneratorFunc(nil)
|
||||
if clientsCount == 1 {
|
||||
getClient = getSharedClientFuncSingle(clients[0])
|
||||
} else {
|
||||
getClient = getSharedClientFuncMultiple(clients, localRand)
|
||||
}
|
||||
|
||||
getRequest := getRequestGeneratorFunc(
|
||||
requestConfig.URL,
|
||||
requestConfig.Params,
|
||||
requestConfig.Headers,
|
||||
requestConfig.Cookies,
|
||||
requestConfig.Method,
|
||||
requestConfig.Body,
|
||||
localRand,
|
||||
)
|
||||
|
||||
requests := &Request{
|
||||
getClient: getClient,
|
||||
getRequest: getRequest,
|
||||
}
|
||||
|
||||
return requests
|
||||
}
|
||||
|
||||
// getRequestGeneratorFunc returns a RequestGeneratorFunc which generates HTTP requests with the specified parameters.
|
||||
// The function uses a local random number generator to select bodies, headers, cookies, and parameters if multiple options are provided.
|
||||
func getRequestGeneratorFunc(
|
||||
URL url.URL,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
method string,
|
||||
bodies []string,
|
||||
localRand *rand.Rand,
|
||||
) RequestGeneratorFunc {
|
||||
getParams := getKeyValueGeneratorFunc(params, localRand)
|
||||
getHeaders := getKeyValueGeneratorFunc(headers, localRand)
|
||||
getCookies := getKeyValueGeneratorFunc(cookies, localRand)
|
||||
getBody := getBodyValueFunc(bodies, utils.NewFuncMapGenerator(localRand), localRand)
|
||||
|
||||
return func() *fasthttp.Request {
|
||||
body, contentType := getBody()
|
||||
headers := getHeaders()
|
||||
if contentType != "" {
|
||||
headers = append(headers, types.KeyValue[string, string]{
|
||||
Key: "Content-Type",
|
||||
Value: contentType,
|
||||
})
|
||||
}
|
||||
|
||||
return newFasthttpRequest(
|
||||
URL,
|
||||
getParams(),
|
||||
headers,
|
||||
getCookies(),
|
||||
method,
|
||||
body,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// newFasthttpRequest creates a new fasthttp.Request object with the provided parameters.
|
||||
// It sets the request URI, host header, headers, cookies, params, method, and body.
|
||||
func newFasthttpRequest(
|
||||
URL url.URL,
|
||||
params []types.KeyValue[string, string],
|
||||
headers []types.KeyValue[string, string],
|
||||
cookies []types.KeyValue[string, string],
|
||||
method string,
|
||||
body string,
|
||||
) *fasthttp.Request {
|
||||
request := fasthttp.AcquireRequest()
|
||||
request.SetRequestURI(URL.Path)
|
||||
|
||||
// Set the host of the request to the host header
|
||||
// If the host header is not set, the request will fail
|
||||
// If there is host header in the headers, it will be overwritten
|
||||
request.Header.SetHost(URL.Host)
|
||||
setRequestParams(request, params)
|
||||
setRequestHeaders(request, headers)
|
||||
setRequestCookies(request, cookies)
|
||||
setRequestMethod(request, method)
|
||||
setRequestBody(request, body)
|
||||
if URL.Scheme == "https" {
|
||||
request.URI().SetScheme("https")
|
||||
}
|
||||
|
||||
return request
|
||||
}
|
||||
|
||||
// setRequestParams adds the query parameters of the given request based on the provided key-value pairs.
|
||||
func setRequestParams(req *fasthttp.Request, params []types.KeyValue[string, string]) {
|
||||
for _, param := range params {
|
||||
req.URI().QueryArgs().Add(param.Key, param.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// setRequestHeaders adds the headers of the given request with the provided key-value pairs.
|
||||
func setRequestHeaders(req *fasthttp.Request, headers []types.KeyValue[string, string]) {
|
||||
for _, header := range headers {
|
||||
req.Header.Add(header.Key, header.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// setRequestCookies adds the cookies of the given request with the provided key-value pairs.
|
||||
func setRequestCookies(req *fasthttp.Request, cookies []types.KeyValue[string, string]) {
|
||||
for _, cookie := range cookies {
|
||||
req.Header.Add("Cookie", cookie.Key+"="+cookie.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// setRequestMethod sets the HTTP request method for the given request.
|
||||
func setRequestMethod(req *fasthttp.Request, method string) {
|
||||
req.Header.SetMethod(method)
|
||||
}
|
||||
|
||||
// setRequestBody sets the request body of the given fasthttp.Request object.
|
||||
// The body parameter is a string that will be converted to a byte slice and set as the request body.
|
||||
func setRequestBody(req *fasthttp.Request, body string) {
|
||||
req.SetBody([]byte(body))
|
||||
}
|
||||
|
||||
// getKeyValueGeneratorFunc creates a function that generates key-value pairs for HTTP requests.
|
||||
// It takes a slice of key-value pairs where each key maps to a slice of possible values,
|
||||
// and a random number generator.
|
||||
//
|
||||
// If any key has multiple possible values, the function will randomly select one value for each
|
||||
// call (using the provided random number generator). If all keys have at most one value, the
|
||||
// function will always return the same set of key-value pairs for efficiency.
|
||||
func getKeyValueGeneratorFunc[
|
||||
T []types.KeyValue[string, string],
|
||||
](
|
||||
keyValueSlice []types.KeyValue[string, []string],
|
||||
localRand *rand.Rand,
|
||||
) func() T {
|
||||
keyValueGenerators := make([]keyValueGenerator, len(keyValueSlice))
|
||||
|
||||
funcMap := *utils.NewFuncMapGenerator(localRand).GetFuncMap()
|
||||
|
||||
for i, kv := range keyValueSlice {
|
||||
keyValueGenerators[i] = keyValueGenerator{
|
||||
key: getKeyFunc(kv.Key, funcMap),
|
||||
value: getValueFunc(kv.Value, funcMap, localRand),
|
||||
}
|
||||
}
|
||||
|
||||
return func() T {
|
||||
keyValues := make(T, len(keyValueGenerators))
|
||||
for i, keyValue := range keyValueGenerators {
|
||||
keyValues[i] = types.KeyValue[string, string]{
|
||||
Key: keyValue.key(),
|
||||
Value: keyValue.value(),
|
||||
}
|
||||
}
|
||||
return keyValues
|
||||
}
|
||||
}
|
||||
|
||||
// getKeyFunc creates a function that processes a key string through Go's template engine.
|
||||
// It takes a key string and a template.FuncMap containing the available template functions.
|
||||
//
|
||||
// The returned function, when called, will execute the template with the given key and return
|
||||
// the processed string result. If template parsing fails, the returned function will always
|
||||
// return an empty string.
|
||||
//
|
||||
// This enables dynamic generation of keys that can include template directives and functions.
|
||||
func getKeyFunc(key string, funcMap template.FuncMap) func() string {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(key)
|
||||
if err != nil {
|
||||
return func() string { return "" }
|
||||
}
|
||||
|
||||
return func() string {
|
||||
var buf bytes.Buffer
|
||||
_ = t.Execute(&buf, nil)
|
||||
return buf.String()
|
||||
}
|
||||
}
|
||||
|
||||
// getValueFunc creates a function that randomly selects and processes a value from a slice of strings
|
||||
// through Go's template engine.
|
||||
//
|
||||
// Parameters:
|
||||
// - values: A slice of string templates that can contain template directives
|
||||
// - funcMap: A template.FuncMap containing all available template functions
|
||||
// - localRand: A random number generator for consistent randomization
|
||||
//
|
||||
// The returned function, when called, will:
|
||||
// 1. Select a random template from the values slice
|
||||
// 2. Execute the selected template
|
||||
// 3. Return the processed string result
|
||||
//
|
||||
// If a selected template is nil (due to earlier parsing failure), the function will return an empty string.
|
||||
// This enables dynamic generation of values with randomized selection from multiple templates.
|
||||
func getValueFunc(
|
||||
values []string,
|
||||
funcMap template.FuncMap,
|
||||
localRand *rand.Rand,
|
||||
) func() string {
|
||||
templates := make([]*template.Template, len(values))
|
||||
|
||||
for i, value := range values {
|
||||
t, err := template.New("default").Funcs(funcMap).Parse(value)
|
||||
if err != nil {
|
||||
templates[i] = nil
|
||||
}
|
||||
templates[i] = t
|
||||
}
|
||||
|
||||
randomTemplateFunc := utils.RandomValueCycle(templates, localRand)
|
||||
|
||||
return func() string {
|
||||
if tmpl := randomTemplateFunc(); tmpl == nil {
|
||||
return ""
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
_ = tmpl.Execute(&buf, nil)
|
||||
return buf.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// getBodyValueFunc creates a function that randomly selects and processes a request body from a slice of templates.
|
||||
// It returns a closure that generates both the body content and the appropriate Content-Type header value.
|
||||
//
|
||||
// Parameters:
|
||||
// - values: A slice of string templates that can contain template directives for request bodies
|
||||
// - funcMapGenerator: Provides template functions and content type information
|
||||
// - localRand: A random number generator for consistent randomization
|
||||
//
|
||||
// The returned function, when called, will:
|
||||
// 1. Select a random body template from the values slice
|
||||
// 2. Execute the selected template with available template functions
|
||||
// 3. Return both the processed body string and the appropriate Content-Type header value
|
||||
//
|
||||
// If the selected template is nil (due to earlier parsing failure), the function will return
|
||||
// empty strings for both the body and Content-Type.
|
||||
//
|
||||
// This enables dynamic generation of request bodies with proper content type headers.
|
||||
func getBodyValueFunc(
|
||||
values []string,
|
||||
funcMapGenerator *utils.FuncMapGenerator,
|
||||
localRand *rand.Rand,
|
||||
) func() (string, string) {
|
||||
templates := make([]*template.Template, len(values))
|
||||
|
||||
for i, value := range values {
|
||||
t, err := template.New("default").Funcs(*funcMapGenerator.GetFuncMap()).Parse(value)
|
||||
if err != nil {
|
||||
templates[i] = nil
|
||||
}
|
||||
templates[i] = t
|
||||
}
|
||||
|
||||
randomTemplateFunc := utils.RandomValueCycle(templates, localRand)
|
||||
|
||||
return func() (string, string) {
|
||||
if tmpl := randomTemplateFunc(); tmpl == nil {
|
||||
return "", ""
|
||||
} else {
|
||||
var buf bytes.Buffer
|
||||
_ = tmpl.Execute(&buf, nil)
|
||||
return buf.String(), funcMapGenerator.GetBodyDataHeader()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
package requests
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/types"
|
||||
"github.com/aykhans/dodo/utils"
|
||||
"github.com/jedib0t/go-pretty/v6/table"
|
||||
)
|
||||
|
||||
type Response struct {
|
||||
Response string
|
||||
Time time.Duration
|
||||
}
|
||||
|
||||
type Responses []*Response
|
||||
|
||||
// Print prints the responses in a tabular format, including information such as
|
||||
// response count, minimum time, maximum time, average time, and latency percentiles.
|
||||
func (responses Responses) Print() {
|
||||
total := struct {
|
||||
Count int
|
||||
Min time.Duration
|
||||
Max time.Duration
|
||||
Sum time.Duration
|
||||
P90 time.Duration
|
||||
P95 time.Duration
|
||||
P99 time.Duration
|
||||
}{
|
||||
Count: len(responses),
|
||||
Min: responses[0].Time,
|
||||
Max: responses[0].Time,
|
||||
}
|
||||
mergedResponses := make(map[string]types.Durations)
|
||||
var allDurations types.Durations
|
||||
|
||||
for _, response := range responses {
|
||||
if response.Time < total.Min {
|
||||
total.Min = response.Time
|
||||
}
|
||||
if response.Time > total.Max {
|
||||
total.Max = response.Time
|
||||
}
|
||||
total.Sum += response.Time
|
||||
|
||||
mergedResponses[response.Response] = append(
|
||||
mergedResponses[response.Response],
|
||||
response.Time,
|
||||
)
|
||||
allDurations = append(allDurations, response.Time)
|
||||
}
|
||||
allDurations.Sort()
|
||||
allDurationsLenAsFloat := float64(len(allDurations) - 1)
|
||||
total.P90 = allDurations[int(0.90*allDurationsLenAsFloat)]
|
||||
total.P95 = allDurations[int(0.95*allDurationsLenAsFloat)]
|
||||
total.P99 = allDurations[int(0.99*allDurationsLenAsFloat)]
|
||||
|
||||
t := table.NewWriter()
|
||||
t.SetOutputMirror(os.Stdout)
|
||||
t.SetStyle(table.StyleLight)
|
||||
t.SetColumnConfigs([]table.ColumnConfig{
|
||||
{Number: 1, WidthMax: 40},
|
||||
})
|
||||
t.AppendHeader(table.Row{
|
||||
"Response",
|
||||
"Count",
|
||||
"Min",
|
||||
"Max",
|
||||
"Average",
|
||||
"P90",
|
||||
"P95",
|
||||
"P99",
|
||||
})
|
||||
|
||||
var roundPrecision int64 = 4
|
||||
for key, durations := range mergedResponses {
|
||||
durations.Sort()
|
||||
durationsLen := len(durations)
|
||||
durationsLenAsFloat := float64(durationsLen - 1)
|
||||
|
||||
t.AppendRow(table.Row{
|
||||
key,
|
||||
durationsLen,
|
||||
utils.DurationRoundBy(*durations.First(), roundPrecision),
|
||||
utils.DurationRoundBy(*durations.Last(), roundPrecision),
|
||||
utils.DurationRoundBy(durations.Avg(), roundPrecision),
|
||||
utils.DurationRoundBy(durations[int(0.90*durationsLenAsFloat)], roundPrecision),
|
||||
utils.DurationRoundBy(durations[int(0.95*durationsLenAsFloat)], roundPrecision),
|
||||
utils.DurationRoundBy(durations[int(0.99*durationsLenAsFloat)], roundPrecision),
|
||||
})
|
||||
t.AppendSeparator()
|
||||
}
|
||||
|
||||
if len(mergedResponses) > 1 {
|
||||
t.AppendRow(table.Row{
|
||||
"Total",
|
||||
total.Count,
|
||||
utils.DurationRoundBy(total.Min, roundPrecision),
|
||||
utils.DurationRoundBy(total.Max, roundPrecision),
|
||||
utils.DurationRoundBy(total.Sum/time.Duration(total.Count), roundPrecision), // Average
|
||||
utils.DurationRoundBy(total.P90, roundPrecision),
|
||||
utils.DurationRoundBy(total.P95, roundPrecision),
|
||||
utils.DurationRoundBy(total.P99, roundPrecision),
|
||||
})
|
||||
}
|
||||
t.Render()
|
||||
}
|
||||
211
requests/run.go
211
requests/run.go
@@ -1,211 +0,0 @@
|
||||
package requests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/aykhans/dodo/config"
|
||||
"github.com/aykhans/dodo/types"
|
||||
"github.com/aykhans/dodo/utils"
|
||||
"github.com/valyala/fasthttp"
|
||||
)
|
||||
|
||||
// Run executes the main logic for processing requests based on the provided configuration.
|
||||
// It initializes clients based on the request configuration and releases the dodos.
|
||||
// If the context is canceled and no responses are collected, it returns an interrupt error.
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for managing request lifecycle and cancellation.
|
||||
// - requestConfig: The configuration for the request, including timeout, proxies, and other settings.
|
||||
func Run(ctx context.Context, requestConfig *config.RequestConfig) (Responses, error) {
|
||||
if requestConfig.Duration > 0 {
|
||||
var cancel context.CancelFunc
|
||||
ctx, cancel = context.WithTimeout(ctx, requestConfig.Duration)
|
||||
defer cancel()
|
||||
}
|
||||
|
||||
clients := getClients(
|
||||
ctx,
|
||||
requestConfig.Timeout,
|
||||
requestConfig.Proxies,
|
||||
requestConfig.GetMaxConns(fasthttp.DefaultMaxConnsPerHost),
|
||||
requestConfig.URL,
|
||||
requestConfig.SkipVerify,
|
||||
)
|
||||
if clients == nil {
|
||||
return nil, types.ErrInterrupt
|
||||
}
|
||||
|
||||
responses := releaseDodos(ctx, requestConfig, clients)
|
||||
if ctx.Err() != nil && len(responses) == 0 {
|
||||
return nil, types.ErrInterrupt
|
||||
}
|
||||
|
||||
return responses, nil
|
||||
}
|
||||
|
||||
// releaseDodos sends requests concurrently using multiple dodos (goroutines) and returns the aggregated responses.
|
||||
//
|
||||
// The function performs the following steps:
|
||||
// 1. Initializes wait groups and other necessary variables.
|
||||
// 2. Starts a goroutine to stream progress updates.
|
||||
// 3. Distributes the total request count among the dodos.
|
||||
// 4. Starts a goroutine for each dodo to send requests concurrently.
|
||||
// 5. Waits for all dodos to complete their requests.
|
||||
// 6. Cancels the progress streaming context and waits for the progress goroutine to finish.
|
||||
// 7. Flattens and returns the aggregated responses.
|
||||
func releaseDodos(
|
||||
ctx context.Context,
|
||||
requestConfig *config.RequestConfig,
|
||||
clients []*fasthttp.HostClient,
|
||||
) Responses {
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
streamWG sync.WaitGroup
|
||||
requestCountPerDodo uint
|
||||
dodosCount = requestConfig.GetValidDodosCountForRequests()
|
||||
responses = make([][]*Response, dodosCount)
|
||||
increase = make(chan int64, requestConfig.RequestCount)
|
||||
)
|
||||
|
||||
wg.Add(int(dodosCount))
|
||||
streamWG.Add(1)
|
||||
streamCtx, streamCtxCancel := context.WithCancel(ctx)
|
||||
|
||||
go streamProgress(streamCtx, &streamWG, requestConfig.RequestCount, "Dodos Working🔥", increase)
|
||||
|
||||
if requestConfig.RequestCount == 0 {
|
||||
for i := range dodosCount {
|
||||
go sendRequest(
|
||||
ctx,
|
||||
newRequest(*requestConfig, clients, int64(i)),
|
||||
requestConfig.Timeout,
|
||||
&responses[i],
|
||||
increase,
|
||||
&wg,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
for i := range dodosCount {
|
||||
if i+1 == dodosCount {
|
||||
requestCountPerDodo = requestConfig.RequestCount - (i * requestConfig.RequestCount / dodosCount)
|
||||
} else {
|
||||
requestCountPerDodo = ((i + 1) * requestConfig.RequestCount / dodosCount) -
|
||||
(i * requestConfig.RequestCount / dodosCount)
|
||||
}
|
||||
|
||||
go sendRequestByCount(
|
||||
ctx,
|
||||
newRequest(*requestConfig, clients, int64(i)),
|
||||
requestConfig.Timeout,
|
||||
requestCountPerDodo,
|
||||
&responses[i],
|
||||
increase,
|
||||
&wg,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
streamCtxCancel()
|
||||
streamWG.Wait()
|
||||
return utils.Flatten(responses)
|
||||
}
|
||||
|
||||
// sendRequestByCount sends a specified number of HTTP requests concurrently with a given timeout.
|
||||
// It appends the responses to the provided responseData slice and sends the count of completed requests
|
||||
// to the increase channel. The function terminates early if the context is canceled or if a custom
|
||||
// interrupt error is encountered.
|
||||
func sendRequestByCount(
|
||||
ctx context.Context,
|
||||
request *Request,
|
||||
timeout time.Duration,
|
||||
requestCount uint,
|
||||
responseData *[]*Response,
|
||||
increase chan<- int64,
|
||||
wg *sync.WaitGroup,
|
||||
) {
|
||||
defer wg.Done()
|
||||
|
||||
for range requestCount {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
|
||||
func() {
|
||||
startTime := time.Now()
|
||||
response, err := request.Send(ctx, timeout)
|
||||
completedTime := time.Since(startTime)
|
||||
if response != nil {
|
||||
defer fasthttp.ReleaseResponse(response)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == types.ErrInterrupt {
|
||||
return
|
||||
}
|
||||
*responseData = append(*responseData, &Response{
|
||||
Response: err.Error(),
|
||||
Time: completedTime,
|
||||
})
|
||||
increase <- 1
|
||||
return
|
||||
}
|
||||
|
||||
*responseData = append(*responseData, &Response{
|
||||
Response: strconv.Itoa(response.StatusCode()),
|
||||
Time: completedTime,
|
||||
})
|
||||
increase <- 1
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
// sendRequest continuously sends HTTP requests until the context is canceled.
|
||||
// It records the response status code or error message along with the response time,
|
||||
// and signals each completed request through the increase channel.
|
||||
func sendRequest(
|
||||
ctx context.Context,
|
||||
request *Request,
|
||||
timeout time.Duration,
|
||||
responseData *[]*Response,
|
||||
increase chan<- int64,
|
||||
wg *sync.WaitGroup,
|
||||
) {
|
||||
defer wg.Done()
|
||||
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
|
||||
func() {
|
||||
startTime := time.Now()
|
||||
response, err := request.Send(ctx, timeout)
|
||||
completedTime := time.Since(startTime)
|
||||
if response != nil {
|
||||
defer fasthttp.ReleaseResponse(response)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == types.ErrInterrupt {
|
||||
return
|
||||
}
|
||||
*responseData = append(*responseData, &Response{
|
||||
Response: err.Error(),
|
||||
Time: completedTime,
|
||||
})
|
||||
increase <- 1
|
||||
return
|
||||
}
|
||||
|
||||
*responseData = append(*responseData, &Response{
|
||||
Response: strconv.Itoa(response.StatusCode()),
|
||||
Time: completedTime,
|
||||
})
|
||||
increase <- 1
|
||||
}()
|
||||
}
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
type Body []string
|
||||
|
||||
func (body Body) String() string {
|
||||
var buffer bytes.Buffer
|
||||
if len(body) == 0 {
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
if len(body) == 1 {
|
||||
buffer.WriteString(body[0])
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
buffer.WriteString(text.FgBlue.Sprint("Random") + "[\n")
|
||||
|
||||
indent := " "
|
||||
|
||||
displayLimit := 5
|
||||
|
||||
for i, item := range body[:min(len(body), displayLimit)] {
|
||||
if i > 0 {
|
||||
buffer.WriteString(",\n")
|
||||
}
|
||||
|
||||
buffer.WriteString(indent + item)
|
||||
}
|
||||
|
||||
// Add remaining count if there are more items
|
||||
if remainingValues := len(body) - displayLimit; remainingValues > 0 {
|
||||
buffer.WriteString(",\n" + indent + text.FgGreen.Sprintf("+%d bodies", remainingValues))
|
||||
}
|
||||
|
||||
buffer.WriteString("\n]")
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
func (body *Body) UnmarshalJSON(b []byte) error {
|
||||
var data any
|
||||
if err := json.Unmarshal(b, &data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch v := data.(type) {
|
||||
case string:
|
||||
*body = []string{v}
|
||||
case []any:
|
||||
var slice []string
|
||||
for _, item := range v {
|
||||
slice = append(slice, fmt.Sprintf("%v", item))
|
||||
}
|
||||
*body = slice
|
||||
default:
|
||||
return fmt.Errorf("invalid type for Body: %T (should be string or []string)", v)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (body *Body) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var data any
|
||||
if err := unmarshal(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch v := data.(type) {
|
||||
case string:
|
||||
*body = []string{v}
|
||||
case []any:
|
||||
var slice []string
|
||||
for _, item := range v {
|
||||
slice = append(slice, fmt.Sprintf("%v", item))
|
||||
}
|
||||
*body = slice
|
||||
default:
|
||||
return fmt.Errorf("invalid type for Body: %T (should be string or []string)", v)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (body *Body) Set(value string) error {
|
||||
*body = append(*body, value)
|
||||
return nil
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
type FileLocationType int
|
||||
|
||||
const (
|
||||
FileLocationTypeLocal FileLocationType = iota
|
||||
FileLocationTypeRemoteHTTP
|
||||
)
|
||||
|
||||
type ConfigFile string
|
||||
|
||||
func (configFile ConfigFile) String() string {
|
||||
return string(configFile)
|
||||
}
|
||||
|
||||
func (configFile ConfigFile) LocationType() FileLocationType {
|
||||
if strings.HasPrefix(string(configFile), "http://") || strings.HasPrefix(string(configFile), "https://") {
|
||||
return FileLocationTypeRemoteHTTP
|
||||
}
|
||||
return FileLocationTypeLocal
|
||||
}
|
||||
|
||||
func (configFile ConfigFile) Extension() string {
|
||||
i := strings.LastIndex(configFile.String(), ".")
|
||||
if i == -1 {
|
||||
return ""
|
||||
}
|
||||
|
||||
return configFile.String()[i+1:]
|
||||
}
|
||||
139
types/cookies.go
139
types/cookies.go
@@ -1,139 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
type Cookies []KeyValue[string, []string]
|
||||
|
||||
func (cookies Cookies) String() string {
|
||||
var buffer bytes.Buffer
|
||||
if len(cookies) == 0 {
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
indent := " "
|
||||
|
||||
displayLimit := 3
|
||||
|
||||
for i, item := range cookies[:min(len(cookies), displayLimit)] {
|
||||
if i > 0 {
|
||||
buffer.WriteString(",\n")
|
||||
}
|
||||
|
||||
if len(item.Value) == 1 {
|
||||
buffer.WriteString(item.Key + ": " + item.Value[0])
|
||||
continue
|
||||
}
|
||||
buffer.WriteString(item.Key + ": " + text.FgBlue.Sprint("Random") + "[\n")
|
||||
|
||||
for ii, v := range item.Value[:min(len(item.Value), displayLimit)] {
|
||||
if ii == len(item.Value)-1 {
|
||||
buffer.WriteString(indent + v + "\n")
|
||||
} else {
|
||||
buffer.WriteString(indent + v + ",\n")
|
||||
}
|
||||
}
|
||||
|
||||
// Add remaining values count if needed
|
||||
if remainingValues := len(item.Value) - displayLimit; remainingValues > 0 {
|
||||
buffer.WriteString(indent + text.FgGreen.Sprintf("+%d values", remainingValues) + "\n")
|
||||
}
|
||||
|
||||
buffer.WriteString("]")
|
||||
}
|
||||
|
||||
// Add remaining key-value pairs count if needed
|
||||
if remainingPairs := len(cookies) - displayLimit; remainingPairs > 0 {
|
||||
buffer.WriteString(",\n" + text.FgGreen.Sprintf("+%d cookies", remainingPairs))
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
func (cookies *Cookies) AppendByKey(key, value string) {
|
||||
if item := cookies.GetValue(key); item != nil {
|
||||
*item = append(*item, value)
|
||||
} else {
|
||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
||||
}
|
||||
}
|
||||
|
||||
func (cookies Cookies) GetValue(key string) *[]string {
|
||||
for i := range cookies {
|
||||
if cookies[i].Key == key {
|
||||
return &cookies[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cookies *Cookies) UnmarshalJSON(b []byte) error {
|
||||
var data []map[string]any
|
||||
if err := json.Unmarshal(b, &data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, item := range data {
|
||||
for key, value := range item {
|
||||
switch parsedValue := value.(type) {
|
||||
case string:
|
||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: []string{parsedValue}})
|
||||
case []any:
|
||||
parsedStr := make([]string, len(parsedValue))
|
||||
for i, item := range parsedValue {
|
||||
parsedStr[i] = fmt.Sprintf("%v", item)
|
||||
}
|
||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: parsedStr})
|
||||
default:
|
||||
return fmt.Errorf("unsupported type for cookies expected string or []string, got %T", parsedValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cookies *Cookies) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var raw []map[string]any
|
||||
if err := unmarshal(&raw); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, param := range raw {
|
||||
for key, value := range param {
|
||||
switch parsed := value.(type) {
|
||||
case string:
|
||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: []string{parsed}})
|
||||
case []any:
|
||||
var values []string
|
||||
for _, v := range parsed {
|
||||
if str, ok := v.(string); ok {
|
||||
values = append(values, str)
|
||||
}
|
||||
}
|
||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: values})
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cookies *Cookies) Set(value string) error {
|
||||
parts := strings.SplitN(value, "=", 2)
|
||||
switch len(parts) {
|
||||
case 0:
|
||||
cookies.AppendByKey("", "")
|
||||
case 1:
|
||||
cookies.AppendByKey(parts[0], "")
|
||||
case 2:
|
||||
cookies.AppendByKey(parts[0], parts[1])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (duration *Duration) UnmarshalJSON(b []byte) error {
|
||||
var v any
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
duration.Duration = time.Duration(value)
|
||||
return nil
|
||||
case string:
|
||||
var err error
|
||||
duration.Duration, err = time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
}
|
||||
|
||||
func (duration Duration) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(duration.String())
|
||||
}
|
||||
|
||||
func (duration *Duration) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var v any
|
||||
if err := unmarshal(&v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
duration.Duration = time.Duration(value)
|
||||
return nil
|
||||
case string:
|
||||
var err error
|
||||
duration.Duration, err = time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Durations []time.Duration
|
||||
|
||||
func (d Durations) Sort(ascending ...bool) {
|
||||
// If ascending is provided and is false, sort in descending order
|
||||
if len(ascending) > 0 && ascending[0] == false {
|
||||
sort.Slice(d, func(i, j int) bool {
|
||||
return d[i] > d[j]
|
||||
})
|
||||
} else { // Otherwise, sort in ascending order
|
||||
sort.Slice(d, func(i, j int) bool {
|
||||
return d[i] < d[j]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (d Durations) First() *time.Duration {
|
||||
return &d[0]
|
||||
}
|
||||
|
||||
func (d Durations) Last() *time.Duration {
|
||||
return &d[len(d)-1]
|
||||
}
|
||||
|
||||
func (d Durations) Sum() time.Duration {
|
||||
sum := time.Duration(0)
|
||||
for _, duration := range d {
|
||||
sum += duration
|
||||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
func (d Durations) Avg() time.Duration {
|
||||
return d.Sum() / time.Duration(len(d))
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInterrupt = errors.New("interrupted")
|
||||
ErrTimeout = errors.New("timeout")
|
||||
)
|
||||
156
types/headers.go
156
types/headers.go
@@ -1,156 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
type Headers []KeyValue[string, []string]
|
||||
|
||||
func (headers Headers) String() string {
|
||||
var buffer bytes.Buffer
|
||||
if len(headers) == 0 {
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
indent := " "
|
||||
|
||||
displayLimit := 3
|
||||
|
||||
for i, item := range headers[:min(len(headers), displayLimit)] {
|
||||
if i > 0 {
|
||||
buffer.WriteString(",\n")
|
||||
}
|
||||
|
||||
if len(item.Value) == 1 {
|
||||
buffer.WriteString(item.Key + ": " + item.Value[0])
|
||||
continue
|
||||
}
|
||||
buffer.WriteString(item.Key + ": " + text.FgBlue.Sprint("Random") + "[\n")
|
||||
|
||||
for ii, v := range item.Value[:min(len(item.Value), displayLimit)] {
|
||||
if ii == len(item.Value)-1 {
|
||||
buffer.WriteString(indent + v + "\n")
|
||||
} else {
|
||||
buffer.WriteString(indent + v + ",\n")
|
||||
}
|
||||
}
|
||||
|
||||
// Add remaining values count if needed
|
||||
if remainingValues := len(item.Value) - displayLimit; remainingValues > 0 {
|
||||
buffer.WriteString(indent + text.FgGreen.Sprintf("+%d values", remainingValues) + "\n")
|
||||
}
|
||||
|
||||
buffer.WriteString("]")
|
||||
}
|
||||
|
||||
// Add remaining key-value pairs count if needed
|
||||
if remainingPairs := len(headers) - displayLimit; remainingPairs > 0 {
|
||||
buffer.WriteString(",\n" + text.FgGreen.Sprintf("+%d headers", remainingPairs))
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
func (headers *Headers) AppendByKey(key, value string) {
|
||||
if item := headers.GetValue(key); item != nil {
|
||||
*item = append(*item, value)
|
||||
} else {
|
||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
||||
}
|
||||
}
|
||||
|
||||
func (headers Headers) GetValue(key string) *[]string {
|
||||
for i := range headers {
|
||||
if headers[i].Key == key {
|
||||
return &headers[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (headers Headers) Has(key string) bool {
|
||||
for i := range headers {
|
||||
if headers[i].Key == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (headers *Headers) UnmarshalJSON(b []byte) error {
|
||||
var data []map[string]any
|
||||
if err := json.Unmarshal(b, &data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, item := range data {
|
||||
for key, value := range item {
|
||||
switch parsedValue := value.(type) {
|
||||
case string:
|
||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{parsedValue}})
|
||||
case []any:
|
||||
parsedStr := make([]string, len(parsedValue))
|
||||
for i, item := range parsedValue {
|
||||
parsedStr[i] = fmt.Sprintf("%v", item)
|
||||
}
|
||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: parsedStr})
|
||||
default:
|
||||
return fmt.Errorf("unsupported type for headers expected string or []string, got %T", parsedValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (headers *Headers) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var raw []map[string]any
|
||||
if err := unmarshal(&raw); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, param := range raw {
|
||||
for key, value := range param {
|
||||
switch parsed := value.(type) {
|
||||
case string:
|
||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{parsed}})
|
||||
case []any:
|
||||
var values []string
|
||||
for _, v := range parsed {
|
||||
if str, ok := v.(string); ok {
|
||||
values = append(values, str)
|
||||
}
|
||||
}
|
||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: values})
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (headers *Headers) Set(value string) error {
|
||||
parts := strings.SplitN(value, ":", 2)
|
||||
switch len(parts) {
|
||||
case 0:
|
||||
headers.AppendByKey("", "")
|
||||
case 1:
|
||||
headers.AppendByKey(parts[0], "")
|
||||
case 2:
|
||||
headers.AppendByKey(parts[0], parts[1])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (headers *Headers) SetIfNotExists(key string, value string) bool {
|
||||
if headers.Has(key) {
|
||||
return false
|
||||
}
|
||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
||||
return true
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
package types
|
||||
|
||||
type KeyValue[K comparable, V any] struct {
|
||||
Key K
|
||||
Value V
|
||||
}
|
||||
139
types/params.go
139
types/params.go
@@ -1,139 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
type Params []KeyValue[string, []string]
|
||||
|
||||
func (params Params) String() string {
|
||||
var buffer bytes.Buffer
|
||||
if len(params) == 0 {
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
indent := " "
|
||||
|
||||
displayLimit := 3
|
||||
|
||||
for i, item := range params[:min(len(params), displayLimit)] {
|
||||
if i > 0 {
|
||||
buffer.WriteString(",\n")
|
||||
}
|
||||
|
||||
if len(item.Value) == 1 {
|
||||
buffer.WriteString(item.Key + ": " + item.Value[0])
|
||||
continue
|
||||
}
|
||||
buffer.WriteString(item.Key + ": " + text.FgBlue.Sprint("Random") + "[\n")
|
||||
|
||||
for ii, v := range item.Value[:min(len(item.Value), displayLimit)] {
|
||||
if ii == len(item.Value)-1 {
|
||||
buffer.WriteString(indent + v + "\n")
|
||||
} else {
|
||||
buffer.WriteString(indent + v + ",\n")
|
||||
}
|
||||
}
|
||||
|
||||
// Add remaining values count if needed
|
||||
if remainingValues := len(item.Value) - displayLimit; remainingValues > 0 {
|
||||
buffer.WriteString(indent + text.FgGreen.Sprintf("+%d values", remainingValues) + "\n")
|
||||
}
|
||||
|
||||
buffer.WriteString("]")
|
||||
}
|
||||
|
||||
// Add remaining key-value pairs count if needed
|
||||
if remainingPairs := len(params) - displayLimit; remainingPairs > 0 {
|
||||
buffer.WriteString(",\n" + text.FgGreen.Sprintf("+%d params", remainingPairs))
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
func (params *Params) AppendByKey(key, value string) {
|
||||
if item := params.GetValue(key); item != nil {
|
||||
*item = append(*item, value)
|
||||
} else {
|
||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
||||
}
|
||||
}
|
||||
|
||||
func (params Params) GetValue(key string) *[]string {
|
||||
for i := range params {
|
||||
if params[i].Key == key {
|
||||
return ¶ms[i].Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (params *Params) UnmarshalJSON(b []byte) error {
|
||||
var data []map[string]any
|
||||
if err := json.Unmarshal(b, &data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, item := range data {
|
||||
for key, value := range item {
|
||||
switch parsedValue := value.(type) {
|
||||
case string:
|
||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: []string{parsedValue}})
|
||||
case []any:
|
||||
parsedStr := make([]string, len(parsedValue))
|
||||
for i, item := range parsedValue {
|
||||
parsedStr[i] = fmt.Sprintf("%v", item)
|
||||
}
|
||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: parsedStr})
|
||||
default:
|
||||
return fmt.Errorf("unsupported type for params expected string or []string, got %T", parsedValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (params *Params) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var raw []map[string]any
|
||||
if err := unmarshal(&raw); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, param := range raw {
|
||||
for key, value := range param {
|
||||
switch parsed := value.(type) {
|
||||
case string:
|
||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: []string{parsed}})
|
||||
case []any:
|
||||
var values []string
|
||||
for _, v := range parsed {
|
||||
if str, ok := v.(string); ok {
|
||||
values = append(values, str)
|
||||
}
|
||||
}
|
||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: values})
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (params *Params) Set(value string) error {
|
||||
parts := strings.SplitN(value, "=", 2)
|
||||
switch len(parts) {
|
||||
case 0:
|
||||
params.AppendByKey("", "")
|
||||
case 1:
|
||||
params.AppendByKey(parts[0], "")
|
||||
case 2:
|
||||
params.AppendByKey(parts[0], parts[1])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
116
types/proxies.go
116
types/proxies.go
@@ -1,116 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
type Proxies []url.URL
|
||||
|
||||
func (proxies Proxies) String() string {
|
||||
var buffer bytes.Buffer
|
||||
if len(proxies) == 0 {
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
if len(proxies) == 1 {
|
||||
buffer.WriteString(proxies[0].String())
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
buffer.WriteString(text.FgBlue.Sprint("Random") + "[\n")
|
||||
|
||||
indent := " "
|
||||
|
||||
displayLimit := 5
|
||||
|
||||
for i, item := range proxies[:min(len(proxies), displayLimit)] {
|
||||
if i > 0 {
|
||||
buffer.WriteString(",\n")
|
||||
}
|
||||
|
||||
buffer.WriteString(indent + item.String())
|
||||
}
|
||||
|
||||
// Add remaining count if there are more items
|
||||
if remainingValues := len(proxies) - displayLimit; remainingValues > 0 {
|
||||
buffer.WriteString(",\n" + indent + text.FgGreen.Sprintf("+%d proxies", remainingValues))
|
||||
}
|
||||
|
||||
buffer.WriteString("\n]")
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
func (proxies *Proxies) UnmarshalJSON(b []byte) error {
|
||||
var data any
|
||||
if err := json.Unmarshal(b, &data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch v := data.(type) {
|
||||
case string:
|
||||
parsed, err := url.Parse(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*proxies = []url.URL{*parsed}
|
||||
case []any:
|
||||
var urls []url.URL
|
||||
for _, item := range v {
|
||||
url, err := url.Parse(item.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
urls = append(urls, *url)
|
||||
}
|
||||
*proxies = urls
|
||||
default:
|
||||
return fmt.Errorf("invalid type for Body: %T (should be URL or []URL)", v)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (proxies *Proxies) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var data any
|
||||
if err := unmarshal(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch v := data.(type) {
|
||||
case string:
|
||||
parsed, err := url.Parse(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*proxies = []url.URL{*parsed}
|
||||
case []any:
|
||||
var urls []url.URL
|
||||
for _, item := range v {
|
||||
url, err := url.Parse(item.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
urls = append(urls, *url)
|
||||
}
|
||||
*proxies = urls
|
||||
default:
|
||||
return fmt.Errorf("invalid type for Body: %T (should be URL or []URL)", v)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (proxies *Proxies) Set(value string) error {
|
||||
parsedURL, err := url.Parse(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*proxies = append(*proxies, *parsedURL)
|
||||
return nil
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type RequestURL struct {
|
||||
url.URL
|
||||
}
|
||||
|
||||
func (requestURL *RequestURL) UnmarshalJSON(data []byte) error {
|
||||
var urlStr string
|
||||
if err := json.Unmarshal(data, &urlStr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return errors.New("request URL is invalid")
|
||||
}
|
||||
|
||||
requestURL.URL = *parsedURL
|
||||
return nil
|
||||
}
|
||||
|
||||
func (requestURL *RequestURL) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var urlStr string
|
||||
if err := unmarshal(&urlStr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return errors.New("request URL is invalid")
|
||||
}
|
||||
|
||||
requestURL.URL = *parsedURL
|
||||
return nil
|
||||
}
|
||||
|
||||
func (requestURL RequestURL) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(requestURL.URL.String())
|
||||
}
|
||||
|
||||
func (requestURL RequestURL) String() string {
|
||||
return requestURL.URL.String()
|
||||
}
|
||||
|
||||
func (requestURL *RequestURL) Set(value string) error {
|
||||
parsedURL, err := url.Parse(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
requestURL.URL = *parsedURL
|
||||
return nil
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Timeout struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (timeout *Timeout) UnmarshalJSON(b []byte) error {
|
||||
var v any
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
timeout.Duration = time.Duration(value)
|
||||
return nil
|
||||
case string:
|
||||
var err error
|
||||
timeout.Duration, err = time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
}
|
||||
|
||||
func (timeout Timeout) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(timeout.String())
|
||||
}
|
||||
|
||||
func (timeout *Timeout) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var v any
|
||||
if err := unmarshal(&v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
timeout.Duration = time.Duration(value)
|
||||
return nil
|
||||
case string:
|
||||
var err error
|
||||
timeout.Duration, err = time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
package utils
|
||||
|
||||
func IsNilOrZero[T comparable](value *T) bool {
|
||||
if value == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
var zero T
|
||||
return *value == zero
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
package utils
|
||||
|
||||
func ToPtr[T any](value T) *T {
|
||||
return &value
|
||||
}
|
||||
21
utils/int.go
21
utils/int.go
@@ -1,21 +0,0 @@
|
||||
package utils
|
||||
|
||||
type Number interface {
|
||||
int | int8 | int16 | int32 | int64
|
||||
}
|
||||
|
||||
func NumLen[T Number](n T) T {
|
||||
if n < 0 {
|
||||
n = -n
|
||||
}
|
||||
if n == 0 {
|
||||
return 1
|
||||
}
|
||||
|
||||
var count T = 0
|
||||
for n > 0 {
|
||||
n /= 10
|
||||
count++
|
||||
}
|
||||
return count
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
func PrintErr(err error) {
|
||||
fmt.Fprintln(os.Stderr, text.FgRed.Sprint(err.Error()))
|
||||
}
|
||||
|
||||
func PrintErrAndExit(err error) {
|
||||
if err != nil {
|
||||
PrintErr(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func PrintAndExit(message string) {
|
||||
fmt.Println(message)
|
||||
os.Exit(0)
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
package utils
|
||||
|
||||
import "math/rand"
|
||||
|
||||
func Flatten[T any](nested [][]*T) []*T {
|
||||
flattened := make([]*T, 0)
|
||||
for _, n := range nested {
|
||||
flattened = append(flattened, n...)
|
||||
}
|
||||
return flattened
|
||||
}
|
||||
|
||||
// RandomValueCycle returns a function that cycles through the provided values in a pseudo-random order.
|
||||
// Each value in the input slice will be returned before any value is repeated.
|
||||
// If the input slice is empty, the returned function will always return the zero value of type T.
|
||||
// If the input slice contains only one element, that element is always returned.
|
||||
// This function is not thread-safe and should not be called concurrently.
|
||||
func RandomValueCycle[T any](values []T, localRand *rand.Rand) func() T {
|
||||
switch valuesLen := len(values); valuesLen {
|
||||
case 0:
|
||||
var zero T
|
||||
return func() T { return zero }
|
||||
case 1:
|
||||
return func() T { return values[0] }
|
||||
default:
|
||||
currentIndex := localRand.Intn(valuesLen)
|
||||
stopIndex := currentIndex
|
||||
return func() T {
|
||||
value := values[currentIndex]
|
||||
currentIndex++
|
||||
if currentIndex == valuesLen {
|
||||
currentIndex = 0
|
||||
}
|
||||
if currentIndex == stopIndex {
|
||||
currentIndex = localRand.Intn(valuesLen)
|
||||
stopIndex = currentIndex
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,479 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"math/rand"
|
||||
"mime/multipart"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/brianvoe/gofakeit/v7"
|
||||
)
|
||||
|
||||
type FuncMapGenerator struct {
|
||||
bodyDataHeader string
|
||||
localFaker *gofakeit.Faker
|
||||
funcMap *template.FuncMap
|
||||
}
|
||||
|
||||
func NewFuncMapGenerator(localRand *rand.Rand) *FuncMapGenerator {
|
||||
f := &FuncMapGenerator{
|
||||
localFaker: gofakeit.NewFaker(localRand, false),
|
||||
}
|
||||
f.funcMap = f.newFuncMap()
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
func (g *FuncMapGenerator) GetBodyDataHeader() string {
|
||||
tempHeader := g.bodyDataHeader
|
||||
g.bodyDataHeader = ""
|
||||
return tempHeader
|
||||
}
|
||||
|
||||
func (g *FuncMapGenerator) GetFuncMap() *template.FuncMap {
|
||||
return g.funcMap
|
||||
}
|
||||
|
||||
// NewFuncMap creates a template.FuncMap populated with string manipulation functions
|
||||
// and data generation functions from gofakeit.
|
||||
//
|
||||
// It takes a random number generator that is used to initialize a localized faker
|
||||
// instance, ensuring that random data generation is deterministic within a request context.
|
||||
//
|
||||
// All functions are prefixed to avoid naming conflicts:
|
||||
// - String functions: "strings_*"
|
||||
// - Dict functions: "dict_*"
|
||||
// - Body functions: "body_*"
|
||||
// - Data generation functions: "fakeit_*"
|
||||
func (g *FuncMapGenerator) newFuncMap() *template.FuncMap {
|
||||
return &template.FuncMap{
|
||||
// Strings
|
||||
"strings_ToUpper": strings.ToUpper,
|
||||
"strings_ToLower": strings.ToLower,
|
||||
"strings_RemoveSpaces": func(s string) string { return strings.ReplaceAll(s, " ", "") },
|
||||
"strings_Replace": strings.Replace,
|
||||
"strings_ToDate": func(dateString string) time.Time {
|
||||
date, err := time.Parse("2006-01-02", dateString)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return date
|
||||
},
|
||||
"strings_First": func(s string, n int) string {
|
||||
if n >= len(s) {
|
||||
return s
|
||||
}
|
||||
return s[:n]
|
||||
},
|
||||
"strings_Last": func(s string, n int) string {
|
||||
if n >= len(s) {
|
||||
return s
|
||||
}
|
||||
return s[len(s)-n:]
|
||||
},
|
||||
"strings_Truncate": func(s string, n int) string {
|
||||
if n >= len(s) {
|
||||
return s
|
||||
}
|
||||
return s[:n] + "..."
|
||||
},
|
||||
"strings_TrimPrefix": strings.TrimPrefix,
|
||||
"strings_TrimSuffix": strings.TrimSuffix,
|
||||
"strings_Join": func(sep string, values ...string) string {
|
||||
return strings.Join(values, sep)
|
||||
},
|
||||
|
||||
// Dict
|
||||
"dict_Str": func(values ...string) map[string]string {
|
||||
dict := make(map[string]string)
|
||||
for i := 0; i < len(values); i += 2 {
|
||||
if i+1 < len(values) {
|
||||
key := values[i]
|
||||
value := values[i+1]
|
||||
dict[key] = value
|
||||
}
|
||||
}
|
||||
return dict
|
||||
},
|
||||
|
||||
// Slice
|
||||
"slice_Str": func(values ...string) []string { return values },
|
||||
"slice_Int": func(values ...int) []int { return values },
|
||||
"slice_Uint": func(values ...uint) []uint { return values },
|
||||
|
||||
// Body
|
||||
"body_FormData": func(kv map[string]string) string {
|
||||
var data bytes.Buffer
|
||||
writer := multipart.NewWriter(&data)
|
||||
|
||||
for k, v := range kv {
|
||||
_ = writer.WriteField(k, v)
|
||||
}
|
||||
|
||||
_ = writer.Close()
|
||||
g.bodyDataHeader = writer.FormDataContentType()
|
||||
|
||||
return data.String()
|
||||
},
|
||||
|
||||
// FakeIt / Product
|
||||
"fakeit_ProductName": g.localFaker.ProductName,
|
||||
"fakeit_ProductDescription": g.localFaker.ProductDescription,
|
||||
"fakeit_ProductCategory": g.localFaker.ProductCategory,
|
||||
"fakeit_ProductFeature": g.localFaker.ProductFeature,
|
||||
"fakeit_ProductMaterial": g.localFaker.ProductMaterial,
|
||||
"fakeit_ProductUPC": g.localFaker.ProductUPC,
|
||||
"fakeit_ProductAudience": g.localFaker.ProductAudience,
|
||||
"fakeit_ProductDimension": g.localFaker.ProductDimension,
|
||||
"fakeit_ProductUseCase": g.localFaker.ProductUseCase,
|
||||
"fakeit_ProductBenefit": g.localFaker.ProductBenefit,
|
||||
"fakeit_ProductSuffix": g.localFaker.ProductSuffix,
|
||||
|
||||
// FakeIt / Person
|
||||
"fakeit_Name": g.localFaker.Name,
|
||||
"fakeit_NamePrefix": g.localFaker.NamePrefix,
|
||||
"fakeit_NameSuffix": g.localFaker.NameSuffix,
|
||||
"fakeit_FirstName": g.localFaker.FirstName,
|
||||
"fakeit_MiddleName": g.localFaker.MiddleName,
|
||||
"fakeit_LastName": g.localFaker.LastName,
|
||||
"fakeit_Gender": g.localFaker.Gender,
|
||||
"fakeit_SSN": g.localFaker.SSN,
|
||||
"fakeit_Hobby": g.localFaker.Hobby,
|
||||
"fakeit_Email": g.localFaker.Email,
|
||||
"fakeit_Phone": g.localFaker.Phone,
|
||||
"fakeit_PhoneFormatted": g.localFaker.PhoneFormatted,
|
||||
|
||||
// FakeIt / Auth
|
||||
"fakeit_Username": g.localFaker.Username,
|
||||
"fakeit_Password": g.localFaker.Password,
|
||||
|
||||
// FakeIt / Address
|
||||
"fakeit_City": g.localFaker.City,
|
||||
"fakeit_Country": g.localFaker.Country,
|
||||
"fakeit_CountryAbr": g.localFaker.CountryAbr,
|
||||
"fakeit_State": g.localFaker.State,
|
||||
"fakeit_StateAbr": g.localFaker.StateAbr,
|
||||
"fakeit_Street": g.localFaker.Street,
|
||||
"fakeit_StreetName": g.localFaker.StreetName,
|
||||
"fakeit_StreetNumber": g.localFaker.StreetNumber,
|
||||
"fakeit_StreetPrefix": g.localFaker.StreetPrefix,
|
||||
"fakeit_StreetSuffix": g.localFaker.StreetSuffix,
|
||||
"fakeit_Zip": g.localFaker.Zip,
|
||||
"fakeit_Latitude": g.localFaker.Latitude,
|
||||
"fakeit_LatitudeInRange": func(min, max float64) float64 {
|
||||
value, err := g.localFaker.LatitudeInRange(min, max)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
"fakeit_Longitude": g.localFaker.Longitude,
|
||||
"fakeit_LongitudeInRange": func(min, max float64) float64 {
|
||||
value, err := g.localFaker.LongitudeInRange(min, max)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
|
||||
// FakeIt / Game
|
||||
"fakeit_Gamertag": g.localFaker.Gamertag,
|
||||
|
||||
// FakeIt / Beer
|
||||
"fakeit_BeerAlcohol": g.localFaker.BeerAlcohol,
|
||||
"fakeit_BeerBlg": g.localFaker.BeerBlg,
|
||||
"fakeit_BeerHop": g.localFaker.BeerHop,
|
||||
"fakeit_BeerIbu": g.localFaker.BeerIbu,
|
||||
"fakeit_BeerMalt": g.localFaker.BeerMalt,
|
||||
"fakeit_BeerName": g.localFaker.BeerName,
|
||||
"fakeit_BeerStyle": g.localFaker.BeerStyle,
|
||||
"fakeit_BeerYeast": g.localFaker.BeerYeast,
|
||||
|
||||
// FakeIt / Car
|
||||
"fakeit_CarMaker": g.localFaker.CarMaker,
|
||||
"fakeit_CarModel": g.localFaker.CarModel,
|
||||
"fakeit_CarType": g.localFaker.CarType,
|
||||
"fakeit_CarFuelType": g.localFaker.CarFuelType,
|
||||
"fakeit_CarTransmissionType": g.localFaker.CarTransmissionType,
|
||||
|
||||
// FakeIt / Words
|
||||
"fakeit_Noun": g.localFaker.Noun,
|
||||
"fakeit_NounCommon": g.localFaker.NounCommon,
|
||||
"fakeit_NounConcrete": g.localFaker.NounConcrete,
|
||||
"fakeit_NounAbstract": g.localFaker.NounAbstract,
|
||||
"fakeit_NounCollectivePeople": g.localFaker.NounCollectivePeople,
|
||||
"fakeit_NounCollectiveAnimal": g.localFaker.NounCollectiveAnimal,
|
||||
"fakeit_NounCollectiveThing": g.localFaker.NounCollectiveThing,
|
||||
"fakeit_NounCountable": g.localFaker.NounCountable,
|
||||
"fakeit_NounUncountable": g.localFaker.NounUncountable,
|
||||
"fakeit_Verb": g.localFaker.Verb,
|
||||
"fakeit_VerbAction": g.localFaker.VerbAction,
|
||||
"fakeit_VerbLinking": g.localFaker.VerbLinking,
|
||||
"fakeit_VerbHelping": g.localFaker.VerbHelping,
|
||||
"fakeit_Adverb": g.localFaker.Adverb,
|
||||
"fakeit_AdverbManner": g.localFaker.AdverbManner,
|
||||
"fakeit_AdverbDegree": g.localFaker.AdverbDegree,
|
||||
"fakeit_AdverbPlace": g.localFaker.AdverbPlace,
|
||||
"fakeit_AdverbTimeDefinite": g.localFaker.AdverbTimeDefinite,
|
||||
"fakeit_AdverbTimeIndefinite": g.localFaker.AdverbTimeIndefinite,
|
||||
"fakeit_AdverbFrequencyDefinite": g.localFaker.AdverbFrequencyDefinite,
|
||||
"fakeit_AdverbFrequencyIndefinite": g.localFaker.AdverbFrequencyIndefinite,
|
||||
"fakeit_Preposition": g.localFaker.Preposition,
|
||||
"fakeit_PrepositionSimple": g.localFaker.PrepositionSimple,
|
||||
"fakeit_PrepositionDouble": g.localFaker.PrepositionDouble,
|
||||
"fakeit_PrepositionCompound": g.localFaker.PrepositionCompound,
|
||||
"fakeit_Adjective": g.localFaker.Adjective,
|
||||
"fakeit_AdjectiveDescriptive": g.localFaker.AdjectiveDescriptive,
|
||||
"fakeit_AdjectiveQuantitative": g.localFaker.AdjectiveQuantitative,
|
||||
"fakeit_AdjectiveProper": g.localFaker.AdjectiveProper,
|
||||
"fakeit_AdjectiveDemonstrative": g.localFaker.AdjectiveDemonstrative,
|
||||
"fakeit_AdjectivePossessive": g.localFaker.AdjectivePossessive,
|
||||
"fakeit_AdjectiveInterrogative": g.localFaker.AdjectiveInterrogative,
|
||||
"fakeit_AdjectiveIndefinite": g.localFaker.AdjectiveIndefinite,
|
||||
"fakeit_Pronoun": g.localFaker.Pronoun,
|
||||
"fakeit_PronounPersonal": g.localFaker.PronounPersonal,
|
||||
"fakeit_PronounObject": g.localFaker.PronounObject,
|
||||
"fakeit_PronounPossessive": g.localFaker.PronounPossessive,
|
||||
"fakeit_PronounReflective": g.localFaker.PronounReflective,
|
||||
"fakeit_PronounDemonstrative": g.localFaker.PronounDemonstrative,
|
||||
"fakeit_PronounInterrogative": g.localFaker.PronounInterrogative,
|
||||
"fakeit_PronounRelative": g.localFaker.PronounRelative,
|
||||
"fakeit_Connective": g.localFaker.Connective,
|
||||
"fakeit_ConnectiveTime": g.localFaker.ConnectiveTime,
|
||||
"fakeit_ConnectiveComparative": g.localFaker.ConnectiveComparative,
|
||||
"fakeit_ConnectiveComplaint": g.localFaker.ConnectiveComplaint,
|
||||
"fakeit_ConnectiveListing": g.localFaker.ConnectiveListing,
|
||||
"fakeit_ConnectiveCasual": g.localFaker.ConnectiveCasual,
|
||||
"fakeit_ConnectiveExamplify": g.localFaker.ConnectiveExamplify,
|
||||
"fakeit_Word": g.localFaker.Word,
|
||||
"fakeit_Sentence": g.localFaker.Sentence,
|
||||
"fakeit_Paragraph": g.localFaker.Paragraph,
|
||||
"fakeit_LoremIpsumWord": g.localFaker.LoremIpsumWord,
|
||||
"fakeit_LoremIpsumSentence": g.localFaker.LoremIpsumSentence,
|
||||
"fakeit_LoremIpsumParagraph": g.localFaker.LoremIpsumParagraph,
|
||||
"fakeit_Question": g.localFaker.Question,
|
||||
"fakeit_Quote": g.localFaker.Quote,
|
||||
"fakeit_Phrase": g.localFaker.Phrase,
|
||||
|
||||
// FakeIt / Foods
|
||||
"fakeit_Fruit": g.localFaker.Fruit,
|
||||
"fakeit_Vegetable": g.localFaker.Vegetable,
|
||||
"fakeit_Breakfast": g.localFaker.Breakfast,
|
||||
"fakeit_Lunch": g.localFaker.Lunch,
|
||||
"fakeit_Dinner": g.localFaker.Dinner,
|
||||
"fakeit_Snack": g.localFaker.Snack,
|
||||
"fakeit_Dessert": g.localFaker.Dessert,
|
||||
|
||||
// FakeIt / Misc
|
||||
"fakeit_Bool": g.localFaker.Bool,
|
||||
"fakeit_UUID": g.localFaker.UUID,
|
||||
"fakeit_FlipACoin": g.localFaker.FlipACoin,
|
||||
|
||||
// FakeIt / Colors
|
||||
"fakeit_Color": g.localFaker.Color,
|
||||
"fakeit_HexColor": g.localFaker.HexColor,
|
||||
"fakeit_RGBColor": g.localFaker.RGBColor,
|
||||
"fakeit_SafeColor": g.localFaker.SafeColor,
|
||||
"fakeit_NiceColors": g.localFaker.NiceColors,
|
||||
|
||||
// FakeIt / Internet
|
||||
"fakeit_URL": g.localFaker.URL,
|
||||
"fakeit_DomainName": g.localFaker.DomainName,
|
||||
"fakeit_DomainSuffix": g.localFaker.DomainSuffix,
|
||||
"fakeit_IPv4Address": g.localFaker.IPv4Address,
|
||||
"fakeit_IPv6Address": g.localFaker.IPv6Address,
|
||||
"fakeit_MacAddress": g.localFaker.MacAddress,
|
||||
"fakeit_HTTPStatusCode": g.localFaker.HTTPStatusCode,
|
||||
"fakeit_HTTPStatusCodeSimple": g.localFaker.HTTPStatusCodeSimple,
|
||||
"fakeit_LogLevel": g.localFaker.LogLevel,
|
||||
"fakeit_HTTPMethod": g.localFaker.HTTPMethod,
|
||||
"fakeit_HTTPVersion": g.localFaker.HTTPVersion,
|
||||
"fakeit_UserAgent": g.localFaker.UserAgent,
|
||||
"fakeit_ChromeUserAgent": g.localFaker.ChromeUserAgent,
|
||||
"fakeit_FirefoxUserAgent": g.localFaker.FirefoxUserAgent,
|
||||
"fakeit_OperaUserAgent": g.localFaker.OperaUserAgent,
|
||||
"fakeit_SafariUserAgent": g.localFaker.SafariUserAgent,
|
||||
|
||||
// FakeIt / HTML
|
||||
"fakeit_InputName": g.localFaker.InputName,
|
||||
|
||||
// FakeIt / Date/Time
|
||||
"fakeit_Date": g.localFaker.Date,
|
||||
"fakeit_PastDate": g.localFaker.PastDate,
|
||||
"fakeit_FutureDate": g.localFaker.FutureDate,
|
||||
"fakeit_DateRange": g.localFaker.DateRange,
|
||||
"fakeit_NanoSecond": g.localFaker.NanoSecond,
|
||||
"fakeit_Second": g.localFaker.Second,
|
||||
"fakeit_Minute": g.localFaker.Minute,
|
||||
"fakeit_Hour": g.localFaker.Hour,
|
||||
"fakeit_Month": g.localFaker.Month,
|
||||
"fakeit_MonthString": g.localFaker.MonthString,
|
||||
"fakeit_Day": g.localFaker.Day,
|
||||
"fakeit_WeekDay": g.localFaker.WeekDay,
|
||||
"fakeit_Year": g.localFaker.Year,
|
||||
"fakeit_TimeZone": g.localFaker.TimeZone,
|
||||
"fakeit_TimeZoneAbv": g.localFaker.TimeZoneAbv,
|
||||
"fakeit_TimeZoneFull": g.localFaker.TimeZoneFull,
|
||||
"fakeit_TimeZoneOffset": g.localFaker.TimeZoneOffset,
|
||||
"fakeit_TimeZoneRegion": g.localFaker.TimeZoneRegion,
|
||||
|
||||
// FakeIt / Payment
|
||||
"fakeit_Price": g.localFaker.Price,
|
||||
"fakeit_CreditCardCvv": g.localFaker.CreditCardCvv,
|
||||
"fakeit_CreditCardExp": g.localFaker.CreditCardExp,
|
||||
"fakeit_CreditCardNumber": g.localFaker.CreditCardNumber,
|
||||
"fakeit_CreditCardType": g.localFaker.CreditCardType,
|
||||
"fakeit_CurrencyLong": g.localFaker.CurrencyLong,
|
||||
"fakeit_CurrencyShort": g.localFaker.CurrencyShort,
|
||||
"fakeit_AchRouting": g.localFaker.AchRouting,
|
||||
"fakeit_AchAccount": g.localFaker.AchAccount,
|
||||
"fakeit_BitcoinAddress": g.localFaker.BitcoinAddress,
|
||||
"fakeit_BitcoinPrivateKey": g.localFaker.BitcoinPrivateKey,
|
||||
|
||||
// FakeIt / Finance
|
||||
"fakeit_Cusip": g.localFaker.Cusip,
|
||||
"fakeit_Isin": g.localFaker.Isin,
|
||||
|
||||
// FakeIt / Company
|
||||
"fakeit_BS": g.localFaker.BS,
|
||||
"fakeit_Blurb": g.localFaker.Blurb,
|
||||
"fakeit_BuzzWord": g.localFaker.BuzzWord,
|
||||
"fakeit_Company": g.localFaker.Company,
|
||||
"fakeit_CompanySuffix": g.localFaker.CompanySuffix,
|
||||
"fakeit_JobDescriptor": g.localFaker.JobDescriptor,
|
||||
"fakeit_JobLevel": g.localFaker.JobLevel,
|
||||
"fakeit_JobTitle": g.localFaker.JobTitle,
|
||||
"fakeit_Slogan": g.localFaker.Slogan,
|
||||
|
||||
// FakeIt / Hacker
|
||||
"fakeit_HackerAbbreviation": g.localFaker.HackerAbbreviation,
|
||||
"fakeit_HackerAdjective": g.localFaker.HackerAdjective,
|
||||
"fakeit_HackerNoun": g.localFaker.HackerNoun,
|
||||
"fakeit_HackerPhrase": g.localFaker.HackerPhrase,
|
||||
"fakeit_HackerVerb": g.localFaker.HackerVerb,
|
||||
|
||||
// FakeIt / Hipster
|
||||
"fakeit_HipsterWord": g.localFaker.HipsterWord,
|
||||
"fakeit_HipsterSentence": g.localFaker.HipsterSentence,
|
||||
"fakeit_HipsterParagraph": g.localFaker.HipsterParagraph,
|
||||
|
||||
// FakeIt / App
|
||||
"fakeit_AppName": g.localFaker.AppName,
|
||||
"fakeit_AppVersion": g.localFaker.AppVersion,
|
||||
"fakeit_AppAuthor": g.localFaker.AppAuthor,
|
||||
|
||||
// FakeIt / Animal
|
||||
"fakeit_PetName": g.localFaker.PetName,
|
||||
"fakeit_Animal": g.localFaker.Animal,
|
||||
"fakeit_AnimalType": g.localFaker.AnimalType,
|
||||
"fakeit_FarmAnimal": g.localFaker.FarmAnimal,
|
||||
"fakeit_Cat": g.localFaker.Cat,
|
||||
"fakeit_Dog": g.localFaker.Dog,
|
||||
"fakeit_Bird": g.localFaker.Bird,
|
||||
|
||||
// FakeIt / Emoji
|
||||
"fakeit_Emoji": g.localFaker.Emoji,
|
||||
"fakeit_EmojiDescription": g.localFaker.EmojiDescription,
|
||||
"fakeit_EmojiCategory": g.localFaker.EmojiCategory,
|
||||
"fakeit_EmojiAlias": g.localFaker.EmojiAlias,
|
||||
"fakeit_EmojiTag": g.localFaker.EmojiTag,
|
||||
|
||||
// FakeIt / Language
|
||||
"fakeit_Language": g.localFaker.Language,
|
||||
"fakeit_LanguageAbbreviation": g.localFaker.LanguageAbbreviation,
|
||||
"fakeit_ProgrammingLanguage": g.localFaker.ProgrammingLanguage,
|
||||
|
||||
// FakeIt / Number
|
||||
"fakeit_Number": g.localFaker.Number,
|
||||
"fakeit_Int": g.localFaker.Int,
|
||||
"fakeit_IntN": g.localFaker.IntN,
|
||||
"fakeit_IntRange": g.localFaker.IntRange,
|
||||
"fakeit_RandomInt": g.localFaker.RandomInt,
|
||||
"fakeit_Int8": g.localFaker.Int8,
|
||||
"fakeit_Int16": g.localFaker.Int16,
|
||||
"fakeit_Int32": g.localFaker.Int32,
|
||||
"fakeit_Int64": g.localFaker.Int64,
|
||||
"fakeit_Uint": g.localFaker.Uint,
|
||||
"fakeit_UintN": g.localFaker.UintN,
|
||||
"fakeit_UintRange": g.localFaker.UintRange,
|
||||
"fakeit_RandomUint": g.localFaker.RandomUint,
|
||||
"fakeit_Uint8": g.localFaker.Uint8,
|
||||
"fakeit_Uint16": g.localFaker.Uint16,
|
||||
"fakeit_Uint32": g.localFaker.Uint32,
|
||||
"fakeit_Uint64": g.localFaker.Uint64,
|
||||
"fakeit_Float32": g.localFaker.Float32,
|
||||
"fakeit_Float32Range": g.localFaker.Float32Range,
|
||||
"fakeit_Float64": g.localFaker.Float64,
|
||||
"fakeit_Float64Range": g.localFaker.Float64Range,
|
||||
"fakeit_HexUint": g.localFaker.HexUint,
|
||||
|
||||
// FakeIt / String
|
||||
"fakeit_Digit": g.localFaker.Digit,
|
||||
"fakeit_DigitN": g.localFaker.DigitN,
|
||||
"fakeit_Letter": g.localFaker.Letter,
|
||||
"fakeit_LetterN": g.localFaker.LetterN,
|
||||
"fakeit_LetterNN": func(min, max uint) string {
|
||||
return g.localFaker.LetterN(g.localFaker.UintRange(min, max))
|
||||
},
|
||||
"fakeit_Lexify": g.localFaker.Lexify,
|
||||
"fakeit_Numerify": g.localFaker.Numerify,
|
||||
"fakeit_RandomString": func(values ...string) string {
|
||||
return g.localFaker.RandomString(values)
|
||||
},
|
||||
|
||||
// FakeIt / Celebrity
|
||||
"fakeit_CelebrityActor": g.localFaker.CelebrityActor,
|
||||
"fakeit_CelebrityBusiness": g.localFaker.CelebrityBusiness,
|
||||
"fakeit_CelebritySport": g.localFaker.CelebritySport,
|
||||
|
||||
// FakeIt / Minecraft
|
||||
"fakeit_MinecraftOre": g.localFaker.MinecraftOre,
|
||||
"fakeit_MinecraftWood": g.localFaker.MinecraftWood,
|
||||
"fakeit_MinecraftArmorTier": g.localFaker.MinecraftArmorTier,
|
||||
"fakeit_MinecraftArmorPart": g.localFaker.MinecraftArmorPart,
|
||||
"fakeit_MinecraftWeapon": g.localFaker.MinecraftWeapon,
|
||||
"fakeit_MinecraftTool": g.localFaker.MinecraftTool,
|
||||
"fakeit_MinecraftDye": g.localFaker.MinecraftDye,
|
||||
"fakeit_MinecraftFood": g.localFaker.MinecraftFood,
|
||||
"fakeit_MinecraftAnimal": g.localFaker.MinecraftAnimal,
|
||||
"fakeit_MinecraftVillagerJob": g.localFaker.MinecraftVillagerJob,
|
||||
"fakeit_MinecraftVillagerStation": g.localFaker.MinecraftVillagerStation,
|
||||
"fakeit_MinecraftVillagerLevel": g.localFaker.MinecraftVillagerLevel,
|
||||
"fakeit_MinecraftMobPassive": g.localFaker.MinecraftMobPassive,
|
||||
"fakeit_MinecraftMobNeutral": g.localFaker.MinecraftMobNeutral,
|
||||
"fakeit_MinecraftMobHostile": g.localFaker.MinecraftMobHostile,
|
||||
"fakeit_MinecraftMobBoss": g.localFaker.MinecraftMobBoss,
|
||||
"fakeit_MinecraftBiome": g.localFaker.MinecraftBiome,
|
||||
"fakeit_MinecraftWeather": g.localFaker.MinecraftWeather,
|
||||
|
||||
// FakeIt / Book
|
||||
"fakeit_BookTitle": g.localFaker.BookTitle,
|
||||
"fakeit_BookAuthor": g.localFaker.BookAuthor,
|
||||
"fakeit_BookGenre": g.localFaker.BookGenre,
|
||||
|
||||
// FakeIt / Movie
|
||||
"fakeit_MovieName": g.localFaker.MovieName,
|
||||
"fakeit_MovieGenre": g.localFaker.MovieGenre,
|
||||
|
||||
// FakeIt / Error
|
||||
"fakeit_Error": g.localFaker.Error,
|
||||
"fakeit_ErrorDatabase": g.localFaker.ErrorDatabase,
|
||||
"fakeit_ErrorGRPC": g.localFaker.ErrorGRPC,
|
||||
"fakeit_ErrorHTTP": g.localFaker.ErrorHTTP,
|
||||
"fakeit_ErrorHTTPClient": g.localFaker.ErrorHTTPClient,
|
||||
"fakeit_ErrorHTTPServer": g.localFaker.ErrorHTTPServer,
|
||||
"fakeit_ErrorRuntime": g.localFaker.ErrorRuntime,
|
||||
|
||||
// FakeIt / School
|
||||
"fakeit_School": g.localFaker.School,
|
||||
|
||||
// FakeIt / Song
|
||||
"fakeit_SongName": g.localFaker.SongName,
|
||||
"fakeit_SongArtist": g.localFaker.SongArtist,
|
||||
"fakeit_SongGenre": g.localFaker.SongGenre,
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user