mirror of
https://github.com/juanfont/headscale.git
synced 2025-06-05 01:20:21 +02:00
* types/node: add helper funcs for node tags
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* types/node: add DebugString method for node
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* policy/v2: add String func to AutoApprover interface
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* policy/v2: simplify, use slices.Contains
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* policy/v2: debug, use nodes.DebugString
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* policy/v1: fix potential nil pointer in NodeCanApproveRoute
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* policy/v1: slices.Contains
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration/tsic: fix diff in login commands
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: fix webauth running with wrong scenario
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: move common oidc opts to func
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: require node count, more verbose
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* auth: remove uneffective route approve
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* .github/workflows: fmt
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration/tsic: add id func
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: remove call that might be nil
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: test autoapprovers against web/authkey x group/tag/user
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: unique network id per scenario
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* Revert "integration: move common oidc opts to func"
This reverts commit 7e9d165d4a
.
* remove cmd
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: clean docker images between runs in ci
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration: run autoapprove test against differnt policy modes
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* integration/tsic: append, not overrwrite extra login args
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
* .github/workflows: remove polv2
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
---------
Signed-off-by: Kristoffer Dalby <kristoffer@tailscale.com>
156 lines
3.6 KiB
Go
156 lines
3.6 KiB
Go
package integration
|
|
|
|
import (
|
|
"testing"
|
|
|
|
"github.com/juanfont/headscale/integration/dockertestutil"
|
|
"github.com/juanfont/headscale/integration/tsic"
|
|
)
|
|
|
|
// This file is intended to "test the test framework", by proxy it will also test
|
|
// some Headscale/Tailscale stuff, but mostly in very simple ways.
|
|
|
|
func IntegrationSkip(t *testing.T) {
|
|
t.Helper()
|
|
|
|
if !dockertestutil.IsRunningInContainer() {
|
|
t.Skip("not running in docker, skipping")
|
|
}
|
|
|
|
if testing.Short() {
|
|
t.Skip("skipping integration tests due to short flag")
|
|
}
|
|
}
|
|
|
|
// If subtests are parallel, then they will start before setup is run.
|
|
// This might mean we approach setup slightly wrong, but for now, ignore
|
|
// the linter
|
|
// nolint:tparallel
|
|
func TestHeadscale(t *testing.T) {
|
|
IntegrationSkip(t)
|
|
t.Parallel()
|
|
|
|
var err error
|
|
|
|
user := "test-space"
|
|
|
|
scenario, err := NewScenario(ScenarioSpec{})
|
|
assertNoErr(t, err)
|
|
defer scenario.ShutdownAssertNoPanics(t)
|
|
|
|
t.Run("start-headscale", func(t *testing.T) {
|
|
headscale, err := scenario.Headscale()
|
|
if err != nil {
|
|
t.Fatalf("failed to create start headcale: %s", err)
|
|
}
|
|
|
|
err = headscale.WaitForRunning()
|
|
if err != nil {
|
|
t.Fatalf("headscale failed to become ready: %s", err)
|
|
}
|
|
})
|
|
|
|
t.Run("create-user", func(t *testing.T) {
|
|
err := scenario.CreateUser(user)
|
|
if err != nil {
|
|
t.Fatalf("failed to create user: %s", err)
|
|
}
|
|
|
|
if _, ok := scenario.users[user]; !ok {
|
|
t.Fatalf("user is not in scenario")
|
|
}
|
|
})
|
|
|
|
t.Run("create-auth-key", func(t *testing.T) {
|
|
_, err := scenario.CreatePreAuthKey(user, true, false)
|
|
if err != nil {
|
|
t.Fatalf("failed to create preauthkey: %s", err)
|
|
}
|
|
})
|
|
}
|
|
|
|
// If subtests are parallel, then they will start before setup is run.
|
|
// This might mean we approach setup slightly wrong, but for now, ignore
|
|
// the linter
|
|
// nolint:tparallel
|
|
func TestTailscaleNodesJoiningHeadcale(t *testing.T) {
|
|
IntegrationSkip(t)
|
|
t.Parallel()
|
|
|
|
var err error
|
|
|
|
user := "join-node-test"
|
|
|
|
count := 1
|
|
|
|
scenario, err := NewScenario(ScenarioSpec{})
|
|
assertNoErr(t, err)
|
|
defer scenario.ShutdownAssertNoPanics(t)
|
|
|
|
t.Run("start-headscale", func(t *testing.T) {
|
|
headscale, err := scenario.Headscale()
|
|
if err != nil {
|
|
t.Fatalf("failed to create start headcale: %s", err)
|
|
}
|
|
|
|
err = headscale.WaitForRunning()
|
|
if err != nil {
|
|
t.Fatalf("headscale failed to become ready: %s", err)
|
|
}
|
|
})
|
|
|
|
t.Run("create-user", func(t *testing.T) {
|
|
err := scenario.CreateUser(user)
|
|
if err != nil {
|
|
t.Fatalf("failed to create user: %s", err)
|
|
}
|
|
|
|
if _, ok := scenario.users[user]; !ok {
|
|
t.Fatalf("user is not in scenario")
|
|
}
|
|
})
|
|
|
|
t.Run("create-tailscale", func(t *testing.T) {
|
|
err := scenario.CreateTailscaleNodesInUser(user, "unstable", count, tsic.WithNetwork(scenario.networks[scenario.testDefaultNetwork]))
|
|
if err != nil {
|
|
t.Fatalf("failed to add tailscale nodes: %s", err)
|
|
}
|
|
|
|
if clients := len(scenario.users[user].Clients); clients != count {
|
|
t.Fatalf("wrong number of tailscale clients: %d != %d", clients, count)
|
|
}
|
|
})
|
|
|
|
t.Run("join-headscale", func(t *testing.T) {
|
|
key, err := scenario.CreatePreAuthKey(user, true, false)
|
|
if err != nil {
|
|
t.Fatalf("failed to create preauthkey: %s", err)
|
|
}
|
|
|
|
headscale, err := scenario.Headscale()
|
|
if err != nil {
|
|
t.Fatalf("failed to create start headcale: %s", err)
|
|
}
|
|
|
|
err = scenario.RunTailscaleUp(
|
|
user,
|
|
headscale.GetEndpoint(),
|
|
key.GetKey(),
|
|
)
|
|
if err != nil {
|
|
t.Fatalf("failed to login: %s", err)
|
|
}
|
|
})
|
|
|
|
t.Run("get-ips", func(t *testing.T) {
|
|
ips, err := scenario.GetIPs(user)
|
|
if err != nil {
|
|
t.Fatalf("failed to get tailscale ips: %s", err)
|
|
}
|
|
|
|
if len(ips) != count*2 {
|
|
t.Fatalf("got the wrong amount of tailscale ips, %d != %d", len(ips), count*2)
|
|
}
|
|
})
|
|
}
|