mirror of
https://github.com/moby/moby.git
synced 2022-11-09 12:21:53 -05:00
Test events streaming, fixes #12079
Signed-off-by: Antonio Murdaca <me@runcom.ninja>
This commit is contained in:
parent
90be2a66a8
commit
d1c4439b5a
1 changed files with 100 additions and 0 deletions
|
@ -1,6 +1,7 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
@ -411,3 +412,102 @@ func checkEvents(t *testing.T, events []string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestEventsStreaming(t *testing.T) {
|
||||||
|
start := daemonTime(t).Unix()
|
||||||
|
|
||||||
|
finish := make(chan struct{})
|
||||||
|
defer close(finish)
|
||||||
|
id := make(chan string)
|
||||||
|
eventCreate := make(chan struct{})
|
||||||
|
eventStart := make(chan struct{})
|
||||||
|
eventDie := make(chan struct{})
|
||||||
|
eventDestroy := make(chan struct{})
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
eventsCmd := exec.Command(dockerBinary, "events", "--since", string(start))
|
||||||
|
stdout, err := eventsCmd.StdoutPipe()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
err = eventsCmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to start 'docker events': %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
<-finish
|
||||||
|
eventsCmd.Process.Kill()
|
||||||
|
}()
|
||||||
|
|
||||||
|
containerID := <-id
|
||||||
|
|
||||||
|
matchCreate := regexp.MustCompile(containerID + `: \(from busybox:latest\) create$`)
|
||||||
|
matchStart := regexp.MustCompile(containerID + `: \(from busybox:latest\) start$`)
|
||||||
|
matchDie := regexp.MustCompile(containerID + `: \(from busybox:latest\) die$`)
|
||||||
|
matchDestroy := regexp.MustCompile(containerID + `: \(from busybox:latest\) destroy$`)
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(stdout)
|
||||||
|
for scanner.Scan() {
|
||||||
|
switch {
|
||||||
|
case matchCreate.MatchString(scanner.Text()):
|
||||||
|
close(eventCreate)
|
||||||
|
case matchStart.MatchString(scanner.Text()):
|
||||||
|
close(eventStart)
|
||||||
|
case matchDie.MatchString(scanner.Text()):
|
||||||
|
close(eventDie)
|
||||||
|
case matchDestroy.MatchString(scanner.Text()):
|
||||||
|
close(eventDestroy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = eventsCmd.Wait()
|
||||||
|
if err != nil && !IsKilled(err) {
|
||||||
|
t.Fatalf("docker events had bad exit status: %s", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
runCmd := exec.Command(dockerBinary, "run", "-d", "busybox", "true")
|
||||||
|
out, _, err := runCommandWithOutput(runCmd)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(out, err)
|
||||||
|
}
|
||||||
|
cleanedContainerID := strings.TrimSpace(out)
|
||||||
|
id <- cleanedContainerID
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-time.After(30 * time.Second):
|
||||||
|
t.Fatal("failed to observe container create in timely fashion")
|
||||||
|
case <-eventCreate:
|
||||||
|
// ignore, done
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-time.After(30 * time.Second):
|
||||||
|
t.Fatal("failed to observe container start in timely fashion")
|
||||||
|
case <-eventStart:
|
||||||
|
// ignore, done
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-time.After(30 * time.Second):
|
||||||
|
t.Fatal("failed to observe container die in timely fashion")
|
||||||
|
case <-eventDie:
|
||||||
|
// ignore, done
|
||||||
|
}
|
||||||
|
|
||||||
|
rmCmd := exec.Command(dockerBinary, "rm", cleanedContainerID)
|
||||||
|
out, _, err = runCommandWithOutput(rmCmd)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(out, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-time.After(30 * time.Second):
|
||||||
|
t.Fatal("failed to observe container destroy in timely fashion")
|
||||||
|
case <-eventDestroy:
|
||||||
|
// ignore, done
|
||||||
|
}
|
||||||
|
|
||||||
|
logDone("events - streamed to stdout")
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue