Implement result listing from CLI

Change the schema to include ASN and Network Name in the results table
This commit is contained in:
Arturo Filastò 2018-05-03 18:40:52 +02:00
parent 0c5b6aa37c
commit ecf3370f53
9 changed files with 323 additions and 21 deletions

View File

@ -16,6 +16,9 @@ CREATE TABLE `results` (
`runtime` REAL,
`summary` JSON,
`done` TINYINT(1),
`country` VARCHAR(2),
`asn` VARCHAR(16),
`network_name` VARCHAR(255),
`data_usage_up` INTEGER,
`data_usage_down` INTEGER
);

View File

@ -130,20 +130,20 @@ func bindataDataDefaultconfigjson() (*asset, error) {
}
var _bindataDataMigrations1createmsmtresultssql = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\x93\x31\xef\xda\x30\x10\xc5\xf7\x7c\x8a\x1b\x41\x2d\x03\x95\xe8\xc2" +
"\x64\x92\x6b\x9b\x36\x38\xc8\x71\xaa\x32\x25\x56\x63\x90\xd5\xc4\x89\x1c\x5b\xa8\xdf\xbe\x32\x24\x14\x68\xa0\xeb" +
"\x7f\x7d\xbf\xbb\x67\x9f\xdf\x79\xb1\x80\x77\x8d\x3a\x1a\x61\x25\x44\xed\x49\x07\xb7\x42\x66\x85\x95\x8d\xd4\x76" +
"\x23\x8f\x4a\x07\x41\xc4\xd2\x1d\x70\xb2\x49\x10\x4a\x23\x7b\x57\xdb\xbe\x5c\xdf\xa9\x8d\x14\xbd\x33\xe7\x1e\x8f" +
"\xa6\xdd\x50\x57\xf7\x24\xef\x5e\x1e\x1b\x32\x24\x1c\x1f\x0f\x86\x59\x00\x00\x50\xaa\xaa\x84\x98\x72\xfc\x8c\x0c" +
"\x76\x2c\xde\x12\xb6\x87\x6f\xb8\x07\x92\xf3\x34\xa6\x21\xc3\x2d\x52\xfe\xfe\x52\xab\x45\x23\x4b\xf8\x4e\x58\xf8" +
"\x85\xb0\xd9\x87\xd5\x6a\x3e\x80\xde\x0a\x63\x0b\xab\x3c\x8e\x08\x47\x1e\x6f\x71\x40\xc6\xe9\x8b\xce\x90\x24\x63" +
"\xb9\x6b\x1a\x61\x7e\x97\xf0\x35\x4b\xe9\xa0\x55\xad\x96\x25\xf0\x98\xee\x63\xca\x67\xcb\xd1\xb9\x12\x56\x14\xae" +
"\x17\x47\x59\xb8\xee\x7a\xd3\x7f\x61\xd5\x9e\xf4\x15\x07\xf3\xf5\xe3\xd8\x77\x2f\xfb\xd6\x66\x57\xdd\xa4\xb3\xe8" +
"\xf5\x5f\x7d\xf9\x71\x94\x7f\xb6\x4e\x5b\xef\x70\x6d\x19\x89\x96\xf6\xd4\x9a\x5f\xc5\xab\xbb\x5a\xff\xca\xf8\x63" +
"\x1c\xeb\x20\x54\xed\xcc\x74\xb5\xeb\xea\x56\x54\xc5\xff\x4b\x64\x35\x91\x9c\x91\x5d\x6b\x6c\x71\x50\xf5\x74\xeb" +
"\xc0\x7d\x06\x13\x54\xe9\xce\xd9\x27\x7d\x7e\x81\x8b\xdb\xec\x18\x7e\x42\x86\x34\xc4\xec\x76\xbf\x7d\xbc\x73\x48" +
"\x29\x44\x98\x20\x47\xc8\x90\x03\xcd\x93\xc4\x4b\xf9\xce\x47\x05\x21\xc9\x42\x12\xe1\x79\x5f\x9e\xfe\xb5\x3f\x01" +
"\x00\x00\xff\xff\x29\xbd\x69\x4f\xe4\x03\x00\x00")
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\x93\xcf\x8e\x9b\x30\x10\xc6\xef\x3c\xc5\x1c\x13\xb5\x7b\xd8\x4a\xdb" +
"\xcb\x9e\xbc\x30\x6d\x69\x89\x89\x8c\xa9\x9a\x13\xb6\x8a\x37\xb2\x0a\x06\x19\x5b\x51\xdf\xbe\x72\x02\x69\x92\x42" +
"\x73\xdd\xeb\xf7\x9b\x3f\xf6\x7c\x33\x0f\x0f\xf0\xae\xd5\x7b\x2b\x9d\x82\xa4\x3b\x98\xe8\x52\x28\x9c\x74\xaa\x55" +
"\xc6\xbd\xa8\xbd\x36\x51\x94\xb0\x7c\x0b\x9c\xbc\x64\x08\xc2\xaa\xc1\x37\x6e\x10\xcf\x57\x6a\xab\xe4\xe0\xed\x31" +
"\x27\xa0\xf9\x6a\x68\xea\x6b\x52\xf6\xff\x6d\x1b\x33\x24\x1c\x6f\x1b\xc3\x2a\x02\x00\x10\xba\x16\x90\x52\x8e\x9f" +
"\x91\xc1\x96\xa5\x1b\xc2\x76\xf0\x0d\x77\x40\x4a\x9e\xa7\x34\x66\xb8\x41\xca\xdf\x9f\x62\x8d\x6c\x95\x80\xef\x84" +
"\xc5\x5f\x08\x5b\x7d\x78\x7a\x5a\x8f\x60\x70\xd2\xba\xca\xe9\x80\x13\xc2\x91\xa7\x1b\x1c\x91\xf5\xe6\xa4\x33\x24" +
"\xd9\x14\xee\xdb\x56\xda\xdf\x02\xbe\x16\x39\x1d\xb5\xba\x33\x4a\x00\x4f\xe9\x2e\xa5\x7c\xf5\x38\x55\xfe\xd9\x79" +
"\xe3\x42\xe8\xb9\xeb\x44\xe4\x60\xfe\xaa\x8f\x1f\x27\xd9\x28\x77\xe8\xec\xaf\x6a\xf1\xad\xb5\x74\xb2\xf2\x83\xdc" +
"\xab\xca\xf7\xe7\xbf\xff\x0b\xeb\xee\x60\xce\x38\x5a\x3f\xdf\x0e\xf2\xca\xab\xb7\x36\x4d\xdd\xcf\x56\x5e\x98\xd9" +
"\xf2\x90\xef\x4e\x73\x08\xcb\x26\x80\xe3\x8f\xe9\x5b\xaf\x52\x37\xde\xce\x47\xfb\xbe\xe9\x64\x5d\xdd\x0f\x51\xf5" +
"\xcc\x2e\x58\xd5\x77\xd6\x55\xaf\xba\x99\x4f\x1d\x79\xf0\x60\x86\x6a\xd3\x7b\xb7\x90\x17\x4e\xa2\xba\xf4\x8e\xe1" +
"\x27\x64\x48\x63\x2c\x2e\x2f\x26\xd8\xbb\x86\x9c\x42\x82\x19\x72\x84\x02\x39\xd0\x32\xcb\x82\x54\x6e\x83\x55\x10" +
"\x93\x22\x26\x09\x1e\xf7\x65\xf1\x7a\xff\x04\x00\x00\xff\xff\xdf\xf0\xa4\xca\x36\x04\x00\x00")
func bindataDataMigrations1createmsmtresultssqlBytes() ([]byte, error) {
return bindataRead(

View File

@ -4,13 +4,59 @@ import (
"github.com/alecthomas/kingpin"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/cli/root"
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/internal/output"
)
func init() {
cmd := root.Command("list", "List measurements")
cmd := root.Command("list", "List results")
cmd.Action(func(_ *kingpin.ParseContext) error {
log.Info("Listing")
ctx, err := root.Init()
if err != nil {
log.WithError(err).Error("failed to initialize root context")
return err
}
doneResults, incompleteResults, err := database.ListResults(ctx.DB)
if err != nil {
log.WithError(err).Error("failed to list results")
return err
}
log.Info("Results")
for idx, result := range doneResults {
output.ResultItem(output.ResultItemData{
ID: result.ID,
Index: idx,
TotalCount: len(doneResults),
Name: result.Name,
StartTime: result.StartTime,
NetworkName: result.NetworkName,
Country: result.Country,
ASN: result.ASN,
Summary: result.Summary,
Done: result.Done,
DataUsageUp: result.DataUsageUp,
DataUsageDown: result.DataUsageDown,
})
}
log.Info("Incomplete results")
for idx, result := range incompleteResults {
output.ResultItem(output.ResultItemData{
ID: result.ID,
Index: idx,
TotalCount: len(incompleteResults),
Name: result.Name,
StartTime: result.StartTime,
NetworkName: result.NetworkName,
Country: result.Country,
ASN: result.ASN,
Summary: result.Summary,
Done: result.Done,
DataUsageUp: result.DataUsageUp,
DataUsageDown: result.DataUsageDown,
})
}
return nil
})
}

View File

@ -15,7 +15,7 @@ var Cmd = kingpin.New("ooni", "")
// Command is syntax sugar for defining sub-commands
var Command = Cmd.Command
// Init should be called by all subcommand that care to have a ooni.OONI instance
// Init should be called by all subcommand that care to have a ooni.Context instance
var Init func() (*ooni.Context, error)
func init() {

View File

@ -40,8 +40,11 @@ func init() {
}
result, err := database.CreateResult(ctx.DB, ctx.Home, database.Result{
Name: *nettestGroup,
StartTime: time.Now().UTC(),
Name: *nettestGroup,
StartTime: time.Now().UTC(),
Country: ctx.Location.CountryCode,
NetworkName: ctx.Location.NetworkName,
ASN: fmt.Sprintf("%d", ctx.Location.ASN),
})
if err != nil {
log.Errorf("DB result error: %s", err)

View File

@ -190,6 +190,9 @@ type Result struct {
ID int64 `db:"id"`
Name string `db:"name"`
StartTime time.Time `db:"start_time"`
Country string `db:"country"`
ASN string `db:"asn"`
NetworkName string `db:"network_name"`
Runtime float64 `db:"runtime"` // Runtime is expressed in fractional seconds
Summary string `db:"summary"` // XXX this should be JSON
Done bool `db:"done"`
@ -198,6 +201,65 @@ type Result struct {
MeasurementDir string `db:"measurement_dir"`
}
// ListResults return the list of results
func ListResults(db *sqlx.DB) ([]*Result, []*Result, error) {
doneResults := []*Result{}
incompleteResults := []*Result{}
rows, err := db.Query(`SELECT id, name,
start_time, runtime,
network_name, country,
asn,
summary, done
FROM results
WHERE done = 1
ORDER BY start_time;`)
if err != nil {
return doneResults, incompleteResults, errors.Wrap(err, "failed to get result done list")
}
for rows.Next() {
result := Result{}
err = rows.Scan(&result.ID, &result.Name,
&result.StartTime, &result.Runtime,
&result.NetworkName, &result.Country,
&result.ASN,
&result.Summary, &result.Done,
//&result.DataUsageUp, &result.DataUsageDown)
)
if err != nil {
log.WithError(err).Error("failed to fetch a row")
continue
}
doneResults = append(doneResults, &result)
}
rows, err = db.Query(`SELECT
id, name,
start_time,
network_name, country,
asn
FROM results
WHERE done != 1
ORDER BY start_time;`)
if err != nil {
return doneResults, incompleteResults, errors.Wrap(err, "failed to get result done list")
}
for rows.Next() {
result := Result{Done: false}
err = rows.Scan(&result.ID, &result.Name, &result.StartTime,
&result.NetworkName, &result.Country,
&result.ASN)
if err != nil {
log.WithError(err).Error("failed to fetch a row")
continue
}
incompleteResults = append(incompleteResults, &result)
}
return doneResults, incompleteResults, nil
}
// MakeSummaryMap return a mapping of test names to summaries for the given
// result
func MakeSummaryMap(db *sqlx.DB, r *Result) (SummaryMap, error) {
@ -258,8 +320,8 @@ func CreateResult(db *sqlx.DB, homePath string, r Result) (*Result, error) {
}
r.MeasurementDir = p
res, err := db.NamedExec(`INSERT INTO results
(name, start_time)
VALUES (:name,:start_time)`,
(name, start_time, country, network_name, asn)
VALUES (:name,:start_time,:country,:network_name,:asn)`,
r)
if err != nil {
return nil, errors.Wrap(err, "creating result")

View File

@ -68,6 +68,8 @@ func (h *Handler) TypedLog(t string, e *log.Entry) error {
fmt.Fprintf(h.Writer, "%.1f%% [%s]: %s", e.Fields.Get("percentage").(float64)*100, e.Fields.Get("key"), e.Message)
fmt.Fprintln(h.Writer)
return nil
case "result_item":
return logResultItem(h.Writer, e.Fields)
default:
return h.DefaultLog(e)
}

View File

@ -0,0 +1,147 @@
package cli
import (
"encoding/json"
"fmt"
"io"
"strings"
"time"
"github.com/apex/log"
)
func RightPad(str string, length int) string {
return str + strings.Repeat(" ", length-len(str))
}
// XXX Copy-pasta from nettest/groups
// PerformanceSummary is the result summary for a performance test
type PerformanceSummary struct {
Upload int64
Download int64
Ping float64
Bitrate int64
}
// MiddleboxSummary is the summary for the middlebox tests
type MiddleboxSummary struct {
Detected bool
}
// IMSummary is the summary for the im tests
type IMSummary struct {
Tested uint
Blocked uint
}
// WebsitesSummary is the summary for the websites test
type WebsitesSummary struct {
Tested uint
Blocked uint
}
func formatSpeed(speed int64) string {
if speed < 1000 {
return fmt.Sprintf("%d Kbit/s", speed)
} else if speed < 1000*1000 {
return fmt.Sprintf("%.2f Mbit/s", float32(speed)/1000)
} else if speed < 1000*1000*1000 {
return fmt.Sprintf("%.2f Gbit/s", float32(speed)/(1000*1000))
}
// WTF, you crazy?
return fmt.Sprintf("%.2f Tbit/s", float32(speed)/(1000*1000*1000))
}
var summarizers = map[string]func(string) []string{
"websites": func(ss string) []string {
var summary WebsitesSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
}
return []string{
fmt.Sprintf("%d tested", summary.Tested),
fmt.Sprintf("%d blocked", summary.Blocked),
"",
}
},
"performance": func(ss string) []string {
var summary PerformanceSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
}
return []string{
fmt.Sprintf("Download: %s", formatSpeed(summary.Download)),
fmt.Sprintf("Upload: %s", formatSpeed(summary.Upload)),
fmt.Sprintf("Ping: %.2fms", summary.Ping),
}
},
"im": func(ss string) []string {
var summary IMSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
}
return []string{
fmt.Sprintf("%d tested", summary.Tested),
fmt.Sprintf("%d blocked", summary.Blocked),
"",
}
},
"middlebox": func(ss string) []string {
var summary MiddleboxSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
}
return []string{
fmt.Sprintf("Detected: %v", summary.Detected),
"",
"",
}
},
}
func makeSummary(name string, ss string) []string {
return summarizers[name](ss)
}
func logResultItem(w io.Writer, f log.Fields) error {
colWidth := 24
rID := f.Get("id").(int64)
name := f.Get("name").(string)
startTime := f.Get("start_time").(time.Time)
networkName := f.Get("network_name").(string)
asn := fmt.Sprintf("AS %s", f.Get("asn").(string))
//runtime := f.Get("runtime").(float64)
//dataUsageUp := f.Get("dataUsageUp").(int64)
//dataUsageDown := f.Get("dataUsageDown").(int64)
index := f.Get("index").(int)
totalCount := f.Get("total_count").(int)
if index == 0 {
fmt.Fprintf(w, "┏"+strings.Repeat("━", colWidth*2+2)+"┓\n")
} else {
fmt.Fprintf(w, "┢"+strings.Repeat("━", colWidth*2+2)+"┪\n")
}
firstRow := RightPad(fmt.Sprintf("#%d - %s", rID, startTime.Format(time.RFC822)), colWidth*2)
fmt.Fprintf(w, "┃ "+firstRow+" ┃\n")
fmt.Fprintf(w, "┡"+strings.Repeat("━", colWidth*2+2)+"┩\n")
summary := makeSummary(name, f.Get("summary").(string))
fmt.Fprintf(w, fmt.Sprintf("│ %s %s│\n",
RightPad(name, colWidth),
RightPad(summary[0], colWidth)))
fmt.Fprintf(w, fmt.Sprintf("│ %s %s│\n",
RightPad(networkName, colWidth),
RightPad(summary[1], colWidth)))
fmt.Fprintf(w, fmt.Sprintf("│ %s %s│\n",
RightPad(asn, colWidth),
RightPad(summary[2], colWidth)))
if index == totalCount-1 {
fmt.Fprintf(w, "└┬──────────────┬──────────────┬──────────────┬")
fmt.Fprintf(w, strings.Repeat("─", colWidth*2-44))
fmt.Fprintf(w, "┘\n")
}
return nil
}

View File

@ -1,6 +1,8 @@
package output
import (
"time"
"github.com/apex/log"
)
@ -12,3 +14,40 @@ func Progress(key string, perc float64, msg string) {
"percentage": perc,
}).Info(msg)
}
// ResultItemData is the metadata about a result
type ResultItemData struct {
ID int64
Name string
StartTime time.Time
Summary string
Runtime float64
Country string
NetworkName string
ASN string
Done bool
DataUsageDown int64
DataUsageUp int64
Index int
TotalCount int
}
// ResultItem logs a progress type event
func ResultItem(result ResultItemData) {
log.WithFields(log.Fields{
"type": "result_item",
"id": result.ID,
"name": result.Name,
"start_time": result.StartTime,
"summary": result.Summary,
"country": result.Country,
"network_name": result.NetworkName,
"asn": result.ASN,
"runtime": result.Runtime,
"done": result.Done,
"data_usage_down": result.DataUsageDown,
"data_usage_up": result.DataUsageUp,
"index": result.Index,
"total_count": result.TotalCount,
}).Info("result item")
}