Merge pull request #16 from ooni/refactor/schema

Update the schema for the measurements and results tables
This commit is contained in:
Arturo Filastò 2018-09-18 13:47:10 +02:00 committed by GitHub
commit 5a3c5e94f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 2213 additions and 901 deletions

View File

@ -1,11 +1,13 @@
# Due to MK dependencies only macOS development is supported
os: osx
language: go
go:
- 1.9.x
- 1.x
install:
- go get -u github.com/golang/dep/...
- make install-dev-deps
- dep ensure
- make update-mk-libs
- make download-mk-libs-macos
script:
- make test-internal
- make build

30
Gopkg.lock generated
View File

@ -46,15 +46,6 @@
packages = ["."]
revision = "ed7bcb39ff10f39ab08e317ce16df282845852fa"
[[projects]]
branch = "master"
name = "github.com/jmoiron/sqlx"
packages = [
".",
"reflectx"
]
revision = "2aeb6a910c2b94f2d5eb53d9895d80e27264ec41"
[[projects]]
name = "github.com/mattn/go-colorable"
packages = ["."]
@ -77,7 +68,7 @@
branch = "master"
name = "github.com/measurement-kit/go-measurement-kit"
packages = ["."]
revision = "4fe2e61c300930aedc10713557b6e05f29631fc0"
revision = "18bd1b84e534aacc7292c22da5c900b6d7bff41b"
[[projects]]
branch = "master"
@ -146,9 +137,26 @@
revision = "c87af80f3cc5036b55b83d77171e156791085e2e"
version = "v1.7.1"
[[projects]]
name = "upper.io/db.v3"
packages = [
".",
"internal/cache",
"internal/cache/hashstructure",
"internal/immutable",
"internal/sqladapter",
"internal/sqladapter/compat",
"internal/sqladapter/exql",
"lib/reflectx",
"lib/sqlbuilder",
"sqlite"
]
revision = "199d13d76c7cfba05ea0327375056fdabc8bea80"
version = "v3.5.4"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "b2f5c39222a1fb405e3f48d2ae3b4758757fe708e12dbd23743c19135e225579"
inputs-digest = "45f2fee1cf7e2abceb2500715c0f158ad17d51b52f8200cb1bb51c369b15402f"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -30,10 +30,6 @@ required = ["github.com/shuLhan/go-bindata/go-bindata"]
name = "github.com/alecthomas/kingpin"
version = "2.2.6"
[[constraint]]
branch = "master"
name = "github.com/aybabtme/rgbterm"
[prune]
go-tests = true
unused-packages = true
@ -65,3 +61,7 @@ required = ["github.com/shuLhan/go-bindata/go-bindata"]
[[constraint]]
branch = "master"
name = "github.com/getsentry/raven-go"
[[constraint]]
name = "upper.io/db.v3"
version = "3.5.4"

View File

@ -1,5 +1,10 @@
GO ?= go
install-dev-deps:
@$(GO) get -u github.com/golang/dep/...
@$(GO) get golang.org/x/tools/cmd/cover
@$(GO) get github.com/mattn/goveralls
build:
@echo "Building dist/ooni"
@$(GO) build -i -o dist/ooni cmd/ooni/main.go
@ -9,14 +14,21 @@ build-windows:
@echo "Building dist/ooni.exe"
CC=x86_64-w64-mingw32-gcc GOOS=windows GOARCH=amd64 CGO_ENABLED=1 go build -o dist/ooni.exe -x cmd/ooni/main.go
update-mk-libs:
download-mk-libs:
@echo "updating mk-libs"
@cd vendor/github.com/measurement-kit/go-measurement-kit && curl -L -o master.zip https://github.com/measurement-kit/golang-prebuilt/archive/master.zip && unzip master.zip && mv golang-prebuilt-master libs && rm master.zip # This is a hack to workaround: https://github.com/golang/dep/issues/1240
.PHONY: update-mk-libs
@cd vendor/github.com/measurement-kit/go-measurement-kit && ./download-libs.sh
download-mk-libs-macos:
@echo "updating mk-libs"
@cd vendor/github.com/measurement-kit/go-measurement-kit && ./download-libs.sh macos
.PHONY: update-mk-libs-macos
bindata:
@$(GO) run vendor/github.com/shuLhan/go-bindata/go-bindata/*.go \
-nometadata \
-o internal/bindata/bindata.go -pkg bindata \
data/...;
test-internal:
@$(GO) test -v ./internal/...
.PHONY: bindata

View File

@ -32,7 +32,7 @@ On macOS you can build a windows and macOS ooni binary.
This can be done by running:
```
make update-mk-libs
make download-mk-libs
```
This will download the prebuilt measurement-kit binaries.
@ -86,3 +86,9 @@ It should now be possible to build ooni by running:
```
make build
```
To run internal tests do:
```
make test-internal
```

View File

@ -2,28 +2,27 @@ package main
import (
// commands
"github.com/apex/log"
"github.com/apex/log"
_ "github.com/ooni/probe-cli/internal/cli/geoip"
_ "github.com/ooni/probe-cli/internal/cli/info"
_ "github.com/ooni/probe-cli/internal/cli/list"
_ "github.com/ooni/probe-cli/internal/cli/nettest"
_ "github.com/ooni/probe-cli/internal/cli/onboard"
_ "github.com/ooni/probe-cli/internal/cli/reset"
_ "github.com/ooni/probe-cli/internal/cli/rm"
_ "github.com/ooni/probe-cli/internal/cli/run"
_ "github.com/ooni/probe-cli/internal/cli/show"
_ "github.com/ooni/probe-cli/internal/cli/upload"
_ "github.com/ooni/probe-cli/internal/cli/version"
"github.com/ooni/probe-cli/internal/crashreport"
"github.com/ooni/probe-cli/internal/cli/app"
"github.com/ooni/probe-cli/internal/crashreport"
)
func main() {
crashreport.CapturePanicAndWait(func() {
err := app.Run()
if err == nil {
return
err, _ := crashreport.CapturePanic(app.Run, nil)
if err != nil {
log.WithError(err.(error)).Error("panic in app.Run")
crashreport.Wait()
}
log.WithError(err).Fatal("main exit")
}, nil)
}

View File

@ -13,7 +13,7 @@ func TestParseConfig(t *testing.T) {
if len(config.NettestGroups.Middlebox.EnabledTests) < 0 {
t.Error("at least one middlebox test should be enabled")
}
if config.Advanced.IncludeCountry == false {
if config.Sharing.IncludeCountry == false {
t.Error("country should be included")
}
}

View File

@ -112,13 +112,13 @@ type Notifications struct {
type Sharing struct {
IncludeIP bool `json:"include_ip"`
IncludeASN bool `json:"include_asn"`
IncludeCountry bool `json:"include_country"`
IncludeGPS bool `json:"include_gps"`
UploadResults bool `json:"upload_results"`
}
// Advanced settings
type Advanced struct {
IncludeCountry bool `json:"include_country"`
UseDomainFronting bool `json:"use_domain_fronting"`
SendCrashReports bool `json:"send_crash_reports"`
}

View File

@ -5,6 +5,7 @@
"auto_update": true,
"sharing": {
"include_ip": false,
"include_country": true,
"include_asn": true,
"include_gps": true,
"upload_results": true
@ -56,7 +57,6 @@
}
},
"advanced": {
"include_country": true,
"use_domain_fronting": false,
"send_crash_reports": true
}

View File

@ -7,6 +7,7 @@
"sharing": {
"include_ip": false,
"include_asn": true,
"include_country": true,
"include_gps": true,
"upload_results": true
},
@ -57,7 +58,6 @@
}
},
"advanced": {
"include_country": true,
"use_domain_fronting": false,
"send_crash_reports": true
}

View File

@ -3,44 +3,166 @@
DROP TABLE `results`;
DROP TABLE `measurements`;
DROP TABLE `urls`;
DROP TABLE `networks`;
-- +migrate StatementEnd
-- +migrate Up
-- +migrate StatementBegin
CREATE TABLE `urls` (
`url_id` INTEGER PRIMARY KEY AUTOINCREMENT,
`url` VARCHAR(255) NOT NULL, -- XXX is this long enough?
`category_code` VARCHAR(5) NOT NULL, -- The citizenlab category code for the
-- site. We use the string NONE to denote
-- no known category code.
`url_country_code` VARCHAR(2) NOT NULL -- The two letter country code which this
-- URL belongs to
);
-- We create a separate table for networks for 2 reasons:
-- 1. For some of the views where need the total number of measured networks,
-- it's going to be much more efficient to just lookup the count of rows in this
-- table.
-- 2. (most important) We want to avoid duplicating a bunch of information that
-- is going to be common to several networks the user is on.
-- Example:
-- We may wish to add to this table the location from of the probe from the GPS
-- or add support for allowing the user to "correct" a misclassified measurement
-- or distinguishing between wifi and mobile.
CREATE TABLE `networks` (
`network_id` INTEGER PRIMARY KEY AUTOINCREMENT,
`network_name` VARCHAR(255) NOT NULL, -- String name representing the network_name which by default is populated based
-- on the ASN.
-- We use a separate key to reference the rows in
-- this tables, because we may wish to "enrich"
-- this with more data in the future.
`network_type` VARCHAR(16) NOT NULL, -- One of wifi, mobile
`ip` VARCHAR(40) NOT NULL, -- Stores a string representation of an ipv4 or ipv6 address.
-- The longest ip is an ipv6 address like:
-- 0000:0000:0000:0000:0000:0000:0000:0000,
-- which is 39 chars.
`asn` INT(4) NOT NULL,
`network_country_code` VARCHAR(2) NOT NULL -- The two letter country code
);
CREATE TABLE `results` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`name` VARCHAR(255),
`start_time` DATETIME,
`runtime` REAL,
`summary` JSON,
`done` TINYINT(1),
`country` VARCHAR(2),
`asn` VARCHAR(16),
`network_name` VARCHAR(255),
`data_usage_up` INTEGER,
`data_usage_down` INTEGER
`result_id` INTEGER PRIMARY KEY AUTOINCREMENT,
-- This can be one of "websites", "im", "performance", "middlebox".
`test_group_name` VARCHAR(16) NOT NULL,
-- We use a different start_time and runtime, because we want to also have
-- data to measure the overhead of creating a report and other factors that
-- go into the test.
-- That is to say: `SUM(runtime) FROM measurements` will always be <=
-- `runtime FROM results` (most times <)
`result_start_time` DATETIME NOT NULL,
`result_runtime` REAL,
-- Used to indicate if the user has seen this result
`result_is_viewed` TINYINT(1) NOT NULL,
-- This is a flag used to indicate if the result is done or is currently running.
`result_is_done` TINYINT(1) NOT NULL,
`result_data_usage_up` REAL NOT NULL,
`result_data_usage_down` REAL NOT NULL,
-- It's probably reasonable to set the maximum length to 260 as this is the
-- maximum length of file paths on windows.
`measurement_dir` VARCHAR(260) NOT NULL,
`network_id` INTEGER NOT NULL,
CONSTRAINT `fk_network_id`
FOREIGN KEY(`network_id`)
REFERENCES `networks`(`network_id`)
);
CREATE TABLE `measurements` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`name` VARCHAR(255),
`start_time` DATETIME,
`runtime` REAL,
`summary` JSON,
`ip` VARCHAR(255),
`asn` VARCHAR(16),
`country` VARCHAR(2),
`network_name` VARCHAR(255),
`state` TEXT,
`failure` VARCHAR(255),
`upload_failure` VARCHAR(255),
`uploaded` TINYINT(1),
`report_file` VARCHAR(255),
`report_id` VARCHAR(255),
`input` VARCHAR(255),
`result_id` INTEGER REFERENCES `results` (`id`) ON DELETE SET NULL ON UPDATE CASCADE
);
`measurement_id` INTEGER PRIMARY KEY AUTOINCREMENT,
-- This can be one of:
-- facebook_messenger
-- telegram
-- whatsapp
-- http_header_field_manipulation
-- http_invalid_request_line
-- dash
-- ndt
`test_name` VARCHAR(64) NOT NULL,
`measurement_start_time` DATETIME NOT NULL,
`measurement_runtime` REAL NOT NULL,
-- Note for golang: we used to have state be one of `done` and `active`, so
-- this is equivalent to done being true or false.
-- `state` TEXT,
`measurement_is_done` TINYINT(1) NOT NULL,
-- The reason to have a dedicated is_uploaded flag, instead of just using
-- is_upload_failed, is that we may not have uploaded the measurement due
-- to a setting.
`measurement_is_uploaded` TINYINT(1) NOT NULL,
-- This is the measurement failed to run and the user should be offerred to
-- re-run it.
`measurement_is_failed` TINYINT(1) NOT NULL,
`measurement_failure_msg` VARCHAR(255),
`measurement_is_upload_failed` TINYINT(1) NOT NULL,
`measurement_upload_failure_msg` VARCHAR(255),
-- Is used to indicate that this particular measurement has been re-run and
-- therefore the UI can take this into account to either hide it from the
-- result view or at the very least disable the ability to re-run it.
-- XXX do we also want to have a reference to the re-run measurement?
`measurement_is_rerun` TINYINT(1) NOT NULL,
-- This is the server-side report_id returned by the collector. By using
-- report_id & input, you can query the api to fetch this measurement.
-- Ex.
-- GET https://api.ooni.io/api/v1/measurements?input=$INPUT&report_id=$REPORT_ID
-- Extract the first item from the `result[]` list and then fetch:
-- `measurement_url` to get the JSON of this measurement row.
-- These two values (`report_id`, `input`) are useful to fetch a
-- measurement that has already been processed by the pipeline, to
-- implement cleanup of already uploaded measurements.
`report_id` VARCHAR(255), -- This can be NULL when no report file has been
-- created.
`url_id` INTEGER,
-- This is not yet a feature of the collector, but we are planning to add
-- this at some point in the near future.
-- See: https://github.com/ooni/pipeline/blob/master/docs/ooni-uuid.md &
-- https://github.com/ooni/pipeline/issues/48
`collector_measurement_id` INT(64),
-- This indicates in the case of a websites test, that a site is likely
-- blocked, or for an IM test if the IM tests says the app is likely
-- blocked, or if a middlebox was detected.
-- You can `JOIN` a `COUNT()` of this value in the results view to get a count of
-- blocked sites or blocked IM apps
`is_anomaly` TINYINT(1),
-- This is an opaque JSON structure, where we store some of the test_keys
-- we need for the measurement details views and some result views (ex. the
-- upload/download speed of NDT, the reason for blocking of a site,
-- etc.)
`test_keys` JSON NOT NULL,
-- The cross table reference to JOIN the two tables together.
`result_id` INTEGER NOT NULL,
-- This is a variable used internally to track the path to the on-disk
-- measurements.json. It may make sense to write one file per entry by
-- hooking MK and preventing it from writing to a file on disk which may
-- have many measurements per file.
`report_file_path` VARCHAR(260) NOT NULL,
CONSTRAINT `fk_result_id`
FOREIGN KEY (`result_id`)
REFERENCES `results`(`result_id`)
ON DELETE CASCADE, -- If we delete a result we also want
-- all the measurements to be deleted as well.
FOREIGN KEY (`url_id`) REFERENCES `urls`(`url_id`)
);
-- +migrate StatementEnd

View File

@ -80,26 +80,26 @@ func (fi bindataFileInfo) Sys() interface{} {
}
var _bindataDataDefaultconfigjson = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x94\x41\x6f\xdb\x3c\x0c\x86\xef\xf9\x15\x82\xce\x75\x53\xe0\xbb\xe5" +
"\xf8\xdd\x76\xd8\x3a\x60\xbb\x15\x85\x20\x5b\xb4\x4d\x4c\x26\x35\x91\x4e\x16\x0c\xfd\xef\x83\xdc\x24\x56\xda\xae" +
"\xeb\xd1\xef\x4b\x53\xe2\x43\x52\xbf\x37\xc6\x58\x67\x77\xc6\x7e\x1f\x51\x0c\x8a\x39\xf2\x9c\xcd\xfd\xfd\x97\x4f" +
"\xe6\x6b\xe6\x16\x4c\xc7\xd4\xe3\x60\x7a\x8c\x70\x6b\xbe\x01\x98\x51\x35\xc9\x6e\xbb\x65\x26\xbc\x45\xde\x8e\x10" +
"\xd3\x36\x95\xd8\xa6\x8b\x68\x7a\xce\xa6\x48\xf6\x66\x49\xbd\x87\x2c\xc8\x64\x77\xe6\xee\x59\x40\xea\x39\x4f\x10" +
"\x5c\xc7\x24\x40\x6a\x77\xa6\xf7\x51\xe0\xe4\x8a\x6b\x41\xbd\xdd\x19\xcd\xf3\xb3\xe6\x67\x65\x37\xa7\xe0\x15\x6a" +
"\x59\x46\x9f\x91\x06\xbb\x33\xa5\x06\x63\x2c\x52\x17\xe7\x00\x0e\x53\x9d\xb2\x32\xbc\x50\x95\xa0\x32\x86\x24\xd7" +
"\xc6\x9c\x22\xfb\xe0\x32\xc8\x1c\xf5\xec\x6d\x8c\x79\x5a\x4e\x26\x56\xec\xb1\xf3\x8a\x4c\xb2\x9e\x0f\xe4\xdb\x08" +
"\xe1\x3a\xd3\x12\x7b\x74\x4c\x4e\x41\xd4\x75\x3c\xa5\x08\xfa\x0c\xe4\xcd\x30\x82\x83\x9c\xef\x7f\x39\xb1\x20\x98" +
"\xbc\x42\x58\xb2\x5c\x55\xbd\x9e\x5a\x97\x7c\x52\x97\xf0\x92\xee\x61\x91\x8d\xb1\x07\x68\x9b\x8e\x89\xa0\x53\xdc" +
"\xa3\x1e\xed\xcd\xd9\xe9\x7d\x07\x2d\xf3\x8f\x66\x02\x11\xa0\x01\xf2\xea\x1d\x46\xaf\xe2\x53\x5a\x15\x85\x08\x43" +
"\xf6\xd3\xaa\x04\x2f\xe3\xfa\x45\x41\xd7\x8f\x32\x31\x0d\xd2\xde\x47\x0c\x4d\x86\x9f\x33\x88\x36\x11\x09\x5e\x84" +
"\x8c\xe0\x03\xe4\xa6\x47\x88\xa1\x99\x3c\x61\x9a\xe3\x42\xd9\x2e\x61\x8f\xa7\xe2\x26\x26\x1d\xe3\xd1\xf9\x18\xf9" +
"\xe0\xa9\x2b\x63\x61\xff\xbb\xbb\xfb\xfc\xbf\xbd\x10\x5b\x68\x0b\x68\x81\x55\xf5\xe8\x00\xad\xa0\xc2\xaa\x54\xac" +
"\x3a\xaf\x30\x70\xc6\xc5\x7d\x78\x5c\xec\xa7\xcb\xa4\x88\x7a\x52\x57\xd8\xf8\xa1\x6e\xc0\x3b\xb0\xdf\x87\xfa\x16" +
"\xd6\x1a\xec\x49\xba\xbe\x47\x82\x5c\xb6\xe7\x54\xf4\x47\x6e\x50\x1a\x71\x4e\x55\x77\xc7\x09\xe4\x3d\xe4\x82\xae" +
"\x4c\x97\x7d\xc3\x73\x89\xb3\xbe\x0e\x28\x8d\xfe\xeb\xdf\x95\x79\xfd\xfb\x55\x19\x13\x86\x10\xa1\xe5\x5f\x1f\x2c" +
"\xe2\xdf\x03\xf4\xc1\x11\xba\xf0\x5c\x57\x2b\xec\x0b\xcd\xf0\xfa\x1d\xe9\x78\x26\xcd\xc7\x17\x2f\x83\x80\x0b\x3c" +
"\x79\x24\xd7\x67\xa6\xd3\x2e\xd6\xab\x27\x40\xc1\x75\xb9\x70\xc8\x50\x10\xd4\xef\xc7\xe6\x69\xf3\x27\x00\x00\xff" +
"\xff\x42\x02\xc0\xed\x72\x05\x00\x00")
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x94\x41\x6f\xdb\x3e\x0c\xc5\xef\xf9\x14\x82\xce\x75\x53\xe0\x7f\xcb" +
"\xf1\x7f\xdb\x61\xeb\x80\xed\x56\x14\x82\x6c\xd1\x36\x31\x99\xd4\x44\x3a\x59\x30\xf4\xbb\x0f\x72\x93\x58\xd9\xb2" +
"\xae\x47\xbf\x47\xd3\x7a\x3f\x52\xfe\xb9\x31\xc6\x3a\xbb\x33\xf6\xeb\x88\x62\x50\xcc\x91\xe7\x6c\x1e\x1f\x3f\x7d" +
"\x30\x9f\x33\xb7\x60\x3a\xa6\x1e\x07\xd3\x63\x84\x7b\xf3\x05\xc0\x8c\xaa\x49\x76\xdb\x2d\x33\xe1\x3d\xf2\x76\x84" +
"\x98\xb6\xa9\xd4\x36\x5d\x44\xd3\x73\x36\x45\xb2\x77\x4b\xeb\x3d\x64\x41\x26\xbb\x33\x0f\xaf\x02\x52\xcf\x79\x82" +
"\xe0\x3a\x26\x01\x52\xbb\x33\xbd\x8f\x02\x27\x57\x5c\x0b\xea\xed\xce\x68\x9e\x5f\x35\x3f\x2b\xbb\x39\x05\xaf\x50" +
"\xcb\x32\xfa\x8c\x34\xd8\x9d\x29\x19\x8c\xb1\x48\x5d\x9c\x03\x38\x4c\x75\xcb\xca\xf0\x42\x55\x83\xca\xe8\x78\x26" +
"\xcd\xc7\xdb\xe6\x90\xe4\xda\x98\x53\x64\x1f\x5c\x06\x99\xa3\x9e\xbd\x8d\x31\x2f\xcb\xb1\x88\x15\x7b\xec\xbc\x22" +
"\x93\xac\x87\x03\xf2\x6d\x84\x70\xdd\x69\xa9\x3d\x3a\x26\xa7\x20\xea\x3a\x9e\x52\x04\x7d\xa5\x75\xb3\x8c\xe0\x20" +
"\xe7\x70\x97\x2f\x16\x3e\x93\x57\x08\x4b\x97\x2b\x24\xeb\x57\x6b\x1e\x27\x75\x29\x2f\xed\x9e\x16\xd9\x18\x7b\x80" +
"\xb6\xe9\x98\x08\x3a\xc5\x3d\xea\xd1\xde\x9d\x9d\xde\x77\xd0\x32\x7f\x6b\x26\x10\x01\x1a\x20\xaf\xde\x61\xf4\x2a" +
"\x3e\xa5\x55\x51\x88\x30\x64\x3f\xad\x4a\xf0\x32\xae\x4f\x14\x74\x7d\x28\xeb\xd4\x20\xed\x7d\xc4\xd0\x64\xf8\x3e" +
"\x83\x68\x13\x91\xe0\xb7\x92\x11\x7c\x80\xdc\xf4\x08\x31\x34\x93\x27\x4c\x73\x5c\x28\xdb\xa5\xec\xf9\x14\x6e\x62" +
"\xd2\x31\x1e\x9d\x8f\x91\x0f\x9e\xba\xb2\x33\xf6\xbf\x87\x87\x8f\xff\xdb\x0b\xb1\x85\xb6\x80\x16\x58\xd5\x8c\x0e" +
"\xd0\x0a\x2a\xac\x4a\xc5\xaa\xf3\x0a\x03\x67\x5c\xdc\xa7\xe7\xc5\x7e\xb9\x6c\x8a\xa8\x27\x75\x85\x8d\x1f\xea\x01" +
"\xbc\x01\xfb\x6d\xa8\xb7\xb0\xd6\x60\x4f\xd2\xf5\x39\x12\xe4\x72\xb5\x4e\xa1\xdf\x73\x82\x32\x88\x73\xab\x7a\x3a" +
"\x4e\x20\xef\x21\x17\x74\x65\xbb\xec\x0d\xcf\x25\xce\xfa\x67\x41\x19\xf4\x5f\xdf\xae\xcc\xeb\xd7\xaf\x62\x4c\x18" +
"\x42\x84\x96\x7f\xbc\x33\xc4\xbf\x17\xe8\x9d\x2b\x74\xe1\xb9\x5e\xad\xb0\x2f\x34\xc3\xba\x23\xb3\x80\x0b\x3c\x79" +
"\x24\xd7\x67\xa6\xd3\x75\xab\x6f\x97\x00\x05\xd7\xe5\x12\x35\x43\x49\x59\xff\x22\x36\x2f\x9b\x5f\x01\x00\x00\xff" +
"\xff\x96\xe8\x5a\xf8\x72\x05\x00\x00")
func bindataDataDefaultconfigjsonBytes() ([]byte, error) {
return bindataRead(
@ -130,20 +130,102 @@ func bindataDataDefaultconfigjson() (*asset, error) {
}
var _bindataDataMigrations1createmsmtresultssql = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\x93\xcf\x8e\x9b\x30\x10\xc6\xef\x3c\xc5\x1c\x13\xb5\x7b\xd8\x4a\xdb" +
"\xcb\x9e\xbc\x30\x6d\x69\x89\x89\x8c\xa9\x9a\x13\xb6\x8a\x37\xb2\x0a\x06\x19\x5b\x51\xdf\xbe\x72\x02\x69\x92\x42" +
"\x73\xdd\xeb\xf7\x9b\x3f\xf6\x7c\x33\x0f\x0f\xf0\xae\xd5\x7b\x2b\x9d\x82\xa4\x3b\x98\xe8\x52\x28\x9c\x74\xaa\x55" +
"\xc6\xbd\xa8\xbd\x36\x51\x94\xb0\x7c\x0b\x9c\xbc\x64\x08\xc2\xaa\xc1\x37\x6e\x10\xcf\x57\x6a\xab\xe4\xe0\xed\x31" +
"\x27\xa0\xf9\x6a\x68\xea\x6b\x52\xf6\xff\x6d\x1b\x33\x24\x1c\x6f\x1b\xc3\x2a\x02\x00\x10\xba\x16\x90\x52\x8e\x9f" +
"\x91\xc1\x96\xa5\x1b\xc2\x76\xf0\x0d\x77\x40\x4a\x9e\xa7\x34\x66\xb8\x41\xca\xdf\x9f\x62\x8d\x6c\x95\x80\xef\x84" +
"\xc5\x5f\x08\x5b\x7d\x78\x7a\x5a\x8f\x60\x70\xd2\xba\xca\xe9\x80\x13\xc2\x91\xa7\x1b\x1c\x91\xf5\xe6\xa4\x33\x24" +
"\xd9\x14\xee\xdb\x56\xda\xdf\x02\xbe\x16\x39\x1d\xb5\xba\x33\x4a\x00\x4f\xe9\x2e\xa5\x7c\xf5\x38\x55\xfe\xd9\x79" +
"\xe3\x42\xe8\xb9\xeb\x44\xe4\x60\xfe\xaa\x8f\x1f\x27\xd9\x28\x77\xe8\xec\xaf\x6a\xf1\xad\xb5\x74\xb2\xf2\x83\xdc" +
"\xab\xca\xf7\xe7\xbf\xff\x0b\xeb\xee\x60\xce\x38\x5a\x3f\xdf\x0e\xf2\xca\xab\xb7\x36\x4d\xdd\xcf\x56\x5e\x98\xd9" +
"\xf2\x90\xef\x4e\x73\x08\xcb\x26\x80\xe3\x8f\xe9\x5b\xaf\x52\x37\xde\xce\x47\xfb\xbe\xe9\x64\x5d\xdd\x0f\x51\xf5" +
"\xcc\x2e\x58\xd5\x77\xd6\x55\xaf\xba\x99\x4f\x1d\x79\xf0\x60\x86\x6a\xd3\x7b\xb7\x90\x17\x4e\xa2\xba\xf4\x8e\xe1" +
"\x27\x64\x48\x63\x2c\x2e\x2f\x26\xd8\xbb\x86\x9c\x42\x82\x19\x72\x84\x02\x39\xd0\x32\xcb\x82\x54\x6e\x83\x55\x10" +
"\x93\x22\x26\x09\x1e\xf7\x65\xf1\x7a\xff\x04\x00\x00\xff\xff\xdf\xf0\xa4\xca\x36\x04\x00\x00")
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xa4\x59\x6d\x73\xdb\x36\x12\xfe\xee\x5f\xb1\xe3\xe9\xf4\xec\x39\x49\x76" +
"\x72\x69\xe6\xce\xd7\x4e\xc7\xb5\x99\x9c\xda\x58\xca\xc8\xf2\x35\x99\x9b\x1b\x11\x22\x97\x12\x2a\x10\x60\xf0\x22" +
"\x46\xf7\xeb\x6f\x16\x00\x29\x52\x56\x1c\x67\xda\x0f\xa9\x48\x02\x8b\x7d\x7d\xf6\x59\x78\x38\x84\xbf\x96\x7c\xa5" +
"\x99\x45\xb8\x55\xb5\x3c\xe9\xbe\xb8\xb7\xcc\x62\x89\xd2\xfe\x82\x2b\x2e\x4f\x4e\x6e\x67\xd3\xf7\x30\xbf\xfe\xe5" +
"\x5d\x02\xa9\x46\xe3\x84\x35\xe9\x3f\x7b\x6f\x4b\x64\xc6\x69\xbf\xe7\xf0\x93\xd3\xe2\xf0\x95\x44\x5b\x2b\xbd\xa1" +
"\xd7\xc7\xcf\x4d\x64\xde\xff\xf2\x50\x3d\xa9\xe0\xcd\x2c\xb9\x9e\x27\xbd\x13\xe1\xec\x04\xfc\xcf\x05\xcf\x53\x18" +
"\x4f\xe6\xc9\xdb\x64\x06\xef\x67\xe3\xbb\xeb\xd9\x47\xf8\x2d\xf9\x08\xd7\x0f\xf3\xe9\x78\x72\x33\x4b\xee\x92\xc9" +
"\x7c\x10\x57\xa7\xf0\xef\xeb\xd9\xcd\xbf\xae\x67\x67\x2f\x7f\xf8\xe1\x1c\x26\xd3\x39\x4c\x1e\xde\xbd\x1b\xc0\x70" +
"\x08\x1f\x3e\x7c\x00\x6e\xc0\xae\xb9\x01\xa1\xe4\x0a\x50\x2a\xb7\x5a\xff\x4c\x5b\x33\x66\x71\xa5\xf4\x6e\x91\xa9" +
"\x1c\xf7\x42\x0e\x45\xcc\xd7\x08\x19\xb7\xfc\x7f\x28\x05\x5b\x42\xb3\x0b\x68\x17\x14\x4a\x83\x5d\xe3\x09\x3c\xef" +
"\xbf\xe1\x10\x0c\xb7\x38\x82\xdf\x11\x9c\x41\xda\x0a\xc6\x6a\x2e\x57\x30\x99\x4e\x12\xb0\x0a\x72\x94\xca\x7e\x8b" +
"\x40\xa9\x60\x23\x55\x2d\xfb\x9a\x8d\x4e\x1a\x5f\x66\xca\x49\xfb\xc8\xca\x97\x7b\x2b\x1b\x23\x6d\xad\x40\xa0\xb5" +
"\xa8\x21\xee\x09\x36\xd6\x6b\x9e\xad\xbd\x0b\x9f\xa7\xd5\x70\x08\x0f\xb3\x77\xb0\x44\x72\xb8\x01\xab\x4e\xce\x43" +
"\xd2\xfc\x8e\x90\x69\xa4\x64\x60\x60\xb0\x62\x3e\x2f\x2c\x5b\x8a\xe0\xc7\x26\xc5\xfc\xc3\x4b\xd0\xc8\x8c\x92\xe6" +
"\x8a\x76\xbe\x18\xc1\x1b\xa5\xc1\xa8\x12\x41\x15\xde\x6d\x5b\x8e\xb5\x81\x7a\x8d\x1a\x41\x22\xe6\xfe\xa5\x55\x96" +
"\x09\x90\xae\x5c\xa2\xa6\x85\x31\xc7\xf3\x56\xf6\x80\xa4\x71\xfb\x17\x03\x2b\x45\x5e\xb7\x0a\x96\x08\xa5\xcb\xd6" +
"\x50\x2a\x8d\x80\x45\xc1\x33\x8e\xd2\xd2\x97\x3f\x9c\xb1\x20\x94\xda\xb8\xca\x4b\xf7\x5e\x21\xb1\x5a\xd5\x06\xb8" +
"\x0c\x3e\x19\x0e\x83\x0d\x23\xfa\xf5\x72\x04\x67\xa5\x32\x16\x78\x59\x29\x6d\x99\xb4\xe7\x64\x76\xcd\x82\x44\xb6" +
"\x55\x3c\x87\xdc\x55\x82\x67\xcc\x92\x02\x0c\x96\x4e\x66\x6b\x92\xca\x65\xa1\x74\xc9\x2c\x57\x24\x99\x59\xaf\x6a" +
"\x5f\xd1\x4c\x95\x25\x7d\x55\x60\x70\x8b\x9a\x6c\x6d\x9c\x46\x0a\x3a\x83\x9a\xb6\x28\xe9\x95\x49\x3e\xb3\xb2\x12" +
"\x78\x15\x7d\x5f\xb2\x1d\xd4\xdc\xac\xbd\x22\x79\x4e\xff\xf3\x75\x11\x22\x40\xfb\x85\xca\xc2\xf1\x85\x56\x65\xe3" +
"\xe8\x4a\xab\x25\x86\x37\xf4\xf8\xf6\xfd\x3d\xc9\x53\xda\xcb\x30\xae\x22\x3b\x7d\xc8\x98\x10\xaa\xf6\xba\x36\xaa" +
"\x58\x05\xa7\x99\xd2\x1a\x33\x7b\x0a\x0c\x4a\x6e\x32\xc1\x8c\xe1\x05\xc7\x1c\x3a\xf8\x13\x05\xe6\xdc\x90\x4f\x1c" +
"\x37\x6b\x12\xb3\x44\x5b\x23\x4a\xa8\x79\xc1\x81\xc9\x1c\x4a\xb5\xe4\xe4\xe7\x3e\x74\xb4\xc8\x14\xe0\x23\x3e\x7e" +
"\x03\x84\x34\x3b\x24\x2b\xf1\x29\x2c\xb9\x0f\x85\x4a\xcb\x40\x63\xa5\xd1\xa0\xb4\x8d\xbd\x5d\x21\xb1\x62\x96\x3b" +
"\xc8\xb1\x60\x4e\x58\x8a\x49\xa5\x2a\x27\x98\xc5\x1c\x96\xcc\x60\xfe\xb5\x52\x22\x8f\x48\x2f\xf9\xfa\x7e\x32\x7a" +
"\xc6\xea\x88\x28\x9d\xca\xda\xe0\x8e\x22\xa0\xb1\x40\x8d\x32\x0b\x21\x8e\xa9\xfb\x0c\x81\xfb\xdc\x30\x03\x58\x62" +
"\xc6\x48\x7c\xdd\x4f\xa3\x53\x94\x9a\x67\xeb\xd3\xe7\x8a\xab\xb9\x8d\x85\x96\x33\xcb\x42\x09\x21\x14\xce\x3a\x8d" +
"\xa3\x6e\x2c\xec\xae\xea\xc4\xe2\xc5\xeb\x83\x50\x4c\xa5\xc7\x01\xca\x8c\x41\x4c\x0b\x8f\x77\xbc\xda\x6f\x7a\x75" +
"\xd9\xdd\x14\x02\xa8\x34\x1a\x72\x51\x88\x64\x1b\xc4\x90\xf4\xaa\x00\x26\x81\x57\xdb\x57\x94\x8c\xbc\xda\xbe\xa6" +
"\x14\xd7\x68\xcc\x73\xfc\x3f\xf7\xf5\x23\x57\x48\xc5\x5f\x51\xc4\x83\xb0\x56\x08\x08\xbe\xc1\xab\x67\x48\xba\xbc" +
"\xbc\xbc\xbc\xfa\xfa\x3f\x83\x67\x88\x0a\x89\xc8\x0d\xfc\xed\x1f\x90\xad\x99\xf6\x96\xa4\xcc\x48\x5f\x1b\x67\xaf" +
"\x3a\x1e\xea\x7a\xff\xcf\xb6\x0c\x0f\xf8\xfd\x2a\x6d\x38\x88\x2f\x52\x68\x9e\x9f\x5f\xa5\xd1\xc7\xdc\x40\xc6\x24" +
"\x41\xa1\x0a\x29\x70\x5a\xe3\x92\x5a\xaa\x39\x1d\xc0\x29\x2f\xe9\xdf\x0a\xb5\x07\x52\x99\x21\x3d\x96\x3c\xcf\x05" +
"\x2e\xd5\xe7\xd3\x10\xc6\xd4\xa2\xb1\x8b\x95\x56\xae\x3a\x28\xf9\x5e\x9a\x35\x67\xb6\x75\x95\xf3\xc2\x17\x92\x05" +
"\x63\x99\xb6\x0b\xcb\x4b\xf4\xb0\xa4\x9d\xa4\xdf\xbd\x22\x69\x01\x5f\x18\x05\x6b\xb6\xc5\x46\x9c\xcf\x7b\xab\x1a" +
"\xf4\xf3\xf9\xaf\xb6\xa8\xd7\xc8\x72\xb2\xc7\x37\xc8\xd0\x18\x34\x7a\x68\xa5\x23\x94\x5d\xa3\x86\x82\x65\x56\x69" +
"\x13\x9a\x43\x94\xb7\x52\xc0\xa5\x47\x72\x04\x32\x6c\xb4\xf7\x15\xf3\xb8\x43\xbd\x82\xed\xae\x20\xbd\x7f\xb8\x3b" +
"\x8b\xaa\x9e\xc3\x9b\xd9\xf4\x0e\x7a\x0c\x10\x6a\x2e\x04\x30\x51\xb3\x9d\x21\xff\xfe\xf8\x53\x23\x29\x8d\xbb\xc2" +
"\xa6\x7d\x20\x7d\x9f\xa3\x0f\x06\x7e\x3c\xef\x45\x75\xef\xa0\x14\x6e\xaf\xe7\xc9\x7c\x7c\x97\x1c\x78\xb6\x59\x1a" +
"\x65\xa7\x30\x4b\xae\xdf\x0d\x4e\x9a\x33\x1f\x0c\xfa\x06\xc5\x65\x4e\x9d\x12\x81\x17\xfb\xb6\xb2\x66\x06\x0c\x75" +
"\x06\x8f\x29\x41\x50\x3f\xab\xcc\x82\xe8\x01\xe6\x29\xcc\xc7\x93\x8f\x94\xea\x2f\xba\xa1\xed\xe5\x13\x55\x2a\x14" +
"\x82\xad\x48\xf8\xd1\x43\x83\x54\x5a\x98\xfb\xac\xf3\x3d\x36\x73\x9a\x92\x41\xec\x28\xfe\x92\xcb\xd5\xe8\x50\x05" +
"\x5a\xfc\x05\x05\xba\x2b\x29\x23\x16\xce\xb0\x15\x2e\x5c\x15\xfc\xf0\xf5\x95\xb9\xaa\xe5\xd1\xb5\xc3\x21\x8c\x89" +
"\xde\x50\xd7\x66\x4b\xd2\xce\xd3\xa8\xd0\xe2\x89\x36\x58\x6f\x52\xc9\x3e\xf3\xd2\x95\x20\x50\xae\xac\x87\xf2\x97" +
"\xaf\x2f\x81\x45\xa6\xec\x19\x73\x9b\xb2\x07\x6b\x55\x01\x05\x17\x08\x15\xb3\x6b\xa2\x1a\x50\x73\x99\xab\x3a\x82" +
"\x64\x77\xac\x58\xe4\x5c\x77\xe0\xe3\xf5\xe5\xa3\x18\x1c\xed\xd6\x7d\x83\x6e\xa6\x93\xfb\xf9\xec\x7a\x3c\x99\x43" +
"\x5a\x6c\x16\x9d\x0d\x11\xff\xde\x4c\x67\xc9\xf8\xed\x84\x60\xe3\xac\x2b\xef\x3c\x7e\x9f\x25\x6f\x92\x59\x32\xb9" +
"\x49\xee\x3b\x5c\xe1\x60\xe5\x63\xbc\xea\xd7\xc6\xd9\x63\xdb\xfe\x2c\x72\x5d\x35\x9f\x0a\x96\xe1\x52\xa9\xcd\xa2" +
"\x44\x63\x50\xae\x50\x37\x5f\x2c\x0a\x5c\x69\x56\x9e\xb4\x68\xce\xac\x61\x55\xd5\x3c\xaf\xad\xad\x16\x04\x1c\xa8" +
"\x17\x05\x47\x91\x2f\x4a\x26\xb9\xa7\x19\x5c\xc9\xde\x2a\x2e\xb7\x4c\xf0\x7c\xa1\xf1\x93\x23\xf8\x13\x5c\x76\x20" +
"\xc9\xac\x9b\xdf\x32\xb7\x1d\x90\xec\xc3\xe3\xeb\x57\x8f\x52\xb8\xeb\x90\xe7\x14\x7d\x77\x7d\xaf\xf2\x8f\x14\xe7" +
"\x44\xd9\x30\x17\xac\x94\x60\x72\x75\x45\xb0\xda\x54\x28\x21\x2a\xc1\xb0\xc5\x4e\x2b\x48\x43\xc1\x11\x5e\xa6\x2c" +
"\xb3\x7c\x8b\xe9\x00\x8c\x3a\xe9\x12\x10\x6e\x00\x3f\x39\xbe\x65\x22\x72\x7c\x5f\xd1\x4b\xf4\x34\x4e\x3b\x5f\xdc" +
"\x05\x13\x06\x5b\x1c\x4d\xfd\x31\x29\xcc\x93\x0f\xf3\x23\x56\x7c\xbd\xce\x63\xab\x0c\x75\xd8\x2a\xcf\x20\xc7\x80" +
"\x32\x39\x70\xb3\x70\x95\x50\x2c\xc7\xdc\x03\xd1\x00\xb8\x34\x36\x36\x04\x3f\x84\x38\xc3\xe5\xaa\x91\xd6\x2e\x5f" +
"\x14\x8c\x0b\xcc\x07\xa1\x5e\x99\x6d\xd8\x99\x54\x36\x1c\xd2\x4a\xf5\x25\xbf\xd7\x1a\x72\xd7\x46\x9f\x9a\x14\xc1" +
"\x82\xdd\x43\xd8\x81\x7d\x8d\x94\x67\x82\xe9\xe1\x59\x41\x49\xcf\x44\x9d\xf4\xd1\x69\x81\xdc\xac\x95\x13\xb9\x0f" +
"\x21\xf5\x56\xed\x97\x35\xf2\x34\x0e\x69\x03\xb7\xc7\xb5\x0a\x62\x9f\xc2\xd7\xee\x06\x5a\xed\x34\x2e\x4a\xb3\xea" +
"\x53\xfc\x06\x88\x8e\xda\xfc\x8d\x87\x74\x36\x3d\x75\x16\x41\xb4\x79\xdc\x6c\x7c\x04\x7d\x92\x56\x4c\x5b\x9e\x39" +
"\xc1\x74\xcf\x91\xd4\xf6\x96\xd4\xf6\xa2\x67\x98\xcc\xf7\xb9\x8d\x1a\x0b\x15\xf9\xc4\xc3\xd8\x43\x8d\x65\x1b\x8c" +
"\x59\x4f\x0c\x81\x65\x61\x7e\xb5\x0a\x90\x7b\x3e\xb1\xe6\x39\x02\xb7\xed\x6c\xb7\xf7\xbc\xef\x77\xd4\x42\xfd\x9c" +
"\x17\x5a\xc6\x16\xf5\x0e\x04\x32\x63\x69\x50\x6b\x67\x46\xb6\xe4\x82\xdb\x38\x69\xf4\x22\x16\xaf\x5f\x72\x45\x79" +
"\xe9\x89\x50\xc3\x8a\x62\x05\x74\x26\x13\x15\x1b\xad\x17\xd0\x31\xfa\xe7\xa3\xd1\xd1\xa8\x9d\xfc\x86\x74\x34\xa8" +
"\xb7\xa8\x87\x86\xec\x0d\xac\x6a\xc1\x73\xd0\x68\x9d\x96\x34\x90\xed\xe2\x78\x2f\x04\x12\xc3\x1a\xc1\x2f\xbb\x7e" +
"\xc9\xed\x37\x7d\x0f\x5c\x56\xce\x0e\x60\xa7\x9c\xf7\xf2\x27\x47\x7e\xf1\x9e\xa8\x38\x19\x52\xa0\x8d\xd7\x25\x5d" +
"\x43\x5a\x97\x24\x9f\xdb\x9f\x6f\x93\xb9\x47\x67\x73\x75\x71\xc1\x2a\x3e\x52\x4a\xf2\x11\x57\xf4\xfb\x62\xfb\xe2" +
"\xa2\xdb\x82\x7e\xf6\xa7\xfe\xf4\xdd\x78\xf2\xfe\x61\xfe\x7d\xab\xce\x4f\xdf\xcd\x92\xf7\xd3\xd9\x7c\x31\xbe\xdd" +
"\xcb\xb7\x9a\x65\x21\x64\x05\xd7\x34\x8d\x58\x2c\xf7\xf3\x7b\x24\x13\xff\xf9\x6f\x0a\x82\x1b\xdb\x14\xa4\x0c\x7a" +
"\xb7\x5d\xa9\x9f\xd8\x5a\xa4\x64\xda\x2a\xb2\x87\x5f\xef\xa7\x93\x70\x3d\xd0\x37\x92\xa6\xcb\x0e\x01\x45\x13\x26" +
"\x84\x2d\x13\x0e\x0d\x9c\xa5\xad\xde\xe9\x00\x52\x6f\x51\x7a\x0e\x4c\x7b\x34\x28\x9c\xd8\x7b\x8f\xb5\xdc\xa3\x23" +
"\xdc\x17\x08\x15\x01\x13\x1a\x59\xbe\x0b\xc5\x50\x69\x95\x51\xe3\x6c\xc3\x58\xf1\x0a\xa9\xbd\x0d\x3a\x58\xc2\xcb" +
"\x4a\x04\x21\x99\x40\x26\x5d\xe5\x87\xbd\x28\xa6\x45\xc9\xae\xc3\x5b\x36\xd7\x68\xdc\xaf\xe4\xc3\x9e\xee\x87\xa2" +
"\x9a\xdc\x28\x55\x43\xdc\x3d\x4b\x6a\x8a\xf6\x2b\xc3\xda\x70\x18\xaf\xc6\xf2\x51\x04\xa4\x83\x6b\xd0\xc7\x89\x4d" +
"\x28\xbf\x43\x4b\xe4\x15\x19\x0d\xd0\xcd\x85\x4d\x9b\xc7\x03\x58\x3a\xdf\x14\xc8\xc5\x95\x60\x9e\xa6\xc6\xdb\x9f" +
"\x5e\x57\x64\x36\x5c\xad\x55\x8a\x4b\xdb\x4c\xe5\x12\x99\xee\x8c\xe6\x61\x82\x46\xbc\x6a\x53\x76\xc5\xed\xda\x2d" +
"\x47\x99\x2a\x2f\x28\x73\x2f\x1a\xc7\x5f\x2c\x85\x5a\x5e\x94\xcc\x58\xd4\x17\xb9\xca\x8c\xff\x3c\x74\x8e\xe7\xa3" +
"\x32\x87\xef\xbb\xc4\xe4\x49\x39\xdc\x18\x87\xe6\xe2\xd5\xdf\x83\x47\x5a\xbb\x16\x47\x78\x18\xb1\x93\x43\x1f\x45" +
"\x64\x35\x8d\x45\x19\x33\xde\x49\x0c\x9a\xa1\xd1\x8f\x4c\x83\x90\x59\xcc\x5f\xcd\x92\x67\x69\x50\x17\xbb\x46\xd6" +
"\x52\xa8\x6c\x43\x5d\x96\xa8\x01\xc1\xa1\x84\xf1\x9d\xdf\xd8\xcc\x07\xf1\xd1\xd0\xa0\x65\x22\x14\x54\x4f\x0b\xe2" +
"\x85\xbf\x0f\x8b\x93\x29\xd4\xcc\x40\x8e\x16\x33\x9f\x00\x71\xfd\xc7\x88\x30\xe9\xaf\xd3\xf1\x24\x05\x06\xe9\xcd" +
"\xf4\x61\x32\x3f\x3b\x4f\xdb\xda\xf3\x95\xd5\x98\x17\x27\xb3\x80\xdb\xb1\x5a\x59\x7b\x69\x79\xa0\x05\x04\xfb\x95" +
"\x6e\x5f\x8c\xef\x48\xed\x70\xc7\x9b\x72\xb3\x60\x52\x95\x4c\xec\xba\x30\x7b\x64\x72\x92\xa0\x2a\xf6\xc9\x45\x4c" +
"\x30\x56\xbb\x8c\x32\x66\x10\x6f\x66\x6b\xa2\x69\xd4\x97\xba\x57\xb7\x9e\x5b\x6e\x70\x67\x5a\x62\x1b\xaf\x70\xe3" +
"\x6d\x7a\x9f\xaa\xa0\x65\x5c\x98\x78\xdf\x4b\x68\xe5\x45\x75\x7a\x94\x81\x33\xfc\x3c\xea\x36\xb0\x50\xd1\x17\x34" +
"\x24\xd1\x0f\x30\x15\x49\x57\x05\x4c\x6e\xe7\x83\xe8\x2b\xcf\xc6\x8a\xc6\x7e\x2a\x0c\x9f\x19\xe4\x96\x96\xb7\xa1" +
"\xcd\x46\xe7\x1d\x3e\x4c\x3a\xa7\xc1\xd2\x63\x0d\x07\x21\xd3\xca\x34\x17\xaa\xbd\xee\x46\x21\x0c\xb6\xd7\x2a\x5e" +
"\xaa\x81\x55\x2b\xa4\x3e\x3c\xfa\xe2\x8d\x48\xe7\x90\xc7\x23\xeb\x96\x69\xee\x0f\xf2\x4c\x82\x4b\x8b\x5a\x32\x21" +
"\x7c\x27\xa6\x16\xb0\x09\x68\xc8\xc2\x78\xe7\xef\x19\xe4\x30\xe7\x66\x73\x04\x5b\xcd\xe8\x0f\xa3\xe4\x08\xc6\xd6" +
"\x13\xc8\x92\x98\x83\x41\x69\xbc\xee\xb5\xa6\xba\x20\x9e\x1c\xe6\x3e\xd4\x80\xfe\xaa\x67\xd9\xa6\xf6\x5a\x29\xef" +
"\xc2\xbb\xdf\x7c\x84\x2a\x8d\xdb\x78\x37\xda\xd0\x0b\x12\xd2\xa0\x4f\x90\xa3\x24\xf1\x88\x4d\xbc\xa3\x2a\xd9\x5e" +
"\x18\xb1\x83\x92\xc9\x5d\x4f\x43\x7f\x6e\xe1\xef\x7f\xbb\xc8\x4c\x6f\x16\x64\xe4\xd3\x83\xe6\xc1\x14\xb9\xf7\xf5" +
"\xe3\x21\xd2\xf7\xa9\xe6\xf3\xb1\x21\xb2\xb9\x01\x39\xb6\x6e\x3a\x81\xdb\xe4\x5d\x32\x4f\xe0\xe6\xfa\xfe\xe6\xfa" +
"\x36\xf1\x9d\x62\x5c\x50\x8a\xe7\x28\xd0\x06\xde\xe3\x73\xb7\xcb\x8a\xbe\xdc\x1e\x86\x43\x60\x42\x1c\x96\x85\x89" +
"\x7f\x10\x08\x32\x73\x9a\xdc\x6b\x14\x22\xf8\xa6\x6f\x4c\x6c\x24\xe7\x3d\x1b\xfc\xdf\xdb\xf6\xdf\x68\x00\xfe\xe2" +
"\x5f\xf5\xfe\x1f\x00\x00\xff\xff\x38\xc6\x64\x22\x78\x1c\x00\x00")
func bindataDataMigrations1createmsmtresultssqlBytes() ([]byte, error) {
return bindataRead(

View File

@ -3,13 +3,18 @@ package app
import (
"os"
"github.com/apex/log"
ooni "github.com/ooni/probe-cli"
"github.com/ooni/probe-cli/internal/cli/root"
)
// Run the app. This is the main app entry point
func Run() error {
func Run() {
root.Cmd.Version(ooni.Version)
_, err := root.Cmd.Parse(os.Args[1:])
return err
if err != nil {
log.WithError(err).Error("failure in main command")
os.Exit(2)
}
return
}

View File

@ -10,8 +10,18 @@ func init() {
cmd := root.Command("info", "Display information about OONI Probe")
cmd.Action(func(_ *kingpin.ParseContext) error {
log.Info("Info")
log.Error("this function is not implemented")
ctx, err := root.Init()
if err != nil {
log.Errorf("%s", err)
return err
}
log.WithFields(log.Fields{
"path": ctx.Home,
}).Info("Home")
log.WithFields(log.Fields{
"path": ctx.TempDir,
}).Info("TempDir")
return nil
})
}

View File

@ -1,8 +1,6 @@
package list
import (
"fmt"
"github.com/alecthomas/kingpin"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/cli/root"
@ -27,9 +25,45 @@ func init() {
log.WithError(err).Error("failed to list measurements")
return err
}
for idx, msmt := range measurements {
fmt.Printf("%d: %v\n", idx, msmt)
msmtSummary := output.MeasurementSummaryData{
TotalCount: 0,
AnomalyCount: 0,
DataUsageUp: 0.0,
DataUsageDown: 0.0,
TotalRuntime: 0,
ASN: 0,
NetworkName: "",
NetworkCountryCode: "ZZ",
}
isFirst := true
isLast := false
for idx, msmt := range measurements {
if idx > 0 {
isFirst = false
}
if idx == len(measurements)-1 {
isLast = true
}
// We assume that since these are summary level information the first
// item will contain the information necessary.
if isFirst {
msmtSummary.TotalRuntime = msmt.Result.Runtime
msmtSummary.DataUsageUp = msmt.DataUsageUp
msmtSummary.DataUsageDown = msmt.DataUsageDown
msmtSummary.NetworkName = msmt.NetworkName
msmtSummary.NetworkCountryCode = msmt.Network.CountryCode
msmtSummary.ASN = msmt.ASN
msmtSummary.StartTime = msmt.Measurement.StartTime
}
if msmt.IsAnomaly.Bool == true {
msmtSummary.AnomalyCount++
}
msmtSummary.TotalCount++
output.MeasurementItem(msmt, isFirst, isLast)
}
output.MeasurementSummary(msmtSummary)
} else {
doneResults, incompleteResults, err := database.ListResults(ctx.DB)
if err != nil {
@ -42,41 +76,53 @@ func init() {
}
for idx, result := range incompleteResults {
output.ResultItem(output.ResultItemData{
ID: result.ID,
ID: result.Result.ID,
Index: idx,
TotalCount: len(incompleteResults),
Name: result.Name,
Name: result.TestGroupName,
StartTime: result.StartTime,
NetworkName: result.NetworkName,
Country: result.Country,
ASN: result.ASN,
Summary: result.Summary,
Done: result.Done,
NetworkName: result.Network.NetworkName,
Country: result.Network.CountryCode,
ASN: result.Network.ASN,
MeasurementCount: 0,
MeasurementAnomalyCount: 0,
TestKeys: "{}", // FIXME this used to be Summary we probably need to use a list now
Done: result.IsDone,
DataUsageUp: result.DataUsageUp,
DataUsageDown: result.DataUsageDown,
})
}
resultSummary := output.ResultSummaryData{}
netCount := make(map[string]int)
netCount := make(map[uint]int)
output.SectionTitle("Results")
for idx, result := range doneResults {
totalCount, anmlyCount, err := database.GetMeasurementCounts(ctx.DB, result.Result.ID)
if err != nil {
log.WithError(err).Error("failed to list measurement counts")
}
testKeys, err := database.GetResultTestKeys(ctx.DB, result.Result.ID)
if err != nil {
log.WithError(err).Error("failed to get testKeys")
}
output.ResultItem(output.ResultItemData{
ID: result.ID,
ID: result.Result.ID,
Index: idx,
TotalCount: len(doneResults),
Name: result.Name,
Name: result.TestGroupName,
StartTime: result.StartTime,
NetworkName: result.NetworkName,
Country: result.Country,
ASN: result.ASN,
Summary: result.Summary,
Done: result.Done,
NetworkName: result.Network.NetworkName,
Country: result.Network.CountryCode,
ASN: result.Network.ASN,
TestKeys: testKeys,
MeasurementCount: totalCount,
MeasurementAnomalyCount: anmlyCount,
Done: result.IsDone,
DataUsageUp: result.DataUsageUp,
DataUsageDown: result.DataUsageDown,
})
resultSummary.TotalTests++
netCount[result.ASN]++
netCount[result.Network.ASN]++
resultSummary.TotalDataUsageUp += result.DataUsageUp
resultSummary.TotalDataUsageDown += result.DataUsageDown
}

View File

@ -1,17 +0,0 @@
package nettest
import (
"github.com/alecthomas/kingpin"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/cli/root"
)
func init() {
cmd := root.Command("nettest", "Run a specific nettest")
cmd.Action(func(_ *kingpin.ParseContext) error {
log.Info("Nettest")
log.Error("this function is not implemented")
return nil
})
}

View File

@ -1,6 +1,8 @@
package onboard
import (
"errors"
"github.com/alecthomas/kingpin"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/cli/root"
@ -29,6 +31,9 @@ func init() {
}
return nil
}
if ctx.IsBatch == true {
return errors.New("cannot do onboarding in batch mode")
}
return onboard.Onboarding(ctx.Config)
})

View File

@ -0,0 +1,30 @@
package reset
import (
"os"
"github.com/alecthomas/kingpin"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/cli/root"
)
func init() {
cmd := root.Command("reset", "Cleanup an old or experimental installation")
force := cmd.Flag("force", "Force deleting the OONI Home").Bool()
cmd.Action(func(_ *kingpin.ParseContext) error {
ctx, err := root.Init()
if err != nil {
log.Errorf("%s", err)
return err
}
if *force == true {
os.RemoveAll(ctx.Home)
log.Infof("Deleted %s", ctx.Home)
} else {
log.Infof("Run with --force to delete %s", ctx.Home)
}
return nil
})
}

51
internal/cli/rm/rm.go Normal file
View File

@ -0,0 +1,51 @@
package rm
import (
"errors"
"fmt"
"github.com/alecthomas/kingpin"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/cli/root"
"github.com/ooni/probe-cli/internal/database"
survey "gopkg.in/AlecAivazis/survey.v1"
db "upper.io/db.v3"
)
func init() {
cmd := root.Command("rm", "Delete a result")
yes := cmd.Flag("yes", "Skip interactive prompt").Bool()
resultID := cmd.Arg("id", "the id of the result to delete").Int64()
cmd.Action(func(_ *kingpin.ParseContext) error {
ctx, err := root.Init()
if err != nil {
log.Errorf("%s", err)
return err
}
if *yes == true {
err = database.DeleteResult(ctx.DB, *resultID)
if err == db.ErrNoMoreRows {
return errors.New("result not found")
}
return err
}
answer := ""
confirm := &survey.Select{
Message: fmt.Sprintf("Are you sure you wish to delete the result #%d", *resultID),
Options: []string{"true", "false"},
Default: "false",
}
survey.AskOne(confirm, &answer, nil)
if answer == "false" {
return errors.New("canceled by user")
}
err = database.DeleteResult(ctx.DB, *resultID)
if err == db.ErrNoMoreRows {
return errors.New("result not found")
}
return err
})
}

View File

@ -48,6 +48,9 @@ func init() {
if err != nil {
return nil, err
}
if *isBatch {
ctx.IsBatch = true
}
return ctx, nil
}

View File

@ -3,9 +3,7 @@ package run
import (
"errors"
"fmt"
"path/filepath"
"strings"
"time"
"github.com/alecthomas/kingpin"
"github.com/apex/log"
@ -14,7 +12,6 @@ import (
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/nettests"
"github.com/ooni/probe-cli/nettests/groups"
"github.com/ooni/probe-cli/utils"
)
func init() {
@ -29,6 +26,8 @@ func init() {
fmt.Sprintf("the nettest group to run. Supported tests are: %s",
strings.Join(nettestGroupNames, ", "))).Required().String()
noCollector := cmd.Flag("no-collector", "Disable uploading measurements to a collector").Bool()
cmd.Action(func(_ *kingpin.ParseContext) error {
log.Infof("Starting %s", *nettestGroup)
ctx, err := root.Init()
@ -42,6 +41,10 @@ func init() {
return err
}
if *noCollector == true {
ctx.Config.Sharing.UploadResults = false
}
group, ok := groups.NettestGroups[*nettestGroup]
if !ok {
log.Errorf("No test group named %s", *nettestGroup)
@ -55,13 +58,13 @@ func init() {
return err
}
result, err := database.CreateResult(ctx.DB, ctx.Home, database.Result{
Name: *nettestGroup,
StartTime: time.Now().UTC(),
Country: ctx.Location.CountryCode,
NetworkName: ctx.Location.NetworkName,
ASN: fmt.Sprintf("%d", ctx.Location.ASN),
})
network, err := database.CreateNetwork(ctx.DB, ctx.Location)
if err != nil {
log.WithError(err).Error("Failed to create the network row")
return nil
}
result, err := database.CreateResult(ctx.DB, ctx.Home, *nettestGroup, network.ID)
if err != nil {
log.Errorf("DB result error: %s", err)
return err
@ -69,17 +72,14 @@ func init() {
for _, nt := range group.Nettests {
log.Debugf("Running test %T", nt)
msmtPath := filepath.Join(ctx.TempDir,
fmt.Sprintf("msmt-%T-%s.jsonl", nt,
time.Now().UTC().Format(utils.ResultTimestamp)))
ctl := nettests.NewController(nt, ctx, result, msmtPath)
ctl := nettests.NewController(nt, ctx, result)
if err = nt.Run(ctl); err != nil {
log.WithError(err).Errorf("Failed to run %s", group.Label)
return err
}
}
if err = result.Finished(ctx.DB, group.Summary); err != nil {
if err = result.Finished(ctx.DB); err != nil {
return err
}
return nil

View File

@ -1,6 +1,7 @@
package crashreport
import (
"github.com/apex/log"
"github.com/getsentry/raven-go"
)
@ -8,13 +9,15 @@ import (
// crash reporting logic a no-op.
var Disabled = false
var client *raven.Client
// CapturePanic is a wrapper around raven.CapturePanic that becomes a noop if
// `Disabled` is set to true.
func CapturePanic(f func(), tags map[string]string) (interface{}, string) {
if Disabled == true {
return nil, ""
}
return raven.CapturePanic(f, tags)
return client.CapturePanic(f, tags)
}
// CapturePanicAndWait is a wrapper around raven.CapturePanicAndWait that becomes a noop if
@ -23,7 +26,7 @@ func CapturePanicAndWait(f func(), tags map[string]string) (interface{}, string)
if Disabled == true {
return nil, ""
}
return raven.CapturePanicAndWait(f, tags)
return client.CapturePanicAndWait(f, tags)
}
// CaptureError is a wrapper around raven.CaptureError
@ -31,7 +34,7 @@ func CaptureError(err error, tags map[string]string) string {
if Disabled == true {
return ""
}
return raven.CaptureError(err, tags)
return client.CaptureError(err, tags)
}
// CaptureErrorAndWait is a wrapper around raven.CaptureErrorAndWait
@ -39,9 +42,21 @@ func CaptureErrorAndWait(err error, tags map[string]string) string {
if Disabled == true {
return ""
}
return raven.CaptureErrorAndWait(err, tags)
return client.CaptureErrorAndWait(err, tags)
}
// Wait will block on sending messages to the sentry server
func Wait() {
if Disabled == false {
log.Info("sending exception backtrace")
client.Wait()
}
}
func init() {
raven.SetDSN("https://cb4510e090f64382ac371040c19b2258:8448daeebfa643c289ef398f8645980b@sentry.io/1234954")
var err error
client, err = raven.NewClient("https://cb4510e090f64382ac371040c19b2258:8448daeebfa643c289ef398f8645980b@sentry.io/1234954", nil)
if err != nil {
log.WithError(err).Error("failed to create a raven client")
}
}

View File

@ -0,0 +1,284 @@
package database
import (
"database/sql"
"encoding/json"
"os"
"reflect"
"time"
"github.com/apex/log"
"github.com/ooni/probe-cli/utils"
"github.com/pkg/errors"
db "upper.io/db.v3"
"upper.io/db.v3/lib/sqlbuilder"
)
// ListMeasurements given a result ID
func ListMeasurements(sess sqlbuilder.Database, resultID int64) ([]MeasurementURLNetwork, error) {
measurements := []MeasurementURLNetwork{}
req := sess.Select(
db.Raw("networks.*"),
db.Raw("urls.*"),
db.Raw("measurements.*"),
db.Raw("results.*"),
).From("results").
Join("measurements").On("results.result_id = measurements.result_id").
Join("networks").On("results.network_id = networks.network_id").
LeftJoin("urls").On("urls.url_id = measurements.url_id").
OrderBy("measurements.measurement_start_time").
Where("results.result_id = ?", resultID)
if err := req.All(&measurements); err != nil {
log.Errorf("failed to run query %s: %v", req.String(), err)
return measurements, err
}
return measurements, nil
}
// GetResultTestKeys returns a list of TestKeys for a given measurements
func GetResultTestKeys(sess sqlbuilder.Database, resultID int64) (string, error) {
res := sess.Collection("measurements").Find("result_id", resultID)
defer res.Close()
var (
msmt Measurement
tk PerformanceTestKeys
)
for res.Next(&msmt) {
if msmt.TestName == "web_connectivity" {
break
}
// We only really care about performance keys
if msmt.TestName == "ndt" || msmt.TestName == "dash" {
if err := json.Unmarshal([]byte(msmt.TestKeys), &tk); err != nil {
log.WithError(err).Error("failed to parse testKeys")
return "{}", err
}
}
}
b, err := json.Marshal(tk)
if err != nil {
log.WithError(err).Error("failed to serialize testKeys")
return "{}", err
}
return string(b), nil
}
// GetMeasurementCounts returns the number of anomalous and total measurement for a given result
func GetMeasurementCounts(sess sqlbuilder.Database, resultID int64) (uint64, uint64, error) {
var (
totalCount uint64
anmlyCount uint64
err error
)
col := sess.Collection("measurements")
// XXX these two queries can be done with a single query
totalCount, err = col.Find("result_id", resultID).
Count()
if err != nil {
log.WithError(err).Error("failed to get total count")
return totalCount, anmlyCount, err
}
anmlyCount, err = col.Find("result_id", resultID).
And(db.Cond{"is_anomaly": true}).Count()
if err != nil {
log.WithError(err).Error("failed to get anmly count")
return totalCount, anmlyCount, err
}
log.Debugf("counts: %d, %d, %d", resultID, totalCount, anmlyCount)
return totalCount, anmlyCount, err
}
// ListResults return the list of results
func ListResults(sess sqlbuilder.Database) ([]ResultNetwork, []ResultNetwork, error) {
doneResults := []ResultNetwork{}
incompleteResults := []ResultNetwork{}
req := sess.Select(
db.Raw("networks.*"),
db.Raw("results.*"),
).From("results").
Join("networks").On("results.network_id = networks.network_id").
OrderBy("results.result_start_time")
if err := req.Where("result_is_done = true").All(&doneResults); err != nil {
return doneResults, incompleteResults, errors.Wrap(err, "failed to get result done list")
}
if err := req.Where("result_is_done = false").All(&incompleteResults); err != nil {
return doneResults, incompleteResults, errors.Wrap(err, "failed to get result done list")
}
return doneResults, incompleteResults, nil
}
// DeleteResult will delete a particular result and the relative measurement on
// disk.
func DeleteResult(sess sqlbuilder.Database, resultID int64) error {
var result Result
res := sess.Collection("results").Find("result_id", resultID)
if err := res.One(&result); err != nil {
if err == db.ErrNoMoreRows {
return err
}
log.WithError(err).Error("error in obtaining the result")
return err
}
if err := res.Delete(); err != nil {
log.WithError(err).Error("failed to delete the result directory")
return err
}
os.RemoveAll(result.MeasurementDir)
return nil
}
// CreateMeasurement writes the measurement to the database a returns a pointer
// to the Measurement
func CreateMeasurement(sess sqlbuilder.Database, reportID sql.NullString, testName string, resultID int64, reportFilePath string, urlID sql.NullInt64) (*Measurement, error) {
msmt := Measurement{
ReportID: reportID,
TestName: testName,
ResultID: resultID,
ReportFilePath: reportFilePath,
URLID: urlID,
IsFailed: false,
IsDone: false,
// XXX Do we want to have this be part of something else?
StartTime: time.Now().UTC(),
TestKeys: "",
}
newID, err := sess.Collection("measurements").Insert(msmt)
if err != nil {
return nil, errors.Wrap(err, "creating measurement")
}
msmt.ID = newID.(int64)
return &msmt, nil
}
// CreateResult writes the Result to the database a returns a pointer
// to the Result
func CreateResult(sess sqlbuilder.Database, homePath string, testGroupName string, networkID int64) (*Result, error) {
startTime := time.Now().UTC()
p, err := utils.MakeResultsDir(homePath, testGroupName, startTime)
if err != nil {
return nil, err
}
result := Result{
TestGroupName: testGroupName,
StartTime: startTime,
NetworkID: networkID,
}
result.MeasurementDir = p
log.Debugf("Creating result %v", result)
newID, err := sess.Collection("results").Insert(result)
if err != nil {
return nil, errors.Wrap(err, "creating result")
}
result.ID = newID.(int64)
return &result, nil
}
// CreateNetwork will create a new network in the network table
func CreateNetwork(sess sqlbuilder.Database, location *utils.LocationInfo) (*Network, error) {
network := Network{
ASN: location.ASN,
CountryCode: location.CountryCode,
NetworkName: location.NetworkName,
// On desktop we consider it to always be wifi
NetworkType: "wifi",
IP: location.IP,
}
newID, err := sess.Collection("networks").Insert(network)
if err != nil {
return nil, err
}
network.ID = newID.(int64)
return &network, nil
}
// CreateOrUpdateURL will create a new URL entry to the urls table if it doesn't
// exists, otherwise it will update the category code of the one already in
// there.
func CreateOrUpdateURL(sess sqlbuilder.Database, urlStr string, categoryCode string, countryCode string) (int64, error) {
var url URL
tx, err := sess.NewTx(nil)
if err != nil {
log.WithError(err).Error("failed to create transaction")
return 0, err
}
res := tx.Collection("urls").Find(
db.Cond{"url": urlStr, "url_country_code": countryCode},
)
err = res.One(&url)
if err == db.ErrNoMoreRows {
url = URL{
URL: sql.NullString{String: urlStr, Valid: true},
CategoryCode: sql.NullString{String: categoryCode, Valid: true},
CountryCode: sql.NullString{String: countryCode, Valid: true},
}
newID, insErr := tx.Collection("urls").Insert(url)
if insErr != nil {
log.Error("Failed to insert into the URLs table")
return 0, insErr
}
url.ID = sql.NullInt64{Int64: newID.(int64), Valid: true}
} else if err != nil {
log.WithError(err).Error("Failed to get single result")
return 0, err
} else {
url.CategoryCode = sql.NullString{String: categoryCode, Valid: true}
res.Update(url)
}
err = tx.Commit()
if err != nil {
log.WithError(err).Error("Failed to write to the URL table")
return 0, err
}
log.Debugf("returning url %d", url.ID.Int64)
return url.ID.Int64, nil
}
// AddTestKeys writes the summary to the measurement
func AddTestKeys(sess sqlbuilder.Database, msmt *Measurement, tk interface{}) error {
var (
isAnomaly bool
isAnomalyValid bool
)
tkBytes, err := json.Marshal(tk)
if err != nil {
log.WithError(err).Error("failed to serialize summary")
}
// This is necessary so that we can extract from the the opaque testKeys just
// the IsAnomaly field of bool type.
// Maybe generics are not so bad after-all, heh golang?
isAnomalyValue := reflect.ValueOf(tk).FieldByName("IsAnomaly")
if isAnomalyValue.IsValid() == true && isAnomalyValue.Kind() == reflect.Bool {
isAnomaly = isAnomalyValue.Bool()
isAnomalyValid = true
}
msmt.TestKeys = string(tkBytes)
msmt.IsAnomaly = sql.NullBool{Bool: isAnomaly, Valid: isAnomalyValid}
err = sess.Collection("measurements").Find("measurement_id", msmt.ID).Update(msmt)
if err != nil {
log.WithError(err).Error("failed to update measurement")
return errors.Wrap(err, "updating measurement")
}
return nil
}

View File

@ -0,0 +1,273 @@
package database
import (
"database/sql"
"encoding/json"
"io/ioutil"
"os"
"testing"
"github.com/ooni/probe-cli/utils"
db "upper.io/db.v3"
)
func TestMeasurementWorkflow(t *testing.T) {
tmpfile, err := ioutil.TempFile("", "dbtest")
if err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name())
tmpdir, err := ioutil.TempDir("", "oonitest")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tmpdir)
sess, err := Connect(tmpfile.Name())
if err != nil {
t.Fatal(err)
}
location := utils.LocationInfo{
ASN: 0,
CountryCode: "IT",
NetworkName: "Unknown",
}
network, err := CreateNetwork(sess, &location)
if err != nil {
t.Fatal(err)
}
result, err := CreateResult(sess, tmpdir, "websites", network.ID)
if err != nil {
t.Fatal(err)
}
reportID := sql.NullString{String: "", Valid: false}
testName := "antani"
resultID := result.ID
reportFilePath := tmpdir
urlID := sql.NullInt64{Int64: 0, Valid: false}
m1, err := CreateMeasurement(sess, reportID, testName, resultID, reportFilePath, urlID)
if err != nil {
t.Fatal(err)
}
var m2 Measurement
err = sess.Collection("measurements").Find("measurement_id", m1.ID).One(&m2)
if err != nil {
t.Fatal(err)
}
if m2.ResultID != m1.ResultID {
t.Error("result_id mismatch")
}
done, incomplete, err := ListResults(sess)
if err != nil {
t.Fatal(err)
}
if len(incomplete) != 1 {
t.Error("there should be 1 incomplete measurement")
}
if len(done) != 0 {
t.Error("there should be 0 done measurements")
}
msmts, err := ListMeasurements(sess, resultID)
if err != nil {
t.Fatal(err)
}
if msmts[0].Network.NetworkType != "wifi" {
t.Error("network_type should be wifi")
}
}
func TestDeleteResult(t *testing.T) {
tmpfile, err := ioutil.TempFile("", "dbtest")
if err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name())
tmpdir, err := ioutil.TempDir("", "oonitest")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tmpdir)
sess, err := Connect(tmpfile.Name())
if err != nil {
t.Fatal(err)
}
location := utils.LocationInfo{
ASN: 0,
CountryCode: "IT",
NetworkName: "Unknown",
}
network, err := CreateNetwork(sess, &location)
if err != nil {
t.Fatal(err)
}
result, err := CreateResult(sess, tmpdir, "websites", network.ID)
if err != nil {
t.Fatal(err)
}
reportID := sql.NullString{String: "", Valid: false}
testName := "antani"
resultID := result.ID
reportFilePath := tmpdir
urlID := sql.NullInt64{Int64: 0, Valid: false}
m1, err := CreateMeasurement(sess, reportID, testName, resultID, reportFilePath, urlID)
if err != nil {
t.Fatal(err)
}
var m2 Measurement
err = sess.Collection("measurements").Find("measurement_id", m1.ID).One(&m2)
if err != nil {
t.Fatal(err)
}
if m2.ResultID != m1.ResultID {
t.Error("result_id mismatch")
}
err = DeleteResult(sess, resultID)
if err != nil {
t.Fatal(err)
}
totalResults, err := sess.Collection("results").Find().Count()
if err != nil {
t.Fatal(err)
}
totalMeasurements, err := sess.Collection("measurements").Find().Count()
if err != nil {
t.Fatal(err)
}
if totalResults != 0 {
t.Fatal("results should be zero")
}
if totalMeasurements != 0 {
t.Fatal("measurements should be zero")
}
err = DeleteResult(sess, 20)
if err != db.ErrNoMoreRows {
t.Fatal(err)
}
}
func TestNetworkCreate(t *testing.T) {
tmpfile, err := ioutil.TempFile("", "dbtest")
if err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name())
sess, err := Connect(tmpfile.Name())
if err != nil {
t.Fatal(err)
}
l1 := utils.LocationInfo{
ASN: 2,
CountryCode: "IT",
NetworkName: "Antaninet",
}
l2 := utils.LocationInfo{
ASN: 3,
CountryCode: "IT",
NetworkName: "Fufnet",
}
_, err = CreateNetwork(sess, &l1)
if err != nil {
t.Fatal(err)
}
_, err = CreateNetwork(sess, &l2)
if err != nil {
t.Fatal(err)
}
}
func TestURLCreation(t *testing.T) {
tmpfile, err := ioutil.TempFile("", "dbtest")
if err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name())
sess, err := Connect(tmpfile.Name())
if err != nil {
t.Fatal(err)
}
newID1, err := CreateOrUpdateURL(sess, "https://google.com", "GMB", "XX")
if err != nil {
t.Fatal(err)
}
newID2, err := CreateOrUpdateURL(sess, "https://google.com", "SRCH", "XX")
if err != nil {
t.Fatal(err)
}
newID3, err := CreateOrUpdateURL(sess, "https://facebook.com", "GRP", "XX")
if err != nil {
t.Fatal(err)
}
newID4, err := CreateOrUpdateURL(sess, "https://facebook.com", "GMP", "XX")
if err != nil {
t.Fatal(err)
}
newID5, err := CreateOrUpdateURL(sess, "https://google.com", "SRCH", "XX")
if err != nil {
t.Fatal(err)
}
if newID2 != newID1 {
t.Error("inserting the same URL with different category code should produce the same result")
}
if newID3 == newID1 {
t.Error("inserting different URL should produce different ids")
}
if newID4 != newID3 {
t.Error("inserting the same URL with different category code should produce the same result")
}
if newID5 != newID1 {
t.Error("the ID of google should still be the same")
}
}
func TestPerformanceTestKeys(t *testing.T) {
var tk PerformanceTestKeys
ndtS := "{\"download\":100.0,\"upload\":20.0,\"ping\":2.2}"
dashS := "{\"median_bitrate\":102.0}"
if err := json.Unmarshal([]byte(ndtS), &tk); err != nil {
t.Fatal("failed to parse ndtS")
}
if err := json.Unmarshal([]byte(dashS), &tk); err != nil {
t.Fatal("failed to parse dashS")
}
if tk.Bitrate != 102.0 {
t.Fatalf("error Bitrate %f", tk.Bitrate)
}
if tk.Download != 100.0 {
t.Fatalf("error Download %f", tk.Download)
}
}

View File

@ -1,22 +1,24 @@
package database
import (
"database/sql"
"github.com/apex/log"
"github.com/jmoiron/sqlx"
_ "github.com/mattn/go-sqlite3" // this is needed to load the sqlite3 driver
"github.com/ooni/probe-cli/internal/bindata"
migrate "github.com/rubenv/sql-migrate"
"upper.io/db.v3/lib/sqlbuilder"
"upper.io/db.v3/sqlite"
)
// RunMigrations runs the database migrations
func RunMigrations(db *sqlx.DB) error {
func RunMigrations(db *sql.DB) error {
log.Debugf("running migrations")
migrations := &migrate.AssetMigrationSource{
Asset: bindata.Asset,
AssetDir: bindata.AssetDir,
Dir: "data/migrations",
}
n, err := migrate.Exec(db.DB, "sqlite3", migrations, migrate.Up)
n, err := migrate.Exec(db, "sqlite3", migrations, migrate.Up)
if err != nil {
return err
}
@ -25,15 +27,21 @@ func RunMigrations(db *sqlx.DB) error {
}
// Connect to the database
func Connect(path string) (db *sqlx.DB, err error) {
db, err = sqlx.Connect("sqlite3", path)
func Connect(path string) (db sqlbuilder.Database, err error) {
settings := sqlite.ConnectionURL{
Database: path,
Options: map[string]string{"_foreign_keys": "1"},
}
sess, err := sqlite.Open(settings)
if err != nil {
return
log.WithError(err).Error("failed to open the DB")
return nil, err
}
err = RunMigrations(db)
err = RunMigrations(sess.Driver().(*sql.DB))
if err != nil {
db = nil
log.WithError(err).Error("failed to run DB migration")
return nil, err
}
return
return sess, err
}

View File

@ -0,0 +1,32 @@
package database
import (
"io/ioutil"
"os"
"testing"
"github.com/apex/log"
)
func TestConnect(t *testing.T) {
tmpfile, err := ioutil.TempFile("", "dbtest")
if err != nil {
t.Error(err)
}
defer os.Remove(tmpfile.Name())
sess, err := Connect(tmpfile.Name())
if err != nil {
t.Error(err)
}
colls, err := sess.Collections()
if err != nil {
t.Error(err)
}
if len(colls) < 1 {
log.Fatal("missing tables")
}
}

View File

@ -1,102 +1,113 @@
package database
import (
"database/sql"
"os"
"path/filepath"
"time"
"github.com/apex/log"
"github.com/jmoiron/sqlx"
"github.com/ooni/probe-cli/nettests/summary"
"github.com/ooni/probe-cli/utils"
"github.com/pkg/errors"
"upper.io/db.v3/lib/sqlbuilder"
)
// UpdateOne will run the specified update query and check that it only affected one row
func UpdateOne(db *sqlx.DB, query string, arg interface{}) error {
res, err := db.NamedExec(query, arg)
if err != nil {
return errors.Wrap(err, "updating table")
}
count, err := res.RowsAffected()
if err != nil {
return errors.Wrap(err, "updating table")
}
if count != 1 {
return errors.New("inconsistent update count")
}
return nil
// ResultNetwork is used to represent the structure made from the JOIN
// between the results and networks tables.
type ResultNetwork struct {
Result `db:",inline"`
Network `db:",inline"`
}
// ListMeasurements given a result ID
func ListMeasurements(db *sqlx.DB, resultID int64) ([]*Measurement, error) {
measurements := []*Measurement{}
rows, err := db.Query(`SELECT id, name,
start_time, runtime,
country,
asn,
summary,
input
FROM measurements
WHERE result_id = ?
ORDER BY start_time;`, resultID)
if err != nil {
return measurements, errors.Wrap(err, "failed to get measurement list")
// MeasurementURLNetwork is used for the JOIN between Measurement and URL
type MeasurementURLNetwork struct {
Measurement `db:",inline"`
Network `db:",inline"`
Result `db:",inline"`
URL `db:",inline"`
}
for rows.Next() {
msmt := Measurement{}
err = rows.Scan(&msmt.ID, &msmt.Name,
&msmt.StartTime, &msmt.Runtime,
&msmt.CountryCode,
&msmt.ASN,
&msmt.Summary, &msmt.Input,
//&result.DataUsageUp, &result.DataUsageDown)
)
if err != nil {
log.WithError(err).Error("failed to fetch a row")
continue
}
measurements = append(measurements, &msmt)
// Network represents a network tested by the user
type Network struct {
ID int64 `db:"network_id,omitempty"`
NetworkName string `db:"network_name"`
NetworkType string `db:"network_type"`
IP string `db:"ip"`
ASN uint `db:"asn"`
CountryCode string `db:"network_country_code"`
}
return measurements, nil
// URL represents URLs from the testing lists
type URL struct {
ID sql.NullInt64 `db:"url_id,omitempty"`
URL sql.NullString `db:"url"`
CategoryCode sql.NullString `db:"category_code"`
CountryCode sql.NullString `db:"url_country_code"`
}
// Measurement model
type Measurement struct {
ID int64 `db:"id"`
Name string `db:"name"`
StartTime time.Time `db:"start_time"`
Runtime float64 `db:"runtime"` // Fractional number of seconds
Summary string `db:"summary"` // XXX this should be JSON
ASN string `db:"asn"`
IP string `db:"ip"`
CountryCode string `db:"country"`
State string `db:"state"`
Failure string `db:"failure"`
UploadFailure string `db:"upload_failure"`
Uploaded bool `db:"uploaded"`
ReportFilePath string `db:"report_file"`
ReportID string `db:"report_id"`
Input string `db:"input"`
ID int64 `db:"measurement_id,omitempty"`
TestName string `db:"test_name"`
StartTime time.Time `db:"measurement_start_time"`
Runtime float64 `db:"measurement_runtime"` // Fractional number of seconds
IsDone bool `db:"measurement_is_done"`
IsUploaded bool `db:"measurement_is_uploaded"`
IsFailed bool `db:"measurement_is_failed"`
FailureMsg sql.NullString `db:"measurement_failure_msg,omitempty"`
IsUploadFailed bool `db:"measurement_is_upload_failed"`
UploadFailureMsg sql.NullString `db:"measurement_upload_failure_msg,omitempty"`
IsRerun bool `db:"measurement_is_rerun"`
ReportID sql.NullString `db:"report_id,omitempty"`
URLID sql.NullInt64 `db:"url_id,omitempty"` // Used to reference URL
MeasurementID sql.NullInt64 `db:"collector_measurement_id,omitempty"`
IsAnomaly sql.NullBool `db:"is_anomaly,omitempty"`
// FIXME we likely want to support JSON. See: https://github.com/upper/db/issues/462
TestKeys string `db:"test_keys"`
ResultID int64 `db:"result_id"`
ReportFilePath string `db:"report_file_path"`
}
// SetGeoIPInfo for the Measurement
func (m *Measurement) SetGeoIPInfo() error {
// Result model
type Result struct {
ID int64 `db:"result_id,omitempty"`
TestGroupName string `db:"test_group_name"`
StartTime time.Time `db:"result_start_time"`
NetworkID int64 `db:"network_id"` // Used to include a Network
Runtime float64 `db:"result_runtime"` // Runtime is expressed in fractional seconds
IsViewed bool `db:"result_is_viewed"`
IsDone bool `db:"result_is_done"`
DataUsageUp float64 `db:"result_data_usage_up"`
DataUsageDown float64 `db:"result_data_usage_down"`
MeasurementDir string `db:"measurement_dir"`
}
// PerformanceTestKeys is the result summary for a performance test
type PerformanceTestKeys struct {
Upload float64 `json:"upload"`
Download float64 `json:"download"`
Ping float64 `json:"ping"`
Bitrate float64 `json:"median_bitrate"`
}
// Finished marks the result as done and sets the runtime
func (r *Result) Finished(sess sqlbuilder.Database) error {
if r.IsDone == true || r.Runtime != 0 {
return errors.New("Result is already finished")
}
r.Runtime = time.Now().UTC().Sub(r.StartTime).Seconds()
r.IsDone = true
err := sess.Collection("results").Find("result_id", r.ID).Update(r)
if err != nil {
return errors.Wrap(err, "updating finished result")
}
return nil
}
// Failed writes the error string to the measurement
func (m *Measurement) Failed(db *sqlx.DB, failure string) error {
m.Failure = failure
err := UpdateOne(db, `UPDATE measurements
SET failure = :failure, state = :state
WHERE id = :id`, m)
func (m *Measurement) Failed(sess sqlbuilder.Database, failure string) error {
m.FailureMsg = sql.NullString{String: failure, Valid: true}
m.IsFailed = true
err := sess.Collection("measurements").Find("measurement_id", m.ID).Update(m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
@ -104,14 +115,12 @@ func (m *Measurement) Failed(db *sqlx.DB, failure string) error {
}
// Done marks the measurement as completed
func (m *Measurement) Done(db *sqlx.DB) error {
func (m *Measurement) Done(sess sqlbuilder.Database) error {
runtime := time.Now().UTC().Sub(m.StartTime)
m.Runtime = runtime.Seconds()
m.State = "done"
m.IsDone = true
err := UpdateOne(db, `UPDATE measurements
SET state = :state, runtime = :runtime
WHERE id = :id`, m)
err := sess.Collection("measurements").Find("measurement_id", m.ID).Update(m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
@ -119,13 +128,11 @@ func (m *Measurement) Done(db *sqlx.DB) error {
}
// UploadFailed writes the error string for the upload failure to the measurement
func (m *Measurement) UploadFailed(db *sqlx.DB, failure string) error {
m.UploadFailure = failure
m.Uploaded = false
func (m *Measurement) UploadFailed(sess sqlbuilder.Database, failure string) error {
m.UploadFailureMsg = sql.NullString{String: failure, Valid: true}
m.IsUploaded = false
err := UpdateOne(db, `UPDATE measurements
SET upload_failure = :upload_failure
WHERE id = :id`, m)
err := sess.Collection("measurements").Find("measurement_id", m.ID).Update(m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
@ -133,25 +140,10 @@ func (m *Measurement) UploadFailed(db *sqlx.DB, failure string) error {
}
// UploadSucceeded writes the error string for the upload failure to the measurement
func (m *Measurement) UploadSucceeded(db *sqlx.DB) error {
m.Uploaded = true
func (m *Measurement) UploadSucceeded(sess sqlbuilder.Database) error {
m.IsUploaded = true
err := UpdateOne(db, `UPDATE measurements
SET uploaded = :uploaded
WHERE id = :id`, m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
return nil
}
// WriteSummary writes the summary to the measurement
func (m *Measurement) WriteSummary(db *sqlx.DB, summary string) error {
m.Summary = summary
err := UpdateOne(db, `UPDATE measurements
SET summary = :summary
WHERE id = :id`, m)
err := sess.Collection("measurements").Find("measurement_id", m.ID).Update(m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
@ -159,7 +151,7 @@ func (m *Measurement) WriteSummary(db *sqlx.DB, summary string) error {
}
// AddToResult adds a measurement to a result
func (m *Measurement) AddToResult(db *sqlx.DB, result *Result) error {
func (m *Measurement) AddToResult(sess sqlbuilder.Database, result *Result) error {
var err error
m.ResultID = result.ID
@ -176,191 +168,9 @@ func (m *Measurement) AddToResult(db *sqlx.DB, result *Result) error {
}
m.ReportFilePath = finalPath
err = UpdateOne(db, `UPDATE measurements
SET result_id = :result_id, report_file = :report_file
WHERE id = :id`, m)
err = sess.Collection("measurements").Find("measurement_id", m.ID).Update(m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
return nil
}
// CreateMeasurement writes the measurement to the database a returns a pointer
// to the Measurement
func CreateMeasurement(db *sqlx.DB, m Measurement, i string) (*Measurement, error) {
// XXX Do we want to have this be part of something else?
m.StartTime = time.Now().UTC()
m.Input = i
m.State = "active"
res, err := db.NamedExec(`INSERT INTO measurements
(name, start_time,
asn, ip, country,
state, failure, report_file,
report_id, input,
result_id)
VALUES (:name,:start_time,
:asn,:ip,:country,
:state,:failure,:report_file,
:report_id,:input,
:result_id)`,
m)
if err != nil {
return nil, errors.Wrap(err, "creating measurement")
}
id, err := res.LastInsertId()
if err != nil {
return nil, errors.Wrap(err, "creating measurement")
}
m.ID = id
return &m, nil
}
// Result model
type Result struct {
ID int64 `db:"id"`
Name string `db:"name"`
StartTime time.Time `db:"start_time"`
Country string `db:"country"`
ASN string `db:"asn"`
NetworkName string `db:"network_name"`
Runtime float64 `db:"runtime"` // Runtime is expressed in fractional seconds
Summary string `db:"summary"` // XXX this should be JSON
Done bool `db:"done"`
DataUsageUp int64 `db:"data_usage_up"`
DataUsageDown int64 `db:"data_usage_down"`
MeasurementDir string `db:"measurement_dir"`
}
// ListResults return the list of results
func ListResults(db *sqlx.DB) ([]*Result, []*Result, error) {
doneResults := []*Result{}
incompleteResults := []*Result{}
rows, err := db.Query(`SELECT id, name,
start_time, runtime,
network_name, country,
asn,
summary, done
FROM results
WHERE done = 1
ORDER BY start_time;`)
if err != nil {
return doneResults, incompleteResults, errors.Wrap(err, "failed to get result done list")
}
for rows.Next() {
result := Result{}
err = rows.Scan(&result.ID, &result.Name,
&result.StartTime, &result.Runtime,
&result.NetworkName, &result.Country,
&result.ASN,
&result.Summary, &result.Done,
//&result.DataUsageUp, &result.DataUsageDown)
)
if err != nil {
log.WithError(err).Error("failed to fetch a row")
continue
}
doneResults = append(doneResults, &result)
}
rows, err = db.Query(`SELECT
id, name,
start_time,
network_name, country,
asn
FROM results
WHERE done != 1
ORDER BY start_time;`)
if err != nil {
return doneResults, incompleteResults, errors.Wrap(err, "failed to get result done list")
}
for rows.Next() {
result := Result{Done: false}
err = rows.Scan(&result.ID, &result.Name, &result.StartTime,
&result.NetworkName, &result.Country,
&result.ASN)
if err != nil {
log.WithError(err).Error("failed to fetch a row")
continue
}
incompleteResults = append(incompleteResults, &result)
}
return doneResults, incompleteResults, nil
}
// MakeSummaryMap return a mapping of test names to summaries for the given
// result
func MakeSummaryMap(db *sqlx.DB, r *Result) (summary.SummaryMap, error) {
summaryMap := summary.SummaryMap{}
msmts := []Measurement{}
// XXX maybe we only want to select some of the columns
err := db.Select(&msmts, "SELECT name, summary FROM measurements WHERE result_id = $1", r.ID)
if err != nil {
return nil, errors.Wrap(err, "failed to get measurements")
}
for _, msmt := range msmts {
val, ok := summaryMap[msmt.Name]
if ok {
summaryMap[msmt.Name] = append(val, msmt.Summary)
} else {
summaryMap[msmt.Name] = []string{msmt.Summary}
}
}
return summaryMap, nil
}
// Finished marks the result as done and sets the runtime
func (r *Result) Finished(db *sqlx.DB, makeSummary summary.ResultSummaryFunc) error {
if r.Done == true || r.Runtime != 0 {
return errors.New("Result is already finished")
}
r.Runtime = time.Now().UTC().Sub(r.StartTime).Seconds()
r.Done = true
// XXX add in here functionality to compute the summary
summaryMap, err := MakeSummaryMap(db, r)
if err != nil {
return err
}
r.Summary, err = makeSummary(summaryMap)
if err != nil {
return err
}
err = UpdateOne(db, `UPDATE results
SET done = :done, runtime = :runtime, summary = :summary
WHERE id = :id`, r)
if err != nil {
return errors.Wrap(err, "updating finished result")
}
return nil
}
// CreateResult writes the Result to the database a returns a pointer
// to the Result
func CreateResult(db *sqlx.DB, homePath string, r Result) (*Result, error) {
log.Debugf("Creating result %v", r)
p, err := utils.MakeResultsDir(homePath, r.Name, r.StartTime)
if err != nil {
return nil, err
}
r.MeasurementDir = p
res, err := db.NamedExec(`INSERT INTO results
(name, start_time, country, network_name, asn)
VALUES (:name,:start_time,:country,:network_name,:asn)`,
r)
if err != nil {
return nil, errors.Wrap(err, "creating result")
}
id, err := res.LastInsertId()
if err != nil {
return nil, errors.Wrap(err, "creating result")
}
r.ID = id
return &r, nil
}

View File

@ -15,7 +15,7 @@ import (
)
// Default handler outputting to stderr.
var Default = New(os.Stderr)
var Default = New(os.Stdout)
// start time.
var start = time.Now()
@ -105,13 +105,19 @@ func logTable(w io.Writer, f log.Fields) error {
func (h *Handler) TypedLog(t string, e *log.Entry) error {
switch t {
case "progress":
var err error
s := fmt.Sprintf("%.2f%%: %-25s", e.Fields.Get("percentage").(float64)*100, e.Message)
fmt.Fprintf(h.Writer, s)
perc := e.Fields.Get("percentage").(float64) * 100
s := fmt.Sprintf(" %s %-25s",
bold.Sprintf("%.2f%%", perc),
e.Message)
fmt.Fprint(h.Writer, s)
fmt.Fprintln(h.Writer)
return err
return nil
case "table":
return logTable(h.Writer, e.Fields)
case "measurement_item":
return logMeasurementItem(h.Writer, e.Fields)
case "measurement_summary":
return logMeasurementSummary(h.Writer, e.Fields)
case "result_item":
return logResultItem(h.Writer, e.Fields)
case "result_summary":
@ -134,10 +140,10 @@ func (h *Handler) DefaultLog(e *log.Entry) error {
if name == "source" {
continue
}
s += fmt.Sprintf(" %s=%s", color.Sprint(name), e.Fields.Get(name))
s += fmt.Sprintf(" %s=%v", color.Sprint(name), e.Fields.Get(name))
}
fmt.Fprintf(h.Writer, s)
fmt.Fprint(h.Writer, s)
fmt.Fprintln(h.Writer)
return nil

View File

@ -0,0 +1,131 @@
package cli
import (
"bytes"
"encoding/json"
"fmt"
"io"
"strings"
"time"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/util"
)
func statusIcon(ok bool) string {
if ok {
return "✓"
}
return "❌"
}
func logTestKeys(w io.Writer, testKeys string) error {
colWidth := 24
var out bytes.Buffer
if err := json.Indent(&out, []byte(testKeys), "", " "); err != nil {
return err
}
testKeysLines := strings.Split(string(out.Bytes()), "\n")
if len(testKeysLines) > 1 {
testKeysLines = testKeysLines[1 : len(testKeysLines)-1]
testKeysLines[0] = "{" + testKeysLines[0][1:]
testKeysLines[len(testKeysLines)-1] = testKeysLines[len(testKeysLines)-1] + "}"
}
for _, line := range testKeysLines {
fmt.Fprintf(w, fmt.Sprintf("│ %s │\n",
util.RightPad(line, colWidth*2)))
}
return nil
}
func logMeasurementItem(w io.Writer, f log.Fields) error {
colWidth := 24
rID := f.Get("id").(int64)
testName := f.Get("test_name").(string)
// We currently don't use these fields in the view
//testGroupName := f.Get("test_group_name").(string)
//networkName := f.Get("network_name").(string)
//asn := fmt.Sprintf("AS%d (%s)", f.Get("asn").(uint), f.Get("network_country_code").(string))
testKeys := f.Get("test_keys").(string)
isAnomaly := f.Get("is_anomaly").(bool)
isFailed := f.Get("is_failed").(bool)
isUploaded := f.Get("is_uploaded").(bool)
url := f.Get("url").(string)
urlCategoryCode := f.Get("url_category_code").(string)
isFirst := f.Get("is_first").(bool)
isLast := f.Get("is_last").(bool)
if isFirst {
fmt.Fprintf(w, "┏"+strings.Repeat("━", colWidth*2+2)+"┓\n")
} else {
fmt.Fprintf(w, "┢"+strings.Repeat("━", colWidth*2+2)+"┪\n")
}
anomalyStr := fmt.Sprintf("ok: %s", statusIcon(!isAnomaly))
uploadStr := fmt.Sprintf("uploaded: %s", statusIcon(isUploaded))
failureStr := fmt.Sprintf("success: %s", statusIcon(!isFailed))
fmt.Fprintf(w, fmt.Sprintf("│ %s │\n",
util.RightPad(
fmt.Sprintf("#%d", rID), colWidth*2)))
if url != "" {
fmt.Fprintf(w, fmt.Sprintf("│ %s │\n",
util.RightPad(
fmt.Sprintf("%s (%s)", url, urlCategoryCode), colWidth*2)))
}
fmt.Fprintf(w, fmt.Sprintf("│ %s %s│\n",
util.RightPad(testName, colWidth),
util.RightPad(anomalyStr, colWidth)))
fmt.Fprintf(w, fmt.Sprintf("│ %s %s│\n",
util.RightPad(failureStr, colWidth),
util.RightPad(uploadStr, colWidth)))
if testKeys != "" {
if err := logTestKeys(w, testKeys); err != nil {
return err
}
}
if isLast {
fmt.Fprintf(w, "└┬────────────────────────────────────────────────┬┘\n")
}
return nil
}
func logMeasurementSummary(w io.Writer, f log.Fields) error {
colWidth := 12
totalCount := f.Get("total_count").(int64)
anomalyCount := f.Get("anomaly_count").(int64)
totalRuntime := f.Get("total_runtime").(float64)
dataUp := f.Get("data_usage_up").(float64)
dataDown := f.Get("data_usage_down").(float64)
startTime := f.Get("start_time").(time.Time)
asn := f.Get("asn").(uint)
countryCode := f.Get("network_country_code").(string)
networkName := f.Get("network_name").(string)
fmt.Fprintf(w, " │ %s │\n",
util.RightPad(startTime.Format(time.RFC822), (colWidth+3)*3),
)
fmt.Fprintf(w, " │ %s │\n",
util.RightPad(fmt.Sprintf("AS%d, %s (%s)", asn, networkName, countryCode), (colWidth+3)*3),
)
fmt.Fprintf(w, " │ %s %s %s │\n",
util.RightPad(fmt.Sprintf("%.2fs", totalRuntime), colWidth),
util.RightPad(fmt.Sprintf("%d/%d anmls", anomalyCount, totalCount), colWidth),
util.RightPad(fmt.Sprintf("⬆ %s ⬇ %s", formatSize(dataUp), formatSize(dataDown)), colWidth+4))
fmt.Fprintf(w, " └────────────────────────────────────────────────┘\n")
return nil
}

View File

@ -8,13 +8,13 @@ import (
"time"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/internal/util"
"github.com/ooni/probe-cli/nettests/summary"
)
func formatSpeed(speed int64) string {
func formatSpeed(speed float64) string {
if speed < 1000 {
return fmt.Sprintf("%d Kbit/s", speed)
return fmt.Sprintf("%.2f Kbit/s", speed)
} else if speed < 1000*1000 {
return fmt.Sprintf("%.2f Mbit/s", float32(speed)/1000)
} else if speed < 1000*1000*1000 {
@ -24,55 +24,55 @@ func formatSpeed(speed int64) string {
return fmt.Sprintf("%.2f Tbit/s", float32(speed)/(1000*1000*1000))
}
var summarizers = map[string]func(string) []string{
"websites": func(ss string) []string {
var summary summary.WebsitesSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
func formatSize(size float64) string {
if size < 1024 {
return fmt.Sprintf("%.1fK", size)
} else if size < 1024*1024 {
return fmt.Sprintf("%.1fM", size/1024.0)
} else if size < 1024*1024*1024 {
return fmt.Sprintf("%.1fG", size/(1024.0*1024.0))
}
// WTF, you crazy?
return fmt.Sprintf("%.1fT", size/(1024*1024*1024))
}
var summarizers = map[string]func(uint64, uint64, string) []string{
"websites": func(totalCount uint64, anomalyCount uint64, ss string) []string {
return []string{
fmt.Sprintf("%d tested", summary.Tested),
fmt.Sprintf("%d blocked", summary.Blocked),
fmt.Sprintf("%d tested", totalCount),
fmt.Sprintf("%d blocked", anomalyCount),
"",
}
},
"performance": func(ss string) []string {
var summary summary.PerformanceSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
"performance": func(totalCount uint64, anomalyCount uint64, ss string) []string {
var tk database.PerformanceTestKeys
if err := json.Unmarshal([]byte(ss), &tk); err != nil {
return nil
}
return []string{
fmt.Sprintf("Download: %s", formatSpeed(summary.Download)),
fmt.Sprintf("Upload: %s", formatSpeed(summary.Upload)),
fmt.Sprintf("Ping: %.2fms", summary.Ping),
fmt.Sprintf("Download: %s", formatSpeed(tk.Download)),
fmt.Sprintf("Upload: %s", formatSpeed(tk.Upload)),
fmt.Sprintf("Ping: %.2fms", tk.Ping),
}
},
"im": func(ss string) []string {
var summary summary.IMSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
}
"im": func(totalCount uint64, anomalyCount uint64, ss string) []string {
return []string{
fmt.Sprintf("%d tested", summary.Tested),
fmt.Sprintf("%d blocked", summary.Blocked),
fmt.Sprintf("%d tested", totalCount),
fmt.Sprintf("%d blocked", anomalyCount),
"",
}
},
"middlebox": func(ss string) []string {
var summary summary.MiddleboxSummary
if err := json.Unmarshal([]byte(ss), &summary); err != nil {
return nil
}
"middlebox": func(totalCount uint64, anomalyCount uint64, ss string) []string {
return []string{
fmt.Sprintf("Detected: %v", summary.Detected),
fmt.Sprintf("Detected: %v", anomalyCount > 0),
"",
"",
}
},
}
func makeSummary(name string, ss string) []string {
return summarizers[name](ss)
func makeSummary(name string, totalCount uint64, anomalyCount uint64, ss string) []string {
return summarizers[name](totalCount, anomalyCount, ss)
}
func logResultItem(w io.Writer, f log.Fields) error {
@ -80,9 +80,10 @@ func logResultItem(w io.Writer, f log.Fields) error {
rID := f.Get("id").(int64)
name := f.Get("name").(string)
isDone := f.Get("is_done").(bool)
startTime := f.Get("start_time").(time.Time)
networkName := f.Get("network_name").(string)
asn := fmt.Sprintf("AS %s", f.Get("asn").(string))
asn := fmt.Sprintf("AS%d (%s)", f.Get("asn").(uint), f.Get("network_country_code").(string))
//runtime := f.Get("runtime").(float64)
//dataUsageUp := f.Get("dataUsageUp").(int64)
//dataUsageDown := f.Get("dataUsageDown").(int64)
@ -98,7 +99,10 @@ func logResultItem(w io.Writer, f log.Fields) error {
fmt.Fprintf(w, "┃ "+firstRow+" ┃\n")
fmt.Fprintf(w, "┡"+strings.Repeat("━", colWidth*2+2)+"┩\n")
summary := makeSummary(name, f.Get("summary").(string))
summary := makeSummary(name,
f.Get("measurement_count").(uint64),
f.Get("measurement_anomaly_count").(uint64),
f.Get("test_keys").(string))
fmt.Fprintf(w, fmt.Sprintf("│ %s %s│\n",
util.RightPad(name, colWidth),
@ -111,9 +115,12 @@ func logResultItem(w io.Writer, f log.Fields) error {
util.RightPad(summary[2], colWidth)))
if index == totalCount-1 {
fmt.Fprintf(w, "└┬──────────────┬──────────────┬──────────────┬")
fmt.Fprintf(w, strings.Repeat("─", colWidth*2-44))
fmt.Fprintf(w, "┘\n")
if isDone == true {
fmt.Fprintf(w, "└┬──────────────┬──────────────┬──────────────────┬┘\n")
} else {
// We want the incomplete section to not have a footer
fmt.Fprintf(w, "└──────────────────────────────────────────────────┘\n")
}
}
return nil
}
@ -122,8 +129,8 @@ func logResultSummary(w io.Writer, f log.Fields) error {
networks := f.Get("total_networks").(int64)
tests := f.Get("total_tests").(int64)
dataUp := f.Get("total_data_usage_up").(int64)
dataDown := f.Get("total_data_usage_down").(int64)
dataUp := f.Get("total_data_usage_up").(float64)
dataDown := f.Get("total_data_usage_down").(float64)
if tests == 0 {
fmt.Fprintf(w, "No results\n")
fmt.Fprintf(w, "Try running:\n")
@ -134,8 +141,8 @@ func logResultSummary(w io.Writer, f log.Fields) error {
fmt.Fprintf(w, " │ %s │ %s │ %s │\n",
util.RightPad(fmt.Sprintf("%d tests", tests), 12),
util.RightPad(fmt.Sprintf("%d nets", networks), 12),
util.RightPad(fmt.Sprintf("%d ⬆ %d ⬇", dataUp, dataDown), 12))
fmt.Fprintf(w, " └──────────────┴──────────────┴──────────────┘\n")
util.RightPad(fmt.Sprintf("⬆ %s ⬇ %s", formatSize(dataUp), formatSize(dataDown)), 16))
fmt.Fprintf(w, " └──────────────┴──────────────┴──────────────────┘\n")
return nil
}

View File

@ -126,7 +126,7 @@ func Onboarding(config *config.Config) error {
config.Lock()
config.InformedConsent = true
config.Advanced.IncludeCountry = settings.IncludeCountry
config.Sharing.IncludeCountry = settings.IncludeCountry
config.Advanced.SendCrashReports = settings.SendCrashReports
config.Sharing.IncludeIP = settings.IncludeIP
config.Sharing.IncludeASN = settings.IncludeNetwork

View File

@ -7,6 +7,7 @@ import (
"time"
"github.com/apex/log"
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/internal/util"
)
@ -19,19 +20,78 @@ func Progress(key string, perc float64, msg string) {
}).Info(msg)
}
type MeasurementSummaryData struct {
TotalRuntime float64
TotalCount int64
AnomalyCount int64
DataUsageUp float64
DataUsageDown float64
ASN uint
NetworkName string
NetworkCountryCode string
StartTime time.Time
}
func MeasurementSummary(msmt MeasurementSummaryData) {
log.WithFields(log.Fields{
"type": "measurement_summary",
"total_runtime": msmt.TotalRuntime,
"total_count": msmt.TotalCount,
"anomaly_count": msmt.AnomalyCount,
"data_usage_down": msmt.DataUsageDown,
"data_usage_up": msmt.DataUsageUp,
"asn": msmt.ASN,
"network_country_code": msmt.NetworkCountryCode,
"network_name": msmt.NetworkName,
"start_time": msmt.StartTime,
}).Info("measurement summary")
}
// MeasurementItem logs a progress type event
func MeasurementItem(msmt database.MeasurementURLNetwork, isFirst bool, isLast bool) {
log.WithFields(log.Fields{
"type": "measurement_item",
"is_first": isFirst,
"is_last": isLast,
"id": msmt.Measurement.ID,
"test_name": msmt.TestName,
"test_group_name": msmt.Result.TestGroupName,
"start_time": msmt.Measurement.StartTime,
"test_keys": msmt.TestKeys,
"network_country_code": msmt.Network.CountryCode,
"network_name": msmt.Network.NetworkName,
"asn": msmt.Network.ASN,
"runtime": msmt.Measurement.Runtime,
"url": msmt.URL.URL.String,
"url_category_code": msmt.URL.CategoryCode.String,
"url_country_code": msmt.URL.CountryCode.String,
"is_anomaly": msmt.IsAnomaly.Bool,
"is_uploaded": msmt.IsUploaded,
"is_upload_failed": msmt.IsUploadFailed,
"upload_failure_msg": msmt.UploadFailureMsg.String,
"is_failed": msmt.IsFailed,
"failure_msg": msmt.FailureMsg.String,
"is_done": msmt.Measurement.IsDone,
"report_file_path": msmt.ReportFilePath,
}).Info("measurement")
}
// ResultItemData is the metadata about a result
type ResultItemData struct {
ID int64
Name string
StartTime time.Time
Summary string
TestKeys string
MeasurementCount uint64
MeasurementAnomalyCount uint64
Runtime float64
Country string
NetworkName string
ASN string
ASN uint
Done bool
DataUsageDown int64
DataUsageUp int64
DataUsageDown float64
DataUsageUp float64
Index int
TotalCount int
}
@ -43,12 +103,14 @@ func ResultItem(result ResultItemData) {
"id": result.ID,
"name": result.Name,
"start_time": result.StartTime,
"summary": result.Summary,
"country": result.Country,
"test_keys": result.TestKeys,
"measurement_count": result.MeasurementCount,
"measurement_anomaly_count": result.MeasurementAnomalyCount,
"network_country_code": result.Country,
"network_name": result.NetworkName,
"asn": result.ASN,
"runtime": result.Runtime,
"done": result.Done,
"is_done": result.Done,
"data_usage_down": result.DataUsageDown,
"data_usage_up": result.DataUsageUp,
"index": result.Index,
@ -58,8 +120,8 @@ func ResultItem(result ResultItemData) {
type ResultSummaryData struct {
TotalTests int64
TotalDataUsageUp int64
TotalDataUsageDown int64
TotalDataUsageUp float64
TotalDataUsageDown float64
TotalNetworks int64
}

View File

@ -38,7 +38,11 @@ func EscapeAwareRuneCountInString(s string) int {
}
func RightPad(str string, length int) string {
return str + strings.Repeat(" ", length-EscapeAwareRuneCountInString(str))
c := length - EscapeAwareRuneCountInString(str)
if c < 0 {
c = 0
}
return str + strings.Repeat(" ", c)
}
// WrapString wraps the given string within lim width in characters.

View File

@ -1,14 +1,10 @@
package groups
import (
"encoding/json"
"github.com/apex/log"
"github.com/ooni/probe-cli/nettests"
"github.com/ooni/probe-cli/nettests/im"
"github.com/ooni/probe-cli/nettests/middlebox"
"github.com/ooni/probe-cli/nettests/performance"
"github.com/ooni/probe-cli/nettests/summary"
"github.com/ooni/probe-cli/nettests/websites"
)
@ -16,7 +12,6 @@ import (
type NettestGroup struct {
Label string
Nettests []nettests.Nettest
Summary summary.ResultSummaryFunc
}
// NettestGroups that can be run by the user
@ -26,35 +21,6 @@ var NettestGroups = map[string]NettestGroup{
Nettests: []nettests.Nettest{
websites.WebConnectivity{},
},
Summary: func(m summary.SummaryMap) (string, error) {
if err := summary.CheckRequiredKeys([]string{"WebConnectivity"}, m); err != nil {
log.WithError(err).Error("missing keys")
return "", err
}
// XXX to generate this I need to create the summary map as a list
var summary summary.WebsitesSummary
summary.Tested = 0
summary.Blocked = 0
for _, msmtSummaryStr := range m["WebConnectivity"] {
var wcSummary websites.WebConnectivitySummary
err := json.Unmarshal([]byte(msmtSummaryStr), &wcSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal WebConnectivity summary")
return "", err
}
if wcSummary.Blocked {
summary.Blocked++
}
summary.Tested++
}
summaryBytes, err := json.Marshal(summary)
if err != nil {
return "", err
}
return string(summaryBytes), nil
},
},
"performance": NettestGroup{
Label: "Performance",
@ -62,38 +28,6 @@ var NettestGroups = map[string]NettestGroup{
performance.Dash{},
performance.NDT{},
},
Summary: func(m summary.SummaryMap) (string, error) {
if err := summary.CheckRequiredKeys([]string{"Dash", "Ndt"}, m); err != nil {
log.WithError(err).Error("missing keys")
return "", err
}
var (
err error
ndtSummary performance.NDTSummary
dashSummary performance.DashSummary
summary summary.PerformanceSummary
)
err = json.Unmarshal([]byte(m["Dash"][0]), &dashSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal Dash summary")
return "", err
}
err = json.Unmarshal([]byte(m["Ndt"][0]), &ndtSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal NDT summary")
return "", err
}
summary.Bitrate = dashSummary.Bitrate
summary.Download = ndtSummary.Download
summary.Upload = ndtSummary.Upload
summary.Ping = ndtSummary.AvgRTT
summaryBytes, err := json.Marshal(summary)
if err != nil {
return "", err
}
return string(summaryBytes), nil
},
},
"middlebox": NettestGroup{
Label: "Middleboxes",
@ -101,35 +35,6 @@ var NettestGroups = map[string]NettestGroup{
middlebox.HTTPInvalidRequestLine{},
middlebox.HTTPHeaderFieldManipulation{},
},
Summary: func(m summary.SummaryMap) (string, error) {
if err := summary.CheckRequiredKeys([]string{"HttpInvalidRequestLine", "HttpHeaderFieldManipulation"}, m); err != nil {
log.WithError(err).Error("missing keys")
return "", err
}
var (
err error
hhfmSummary middlebox.HTTPHeaderFieldManipulationSummary
hirlSummary middlebox.HTTPInvalidRequestLineSummary
summary summary.MiddleboxSummary
)
err = json.Unmarshal([]byte(m["HttpHeaderFieldManipulation"][0]), &hhfmSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal hhfm summary")
return "", err
}
err = json.Unmarshal([]byte(m["HttpInvalidRequestLine"][0]), &hirlSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal hirl summary")
return "", err
}
summary.Detected = hirlSummary.Tampering == true || hhfmSummary.Tampering == true
summaryBytes, err := json.Marshal(summary)
if err != nil {
return "", err
}
return string(summaryBytes), nil
},
},
"im": NettestGroup{
Label: "Instant Messaging",
@ -138,52 +43,5 @@ var NettestGroups = map[string]NettestGroup{
im.Telegram{},
im.WhatsApp{},
},
Summary: func(m summary.SummaryMap) (string, error) {
if err := summary.CheckRequiredKeys([]string{"Whatsapp", "Telegram", "FacebookMessenger"}, m); err != nil {
log.WithError(err).Error("missing keys")
return "", err
}
var (
err error
waSummary im.WhatsAppSummary
tgSummary im.TelegramSummary
fbSummary im.FacebookMessengerSummary
summary summary.IMSummary
)
err = json.Unmarshal([]byte(m["Whatsapp"][0]), &waSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal whatsapp summary")
return "", err
}
err = json.Unmarshal([]byte(m["Telegram"][0]), &tgSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal telegram summary")
return "", err
}
err = json.Unmarshal([]byte(m["FacebookMessenger"][0]), &fbSummary)
if err != nil {
log.WithError(err).Error("failed to unmarshal facebook summary")
return "", err
}
// XXX it could actually be that some are not tested when the
// configuration is changed.
summary.Tested = 3
summary.Blocked = 0
if fbSummary.Blocked == true {
summary.Blocked++
}
if tgSummary.Blocked == true {
summary.Blocked++
}
if waSummary.Blocked == true {
summary.Blocked++
}
summaryBytes, err := json.Marshal(summary)
if err != nil {
return "", err
}
return string(summaryBytes), nil
},
},
}

View File

@ -16,15 +16,15 @@ func (h FacebookMessenger) Run(ctl *nettests.Controller) error {
return mknt.Run()
}
// FacebookMessengerSummary for the test
type FacebookMessengerSummary struct {
DNSBlocking bool
TCPBlocking bool
Blocked bool
// FacebookMessengerTestKeys for the test
type FacebookMessengerTestKeys struct {
DNSBlocking bool `json:"facebook_dns_blocking"`
TCPBlocking bool `json:"facebook_tcp_blocking"`
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (h FacebookMessenger) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (h FacebookMessenger) GetTestKeys(tk map[string]interface{}) interface{} {
var (
dnsBlocking bool
tcpBlocking bool
@ -41,10 +41,10 @@ func (h FacebookMessenger) Summary(tk map[string]interface{}) interface{} {
tcpBlocking = tk["facebook_tcp_blocking"].(bool)
}
return FacebookMessengerSummary{
return FacebookMessengerTestKeys{
DNSBlocking: dnsBlocking,
TCPBlocking: tcpBlocking,
Blocked: dnsBlocking || tcpBlocking,
IsAnomaly: dnsBlocking || tcpBlocking,
}
}

View File

@ -16,16 +16,16 @@ func (h Telegram) Run(ctl *nettests.Controller) error {
return mknt.Run()
}
// TelegramSummary for the test
type TelegramSummary struct {
HTTPBlocking bool
TCPBlocking bool
WebBlocking bool
Blocked bool
// TelegramTestKeys for the test
type TelegramTestKeys struct {
HTTPBlocking bool `json:"telegram_http_blocking"`
TCPBlocking bool `json:"telegram_tcp_blocking"`
WebBlocking bool `json:"telegram_web_blocking"`
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (h Telegram) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (h Telegram) GetTestKeys(tk map[string]interface{}) interface{} {
var (
tcpBlocking bool
httpBlocking bool
@ -48,11 +48,11 @@ func (h Telegram) Summary(tk map[string]interface{}) interface{} {
webBlocking = tk["telegram_web_status"].(string) == "blocked"
}
return TelegramSummary{
return TelegramTestKeys{
TCPBlocking: tcpBlocking,
HTTPBlocking: httpBlocking,
WebBlocking: webBlocking,
Blocked: webBlocking || httpBlocking || tcpBlocking,
IsAnomaly: webBlocking || httpBlocking || tcpBlocking,
}
}

View File

@ -16,16 +16,16 @@ func (h WhatsApp) Run(ctl *nettests.Controller) error {
return mknt.Run()
}
// WhatsAppSummary for the test
type WhatsAppSummary struct {
RegistrationServerBlocking bool
WebBlocking bool
EndpointsBlocking bool
Blocked bool
// WhatsAppTestKeys for the test
type WhatsAppTestKeys struct {
RegistrationServerBlocking bool `json:"registration_server_blocking"`
WebBlocking bool `json:"whatsapp_web_blocking"`
EndpointsBlocking bool `json:"whatsapp_endpoints_blocking"`
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (h WhatsApp) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (h WhatsApp) GetTestKeys(tk map[string]interface{}) interface{} {
var (
webBlocking bool
registrationBlocking bool
@ -46,11 +46,11 @@ func (h WhatsApp) Summary(tk map[string]interface{}) interface{} {
webBlocking = computeBlocking("whatsapp_web_status")
endpointsBlocking = computeBlocking("whatsapp_endpoints_status")
return WhatsAppSummary{
return WhatsAppTestKeys{
RegistrationServerBlocking: registrationBlocking,
WebBlocking: webBlocking,
EndpointsBlocking: endpointsBlocking,
Blocked: registrationBlocking || webBlocking || endpointsBlocking,
IsAnomaly: registrationBlocking || webBlocking || endpointsBlocking,
}
}

View File

@ -16,13 +16,13 @@ func (h HTTPHeaderFieldManipulation) Run(ctl *nettests.Controller) error {
return mknt.Run()
}
// HTTPHeaderFieldManipulationSummary for the test
type HTTPHeaderFieldManipulationSummary struct {
Tampering bool
// HTTPHeaderFieldManipulationTestKeys for the test
type HTTPHeaderFieldManipulationTestKeys struct {
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (h HTTPHeaderFieldManipulation) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys returns a projection of the tests keys needed for the views
func (h HTTPHeaderFieldManipulation) GetTestKeys(tk map[string]interface{}) interface{} {
tampering := false
for _, v := range tk["tampering"].(map[string]interface{}) {
t, ok := v.(bool)
@ -32,8 +32,8 @@ func (h HTTPHeaderFieldManipulation) Summary(tk map[string]interface{}) interfac
}
}
return HTTPHeaderFieldManipulationSummary{
Tampering: tampering,
return HTTPHeaderFieldManipulationTestKeys{
IsAnomaly: tampering,
}
}

View File

@ -16,17 +16,17 @@ func (h HTTPInvalidRequestLine) Run(ctl *nettests.Controller) error {
return mknt.Run()
}
// HTTPInvalidRequestLineSummary for the test
type HTTPInvalidRequestLineSummary struct {
Tampering bool
// HTTPInvalidRequestLineTestKeys for the test
type HTTPInvalidRequestLineTestKeys struct {
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (h HTTPInvalidRequestLine) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (h HTTPInvalidRequestLine) GetTestKeys(tk map[string]interface{}) interface{} {
tampering := tk["tampering"].(bool)
return HTTPInvalidRequestLineSummary{
Tampering: tampering,
return HTTPInvalidRequestLineTestKeys{
IsAnomaly: tampering,
}
}

View File

@ -1,29 +1,36 @@
package nettests
import (
"database/sql"
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
"github.com/apex/log"
"github.com/fatih/color"
"github.com/measurement-kit/go-measurement-kit"
ooni "github.com/ooni/probe-cli"
"github.com/ooni/probe-cli/internal/crashreport"
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/internal/output"
"github.com/ooni/probe-cli/utils"
"github.com/ooni/probe-cli/utils/strcase"
)
// Nettest interface. Every Nettest should implement this.
type Nettest interface {
Run(*Controller) error
Summary(map[string]interface{}) interface{}
GetTestKeys(map[string]interface{}) interface{}
LogSummary(string) error
}
// NewController creates a nettest controller
func NewController(nt Nettest, ctx *ooni.Context, res *database.Result, msmtPath string) *Controller {
func NewController(nt Nettest, ctx *ooni.Context, res *database.Result) *Controller {
msmtPath := filepath.Join(ctx.TempDir,
fmt.Sprintf("msmt-%T-%s.jsonl", nt,
time.Now().UTC().Format(utils.ResultTimestamp)))
return &Controller{
Ctx: ctx,
nt: nt,
@ -40,6 +47,7 @@ type Controller struct {
nt Nettest
msmts map[int64]*database.Measurement
msmtPath string // XXX maybe we can drop this and just use a temporary file
inputIdxMap map[int64]int64 // Used to map mk idx to database id
}
func getCaBundlePath() string {
@ -50,6 +58,11 @@ func getCaBundlePath() string {
return "/etc/ssl/cert.pem"
}
func (c *Controller) SetInputIdxMap(inputIdxMap map[int64]int64) error {
c.inputIdxMap = inputIdxMap
return nil
}
// Init should be called once to initialise the nettest
func (c *Controller) Init(nt *mk.Nettest) error {
log.Debugf("Init: %v", nt)
@ -57,70 +70,31 @@ func (c *Controller) Init(nt *mk.Nettest) error {
c.msmts = make(map[int64]*database.Measurement)
msmtTemplate := database.Measurement{
ASN: "",
IP: "",
CountryCode: "",
ReportID: "",
Name: nt.Name,
ResultID: c.res.ID,
ReportFilePath: c.msmtPath,
}
// This is to workaround homedirs having UTF-8 characters in them.
// See: https://github.com/measurement-kit/measurement-kit/issues/1635
// These values are shared by every measurement
reportID := sql.NullString{String: "", Valid: false}
testName := strcase.ToSnake(nt.Name)
resultID := c.res.ID
reportFilePath := c.msmtPath
geoIPCountryPath := filepath.Join(utils.GeoIPDir(c.Ctx.Home), "GeoIP.dat")
geoIPASNPath := filepath.Join(utils.GeoIPDir(c.Ctx.Home), "GeoIPASNum.dat")
caBundlePath := getCaBundlePath()
msmtPath := c.msmtPath
userHome, err := utils.GetOONIHome()
if err != nil {
log.WithError(err).Error("failed to figure out the homedir")
return err
}
// Get the parent of it
userHome = filepath.Dir(userHome)
relPath, err := filepath.Rel(userHome, caBundlePath)
if err != nil {
log.WithError(err).Error("caBundlePath is not relative to the users home")
} else {
caBundlePath = relPath
}
relPath, err = filepath.Rel(userHome, geoIPASNPath)
if err != nil {
log.WithError(err).Error("geoIPASNPath is not relative to the users home")
} else {
geoIPASNPath = relPath
}
relPath, err = filepath.Rel(userHome, geoIPCountryPath)
if err != nil {
log.WithError(err).Error("geoIPCountryPath is not relative to the users home")
} else {
geoIPCountryPath = relPath
}
log.Debugf("Chdir to: %s", userHome)
if err := os.Chdir(userHome); err != nil {
log.WithError(err).Errorf("failed to chdir to %s", userHome)
return err
}
log.Debugf("OutputPath: %s", msmtPath)
nt.Options = mk.NettestOptions{
IncludeIP: c.Ctx.Config.Sharing.IncludeIP,
IncludeASN: c.Ctx.Config.Sharing.IncludeASN,
IncludeCountry: c.Ctx.Config.Advanced.IncludeCountry,
LogLevel: "INFO",
IncludeCountry: c.Ctx.Config.Sharing.IncludeCountry,
LogLevel: "DEBUG",
ProbeCC: c.Ctx.Location.CountryCode,
ProbeASN: fmt.Sprintf("AS%d", c.Ctx.Location.ASN),
ProbeIP: c.Ctx.Location.IP,
DisableReportFile: false,
DisableCollector: false,
SoftwareName: "ooniprobe",
DisableCollector: !c.Ctx.Config.Sharing.UploadResults,
RandomizeInput: false, // It's important to disable input randomization to ensure the URLs are written in sync to the DB
SoftwareName: "ooniprobe-desktop",
SoftwareVersion: ooni.Version,
OutputPath: msmtPath,
@ -132,18 +106,16 @@ func (c *Controller) Init(nt *mk.Nettest) error {
log.Debugf("GeoIPCountryPath: %s", nt.Options.GeoIPCountryPath)
nt.On("log", func(e mk.Event) {
log.Debugf(color.RedString(e.Key))
level := e.Value.LogLevel
msg := e.Value.Message
switch level {
case "ERROR":
log.Error(msg)
log.Errorf("%v: %s", color.RedString("mklog"), msg)
case "INFO":
log.Info(msg)
log.Infof("%v: %s", color.BlueString("mklog"), msg)
default:
log.Debug(msg)
log.Debugf("%v: %s", color.WhiteString("mklog"), msg)
}
})
@ -159,22 +131,21 @@ func (c *Controller) Init(nt *mk.Nettest) error {
nt.On("status.report_created", func(e mk.Event) {
log.Debugf("%s", e.Key)
msmtTemplate.ReportID = e.Value.ReportID
reportID = sql.NullString{String: e.Value.ReportID, Valid: true}
})
nt.On("status.geoip_lookup", func(e mk.Event) {
log.Debugf(color.RedString(e.Key))
msmtTemplate.ASN = e.Value.ProbeASN
msmtTemplate.IP = e.Value.ProbeIP
msmtTemplate.CountryCode = e.Value.ProbeCC
})
nt.On("status.measurement_start", func(e mk.Event) {
log.Debugf(color.RedString(e.Key))
idx := e.Value.Idx
msmt, err := database.CreateMeasurement(c.Ctx.DB, msmtTemplate, e.Value.Input)
urlID := sql.NullInt64{Int64: 0, Valid: false}
if c.inputIdxMap != nil {
urlID = sql.NullInt64{Int64: c.inputIdxMap[idx], Valid: true}
}
msmt, err := database.CreateMeasurement(c.Ctx.DB, reportID, testName, resultID, reportFilePath, urlID)
if err != nil {
log.WithError(err).Error("Failed to create measurement")
return
@ -242,9 +213,12 @@ func (c *Controller) Init(nt *mk.Nettest) error {
nt.On("status.measurement_submission", func(e mk.Event) {
log.Debugf(color.RedString(e.Key))
// XXX maybe this should change once MK is aligned with the spec
if c.Ctx.Config.Sharing.UploadResults == true {
if err := c.msmts[e.Value.Idx].UploadSucceeded(c.Ctx.DB); err != nil {
log.WithError(err).Error("failed to mark msmt as uploaded")
}
}
})
nt.On("status.measurement_done", func(e mk.Event) {
@ -258,19 +232,30 @@ func (c *Controller) Init(nt *mk.Nettest) error {
nt.On("measurement", func(e mk.Event) {
log.Debugf("status.end")
crashreport.CapturePanicAndWait(func() {
c.OnEntry(e.Value.Idx, e.Value.JSONStr)
}, nil)
})
nt.On("status.end", func(e mk.Event) {
log.Debugf("status.end")
for idx, msmt := range c.msmts {
log.Debugf("adding msmt#%d to result", idx)
if err := msmt.AddToResult(c.Ctx.DB, c.res); err != nil {
log.WithError(err).Error("failed to add to result")
}
}
if e.Value.Failure != "" {
log.Errorf("Failure in status.end: %s", e.Value.Failure)
}
c.res.DataUsageDown += e.Value.DownloadedKB
c.res.DataUsageUp += e.Value.UploadedKB
})
log.Debugf("Registered all the handlers")
return nil
}
@ -292,14 +277,17 @@ func (c *Controller) OnEntry(idx int64, jsonStr string) {
log.Debugf("OnEntry")
var entry Entry
json.Unmarshal([]byte(jsonStr), &entry)
summary := c.nt.Summary(entry.TestKeys)
summaryBytes, err := json.Marshal(summary)
if err != nil {
log.WithError(err).Error("failed to serialize summary")
if err := json.Unmarshal([]byte(jsonStr), &entry); err != nil {
log.WithError(err).Error("failed to parse onEntry")
return
}
tk := c.nt.GetTestKeys(entry.TestKeys)
log.Debugf("Fetching: %s %v", idx, c.msmts[idx])
c.msmts[idx].WriteSummary(c.Ctx.DB, string(summaryBytes))
err := database.AddTestKeys(c.Ctx.DB, c.msmts[idx], tk)
if err != nil {
log.WithError(err).Error("failed to add test keys to summary")
}
}
// MKStart is the interface for the mk.Nettest Start() function

View File

@ -16,19 +16,21 @@ func (d Dash) Run(ctl *nettests.Controller) error {
return dash.Run()
}
// DashSummary for the test
// DashTestKeys for the test
// TODO: process 'receiver_data' to provide an array of performance for a chart.
type DashSummary struct {
Latency float64
Bitrate int64
Delay float64
type DashTestKeys struct {
Latency float64 `json:"connect_latency"`
Bitrate int64 `json:"median_bitrate"`
Delay float64 `json:"min_playout_delay"`
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (d Dash) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (d Dash) GetTestKeys(tk map[string]interface{}) interface{} {
simple := tk["simple"].(map[string]interface{})
return DashSummary{
return DashTestKeys{
IsAnomaly: false,
Latency: simple["connect_latency"].(float64),
Bitrate: int64(simple["median_bitrate"].(float64)),
Delay: simple["min_playout_delay"].(float64),

View File

@ -16,26 +16,27 @@ func (n NDT) Run(ctl *nettests.Controller) error {
return nt.Run()
}
// NDTSummary for the test
type NDTSummary struct {
Upload int64
Download int64
Ping int64
MaxRTT float64
AvgRTT float64
MinRTT float64
MSS int64
OutOfOrder int64
PacketLoss float64
Timeouts int64
// NDTTestKeys for the test
type NDTTestKeys struct {
Upload int64 `json:"upload"`
Download int64 `json:"download"`
Ping int64 `json:"ping"`
MaxRTT float64 `json:"max_rtt"`
AvgRTT float64 `json:"avg_rtt"`
MinRTT float64 `json:"min_rtt"`
MSS int64 `json:"mss"`
OutOfOrder int64 `json:"out_of_order"`
PacketLoss float64 `json:"packet_loss"`
Timeouts int64 `json:"timeouts"`
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (n NDT) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (n NDT) GetTestKeys(tk map[string]interface{}) interface{} {
simple := tk["simple"].(map[string]interface{})
advanced := tk["advanced"].(map[string]interface{})
return NDTSummary{
return NDTTestKeys{
Upload: int64(simple["upload"].(float64)),
Download: int64(simple["download"].(float64)),
Ping: int64(simple["ping"].(float64)),

View File

@ -1,44 +0,0 @@
package summary
import "fmt"
// ResultSummaryFunc is the function used to generate result summaries
type ResultSummaryFunc func(SummaryMap) (string, error)
// SummaryMap contains a mapping from test name to serialized summary for it
type SummaryMap map[string][]string
// PerformanceSummary is the result summary for a performance test
type PerformanceSummary struct {
Upload int64
Download int64
Ping float64
Bitrate int64
}
// MiddleboxSummary is the summary for the middlebox tests
type MiddleboxSummary struct {
Detected bool
}
// IMSummary is the summary for the im tests
type IMSummary struct {
Tested uint
Blocked uint
}
// WebsitesSummary is the summary for the websites test
type WebsitesSummary struct {
Tested uint
Blocked uint
}
func CheckRequiredKeys(rk []string, m SummaryMap) error {
for _, key := range rk {
if _, ok := m[key]; ok {
continue
}
return fmt.Errorf("missing SummaryMap key '%s'", key)
}
return nil
}

View File

@ -6,7 +6,9 @@ import (
"io/ioutil"
"net/http"
"github.com/apex/log"
"github.com/measurement-kit/go-measurement-kit"
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/nettests"
"github.com/pkg/errors"
)
@ -14,6 +16,7 @@ import (
// URLInfo contains the URL and the citizenlab category code for that URL
type URLInfo struct {
URL string `json:"url"`
CountryCode string `json:"country_code"`
CategoryCode string `json:"category_code"`
}
@ -24,11 +27,13 @@ type URLResponse struct {
const orchestrateBaseURL = "https://events.proteus.test.ooni.io"
func lookupURLs(ctl *nettests.Controller) ([]string, error) {
func lookupURLs(ctl *nettests.Controller) ([]string, map[int64]int64, error) {
var (
parsed = new(URLResponse)
urls []string
)
urlIDMap := make(map[int64]int64)
log.Debug("Looking up URLs")
// XXX pass in the configuration for category codes
reqURL := fmt.Sprintf("%s/api/v1/urls?probe_cc=%s",
orchestrateBaseURL,
@ -36,22 +41,29 @@ func lookupURLs(ctl *nettests.Controller) ([]string, error) {
resp, err := http.Get(reqURL)
if err != nil {
return urls, errors.Wrap(err, "failed to perform request")
return urls, urlIDMap, errors.Wrap(err, "failed to perform request")
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return urls, errors.Wrap(err, "failed to read response body")
return urls, urlIDMap, errors.Wrap(err, "failed to read response body")
}
err = json.Unmarshal([]byte(body), &parsed)
if err != nil {
return urls, errors.Wrap(err, "failed to parse json")
return urls, urlIDMap, errors.Wrap(err, "failed to parse json")
}
for _, url := range parsed.Results {
for idx, url := range parsed.Results {
log.Debugf("Going over URL %d", idx)
urlID, err := database.CreateOrUpdateURL(ctl.Ctx.DB, url.URL, url.CategoryCode, url.CountryCode)
if err != nil {
log.Error("failed to add to the URL table")
}
log.Debugf("Mapped URL %s to idx %d and urlID %d", url.URL, idx, urlID)
urlIDMap[int64(idx)] = urlID
urls = append(urls, url.URL)
}
return urls, nil
return urls, urlIDMap, nil
}
// WebConnectivity test implementation
@ -63,24 +75,25 @@ func (n WebConnectivity) Run(ctl *nettests.Controller) error {
nt := mk.NewNettest("WebConnectivity")
ctl.Init(nt)
urls, err := lookupURLs(ctl)
urls, urlIDMap, err := lookupURLs(ctl)
if err != nil {
return err
}
ctl.SetInputIdxMap(urlIDMap)
nt.Options.Inputs = urls
return nt.Run()
}
// WebConnectivitySummary for the test
type WebConnectivitySummary struct {
Accessible bool
Blocking string
Blocked bool
// WebConnectivityTestKeys for the test
type WebConnectivityTestKeys struct {
Accessible bool `json:"accessible"`
Blocking string `json:"blocking"`
IsAnomaly bool `json:"-"`
}
// Summary generates a summary for a test run
func (n WebConnectivity) Summary(tk map[string]interface{}) interface{} {
// GetTestKeys generates a summary for a test run
func (n WebConnectivity) GetTestKeys(tk map[string]interface{}) interface{} {
var (
blocked bool
blocking string
@ -107,10 +120,10 @@ func (n WebConnectivity) Summary(tk map[string]interface{}) interface{} {
accessible = tk["accessible"].(bool)
}
return WebConnectivitySummary{
return WebConnectivityTestKeys{
Accessible: accessible,
Blocking: blocking,
Blocked: blocked,
IsAnomaly: blocked,
}
}

10
ooni.go
View File

@ -6,7 +6,6 @@ import (
"path"
"github.com/apex/log"
"github.com/jmoiron/sqlx"
"github.com/ooni/probe-cli/config"
"github.com/ooni/probe-cli/internal/bindata"
"github.com/ooni/probe-cli/internal/database"
@ -14,15 +13,17 @@ import (
"github.com/ooni/probe-cli/internal/onboard"
"github.com/ooni/probe-cli/utils"
"github.com/pkg/errors"
"upper.io/db.v3/lib/sqlbuilder"
)
const Version = "3.0.0-dev.0"
const Version = "3.0.0-dev.1"
// Context for OONI Probe
type Context struct {
Config *config.Config
DB *sqlx.DB
DB sqlbuilder.Database
Location *utils.LocationInfo
IsBatch bool
Home string
TempDir string
@ -60,6 +61,9 @@ func (c *Context) LocationLookup() error {
// config option is set to false
func (c *Context) MaybeOnboarding() error {
if c.Config.InformedConsent == false {
if c.IsBatch == true {
return errors.New("cannot run onboarding in batch mode")
}
if err := onboard.Onboarding(c.Config); err != nil {
return errors.Wrap(err, "onboarding")
}

27
ooni_test.go Normal file
View File

@ -0,0 +1,27 @@
package ooni
import (
"io/ioutil"
"os"
"path"
"testing"
)
func TestInit(t *testing.T) {
ooniHome, err := ioutil.TempDir("", "oonihome")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(ooniHome)
ctx := NewContext("", ooniHome)
if err := ctx.Init(); err != nil {
t.Error(err)
t.Fatal("failed to init the context")
}
configPath := path.Join(ooniHome, "config.json")
if _, err := os.Stat(configPath); os.IsNotExist(err) {
t.Fatal("config file was not created")
}
}

22
utils/strcase/LICENSE Normal file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Ian Coleman
Copyright (c) 2018 Ma_124, <github.com/Ma124>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, Subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or Substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

23
utils/strcase/README.md Normal file
View File

@ -0,0 +1,23 @@
# strcase
[![Godoc Reference](https://godoc.org/github.com/iancoleman/strcase?status.svg)](http://godoc.org/github.com/iancoleman/strcase)
[![Build Status](https://travis-ci.org/iancoleman/strcase.svg)](https://travis-ci.org/iancoleman/strcase)
[![Coverage](http://gocover.io/_badge/github.com/iancoleman/strcase?0)](http://gocover.io/github.com/iancoleman/strcase)
strcase is a go package for converting string case to [snake case](https://en.wikipedia.org/wiki/Snake_case) or [camel case](https://en.wikipedia.org/wiki/CamelCase).
## Example
```go
s := "AnyKind of_string"
```
| Function | Result |
|-----------------------------------|----------------------|
| `ToSnake(s)` | `any_kind_of_string` |
| `ToScreamingSnake(s)` | `ANY_KIND_OF_STRING` |
| `ToKebab(s)` | `any-kind-of-string` |
| `ToScreamingKebab(s)` | `ANY-KIND-OF-STRING` |
| `ToDelimited(s, '.')` | `any.kind.of.string` |
| `ToScreamingDelimited(s, '.')` | `ANY.KIND.OF.STRING` |
| `ToCamel(s)` | `AnyKindOfString` |
| `ToLowerCamel(s)` | `anyKindOfString` |

75
utils/strcase/camel.go Normal file
View File

@ -0,0 +1,75 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ian Coleman
* Copyright (c) 2018 Ma_124, <github.com/Ma124>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, Subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or Substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package strcase
import (
"strings"
)
// Converts a string to CamelCase
func toCamelInitCase(s string, initCase bool) string {
s = addWordBoundariesToNumbers(s)
s = strings.Trim(s, " ")
n := ""
capNext := initCase
for _, v := range s {
if v >= 'A' && v <= 'Z' {
n += string(v)
}
if v >= '0' && v <= '9' {
n += string(v)
}
if v >= 'a' && v <= 'z' {
if capNext {
n += strings.ToUpper(string(v))
} else {
n += string(v)
}
}
if v == '_' || v == ' ' || v == '-' {
capNext = true
} else {
capNext = false
}
}
return n
}
// Converts a string to CamelCase
func ToCamel(s string) string {
return toCamelInitCase(s, true)
}
// Converts a string to lowerCamelCase
func ToLowerCamel(s string) string {
if s == "" {
return s
}
if r := rune(s[0]); r >= 'A' && r <= 'Z' {
s = strings.ToLower(string(r)) + s[1:]
}
return toCamelInitCase(s, false)
}

View File

@ -0,0 +1,68 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ian Coleman
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, Subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or Substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package strcase
import (
"testing"
)
func TestToCamel(t *testing.T) {
cases := [][]string{
[]string{"test_case", "TestCase"},
[]string{"test", "Test"},
[]string{"TestCase", "TestCase"},
[]string{" test case ", "TestCase"},
[]string{"", ""},
[]string{"many_many_words", "ManyManyWords"},
[]string{"AnyKind of_string", "AnyKindOfString"},
[]string{"odd-fix", "OddFix"},
[]string{"numbers2And55with000", "Numbers2And55With000"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToCamel(in)
if result != out {
t.Error("'" + result + "' != '" + out + "'")
}
}
}
func TestToLowerCamel(t *testing.T) {
cases := [][]string{
[]string{"foo-bar", "fooBar"},
[]string{"TestCase", "testCase"},
[]string{"", ""},
[]string{"AnyKind of_string", "anyKindOfString"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToLowerCamel(in)
if result != out {
t.Error("'" + result + "' != '" + out + "'")
}
}
}

38
utils/strcase/numbers.go Normal file
View File

@ -0,0 +1,38 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ian Coleman
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, Subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or Substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package strcase
import (
"regexp"
)
var numberSequence = regexp.MustCompile(`([a-zA-Z])(\d+)([a-zA-Z]?)`)
var numberReplacement = []byte(`$1 $2 $3`)
func addWordBoundariesToNumbers(s string) string {
b := []byte(s)
b = numberSequence.ReplaceAll(b, numberReplacement)
return string(b)
}

94
utils/strcase/snake.go Normal file
View File

@ -0,0 +1,94 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ian Coleman
* Copyright (c) 2018 Ma_124, <github.com/Ma124>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, Subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or Substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// Package strcase converts strings to snake_case or CamelCase
package strcase
import (
"strings"
)
// Converts a string to snake_case
func ToSnake(s string) string {
return ToDelimited(s, '_')
}
// Converts a string to SCREAMING_SNAKE_CASE
func ToScreamingSnake(s string) string {
return ToScreamingDelimited(s, '_', true)
}
// Converts a string to kebab-case
func ToKebab(s string) string {
return ToDelimited(s, '-')
}
// Converts a string to SCREAMING-KEBAB-CASE
func ToScreamingKebab(s string) string {
return ToScreamingDelimited(s, '-', true)
}
// Converts a string to delimited.snake.case (in this case `del = '.'`)
func ToDelimited(s string, del uint8) string {
return ToScreamingDelimited(s, del, false)
}
// Converts a string to SCREAMING.DELIMITED.SNAKE.CASE (in this case `del = '.'; screaming = true`) or delimited.snake.case (in this case `del = '.'; screaming = false`)
func ToScreamingDelimited(s string, del uint8, screaming bool) string {
s = addWordBoundariesToNumbers(s)
s = strings.Trim(s, " ")
n := ""
for i, v := range s {
// treat acronyms as words, eg for JSONData -> JSON is a whole word
nextCaseIsChanged := false
if i+1 < len(s) {
next := s[i+1]
if (v >= 'A' && v <= 'Z' && next >= 'a' && next <= 'z') || (v >= 'a' && v <= 'z' && next >= 'A' && next <= 'Z') {
nextCaseIsChanged = true
}
}
if i > 0 && n[len(n)-1] != del && nextCaseIsChanged {
// add underscore if next letter case type is changed
if v >= 'A' && v <= 'Z' {
n += string(del) + string(v)
} else if v >= 'a' && v <= 'z' {
n += string(v) + string(del)
}
} else if v == ' ' || v == '_' || v == '-' {
// replace spaces/underscores with delimiters
n += string(del)
} else {
n = n + string(v)
}
}
if screaming {
n = strings.ToUpper(n)
} else {
n = strings.ToLower(n)
}
return n
}

147
utils/strcase/snake_test.go Normal file
View File

@ -0,0 +1,147 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ian Coleman
* Copyright (c) 2018 Ma_124, <github.com/Ma124>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, Subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or Substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package strcase
import (
"testing"
)
func TestToSnake(t *testing.T) {
cases := [][]string{
[]string{"testCase", "test_case"},
[]string{"TestCase", "test_case"},
[]string{"Test Case", "test_case"},
[]string{" Test Case", "test_case"},
[]string{"Test Case ", "test_case"},
[]string{" Test Case ", "test_case"},
[]string{"test", "test"},
[]string{"test_case", "test_case"},
[]string{"Test", "test"},
[]string{"", ""},
[]string{"ManyManyWords", "many_many_words"},
[]string{"manyManyWords", "many_many_words"},
[]string{"AnyKind of_string", "any_kind_of_string"},
[]string{"numbers2and55with000", "numbers_2_and_55_with_000"},
[]string{"JSONData", "json_data"},
[]string{"userID", "user_id"},
[]string{"AAAbbb", "aa_abbb"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToSnake(in)
if result != out {
t.Error("'" + in + "'('" + result + "' != '" + out + "')")
}
}
}
func TestToDelimited(t *testing.T) {
cases := [][]string{
[]string{"testCase", "test@case"},
[]string{"TestCase", "test@case"},
[]string{"Test Case", "test@case"},
[]string{" Test Case", "test@case"},
[]string{"Test Case ", "test@case"},
[]string{" Test Case ", "test@case"},
[]string{"test", "test"},
[]string{"test_case", "test@case"},
[]string{"Test", "test"},
[]string{"", ""},
[]string{"ManyManyWords", "many@many@words"},
[]string{"manyManyWords", "many@many@words"},
[]string{"AnyKind of_string", "any@kind@of@string"},
[]string{"numbers2and55with000", "numbers@2@and@55@with@000"},
[]string{"JSONData", "json@data"},
[]string{"userID", "user@id"},
[]string{"AAAbbb", "aa@abbb"},
[]string{"test-case", "test@case"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToDelimited(in, '@')
if result != out {
t.Error("'" + in + "' ('" + result + "' != '" + out + "')")
}
}
}
func TestToScreamingSnake(t *testing.T) {
cases := [][]string{
[]string{"testCase", "TEST_CASE"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToScreamingSnake(in)
if result != out {
t.Error("'" + result + "' != '" + out + "'")
}
}
}
func TestToKebab(t *testing.T) {
cases := [][]string{
[]string{"testCase", "test-case"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToKebab(in)
if result != out {
t.Error("'" + result + "' != '" + out + "'")
}
}
}
func TestToScreamingKebab(t *testing.T) {
cases := [][]string{
[]string{"testCase", "TEST-CASE"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToScreamingKebab(in)
if result != out {
t.Error("'" + result + "' != '" + out + "'")
}
}
}
func TestToScreamingDelimited(t *testing.T) {
cases := [][]string{
[]string{"testCase", "TEST.CASE"},
}
for _, i := range cases {
in := i[0]
out := i[1]
result := ToScreamingDelimited(in, '.', true)
if result != out {
t.Error("'" + result + "' != '" + out + "'")
}
}
}