Merge pull request #102 from ooni/independent-file

One measurement, One file
This commit is contained in:
Arturo Filastò 2020-01-31 10:47:49 +01:00 committed by GitHub
commit a2637be74d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 321 additions and 211 deletions

View File

@ -4,6 +4,21 @@ set -e
buildtags=""
ldflags="-s -w"
if [ "$1" = "bindata" ]; then
GO_BINDATA_V=$(go-bindata -version | grep go-bin | cut -d ' ' -f2)
if [ "$GO_BINDATA_V" = "3.2.0" ]; then
echo "Updating bindata"
go-bindata -nometadata -o internal/bindata/bindata.go -pkg bindata data/...
echo "DONE"
exit 0
else
echo "Wrong go-bindata-version"
echo "Please install go-bindata with:"
echo " go get -u github.com/shuLhan/go-bindata/..."
exit 1
fi
fi
if [ "$1" = "windows" ]; then
set -x
CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ \

View File

@ -0,0 +1,164 @@
-- +migrate Down
-- +migrate StatementBegin
PRAGMA foreign_keys=off;
ALTER TABLE `measurements` RENAME TO `_measurements_new`;
CREATE TABLE `measurements` (
`measurement_id` INTEGER PRIMARY KEY AUTOINCREMENT,
`test_name` VARCHAR(64) NOT NULL,
`measurement_start_time` DATETIME NOT NULL,
`measurement_runtime` REAL NOT NULL,
`measurement_is_done` TINYINT(1) NOT NULL,
`measurement_is_uploaded` TINYINT(1) NOT NULL,
`measurement_is_failed` TINYINT(1) NOT NULL,
`measurement_failure_msg` VARCHAR(255),
`measurement_is_upload_failed` TINYINT(1) NOT NULL,
`measurement_upload_failure_msg` VARCHAR(255),
`measurement_is_rerun` TINYINT(1) NOT NULL,
`report_id` VARCHAR(255),
`url_id` INTEGER,
`collector_measurement_id` INT(64),
`is_anomaly` TINYINT(1),
`test_keys` JSON NOT NULL,
`result_id` INTEGER NOT NULL,
`report_file_path` VARCHAR(260) NOT NULL,
CONSTRAINT `fk_result_id`
FOREIGN KEY (`result_id`)
REFERENCES `results`(`result_id`)
ON DELETE CASCADE,
FOREIGN KEY (`url_id`) REFERENCES `urls`(`url_id`)
);
INSERT INTO measurements (
`measurement_id`,
`test_name`,
`measurement_start_time`,
`measurement_runtime`,
`measurement_is_done`,
`measurement_is_uploaded`,
`measurement_is_failed`,
`measurement_failure_msg`,
`measurement_is_upload_failed`,
`measurement_upload_failure_msg`,
`measurement_is_rerun`,
`report_id`,
`url_id`,
`collector_measurement_id`,
`is_anomaly`,
`test_keys`,
`result_id`,
`report_file_path`
)
SELECT `measurement_id`,
`test_name`,
`measurement_start_time`,
`measurement_runtime`,
`measurement_is_done`,
`measurement_is_uploaded`,
`measurement_is_failed`,
`measurement_failure_msg`,
`measurement_is_upload_failed`,
`measurement_upload_failure_msg`,
`measurement_is_rerun`,
`report_id`,
`url_id`,
`collector_measurement_id`,
`is_anomaly`,
`test_keys`,
`result_id`,
`report_file_path`
FROM _measurements_new;
DROP TABLE _measurements_new;
PRAGMA foreign_keys=on;
-- +migrate StatementEnd
-- +migrate Up
-- +migrate StatementBegin
PRAGMA foreign_keys=off;
-- SQLite3 does not support adding columns or dropping constraints, so we need
-- to re-create the table and copy the data over.
ALTER TABLE `measurements` RENAME TO `_measurements_old`;
CREATE TABLE `measurements` (
`measurement_id` INTEGER PRIMARY KEY AUTOINCREMENT,
`test_name` VARCHAR(64) NOT NULL,
`measurement_start_time` DATETIME NOT NULL,
`measurement_runtime` REAL NOT NULL,
`measurement_is_done` TINYINT(1) NOT NULL,
`measurement_is_uploaded` TINYINT(1) NOT NULL,
`measurement_is_failed` TINYINT(1) NOT NULL,
`measurement_failure_msg` VARCHAR(255),
`measurement_is_upload_failed` TINYINT(1) NOT NULL,
`measurement_upload_failure_msg` VARCHAR(255),
`measurement_is_rerun` TINYINT(1) NOT NULL,
`report_id` VARCHAR(255),
`url_id` INTEGER,
`collector_measurement_id` INT(64),
`is_anomaly` TINYINT(1),
`test_keys` JSON NOT NULL,
`result_id` INTEGER NOT NULL,
`report_file_path` VARCHAR(260),
`measurement_file_path` TEXT,
CONSTRAINT `fk_result_id`
FOREIGN KEY (`result_id`)
REFERENCES `results`(`result_id`)
ON DELETE CASCADE,
FOREIGN KEY (`url_id`) REFERENCES `urls`(`url_id`)
);
INSERT INTO measurements (
`measurement_id`,
`test_name`,
`measurement_start_time`,
`measurement_runtime`,
`measurement_is_done`,
`measurement_is_uploaded`,
`measurement_is_failed`,
`measurement_failure_msg`,
`measurement_is_upload_failed`,
`measurement_upload_failure_msg`,
`measurement_is_rerun`,
`report_id`,
`url_id`,
`collector_measurement_id`,
`is_anomaly`,
`test_keys`,
`result_id`,
`report_file_path`,
`measurement_file_path`
)
SELECT `measurement_id`,
`test_name`,
`measurement_start_time`,
`measurement_runtime`,
`measurement_is_done`,
`measurement_is_uploaded`,
`measurement_is_failed`,
`measurement_failure_msg`,
`measurement_is_upload_failed`,
`measurement_upload_failure_msg`,
`measurement_is_rerun`,
`report_id`,
`url_id`,
`collector_measurement_id`,
`is_anomaly`,
`test_keys`,
`result_id`,
`report_file_path`,
NULL
FROM _measurements_old;
DROP TABLE _measurements_old;
PRAGMA foreign_keys=on;
-- +migrate StatementEnd

View File

@ -2,10 +2,10 @@
// sources:
// data/default-config.json
// data/migrations/1_create_msmt_results.sql
// data/migrations/2_single_msmt_file.sql
package bindata
import (
"bytes"
"compress/gzip"
@ -38,7 +38,6 @@ func bindataRead(data []byte, name string) ([]byte, error) {
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
@ -79,58 +78,37 @@ func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataDataDefaultconfigjson = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x94\xc1\x6e\xdb\x30\x0c\x86\xef\x79\x0a\x41\xe7\xba\x29\xb0\x5b\x8e" +
"\xbb\xed\xb0\x75\xc0\x76\x2b\x0a\x41\x96\x68\x9b\x98\x4c\x7a\x12\x95\x2c\x18\xfa\xee\x83\x5c\x27\x56\xba\xac\xcb" +
"\xd1\xff\x4f\x53\xe4\x47\x4a\xbf\x37\x4a\x69\xa3\x77\x4a\x7f\x1f\x30\x29\x4c\xea\xc8\x39\xaa\xc7\xc7\x2f\x9f\xd4" +
"\xd7\xc8\x2d\x28\xc7\xd4\x61\xaf\x3a\x0c\x70\xaf\xbe\x01\xa8\x41\x64\x4a\xbb\xed\x96\x99\xf0\x1e\x79\x3b\x40\x98" +
"\xb6\x53\x89\x6d\x5c\x40\xd5\x71\x54\x45\xd2\x77\x73\xea\x3d\xc4\x84\x4c\x7a\xa7\x1e\x5e\x05\xa4\x8e\xe3\x08\xde" +
"\x38\xa6\x04\x24\x7a\xa7\x3a\x1b\x12\x2c\x6e\x32\x2d\x88\xd5\x3b\x25\x31\xbf\x6a\x36\x0b\x9b\x3c\x79\x2b\x50\xcb" +
"\x69\xb0\x11\xa9\xd7\x3b\x55\x7a\x50\x4a\x23\xb9\x90\x3d\x18\x9c\xea\x94\x95\x61\x13\x55\x09\x2a\xc3\x71\x26\x89" +
"\xc7\xeb\x66\x3f\xa5\x4b\x23\x4f\x81\xad\x37\x11\x52\x0e\x72\xf2\x36\x4a\xbd\xcc\x65\x11\x0b\x76\xe8\xac\x20\x53" +
"\x5a\x8b\x03\xb2\x6d\x00\x7f\x99\x69\x8e\x3d\x1a\x26\x23\x90\xc4\x38\x1e\xa7\x00\xf2\x4a\xeb\x6a\x18\xc1\x21\x9d" +
"\x9a\x3b\x9f\x58\xf8\x8c\x56\xc0\xcf\x59\x2e\x90\xac\xa7\xd6\x3c\x16\x75\x0e\x2f\xe9\x9e\x66\x59\x29\x7d\x80\xb6" +
"\x71\x4c\x04\x4e\x70\x8f\x72\xd4\x77\x27\xa7\xb3\x0e\x5a\xe6\x1f\xcd\x08\x29\x01\xf5\x10\x57\xef\x30\x58\x49\x76" +
"\x9a\x56\x45\x20\x40\x1f\xed\xb8\x2a\xde\xa6\x61\xfd\x22\x2f\xeb\x47\x59\xa7\x06\x69\x6f\x03\xfa\x26\xc2\xcf\x0c" +
"\x49\x9a\x80\x04\x6f\x42\x06\xb0\x1e\x62\xd3\x21\x04\xdf\x8c\x96\x70\xca\x61\xa6\xac\xe7\xb0\xe7\xa5\xb9\x91\x49" +
"\x86\x70\x34\x36\x04\x3e\x58\x72\x65\x67\xf4\x87\x87\x87\xcf\x1f\xf5\x99\xd8\x4c\x3b\x81\x14\x58\xd5\x8c\x0e\xd0" +
"\x26\x14\x58\x95\x8a\x95\xb3\x02\x3d\x47\x9c\xdd\xa7\xe7\xd9\x7e\x39\x6f\x4a\x12\x4b\x62\x0a\x1b\xdb\xd7\x03\x78" +
"\x07\xf6\xfb\x50\xaf\x61\xad\xc1\x2e\xd2\x65\x1d\x13\xc4\x72\xb5\x96\xa6\x6f\xa9\xa0\x0c\xe2\x94\xaa\x9e\x8e\x49" +
"\x10\xf7\x10\x0b\xba\xb2\x5d\xfa\x8a\x67\x26\x8e\xf2\x77\x40\x19\xf4\x3f\xff\xae\xcc\xcb\xdf\x2f\xda\x18\xd1\xfb" +
"\x00\x2d\xff\xba\xb1\x89\xff\x2f\xd0\x8d\x2b\x74\xe6\xb9\x5e\x2d\xbf\x2f\x34\xfd\xba\x23\x39\x81\xf1\x3c\x5a\x24" +
"\xd3\x45\xa6\xe5\xba\xd5\xb7\x2b\x01\x79\xe3\x62\x69\x35\x42\xe9\xf2\xcd\xf3\xe1\x38\x04\x70\xc2\xd1\xe4\x18\x0a" +
"\x81\xa5\x4c\xdd\x72\x26\x07\x67\xf9\xf4\xcc\x2e\xf2\xfd\xf2\xdc\xce\x5b\xbc\x79\xd9\xfc\x09\x00\x00\xff\xff\xa1" +
"\x4d\x40\xc5\xb9\x05\x00\x00")
var _dataDefaultConfigJson = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\x91\x41\x4f\xc3\x30\x0c\x85\xef\xfd\x15\x56\xce\xb0\xc2\xb5\xff\x80" +
"\x0b\x43\x82\x7b\x94\x25\xee\x6a\x29\xb3\x2b\x3b\x19\x9a\xd0\xfe\x3b\x4a\x57\x46\xe1\xfa\x3e\xdb\xef\x59\xef\xab" +
"\x03\x70\xde\x0d\xe0\x3e\x26\x32\x20\x83\x8b\x54\x85\xfd\xfe\xf5\x05\xde\x54\x0e\x08\x51\x78\xa4\x23\x8c\x94\x71" +
"\x07\xef\x88\x30\x95\x32\xdb\xd0\xf7\x22\x4c\x3b\x92\x7e\xc2\x3c\xf7\x73\x9b\x7d\x8c\x99\x60\x14\x85\x26\xb9\x87" +
"\xe5\xf4\x19\xd5\x48\xd8\x0d\xf0\x7c\x13\x88\x47\xd1\x13\x26\x1f\x85\x0d\xb9\xb8\x01\xc6\x90\x0d\x17\x6a\x53\x50" +
"\xe2\xa3\x1b\xa0\x05\x03\x70\xc4\x31\xd7\x84\x9e\xe6\xed\xdc\x06\x04\x6b\xb7\x8b\xd6\xff\x20\x4a\xe5\xa2\x97\xbf" +
"\xb0\xce\x59\x42\xf2\x8a\x56\x73\xb1\x95\x75\x00\xd7\xc5\x9d\xb1\x14\xb4\x45\x5f\xed\x3f\xf1\x60\x54\xd0\x7c\xd5" +
"\xec\x33\x9d\xa8\xc5\x7d\xba\x2f\x84\x74\x0e\x1c\x31\xfd\x2e\x54\x43\x9f\xe4\x14\x88\xfd\xa8\xc2\xe5\xf6\xcc\x36" +
"\xb8\x21\x27\x1f\x35\xd8\xe4\x15\x67\xd1\x7b\x8c\x95\x47\xc9\x19\x63\x11\x6d\x9e\xad\x18\xb7\x82\x83\x54\x8e\x78" +
"\x97\x7f\x6a\x58\xe5\xdd\x5a\x87\x6b\xe1\xba\x6b\xf7\x1d\x00\x00\xff\xff\x8e\xc0\xab\xe6\xd9\x01\x00\x00")
func bindataDataDefaultconfigjsonBytes() ([]byte, error) {
func dataDefaultConfigJsonBytes() ([]byte, error) {
return bindataRead(
_bindataDataDefaultconfigjson,
_dataDefaultConfigJson,
"data/default-config.json",
)
}
func bindataDataDefaultconfigjson() (*asset, error) {
bytes, err := bindataDataDefaultconfigjsonBytes()
func dataDefaultConfigJson() (*asset, error) {
bytes, err := dataDefaultConfigJsonBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "data/default-config.json",
size: 0,
md5checksum: "",
mode: os.FileMode(0),
modTime: time.Unix(0, 0),
}
info := bindataFileInfo{name: "data/default-config.json", size: 0, md5checksum: "", mode: os.FileMode(0), modTime: time.Unix(0, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _bindataDataMigrations1createmsmtresultssql = []byte(
var _dataMigrations1_create_msmt_resultsSql = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xa4\x59\x6d\x73\xdb\x36\x12\xfe\xee\x5f\xb1\xe3\xe9\xf4\xec\x39\x49\x76" +
"\x72\x69\xe6\xce\xd7\x4e\xc7\xb5\x99\x9c\xda\x58\xca\xc8\xf2\x35\x99\x9b\x1b\x11\x22\x97\x12\x2a\x10\x60\xf0\x22" +
"\x46\xf7\xeb\x6f\x16\x00\x29\x52\x56\x1c\x67\xda\x0f\xa9\x48\x02\x8b\x7d\x7d\xf6\x59\x78\x38\x84\xbf\x96\x7c\xa5" +
@ -228,40 +206,73 @@ var _bindataDataMigrations1createmsmtresultssql = []byte(
"\x7f\x10\x08\x32\x73\x9a\xdc\x6b\x14\x22\xf8\xa6\x6f\x4c\x6c\x24\xe7\x3d\x1b\xfc\xdf\xdb\xf6\xdf\x68\x00\xfe\xe2" +
"\x5f\xf5\xfe\x1f\x00\x00\xff\xff\x38\xc6\x64\x22\x78\x1c\x00\x00")
func bindataDataMigrations1createmsmtresultssqlBytes() ([]byte, error) {
func dataMigrations1_create_msmt_resultsSqlBytes() ([]byte, error) {
return bindataRead(
_bindataDataMigrations1createmsmtresultssql,
_dataMigrations1_create_msmt_resultsSql,
"data/migrations/1_create_msmt_results.sql",
)
}
func bindataDataMigrations1createmsmtresultssql() (*asset, error) {
bytes, err := bindataDataMigrations1createmsmtresultssqlBytes()
func dataMigrations1_create_msmt_resultsSql() (*asset, error) {
bytes, err := dataMigrations1_create_msmt_resultsSqlBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "data/migrations/1_create_msmt_results.sql",
size: 0,
md5checksum: "",
mode: os.FileMode(0),
modTime: time.Unix(0, 0),
}
info := bindataFileInfo{name: "data/migrations/1_create_msmt_results.sql", size: 0, md5checksum: "", mode: os.FileMode(0), modTime: time.Unix(0, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _dataMigrations2_single_msmt_fileSql = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x56\x4d\x6f\xdb\x38\x10\xbd\xf3\x57\xcc\xd1\xc6\x2a\x8b\xdd\xb6\xc9" +
"\xc5\xe8\x81\x91\x99\x54\xad\x4c\xa5\x14\x53\x34\x27\x89\xb5\x68\x47\x88\x4c\x0a\x24\xd5\x20\xff\xbe\x90\x3f\x6a" +
"\xda\x96\x8d\x3a\x28\x8a\x1e\x74\x9d\x8f\xc7\x21\xe7\x0d\xe7\x5d\x5c\xc0\x3f\x8b\x72\x6e\x84\x93\x30\xd6\xcf\x0a" +
"\xf9\x86\xd4\x09\x27\x17\x52\xb9\x6b\x39\x2f\x15\x42\x77\x0c\xdf\x4e\x30\xcc\xb4\x91\xe5\x5c\x65\x4f\xf2\xc5\xbe" +
"\xd7\xb3\xd9\x08\xe1\x98\x13\x06\x1c\x5f\xc7\x04\xf2\x85\x14\xb6\x31\xcb\x3c\x9b\x03\x23\x14\x4f\x08\xf0\x04\xf2" +
"\xcc\xf7\x64\x4a\x3e\xe7\x23\x84\x42\x46\x30\x27\xdd\xb9\x03\x04\x00\x3b\xc6\xac\x2c\x72\x88\x28\x27\xb7\x84\xc1" +
"\x1d\x8b\x26\x98\x3d\xc0\x27\xf2\x00\xf8\x9e\x27\x11\x0d\x19\x99\x10\xca\x83\x55\x9e\x93\xd6\x65\x4a\x2c\x64\x0e" +
"\x5f\x30\x0b\x3f\x60\x36\xb8\x7a\x37\x04\x9a\x70\xa0\xf7\x71\x1c\x1c\xa2\x5b\x27\x8c\xcb\x5c\xd9\xa6\x8c\x31\x27" +
"\x3c\x9a\x90\x53\xf1\xa6\x51\xab\x60\x46\x70\xec\x05\x76\xd4\x6d\xb3\x42\x2b\x99\x03\x8f\xe8\x43\x44\xf9\xe0\xff" +
"\x93\x85\x94\x36\x6b\xea\x4a\x8b\x42\x16\x67\xa4\xcc\x44\x59\xfd\x7a\x42\x1b\xdd\x18\x99\x2d\xec\x7c\xfb\x40\x6f" +
"\x2e\x2f\x87\xa7\xea\x39\xf3\x0c\x2f\xe9\x8c\xa3\x8c\x34\x8d\x3a\x75\x84\x91\xb5\x36\x2b\x32\x74\xa0\x35\xa6\xf2" +
"\x79\xb2\xb6\x4e\x75\x55\xc9\xa9\xd3\x26\xeb\xe0\x53\x4b\x8c\x75\x5c\x69\x33\xa1\xf4\x42\x54\x2f\x7e\x05\x3e\xa5" +
"\x5a\xde\xe7\xf0\x31\x4d\xe8\x61\x5d\xb6\xa9\x76\x49\xda\x5d\xf9\xac\xac\x64\x56\x0b\xf7\xe8\x5d\xe0\xea\xbf\xfd" +
"\x7b\x86\x09\x4d\x39\xc3\x11\xe5\x90\xcf\x9e\xb2\x2d\xfa\xd2\x0b\x70\x93\x30\x12\xdd\xd2\xe5\x00\x0c\xbc\xc3\x87" +
"\x6b\x3f\x23\x37\x84\x11\x1a\x92\x74\x53\x9a\xcd\xbb\xe2\x12\x0a\x63\x12\x13\x4e\x20\xc4\x69\x88\xc7\x64\x75\xfc" +
"\x2e\xfc\xfa\x55\x87\x3b\xa8\x8d\xa9\x5a\xc8\x8d\x0f\x0d\x47\x08\x45\x34\x25\x8c\xb7\x0f\x90\x80\x3f\xcf\x30\x40" +
"\xfb\xa3\x1c\x20\x6f\x48\x03\x74\x6c\x16\xf7\x3c\x9b\xa9\xdb\x33\x6f\x46\xec\xd0\xfc\x73\x94\x0e\x5d\x6b\x3a\xef" +
"\x39\x7c\xbe\x1e\x83\x3b\x92\xda\xc1\xf8\x43\x84\x15\xc1\x03\xe4\x11\x39\x40\x9b\x57\x0c\xd0\x71\xae\x06\xc8\xe7" +
"\xe7\xe6\xf9\x96\x84\x5c\xa2\x6d\x3a\xbb\x85\xde\x32\x0d\xb5\xed\x4e\x49\x4c\x42\x7e\xf0\xa7\xf6\x8d\xf8\x93\x8d" +
"\x00\xb8\x61\xc9\x04\x0e\xf6\xe1\x08\xa1\x31\x4b\xee\xd6\xcb\xb0\xcb\xdd\xb9\x81\xd5\x08\x75\xaf\x6d\xa2\x8a\x5d" +
"\xcf\x7d\xfd\xba\xfd\xde\x66\xa5\x9f\xe3\xd2\xc9\xb7\x50\x68\x69\x41\x69\x07\xb6\xa9\xdb\x8b\x81\x28\x8a\x52\xcd" +
"\x61\xaa\xab\x66\xa1\x2c\x68\x03\x85\xd1\x75\xbd\xb2\x29\xeb\x8c\x28\x95\xb3\x01\x58\x0d\xcf\x12\x94\x94\x45\x0b" +
"\xe7\x34\x18\x79\x31\x35\xb2\x2d\xc4\x3d\x4a\x70\xe2\x5b\x25\x41\xa8\x02\xa6\xba\x7e\x59\x9a\x0a\xe1\x04\xe8\xef" +
"\xd2\xfc\x8b\x5e\x25\x31\x74\x55\xf4\x12\xa3\x97\x18\xbd\xc4\xf0\x24\x46\x57\x97\xb6\x91\x9c\x7c\xe5\xbd\xf6\xe8" +
"\x57\xde\xef\x5e\x79\xfb\x77\xec\x45\xc9\xdf\xd7\xa1\xf6\x0b\xe9\x96\x26\xba\x2a\x4e\x49\x93\x95\xfb\x7c\x69\xf2" +
"\x23\x00\x00\xff\xff\xca\xeb\xb6\x24\x7c\x10\x00\x00")
func dataMigrations2_single_msmt_fileSqlBytes() ([]byte, error) {
return bindataRead(
_dataMigrations2_single_msmt_fileSql,
"data/migrations/2_single_msmt_file.sql",
)
}
func dataMigrations2_single_msmt_fileSql() (*asset, error) {
bytes, err := dataMigrations2_single_msmt_fileSqlBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "data/migrations/2_single_msmt_file.sql", size: 0, md5checksum: "", mode: os.FileMode(0), modTime: time.Unix(0, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
@ -274,11 +285,9 @@ func Asset(name string) ([]byte, error) {
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
@ -288,10 +297,9 @@ func MustAsset(name string) []byte {
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
// It returns an error if the asset could not be found or
// could not be loaded.
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
@ -304,10 +312,8 @@ func AssetInfo(name string) (os.FileInfo, error) {
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
@ -316,15 +322,13 @@ func AssetNames() []string {
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"data/default-config.json": bindataDataDefaultconfigjson,
"data/migrations/1_create_msmt_results.sql": bindataDataMigrations1createmsmtresultssql,
"data/default-config.json": dataDefaultConfigJson,
"data/migrations/1_create_msmt_results.sql": dataMigrations1_create_msmt_resultsSql,
"data/migrations/2_single_msmt_file.sql": dataMigrations2_single_msmt_fileSql,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
@ -338,7 +342,6 @@ var _bindata = map[string]func() (*asset, error){
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
@ -347,20 +350,12 @@ func AssetDir(name string) ([]string, error) {
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
@ -369,17 +364,17 @@ func AssetDir(name string) ([]string, error) {
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"data": {Func: nil, Children: map[string]*bintree{
"default-config.json": {Func: bindataDataDefaultconfigjson, Children: map[string]*bintree{}},
"migrations": {Func: nil, Children: map[string]*bintree{
"1_create_msmt_results.sql": {Func: bindataDataMigrations1createmsmtresultssql, Children: map[string]*bintree{}},
var _bintree = &bintree{nil, map[string]*bintree{
"data": {nil, map[string]*bintree{
"default-config.json": {dataDefaultConfigJson, map[string]*bintree{}},
"migrations": {nil, map[string]*bintree{
"1_create_msmt_results.sql": {dataMigrations1_create_msmt_resultsSql, map[string]*bintree{}},
"2_single_msmt_file.sql": {dataMigrations2_single_msmt_fileSql, map[string]*bintree{}},
}},
}},
}}

View File

@ -1,12 +1,12 @@
package database
import (
"bufio"
"database/sql"
"encoding/json"
"io"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"time"
@ -55,11 +55,15 @@ func GetMeasurementJSON(sess sqlbuilder.Database, measurementID int64) (map[stri
log.Errorf("failed to run query %s: %v", req.String(), err)
return nil, err
}
reportFilePath := measurement.Measurement.ReportFilePath
// If the url->url is NULL then we are dealing with a single entry
// measurement and all we have to do is read the file and return it.
if measurement.URL.URL.Valid == false {
b, err := ioutil.ReadFile(reportFilePath)
// MeasurementFilePath might be NULL because the measurement from a
// 3.0.0-beta install
if measurement.Measurement.MeasurementFilePath.Valid == false {
log.Error("invalid measurement_file_path")
log.Error("backup your OONI_HOME and run `ooniprobe reset`")
return nil, errors.New("cannot access measurement file")
}
measurementFilePath := measurement.Measurement.MeasurementFilePath.String
b, err := ioutil.ReadFile(measurementFilePath)
if err != nil {
return nil, err
}
@ -68,32 +72,6 @@ func GetMeasurementJSON(sess sqlbuilder.Database, measurementID int64) (map[stri
}
return msmtJSON, nil
}
// When the URL is a string then we need to seek until we reach the
// measurement line in the file that matches the target input
url := measurement.URL.URL.String
file, err := os.Open(reportFilePath)
if err != nil {
return nil, err
}
defer file.Close()
reader := bufio.NewReader(file)
for {
line, err := reader.ReadString('\n')
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
if err := json.Unmarshal([]byte(line), &msmtJSON); err != nil {
return nil, err
}
if msmtJSON["input"].(string) == url {
return msmtJSON, nil
}
}
return nil, errors.New("Could not find measurement")
}
// GetResultTestKeys returns a list of TestKeys for a given result
func GetResultTestKeys(sess sqlbuilder.Database, resultID int64) (string, error) {
@ -197,12 +175,13 @@ func DeleteResult(sess sqlbuilder.Database, resultID int64) error {
// CreateMeasurement writes the measurement to the database a returns a pointer
// to the Measurement
func CreateMeasurement(sess sqlbuilder.Database, reportID sql.NullString, testName string, resultID int64, reportFilePath string, urlID sql.NullInt64) (*Measurement, error) {
func CreateMeasurement(sess sqlbuilder.Database, reportID sql.NullString, testName string, measurementDir string, idx int, resultID int64, urlID sql.NullInt64) (*Measurement, error) {
msmtFilePath := filepath.Join(measurementDir, fmt.Sprintf("msmt-%d.json", idx))
msmt := Measurement{
ReportID: reportID,
TestName: testName,
ResultID: resultID,
ReportFilePath: reportFilePath,
MeasurementFilePath: sql.NullString{String: msmtFilePath, Valid: true},
URLID: urlID,
IsFailed: false,
IsDone: false,

View File

@ -79,10 +79,10 @@ func TestMeasurementWorkflow(t *testing.T) {
reportID := sql.NullString{String: "", Valid: false}
testName := "antani"
resultID := result.ID
reportFilePath := tmpdir
msmtFilePath := tmpdir
urlID := sql.NullInt64{Int64: 0, Valid: false}
m1, err := CreateMeasurement(sess, reportID, testName, resultID, reportFilePath, urlID)
m1, err := CreateMeasurement(sess, reportID, testName, msmtFilePath, 0, resultID, urlID)
if err != nil {
t.Fatal(err)
}
@ -153,10 +153,10 @@ func TestDeleteResult(t *testing.T) {
reportID := sql.NullString{String: "", Valid: false}
testName := "antani"
resultID := result.ID
reportFilePath := tmpdir
msmtFilePath := tmpdir
urlID := sql.NullInt64{Int64: 0, Valid: false}
m1, err := CreateMeasurement(sess, reportID, testName, resultID, reportFilePath, urlID)
m1, err := CreateMeasurement(sess, reportID, testName, msmtFilePath, 0, resultID, urlID)
if err != nil {
t.Fatal(err)
}

View File

@ -2,11 +2,8 @@ package database
import (
"database/sql"
"os"
"path/filepath"
"time"
"github.com/ooni/probe-cli/utils/shutil"
"github.com/pkg/errors"
"upper.io/db.v3/lib/sqlbuilder"
)
@ -64,7 +61,8 @@ type Measurement struct {
// FIXME we likely want to support JSON. See: https://github.com/upper/db/issues/462
TestKeys string `db:"test_keys"`
ResultID int64 `db:"result_id"`
ReportFilePath string `db:"report_file_path"`
ReportFilePath sql.NullString `db:"report_file_path,omitempty"`
MeasurementFilePath sql.NullString `db:"measurement_file_path,omitempty"`
}
// Result model
@ -150,32 +148,3 @@ func (m *Measurement) UploadSucceeded(sess sqlbuilder.Database) error {
}
return nil
}
// AddToResult adds a measurement to a result
func (m *Measurement) AddToResult(sess sqlbuilder.Database, result *Result) error {
var err error
m.ResultID = result.ID
finalPath := filepath.Join(result.MeasurementDir,
filepath.Base(m.ReportFilePath))
// If the finalPath already exists, it means it has already been moved there.
// This happens in multi input reports
if _, err = os.Stat(finalPath); os.IsNotExist(err) {
err := shutil.CopyFile(m.ReportFilePath, finalPath, false)
if err != nil {
return errors.Wrap(err, "copying report file")
}
err = os.Remove(m.ReportFilePath)
if err != nil {
return errors.Wrap(err, "deleting report file")
}
}
m.ReportFilePath = finalPath
err = sess.Collection("measurements").Find("measurement_id", m.ID).Update(m)
if err != nil {
return errors.Wrap(err, "updating measurement")
}
return nil
}

View File

@ -123,7 +123,7 @@ func logResultItem(w io.Writer, f log.Fields) error {
if index == totalCount-1 {
if isDone == true {
fmt.Fprintf(w, "└┬──────────────┬─────────────┬──────────────────┬┘\n")
fmt.Fprintf(w, "└┬──────────────┬───────────────────────────────┬┘\n")
} else {
// We want the incomplete section to not have a footer
fmt.Fprintf(w, "└──────────────────────────────────────────────────┘\n")
@ -144,12 +144,12 @@ func logResultSummary(w io.Writer, f log.Fields) error {
fmt.Fprintf(w, " ooni run websites\n")
return nil
}
// └┬──────────────┬─────────────┬──────────────┬
// └┬──────────────┬───────────────────────────┬
fmt.Fprintf(w, " │ %s │ %s │ %s │\n",
utils.RightPad(fmt.Sprintf("%d tests", tests), 12),
utils.RightPad(fmt.Sprintf("%d nets", networks), 12),
utils.RightPad(fmt.Sprintf("%d nets", networks), 11),
utils.RightPad(fmt.Sprintf("⬆ %s ⬇ %s", formatSize(dataUp), formatSize(dataDown)), 16))
fmt.Fprintf(w, " └──────────────┴─────────────┴──────────────────┘\n")
fmt.Fprintf(w, " └──────────────┴───────────────────────────────┘\n")
return nil
}

View File

@ -82,7 +82,8 @@ func MeasurementItem(msmt database.MeasurementURLNetwork, isFirst bool, isLast b
"is_failed": msmt.IsFailed,
"failure_msg": msmt.FailureMsg.String,
"is_done": msmt.Measurement.IsDone,
"report_file_path": msmt.ReportFilePath,
"report_file_path": msmt.ReportFilePath.String,
"measurement_file_path": msmt.MeasurementFilePath.String,
}).Info("measurement")
}

View File

@ -3,7 +3,6 @@ package nettests
import (
"database/sql"
"fmt"
"path/filepath"
"time"
"github.com/apex/log"
@ -11,7 +10,6 @@ import (
ooni "github.com/ooni/probe-cli"
"github.com/ooni/probe-cli/internal/database"
"github.com/ooni/probe-cli/internal/output"
"github.com/ooni/probe-cli/utils"
engine "github.com/ooni/probe-engine"
"github.com/pkg/errors"
)
@ -25,14 +23,10 @@ type Nettest interface {
// NewController creates a nettest controller
func NewController(nt Nettest, ctx *ooni.Context, res *database.Result) *Controller {
msmtPath := filepath.Join(ctx.TempDir,
fmt.Sprintf("msmt-%T-%s.jsonl", nt,
time.Now().UTC().Format(utils.ResultTimestamp)))
return &Controller{
Ctx: ctx,
nt: nt,
res: res,
msmtPath: msmtPath,
}
}
@ -46,7 +40,6 @@ type Controller struct {
ntIndex int
ntStartTime time.Time // used to calculate the eta
msmts map[int64]*database.Measurement
msmtPath string // XXX maybe we can drop this and just use a temporary file
inputIdxMap map[int64]int64 // Used to map mk idx to database id
// numInputs is the total number of inputs
@ -91,7 +84,6 @@ func (c *Controller) Run(builder *engine.ExperimentBuilder, inputs []string) err
log.Debug(color.RedString("status.queued"))
log.Debug(color.RedString("status.started"))
log.Debugf("OutputPath: %s", c.msmtPath)
if c.Ctx.Config.Sharing.UploadResults {
if err := exp.OpenReport(); err != nil {
@ -118,8 +110,9 @@ func (c *Controller) Run(builder *engine.ExperimentBuilder, inputs []string) err
if c.inputIdxMap != nil {
urlID = sql.NullInt64{Int64: c.inputIdxMap[idx64], Valid: true}
}
msmt, err := database.CreateMeasurement(
c.Ctx.DB, reportID, exp.Name(), resultID, c.msmtPath, urlID,
c.Ctx.DB, reportID, exp.Name(), c.res.MeasurementDir, idx, resultID, urlID,
)
if err != nil {
return errors.Wrap(err, "failed to create measurement")
@ -149,7 +142,7 @@ func (c *Controller) Run(builder *engine.ExperimentBuilder, inputs []string) err
}
}
if err := exp.SaveMeasurement(measurement, c.msmtPath); err != nil {
if err := exp.SaveMeasurement(measurement, msmt.MeasurementFilePath.String); err != nil {
return errors.Wrap(err, "failed to save measurement on disk")
}
if err := c.msmts[idx64].Done(c.Ctx.DB); err != nil {
@ -178,12 +171,6 @@ func (c *Controller) Run(builder *engine.ExperimentBuilder, inputs []string) err
}
log.Debugf("status.end")
for idx, msmt := range c.msmts {
log.Debugf("adding msmt#%d to result", idx)
if err := msmt.AddToResult(c.Ctx.DB, c.res); err != nil {
return errors.Wrap(err, "failed to add to result")
}
}
return nil
}