ooni-probe-cli/internal/engine/ooapi/caching.go
Simone Basso 55bdebe8b2
engine/ooapi: autogenerated API with login and caching (#234)
* internal/engine/ooapi: auto-generated API client

* feat: introduce the callers abstraction

* feat: implement API caching on disk

* feat: implement cloneWithToken when we require login

* feat: implement login

* fix: do not cache all APIs

* feat: start making space for more tests

* feat: implement caching policy

* feat: write tests for caching layer

* feat: add integration tests and fix some minor issues

* feat: write much more unit tests

* feat: add some more easy unit tests

* feat: add tests that use a local server

While there, make sure many fields we care about are OK.

* doc: write basic documentation

* fix: tweak sentence

* doc: improve ooapi documentation

* doc(ooapi): other documentation improvements

* fix(ooapi): remove caching for most APIs

We discussed this topic yesterday with @FedericoCeratto. The only
place where we want LRU caching is MeasurementMeta.

* feat(ooapi): improve handling of errors during login

This was also discussed yesterday with @FedericoCeratto

* fix(swaggerdiff_test.go): temporarily disable

Before I work on this, I need to tend onto other tasks.

* fix(ootest): add one more test case

We're going towards 100% coverage of this package, as it ought to be.

* feat(ooapi): test cases for when the probe clock is off

* fix(ooapi): change test to have 100% unittest coverage

* feat: sync server and client APIs definition

Companion PR: https://github.com/ooni/api/pull/218

* fix(ooapi): start testing again against API

* fix(ooapi): only generate each file once

* chore: set version to 3.7.0-alpha

While there, make sure we don't always skip a currently failing
riseupvpn test, and slightly clarify the readme.

* fix(kvstore): less scoped error message
2021-03-04 11:51:07 +01:00

99 lines
2.5 KiB
Go

// Code generated by go generate; DO NOT EDIT.
// 2021-02-26 15:45:51.194159684 +0100 CET m=+0.000175181
package ooapi
//go:generate go run ./internal/generator -file caching.go
import (
"context"
"reflect"
"github.com/ooni/probe-cli/v3/internal/engine/ooapi/apimodel"
)
// MeasurementMetaCache implements caching for MeasurementMetaAPI.
type MeasurementMetaCache struct {
API MeasurementMetaCaller // mandatory
GobCodec GobCodec // optional
KVStore KVStore // mandatory
}
type cacheEntryForMeasurementMeta struct {
Req *apimodel.MeasurementMetaRequest
Resp *apimodel.MeasurementMetaResponse
}
// Call calls the API and implements caching.
func (c *MeasurementMetaCache) Call(ctx context.Context, req *apimodel.MeasurementMetaRequest) (*apimodel.MeasurementMetaResponse, error) {
if resp, _ := c.readcache(req); resp != nil {
return resp, nil
}
resp, err := c.API.Call(ctx, req)
if err != nil {
return nil, err
}
if err := c.writecache(req, resp); err != nil {
return nil, err
}
return resp, nil
}
func (c *MeasurementMetaCache) gobCodec() GobCodec {
if c.GobCodec != nil {
return c.GobCodec
}
return &defaultGobCodec{}
}
func (c *MeasurementMetaCache) getcache() ([]cacheEntryForMeasurementMeta, error) {
data, err := c.KVStore.Get("MeasurementMeta.cache")
if err != nil {
return nil, err
}
var out []cacheEntryForMeasurementMeta
if err := c.gobCodec().Decode(data, &out); err != nil {
return nil, err
}
return out, nil
}
func (c *MeasurementMetaCache) setcache(in []cacheEntryForMeasurementMeta) error {
data, err := c.gobCodec().Encode(in)
if err != nil {
return err
}
return c.KVStore.Set("MeasurementMeta.cache", data)
}
func (c *MeasurementMetaCache) readcache(req *apimodel.MeasurementMetaRequest) (*apimodel.MeasurementMetaResponse, error) {
cache, err := c.getcache()
if err != nil {
return nil, err
}
for _, cur := range cache {
if reflect.DeepEqual(req, cur.Req) {
return cur.Resp, nil
}
}
return nil, errCacheNotFound
}
func (c *MeasurementMetaCache) writecache(req *apimodel.MeasurementMetaRequest, resp *apimodel.MeasurementMetaResponse) error {
cache, _ := c.getcache()
out := []cacheEntryForMeasurementMeta{{Req: req, Resp: resp}}
const toomany = 64
for idx, cur := range cache {
if reflect.DeepEqual(req, cur.Req) {
continue // we already updated the cache
}
if idx > toomany {
break
}
out = append(out, cur)
}
return c.setcache(out)
}
var _ MeasurementMetaCaller = &MeasurementMetaCache{}