From 52cae8e12eb12f005b4e3cfb96ade0a476adcf9c Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Tue, 25 Jun 2019 09:36:12 +0100 Subject: [PATCH 01/70] have vent use burrow's logging Signed-off-by: Gregory Hill --- cmd/burrow/commands/vent.go | 16 ++- logging/logger.go | 3 +- vent/config/config.go | 1 - vent/logger/logger.go | 65 --------- vent/service/consumer.go | 45 +++---- vent/service/consumer_test.go | 8 +- vent/service/rowbuilder.go | 12 +- vent/service/rowbuilder_test.go | 4 +- vent/service/server.go | 12 +- vent/service/server_test.go | 5 +- vent/sqldb/adapters/db_adapter.go | 3 +- vent/sqldb/adapters/postgres_adapter.go | 20 +-- vent/sqldb/adapters/sqlite_adapter.go | 8 +- .../sqldb/adapters/sqlite_adapter_nosqlite.go | 6 +- vent/sqldb/sqldb.go | 126 +++++++++--------- vent/sqldb/sqldb_postgres_test.go | 6 +- vent/sqldb/utils.go | 62 ++++----- vent/test/db.go | 16 +-- vent/types/sql_utils.go | 4 +- 19 files changed, 179 insertions(+), 243 deletions(-) delete mode 100644 vent/logger/logger.go diff --git a/cmd/burrow/commands/vent.go b/cmd/burrow/commands/vent.go index c463ca6f4..5dda1038f 100644 --- a/cmd/burrow/commands/vent.go +++ b/cmd/burrow/commands/vent.go @@ -8,10 +8,11 @@ import ( "syscall" "time" + "github.com/hyperledger/burrow/logging/lifecycle" + "github.com/hyperledger/burrow/config/source" "github.com/hyperledger/burrow/execution/evm/abi" "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/logger" "github.com/hyperledger/burrow/vent/service" "github.com/hyperledger/burrow/vent/sqldb" "github.com/hyperledger/burrow/vent/sqlsol" @@ -68,7 +69,12 @@ func Vent(output Output) func(cmd *cli.Cmd) { "[--blocks] [--txs] [--grpc-addr] [--http-addr] [--log-level] [--announce-every=]" cmd.Action = func() { - log := logger.NewLogger(cfg.LogLevel) + log, err := lifecycle.NewStdErrLogger() + if err != nil { + output.Fatalf("failed to load logger: %v", err) + } + + log = log.With("service", "vent") consumer := service.NewConsumer(cfg, log, make(chan types.EventData)) server := service.NewServer(cfg, log, consumer) @@ -155,11 +161,15 @@ func Vent(output Output) func(cmd *cli.Cmd) { } cmd.Action = func() { + log, err := lifecycle.NewStdErrLogger() + if err != nil { + output.Fatalf("failed to load logger: %v", err) + } db, err := sqldb.NewSQLDB(types.SQLConnection{ DBAdapter: *dbOpts.adapter, DBURL: *dbOpts.url, DBSchema: *dbOpts.schema, - Log: logger.NewLogger("debug"), + Log: log.With("service", "vent"), }) if err != nil { output.Fatalf("Could not connect to SQL DB: %v", err) diff --git a/logging/logger.go b/logging/logger.go index a88b8499a..024aa17b4 100644 --- a/logging/logger.go +++ b/logging/logger.go @@ -47,6 +47,7 @@ func NewLogger(outputLogger log.Logger) *Logger { // long will start dropping log lines by using a ring buffer. swapLogger := new(log.SwapLogger) swapLogger.Swap(outputLogger) + return &Logger{ Output: swapLogger, // logging contexts @@ -129,7 +130,7 @@ func (l *Logger) SwapOutput(infoLogger log.Logger) { l.Output.Swap(infoLogger) } -// Record structured Info lo`g line with a message +// Record structured Info log line with a message func (l *Logger) InfoMsg(message string, keyvals ...interface{}) error { return Msg(l.Info, message, keyvals...) } diff --git a/vent/config/config.go b/vent/config/config.go index dc92fd79f..f99b967d5 100644 --- a/vent/config/config.go +++ b/vent/config/config.go @@ -4,7 +4,6 @@ import ( "time" "github.com/hyperledger/burrow/vent/sqlsol" - "github.com/hyperledger/burrow/vent/types" ) diff --git a/vent/logger/logger.go b/vent/logger/logger.go deleted file mode 100644 index a7a7b1cf1..000000000 --- a/vent/logger/logger.go +++ /dev/null @@ -1,65 +0,0 @@ -package logger - -import ( - "os" - - kitlog "github.com/go-kit/kit/log" - kitlevel "github.com/go-kit/kit/log/level" -) - -// Logger wraps a go-kit logger -type Logger struct { - Log kitlog.Logger -} - -// NewLogger creates a new logger based on the given level -func NewLogger(level string) *Logger { - log := kitlog.NewJSONLogger(kitlog.NewSyncWriter(os.Stdout)) - switch level { - case "error": - log = kitlevel.NewFilter(log, kitlevel.AllowError()) // only error logs - case "warn": - log = kitlevel.NewFilter(log, kitlevel.AllowWarn()) // warn + error logs - case "info": - log = kitlevel.NewFilter(log, kitlevel.AllowInfo()) // info + warn + error logs - case "debug": - log = kitlevel.NewFilter(log, kitlevel.AllowDebug()) // all logs - default: - log = kitlevel.NewFilter(log, kitlevel.AllowNone()) // no logs - } - - log = kitlog.With(log, "service", "vent") - log = kitlog.With(log, "ts", kitlog.DefaultTimestampUTC) - log = kitlog.With(log, "caller", kitlog.Caller(4)) - - return &Logger{ - Log: log, - } -} - -// NewLoggerFromKitlog creates a logger from a go-kit logger -func NewLoggerFromKitlog(log kitlog.Logger) *Logger { - return &Logger{ - Log: log, - } -} - -// Error prints an error log -func (l *Logger) Error(args ...interface{}) { - kitlevel.Error(l.Log).Log(args...) -} - -// Warn prints a warning log -func (l *Logger) Warn(args ...interface{}) { - kitlevel.Warn(l.Log).Log(args...) -} - -// Info prints an information log -func (l *Logger) Info(args ...interface{}) { - kitlevel.Info(l.Log).Log(args...) -} - -// Debug prints a debug log -func (l *Logger) Debug(args ...interface{}) { - kitlevel.Debug(l.Log).Log(args...) -} diff --git a/vent/service/consumer.go b/vent/service/consumer.go index abed5a7a2..d1282929b 100644 --- a/vent/service/consumer.go +++ b/vent/service/consumer.go @@ -8,13 +8,12 @@ import ( "github.com/hyperledger/burrow/rpc" - "github.com/hyperledger/burrow/execution/exec" - "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/rpc/rpcevents" "github.com/hyperledger/burrow/rpc/rpcquery" "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/logger" "github.com/hyperledger/burrow/vent/sqldb" "github.com/hyperledger/burrow/vent/sqlsol" "github.com/hyperledger/burrow/vent/types" @@ -26,7 +25,7 @@ import ( // Consumer contains basic configuration for consumer to run type Consumer struct { Config *config.VentConfig - Log *logger.Logger + Log *logging.Logger Closing bool DB *sqldb.SQLDB GRPCConnection *grpc.ClientConn @@ -44,7 +43,7 @@ type Status struct { // NewConsumer constructs a new consumer configuration. // The event channel will be passed a collection of rows generated from all of the events in a single block // It will be closed by the consumer when it is finished -func NewConsumer(cfg *config.VentConfig, log *logger.Logger, eventChannel chan types.EventData) *Consumer { +func NewConsumer(cfg *config.VentConfig, log *logging.Logger, eventChannel chan types.EventData) *Consumer { return &Consumer{ Config: cfg, Log: log, @@ -59,7 +58,7 @@ func NewConsumer(cfg *config.VentConfig, log *logger.Logger, eventChannel chan t func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stream bool) error { var err error - c.Log.Info("msg", "Connecting to Burrow gRPC server") + c.Log.InfoMsg("Connecting to Burrow gRPC server") c.GRPCConnection, err = grpc.Dial(c.Config.GRPCAddr, grpc.WithInsecure()) if err != nil { @@ -76,11 +75,11 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre } if len(projection.EventSpec) == 0 { - c.Log.Info("msg", "No events specifications found") + c.Log.InfoMsg("No events specifications found") return nil } - c.Log.Info("msg", "Connecting to SQL database") + c.Log.InfoMsg("Connecting to SQL database") connection := types.SQLConnection{ DBAdapter: c.Config.DBAdapter, @@ -100,7 +99,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre return fmt.Errorf("could not clean tables after ChainID change: %v", err) } - c.Log.Info("msg", "Synchronizing config and database projection structures") + c.Log.InfoMsg("Synchronizing config and database projection structures") err = c.DB.SynchronizeDB(c.Burrow.ChainID, projection.Tables) if err != nil { @@ -119,7 +118,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre }() go c.announceEvery(doneCh) - c.Log.Info("msg", "Getting last processed block number from SQL log table") + c.Log.InfoMsg("Getting last processed block number from SQL log table") // NOTE [Silas]: I am preserving the comment below that dates from the early days of Vent. I have looked at the // bosmarmot git history and I cannot see why the original author thought that it was the case that there was @@ -166,16 +165,16 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre // get blocks - c.Log.Debug("msg", "Waiting for blocks...") + c.Log.TraceMsg("Waiting for blocks...") err = rpcevents.ConsumeBlockExecutions(stream, c.makeBlockConsumer(projection, abiSpec, eventCh)) if err != nil { if err == io.EOF { - c.Log.Info("msg", "EOF stream received...") + c.Log.InfoMsg("EOF stream received...") } else { if c.Closing { - c.Log.Debug("msg", "GRPC connection closed") + c.Log.TraceMsg("GRPC connection closed") } else { errCh <- errors.Wrapf(err, "Error receiving blocks") return @@ -190,7 +189,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre case blk := <-eventCh: err := c.commitBlock(projection, blk) if err != nil { - c.Log.Info("msg", "error committing block", "err", err) + c.Log.InfoMsg("error committing block", "err", err) return err } @@ -200,12 +199,12 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre // Select possible error case err := <-errCh: - c.Log.Info("msg", "finished with error", "err", err) + c.Log.InfoMsg("finished with error", "err", err) return err // Or fallback to success default: - c.Log.Info("msg", "finished successfully") + c.Log.InfoMsg("finished successfully") return nil } } @@ -227,7 +226,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi c.Status.LastProcessedHeight = fromBlock }() - c.Log.Debug("msg", "Block received", "height", blockExecution.Height, "num_txs", len(blockExecution.TxExecutions)) + c.Log.TraceMsg("Block received", "height", blockExecution.Height, "num_txs", len(blockExecution.TxExecutions)) // create a fresh new structure to store block data at this height blockData := sqlsol.NewBlockData(fromBlock) @@ -243,7 +242,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi // get transactions for a given block for _, txe := range blockExecution.TxExecutions { - c.Log.Debug("msg", "Getting transaction", "TxHash", txe.TxHash, "num_events", len(txe.Events)) + c.Log.TraceMsg("Getting transaction", "TxHash", txe.TxHash, "num_events", len(txe.Events)) if c.Config.SpecOpt&sqlsol.Tx > 0 { txRawData, err := buildTxData(txe) @@ -282,7 +281,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi // there's a matching filter, add data to the rows if qry.Matches(taggedEvent) { - c.Log.Info("msg", fmt.Sprintf("Matched event header: %v", event.Header), + c.Log.InfoMsg(fmt.Sprintf("Matched event header: %v", event.Header), "filter", eventClass.Filter) // unpack, decode & build event data @@ -305,7 +304,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi // gets block data to upsert blk := blockData.Data - c.Log.Info("msg", fmt.Sprintf("Upserting rows in SQL tables %v", blk), "block", fromBlock) + c.Log.InfoMsg(fmt.Sprintf("Upserting rows in SQL tables %v", blk), "block", fromBlock) eventCh <- blk } @@ -356,7 +355,7 @@ func (c *Consumer) Health() error { // Shutdown gracefully shuts down the events consumer func (c *Consumer) Shutdown() { - c.Log.Info("msg", "Shutting down vent consumer...") + c.Log.InfoMsg("Shutting down vent consumer...") c.Closing = true c.GRPCConnection.Close() } @@ -364,7 +363,7 @@ func (c *Consumer) Shutdown() { func (c *Consumer) updateStatus(qcli rpcquery.QueryClient) { stat, err := qcli.Status(context.Background(), &rpcquery.StatusParam{}) if err != nil { - c.Log.Error("msg", "could not get blockchain status", "err", err) + c.Log.InfoMsg("could not get blockchain status", "err", err) return } c.Status.Burrow = stat @@ -398,7 +397,7 @@ func (c *Consumer) announceEvery(doneCh <-chan struct{}) { select { case <-ticker.C: c.updateStatus(qcli) - c.Log.Info(c.statusMessage()...) + c.Log.InfoMsg("Announcement", c.statusMessage()...) case <-doneCh: ticker.Stop() return diff --git a/vent/service/consumer_test.go b/vent/service/consumer_test.go index dbdcf6443..93ccf75ca 100644 --- a/vent/service/consumer_test.go +++ b/vent/service/consumer_test.go @@ -10,11 +10,10 @@ import ( "time" "github.com/hyperledger/burrow/crypto" - "github.com/hyperledger/burrow/rpc/rpctransact" - "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/rpc/rpctransact" "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/logger" "github.com/hyperledger/burrow/vent/service" "github.com/hyperledger/burrow/vent/sqlsol" "github.com/hyperledger/burrow/vent/test" @@ -194,9 +193,8 @@ func newConsumer(t *testing.T, cfg *config.VentConfig) *service.Consumer { cfg.AbiFileOrDirs = []string{path.Join(testDir, "EventsTest.abi")} cfg.SpecOpt = sqlsol.BlockTx - log := logger.NewLogger(cfg.LogLevel) ch := make(chan types.EventData, 100) - return service.NewConsumer(cfg, log, ch) + return service.NewConsumer(cfg, logging.NewNoopLogger(), ch) } // Run consumer to listen to events diff --git a/vent/service/rowbuilder.go b/vent/service/rowbuilder.go index 72b8a255d..3c0b726a4 100644 --- a/vent/service/rowbuilder.go +++ b/vent/service/rowbuilder.go @@ -9,7 +9,7 @@ import ( "github.com/hyperledger/burrow/execution/evm/abi" "github.com/hyperledger/burrow/execution/exec" - "github.com/hyperledger/burrow/vent/logger" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/sqlsol" "github.com/hyperledger/burrow/vent/types" "github.com/pkg/errors" @@ -17,7 +17,7 @@ import ( // buildEventData builds event data from transactions func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, origin *exec.Origin, abiSpec *abi.AbiSpec, - l *logger.Logger) (types.EventDataRow, error) { + l *logging.Logger) (types.EventDataRow, error) { // a fresh new row to store column/value data row := make(map[string]interface{}) @@ -32,7 +32,7 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, return types.EventDataRow{}, errors.Wrapf(err, "Error decoding event (filter: %s)", eventClass.Filter) } - l.Info("msg", fmt.Sprintf("Unpacked data: %v", decodedData), "eventName", decodedData[types.EventNameLabel]) + l.InfoMsg(fmt.Sprintf("Unpacked data: %v", decodedData), "eventName", decodedData[types.EventNameLabel]) rowAction := types.ActionUpsert @@ -59,7 +59,7 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, } row[column.Name] = value } else { - l.Debug("msg", "could not get column", "err", err) + l.TraceMsg("could not get column", "err", err) } } @@ -137,10 +137,10 @@ func buildTxData(txe *exec.TxExecution) (types.EventDataRow, error) { }, nil } -func sanitiseBytesForString(bs []byte, l *logger.Logger) string { +func sanitiseBytesForString(bs []byte, l *logging.Logger) string { str, err := UTF8StringFromBytes(bs) if err != nil { - l.Error("msg", "buildEventData() received invalid bytes for utf8 string - proceeding with sanitised version", + l.InfoMsg("buildEventData() received invalid bytes for utf8 string - proceeding with sanitised version", "err", err) } // The only null bytes in utf8 are for the null code point/character so this is fine in general diff --git a/vent/service/rowbuilder_test.go b/vent/service/rowbuilder_test.go index 14ef1c32a..f7cb7e78b 100644 --- a/vent/service/rowbuilder_test.go +++ b/vent/service/rowbuilder_test.go @@ -5,7 +5,7 @@ import ( "fmt" "testing" - "github.com/hyperledger/burrow/vent/logger" + "github.com/hyperledger/burrow/logging" "github.com/stretchr/testify/assert" hex "github.com/tmthrgd/go-hex" ) @@ -30,7 +30,7 @@ func TestUTF8StringFromBytes(t *testing.T) { func TestSanitiseBytesForString(t *testing.T) { goodString := "Cliente - Doc. identificación" badString := BadStringToHexFunction(goodString) - str := sanitiseBytesForString([]byte(badString), logger.NewLogger("error")) + str := sanitiseBytesForString([]byte(badString), logging.NewNoopLogger()) assert.Equal(t, "Cliente - Doc. identificaci�n", str) } diff --git a/vent/service/server.go b/vent/service/server.go index a9a0ea1f7..2bc41bc45 100644 --- a/vent/service/server.go +++ b/vent/service/server.go @@ -4,21 +4,21 @@ import ( "context" "net/http" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/logger" ) // Server exposes HTTP endpoints for the service type Server struct { Config *config.VentConfig - Log *logger.Logger + Log *logging.Logger Consumer *Consumer mux *http.ServeMux stopCh chan bool } // NewServer returns a new HTTP server -func NewServer(cfg *config.VentConfig, log *logger.Logger, consumer *Consumer) *Server { +func NewServer(cfg *config.VentConfig, log *logging.Logger, consumer *Consumer) *Server { // setup handlers mux := http.NewServeMux() @@ -35,20 +35,20 @@ func NewServer(cfg *config.VentConfig, log *logger.Logger, consumer *Consumer) * // Run starts the HTTP server func (s *Server) Run() { - s.Log.Info("msg", "Starting HTTP Server") + s.Log.InfoMsg("Starting HTTP Server") // start http server httpServer := &http.Server{Addr: s.Config.HTTPAddr, Handler: s} go func() { - s.Log.Info("msg", "HTTP Server listening", "address", s.Config.HTTPAddr) + s.Log.InfoMsg("HTTP Server listening", "address", s.Config.HTTPAddr) httpServer.ListenAndServe() }() // wait for stop signal <-s.stopCh - s.Log.Info("msg", "Shutting down HTTP Server...") + s.Log.InfoMsg("Shutting down HTTP Server...") httpServer.Shutdown(context.Background()) } diff --git a/vent/service/server_test.go b/vent/service/server_test.go index 4c914ea23..d7ee91b86 100644 --- a/vent/service/server_test.go +++ b/vent/service/server_test.go @@ -14,8 +14,8 @@ import ( "github.com/hyperledger/burrow/execution/evm/abi" "github.com/hyperledger/burrow/integration" "github.com/hyperledger/burrow/integration/rpctest" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/logger" "github.com/hyperledger/burrow/vent/service" "github.com/hyperledger/burrow/vent/sqlsol" "github.com/hyperledger/burrow/vent/test" @@ -41,9 +41,8 @@ func TestServer(t *testing.T) { cfg.AbiFileOrDirs = []string{os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/vent/test/EventsTest.abi"} cfg.GRPCAddr = kern.GRPCListenAddress().String() - log := logger.NewLogger(cfg.LogLevel) + log := logging.NewNoopLogger() consumer := service.NewConsumer(cfg, log, make(chan types.EventData)) - projection, err := sqlsol.SpecLoader(cfg.SpecFileOrDirs, sqlsol.None) abiSpec, err := abi.LoadPath(cfg.AbiFileOrDirs...) diff --git a/vent/sqldb/adapters/db_adapter.go b/vent/sqldb/adapters/db_adapter.go index 057174974..b366f04f1 100644 --- a/vent/sqldb/adapters/db_adapter.go +++ b/vent/sqldb/adapters/db_adapter.go @@ -4,9 +4,8 @@ import ( "fmt" "strings" - "github.com/jmoiron/sqlx" - "github.com/hyperledger/burrow/vent/types" + "github.com/jmoiron/sqlx" ) // DBAdapter implements database dependent interface diff --git a/vent/sqldb/adapters/postgres_adapter.go b/vent/sqldb/adapters/postgres_adapter.go index a0e99a6f7..a2a8065ae 100644 --- a/vent/sqldb/adapters/postgres_adapter.go +++ b/vent/sqldb/adapters/postgres_adapter.go @@ -6,7 +6,7 @@ import ( "github.com/lib/pq" - "github.com/hyperledger/burrow/vent/logger" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/types" "github.com/jmoiron/sqlx" "github.com/prometheus/common/log" @@ -29,13 +29,13 @@ var pgDataTypes = map[types.SQLColumnType]string{ type PostgresAdapter struct { Schema string types.SQLNames - Log *logger.Logger + Log *logging.Logger } var _ DBAdapter = &PostgresAdapter{} // NewPostgresAdapter constructs a new db adapter -func NewPostgresAdapter(schema string, sqlNames types.SQLNames, log *logger.Logger) *PostgresAdapter { +func NewPostgresAdapter(schema string, sqlNames types.SQLNames, log *logging.Logger) *PostgresAdapter { return &PostgresAdapter{ Schema: schema, SQLNames: sqlNames, @@ -67,28 +67,28 @@ func (pa *PostgresAdapter) Open(dbURL string) (*sqlx.DB, error) { return db, nil } -func ensureSchema(db sqlx.Ext, schema string, log *logger.Logger) error { +func ensureSchema(db sqlx.Ext, schema string, log *logging.Logger) error { query := Cleanf(`SELECT EXISTS (SELECT 1 FROM pg_catalog.pg_namespace n WHERE n.nspname = '%s');`, schema) - log.Info("msg", "FIND SCHEMA", "query", query) + log.InfoMsg("FIND SCHEMA", "query", query) var found bool if err := db.QueryRowx(query).Scan(&found); err == nil { if !found { - log.Warn("msg", "Schema not found") + log.InfoMsg("Schema not found") } - log.Info("msg", "Creating schema") + log.InfoMsg("Creating schema") query = Cleanf("CREATE SCHEMA %s;", schema) - log.Info("msg", "CREATE SCHEMA", "query", query) + log.InfoMsg("CREATE SCHEMA", "query", query) if _, err = db.Exec(query); err != nil { if errorEquals(err, types.SQLErrorTypeDuplicatedSchema) { - log.Warn("msg", "Duplicated schema") + log.InfoMsg("Duplicated schema") return nil } } } else { - log.Info("msg", "Error searching schema", "err", err) + log.InfoMsg("Error searching schema", "err", err) return err } return nil diff --git a/vent/sqldb/adapters/sqlite_adapter.go b/vent/sqldb/adapters/sqlite_adapter.go index 807f9f987..5a56137d4 100644 --- a/vent/sqldb/adapters/sqlite_adapter.go +++ b/vent/sqldb/adapters/sqlite_adapter.go @@ -7,7 +7,7 @@ import ( "fmt" "strings" - "github.com/hyperledger/burrow/vent/logger" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/types" "github.com/jmoiron/sqlx" sqlite3 "github.com/mattn/go-sqlite3" @@ -29,13 +29,13 @@ var sqliteDataTypes = map[types.SQLColumnType]string{ // SQLiteAdapter implements DBAdapter for SQLiteDB type SQLiteAdapter struct { types.SQLNames - Log *logger.Logger + Log *logging.Logger } var _ DBAdapter = &SQLiteAdapter{} // NewSQLiteAdapter constructs a new db adapter -func NewSQLiteAdapter(sqlNames types.SQLNames, log *logger.Logger) *SQLiteAdapter { +func NewSQLiteAdapter(sqlNames types.SQLNames, log *logging.Logger) *SQLiteAdapter { return &SQLiteAdapter{ SQLNames: sqlNames, Log: log, @@ -45,7 +45,7 @@ func NewSQLiteAdapter(sqlNames types.SQLNames, log *logger.Logger) *SQLiteAdapte func (sla *SQLiteAdapter) Open(dbURL string) (*sqlx.DB, error) { db, err := sqlx.Open("sqlite3", dbURL) if err != nil { - sla.Log.Info("msg", "Error creating database connection", "err", err) + sla.Log.InfoMsg("Error creating database connection", "err", err) return nil, err } return db, nil diff --git a/vent/sqldb/adapters/sqlite_adapter_nosqlite.go b/vent/sqldb/adapters/sqlite_adapter_nosqlite.go index c46edc699..3568ff036 100644 --- a/vent/sqldb/adapters/sqlite_adapter_nosqlite.go +++ b/vent/sqldb/adapters/sqlite_adapter_nosqlite.go @@ -6,19 +6,19 @@ package adapters import ( "fmt" - "github.com/hyperledger/burrow/vent/logger" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/types" "github.com/jmoiron/sqlx" ) // This is a no-op version of SQLiteAdapter type SQLiteAdapter struct { - Log *logger.Logger + Log *logging.Logger } var _ DBAdapter = &SQLiteAdapter{} -func NewSQLiteAdapter(names types.SQLNames, log *logger.Logger) *SQLiteAdapter { +func NewSQLiteAdapter(names types.SQLNames, log *logging.Logger) *SQLiteAdapter { panic(fmt.Errorf("vent has been built without sqlite support. To use the sqlite DBAdapter build with the 'sqlite' build tag enabled")) } diff --git a/vent/sqldb/sqldb.go b/vent/sqldb/sqldb.go index 43eb7849d..db45a3058 100644 --- a/vent/sqldb/sqldb.go +++ b/vent/sqldb/sqldb.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/hyperledger/burrow/vent/logger" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/sqldb/adapters" "github.com/hyperledger/burrow/vent/types" "github.com/jmoiron/sqlx" @@ -20,7 +20,7 @@ type SQLDB struct { Schema string Queries Queries types.SQLNames - Log *logger.Logger + Log *logging.Logger } // NewSQLDB delegates work to a specific database adapter implementation, @@ -45,12 +45,12 @@ func NewSQLDB(connection types.SQLConnection) (*SQLDB, error) { var err error db.DB, err = db.DBAdapter.Open(connection.DBURL) if err != nil { - db.Log.Info("msg", "Error opening database connection", "err", err) + db.Log.InfoMsg("Error opening database connection", "err", err) return nil, err } if err = db.Ping(); err != nil { - db.Log.Info("msg", "Error database not available", "err", err) + db.Log.InfoMsg("Error database not available", "err", err) return nil, err } @@ -60,7 +60,7 @@ func NewSQLDB(connection types.SQLConnection) (*SQLDB, error) { // Initialise the system and chain tables in case this is the first run - is idempotent though will drop tables // if ChainID has changed func (db *SQLDB) Init(chainID, burrowVersion string) error { - db.Log.Info("msg", "Initializing DB") + db.Log.InfoMsg("Initializing DB") // Create dictionary and log tables sysTables := db.getSysTablesDefinition() @@ -68,7 +68,7 @@ func (db *SQLDB) Init(chainID, burrowVersion string) error { // IMPORTANT: DO NOT CHANGE TABLE CREATION ORDER (1) if err := db.createTable(chainID, sysTables[db.Tables.Dictionary], true); err != nil { if !db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeDuplicatedTable) { - db.Log.Info("msg", "Error creating Dictionary table", "err", err) + db.Log.InfoMsg("Error creating Dictionary table", "err", err) return err } } @@ -76,7 +76,7 @@ func (db *SQLDB) Init(chainID, burrowVersion string) error { // IMPORTANT: DO NOT CHANGE TABLE CREATION ORDER (2) if err := db.createTable(chainID, sysTables[db.Tables.Log], true); err != nil { if !db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeDuplicatedTable) { - db.Log.Info("msg", "Error creating Log table", "err", err) + db.Log.InfoMsg("Error creating Log table", "err", err) return err } } @@ -84,7 +84,7 @@ func (db *SQLDB) Init(chainID, burrowVersion string) error { // IMPORTANT: DO NOT CHANGE TABLE CREATION ORDER (3) if err := db.createTable(chainID, sysTables[db.Tables.ChainInfo], true); err != nil { if !db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeDuplicatedTable) { - db.Log.Info("msg", "Error creating Chain Info table", "err", err) + db.Log.InfoMsg("Error creating Chain Info table", "err", err) return err } } @@ -104,7 +104,7 @@ func (db *SQLDB) Init(chainID, burrowVersion string) error { db.Queries, err = db.prepareQueries() if err != nil { - db.Log.Info("msg", "Could not prepare queries", "err", err) + db.Log.InfoMsg("Could not prepare queries", "err", err) return err } @@ -137,7 +137,7 @@ func (db *SQLDB) InitChain(chainID, burrowVersion string) (chainIDChanged bool, // Read chainID query = cleanQueries.SelectChainIDQry if err := db.DB.QueryRow(query).Scan(&savedRows, &savedChainID, &savedBurrowVersion); err != nil { - db.Log.Info("msg", "Error selecting CHAIN ID", "err", err, "query", query) + db.Log.InfoMsg("Error selecting CHAIN ID", "err", err, "query", query) return false, err } @@ -155,7 +155,7 @@ func (db *SQLDB) InitChain(chainID, burrowVersion string) (chainIDChanged bool, _, err := db.DB.Exec(query, chainID, burrowVersion, 0) if err != nil { - db.Log.Info("msg", "Error inserting CHAIN ID", "err", err, "query", query) + db.Log.InfoMsg("Error inserting CHAIN ID", "err", err, "query", query) } return false, err @@ -172,7 +172,7 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { // Begin tx if tx, err = db.DB.Begin(); err != nil { - db.Log.Info("msg", "Error beginning transaction", "err", err) + db.Log.InfoMsg("Error beginning transaction", "err", err) return err } defer tx.Rollback() @@ -180,14 +180,14 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { // Delete chainID query := cleanQueries.DeleteChainIDQry if _, err = tx.Exec(query); err != nil { - db.Log.Info("msg", "Error deleting CHAIN ID", "err", err, "query", query) + db.Log.InfoMsg("Error deleting CHAIN ID", "err", err, "query", query) return err } // Insert chainID query = cleanQueries.InsertChainIDQry if _, err := tx.Exec(query, chainID, burrowVersion, 0); err != nil { - db.Log.Info("msg", "Error inserting CHAIN ID", "err", err, "query", query) + db.Log.InfoMsg("Error inserting CHAIN ID", "err", err, "query", query) return err } @@ -195,19 +195,19 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { query = cleanQueries.SelectDictionaryQry rows, err := tx.Query(query) if err != nil { - db.Log.Info("msg", "error querying dictionary", "err", err, "query", query) + db.Log.InfoMsg("error querying dictionary", "err", err, "query", query) return err } defer rows.Close() for rows.Next() { if err = rows.Scan(&tableName); err != nil { - db.Log.Info("msg", "error scanning table structure", "err", err) + db.Log.InfoMsg("error scanning table structure", "err", err) return err } if err = rows.Err(); err != nil { - db.Log.Info("msg", "error scanning table structure", "err", err) + db.Log.InfoMsg("error scanning table structure", "err", err) return err } tables = append(tables, tableName) @@ -216,14 +216,14 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { // Delete Dictionary query = cleanQueries.DeleteDictionaryQry if _, err = tx.Exec(query); err != nil { - db.Log.Info("msg", "Error deleting dictionary", "err", err, "query", query) + db.Log.InfoMsg("Error deleting dictionary", "err", err, "query", query) return err } // Delete Log query = cleanQueries.DeleteLogQry if _, err = tx.Exec(query); err != nil { - db.Log.Info("msg", "Error deleting log", "err", err, "query", query) + db.Log.InfoMsg("Error deleting log", "err", err, "query", query) return err } // Drop database tables @@ -232,7 +232,7 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { if _, err = tx.Exec(query); err != nil { // if error == table does not exists, continue if !db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeUndefinedTable) { - db.Log.Info("msg", "error dropping tables", "err", err, "value", tableName, "query", query) + db.Log.InfoMsg("error dropping tables", "err", err, "value", tableName, "query", query) return err } } @@ -240,7 +240,7 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { // Commit if err = tx.Commit(); err != nil { - db.Log.Info("msg", "Error commiting transaction", "err", err) + db.Log.InfoMsg("Error commiting transaction", "err", err) return err } @@ -250,14 +250,14 @@ func (db *SQLDB) CleanTables(chainID, burrowVersion string) error { // Close database connection func (db *SQLDB) Close() { if err := db.DB.Close(); err != nil { - db.Log.Error("msg", "Error closing database", "err", err) + db.Log.InfoMsg("Error closing database", "err", err) } } // Ping database func (db *SQLDB) Ping() error { if err := db.DB.Ping(); err != nil { - db.Log.Info("msg", "Error database not available", "err", err) + db.Log.InfoMsg("Error database not available", "err", err) return err } @@ -266,7 +266,7 @@ func (db *SQLDB) Ping() error { // SynchronizeDB synchronize db tables structures from given tables specifications func (db *SQLDB) SynchronizeDB(chainID string, eventTables types.EventTables) error { - db.Log.Info("msg", "Synchronizing DB") + db.Log.InfoMsg("Synchronizing DB") for _, table := range eventTables { found, err := db.findTable(table.Name) @@ -289,12 +289,12 @@ func (db *SQLDB) SynchronizeDB(chainID string, eventTables types.EventTables) er // SetBlock inserts or updates multiple rows and stores log info in SQL tables func (db *SQLDB) SetBlock(chainID string, eventTables types.EventTables, eventData types.EventData) error { - db.Log.Info("msg", "Synchronize Block..........") + db.Log.InfoMsg("Synchronize Block..........") // Begin tx tx, err := db.DB.Beginx() if err != nil { - db.Log.Info("msg", "Error beginning transaction", "err", err) + db.Log.InfoMsg("Error beginning transaction", "err", err) return err } defer tx.Rollback() @@ -303,7 +303,7 @@ func (db *SQLDB) SetBlock(chainID string, eventTables types.EventTables, eventDa logQuery := db.DBAdapter.InsertLogQuery() logStmt, err := tx.Prepare(logQuery) if err != nil { - db.Log.Info("msg", "Error preparing log stmt", "err", err) + db.Log.InfoMsg("Error preparing log stmt", "err", err) return err } defer logStmt.Close() @@ -324,19 +324,19 @@ loop: case types.ActionUpsert: //Prepare Upsert if queryVal, txHash, errQuery = db.DBAdapter.UpsertQuery(table, row); errQuery != nil { - db.Log.Info("msg", "Error building upsert query", "err", errQuery, "value", fmt.Sprintf("%v %v", table, row)) + db.Log.InfoMsg("Error building upsert query", "err", errQuery, "value", fmt.Sprintf("%v %v", table, row)) break loop // exits from all loops -> continue in close log stmt } case types.ActionDelete: //Prepare Delete if queryVal, errQuery = db.DBAdapter.DeleteQuery(table, row); errQuery != nil { - db.Log.Info("msg", "Error building delete query", "err", errQuery, "value", fmt.Sprintf("%v %v", table, row)) + db.Log.InfoMsg("Error building delete query", "err", errQuery, "value", fmt.Sprintf("%v %v", table, row)) break loop // exits from all loops -> continue in close log stmt } default: //Invalid Action - db.Log.Info("msg", "invalid action", "value", row.Action) + db.Log.InfoMsg("invalid action", "value", row.Action) err = fmt.Errorf("invalid row action %s", row.Action) break loop // exits from all loops -> continue in close log stmt } @@ -344,34 +344,34 @@ loop: query := queryVal.Query // Perform row action - db.Log.Info("msg", row.Action, "query", query, "value", queryVal.Values) + db.Log.InfoMsg("msg", "action", row.Action, "query", query, "value", queryVal.Values) if _, err = tx.Exec(query, queryVal.Pointers...); err != nil { - db.Log.Info("msg", fmt.Sprintf("error performing %s on row", row.Action), "err", err, "value", queryVal.Values) + db.Log.InfoMsg(fmt.Sprintf("error performing %s on row", row.Action), "err", err, "value", queryVal.Values) break loop // exits from all loops -> continue in close log stmt } // Marshal the rowData map jsonData, err := getJSON(row.RowData) if err != nil { - db.Log.Info("msg", "error marshaling rowData", "err", err, "value", fmt.Sprintf("%v", row.RowData)) + db.Log.InfoMsg("error marshaling rowData", "err", err, "value", fmt.Sprintf("%v", row.RowData)) break loop // exits from all loops -> continue in close log stmt } // Marshal sql values sqlValues, err := getJSONFromValues(queryVal.Pointers) if err != nil { - db.Log.Info("msg", "error marshaling rowdata", "err", err, "value", fmt.Sprintf("%v", row.RowData)) + db.Log.InfoMsg("error marshaling rowdata", "err", err, "value", fmt.Sprintf("%v", row.RowData)) break loop // exits from all loops -> continue in close log stmt } eventName, _ := row.RowData[db.Columns.EventName].(string) // Insert in log - db.Log.Info("msg", "INSERT LOG", "query", logQuery, "value", + db.Log.InfoMsg("INSERT LOG", "query", logQuery, "value", fmt.Sprintf("chainid = %s tableName = %s eventName = %s block = %d", chainID, safeTable, en, eventData.BlockHeight)) if _, err = logStmt.Exec(chainID, safeTable, eventName, row.EventClass.GetFilter(), eventData.BlockHeight, txHash, row.Action, jsonData, query, sqlValues); err != nil { - db.Log.Info("msg", "Error inserting into log", "err", err) + db.Log.InfoMsg("Error inserting into log", "err", err) break loop // exits from all loops -> continue in close log stmt } } @@ -380,7 +380,7 @@ loop: // Close log statement if err == nil { if err = logStmt.Close(); err != nil { - db.Log.Info("msg", "Error closing log stmt", "err", err) + db.Log.InfoMsg("Error closing log stmt", "err", err) } } @@ -388,7 +388,7 @@ loop: if err != nil { // Rollback error if errRb := tx.Rollback(); errRb != nil { - db.Log.Info("msg", "Error on rollback", "err", errRb) + db.Log.InfoMsg("Error on rollback", "err", errRb) return errRb } @@ -397,7 +397,7 @@ loop: // Table does not exists if db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeUndefinedTable) { - db.Log.Warn("msg", "Table not found", "value", safeTable) + db.Log.InfoMsg("Table not found", "value", safeTable) //Synchronize DB if err = db.SynchronizeDB(chainID, eventTables); err != nil { return err @@ -408,7 +408,7 @@ loop: // Columns do not match if db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeUndefinedColumn) { - db.Log.Warn("msg", "Column not found", "value", safeTable) + db.Log.InfoMsg("Column not found", "value", safeTable) //Synchronize DB if err = db.SynchronizeDB(chainID, eventTables); err != nil { return err @@ -421,17 +421,17 @@ loop: return err } - db.Log.Info("msg", "COMMIT") + db.Log.InfoMsg("COMMIT") err = db.SetBlockHeight(tx, chainID, eventData.BlockHeight) if err != nil { - db.Log.Info("msg", "Could not commit block height", "err", err) + db.Log.InfoMsg("Could not commit block height", "err", err) return err } err = tx.Commit() if err != nil { - db.Log.Info("msg", "Error on commit", "err", err) + db.Log.InfoMsg("Error on commit", "err", err) return err } @@ -457,20 +457,20 @@ func (db *SQLDB) GetBlock(chainID string, height uint64) (types.EventData, error // get query for table query, err = db.getSelectQuery(table, height) if err != nil { - db.Log.Info("msg", "Error building table query", "err", err) + db.Log.InfoMsg("Error building table query", "err", err) return data, err } - db.Log.Info("msg", "Query table data", "query", query) + db.Log.InfoMsg("Query table data", "query", query) rows, err := db.DB.Query(query) if err != nil { - db.Log.Info("msg", "Error querying table data", "err", err) + db.Log.InfoMsg("Error querying table data", "err", err) return data, err } defer rows.Close() cols, err := rows.Columns() if err != nil { - db.Log.Info("msg", "Error getting row columns", "err", err) + db.Log.InfoMsg("Error getting row columns", "err", err) return data, err } @@ -491,10 +491,10 @@ func (db *SQLDB) GetBlock(chainID string, height uint64) (types.EventData, error row := make(map[string]interface{}) if err = rows.Scan(pointers...); err != nil { - db.Log.Info("msg", "Error scanning data", "err", err) + db.Log.InfoMsg("Error scanning data", "err", err) return data, err } - db.Log.Info("msg", "Query resultset", "value", fmt.Sprintf("%+v", containers)) + db.Log.InfoMsg("Query resultset", "value", fmt.Sprintf("%+v", containers)) // for each column in row for i, col := range cols { @@ -507,7 +507,7 @@ func (db *SQLDB) GetBlock(chainID string, height uint64) (types.EventData, error } if err = rows.Err(); err != nil { - db.Log.Info("msg", "Error during rows iteration", "err", err) + db.Log.InfoMsg("Error during rows iteration", "err", err) return data, err } data.Tables[table.Name] = dataRows @@ -566,20 +566,20 @@ func (db *SQLDB) RestoreDB(restoreTime time.Time, prefix string) error { } strTime := restoreTime.Format(yymmddhhmmss) - db.Log.Info("msg", "RESTORING DB..................................") + db.Log.InfoMsg("RESTORING DB..................................") - db.Log.Info("msg", "open log", "query", query) + db.Log.InfoMsg("open log", "query", query) // Postgres does not work is run within same tx as updates, see: https://github.com/lib/pq/issues/81 rows, err := db.DB.Query(query, strTime) if err != nil { - db.Log.Info("msg", "error querying log", "err", err) + db.Log.InfoMsg("error querying log", "err", err) return err } defer rows.Close() tx, err := db.DB.Begin() if err != nil { - db.Log.Info("msg", "could not open transaction for restore", "err", err) + db.Log.InfoMsg("could not open transaction for restore", "err", err) return err } defer tx.Rollback() @@ -592,13 +592,13 @@ func (db *SQLDB) RestoreDB(restoreTime time.Time, prefix string) error { err = rows.Err() if err != nil { - db.Log.Info("msg", "error scanning table structure", "err", err) + db.Log.InfoMsg("error scanning table structure", "err", err) return err } err = rows.Scan(&id, &tableName, &action, &sqlSmt, &sqlValues) if err != nil { - db.Log.Info("msg", "error scanning table structure", "err", err) + db.Log.InfoMsg("error scanning table structure", "err", err) return err } @@ -611,7 +611,7 @@ func (db *SQLDB) RestoreDB(restoreTime time.Time, prefix string) error { case types.ActionUpsert, types.ActionDelete: // get row values if pointers, err = getValuesFromJSON(sqlValues); err != nil { - db.Log.Info("msg", "error unmarshaling json", "err", err, "value", sqlValues) + db.Log.InfoMsg("error unmarshaling json", "err", err, "value", sqlValues) return err } @@ -622,9 +622,9 @@ func (db *SQLDB) RestoreDB(restoreTime time.Time, prefix string) error { query = strings.Replace(sqlSmt, tableName, restoreTable, -1) } - db.Log.Info("msg", "SQL COMMAND", "sql", query, "log_id", id) + db.Log.InfoMsg("SQL COMMAND", "sql", query, "log_id", id) if _, err = tx.Exec(query, pointers...); err != nil { - db.Log.Info("msg", "Error executing upsert/delete ", "err", err, "value", sqlSmt, "data", sqlValues) + db.Log.InfoMsg("Error executing upsert/delete ", "err", err, "value", sqlSmt, "data", sqlValues) return err } @@ -639,21 +639,21 @@ func (db *SQLDB) RestoreDB(restoreTime time.Time, prefix string) error { // Prepare Alter/Create Table query = strings.Replace(sqlSmt, tableName, restoreTable, -1) - db.Log.Info("msg", "SQL COMMAND", "sql", query) + db.Log.InfoMsg("SQL COMMAND", "sql", query) _, err = tx.Exec(query) if err != nil { - db.Log.Info("msg", "Error executing alter/create table command ", "err", err, "value", sqlSmt) + db.Log.InfoMsg("Error executing alter/create table command ", "err", err, "value", sqlSmt) return err } default: // Invalid Action - db.Log.Info("msg", "invalid action", "value", action) + db.Log.InfoMsg("invalid action", "value", action) return fmt.Errorf("invalid row action %s", action) } } err = tx.Commit() if err != nil { - db.Log.Info("msg", "could not commit restore tx", "err", err) + db.Log.InfoMsg("could not commit restore tx", "err", err) return err } return nil diff --git a/vent/sqldb/sqldb_postgres_test.go b/vent/sqldb/sqldb_postgres_test.go index 11994b85b..9e99ff2d9 100644 --- a/vent/sqldb/sqldb_postgres_test.go +++ b/vent/sqldb/sqldb_postgres_test.go @@ -9,12 +9,10 @@ import ( "testing" "time" - "github.com/lib/pq" - "github.com/hyperledger/burrow/vent/sqldb/adapters" - "github.com/hyperledger/burrow/vent/types" - "github.com/hyperledger/burrow/vent/test" + "github.com/hyperledger/burrow/vent/types" + "github.com/lib/pq" "github.com/stretchr/testify/require" ) diff --git a/vent/sqldb/utils.go b/vent/sqldb/utils.go index da5847596..b24eefb9f 100644 --- a/vent/sqldb/utils.go +++ b/vent/sqldb/utils.go @@ -28,14 +28,14 @@ func (db *SQLDB) findTable(tableName string) (bool, error) { safeTable := safe(tableName) query := db.DBAdapter.FindTableQuery() - db.Log.Info("msg", "FIND TABLE", "query", query, "value", safeTable) + db.Log.InfoMsg("FIND TABLE", "query", query, "value", safeTable) if err := db.DB.QueryRow(query, tableName).Scan(&found); err != nil { - db.Log.Info("msg", "Error finding table", "err", err) + db.Log.InfoMsg("Error finding table", "err", err) return false, err } if found == 0 { - db.Log.Warn("msg", "Table not found", "value", safeTable) + db.Log.InfoMsg("Table not found", "value", safeTable) return false, nil } @@ -185,16 +185,16 @@ func (db *SQLDB) getTableDef(tableName string) (*types.SQLTable, error) { } if !found { - db.Log.Info("msg", "Error table not found", "value", table.Name) + db.Log.InfoMsg("Error table not found", "value", table.Name) return nil, errors.New("Error table not found " + table.Name) } query := db.DBAdapter.TableDefinitionQuery() - db.Log.Info("msg", "QUERY STRUCTURE", "query", query, "value", table.Name) + db.Log.InfoMsg("QUERY STRUCTURE", "query", query, "value", table.Name) rows, err := db.DB.Query(query, safe(tableName)) if err != nil { - db.Log.Info("msg", "Error querying table structure", "err", err) + db.Log.InfoMsg("Error querying table structure", "err", err) return nil, err } defer rows.Close() @@ -208,7 +208,7 @@ func (db *SQLDB) getTableDef(tableName string) (*types.SQLTable, error) { var columnLength int if err = rows.Scan(&columnName, &columnSQLType, &columnLength, &columnIsPK); err != nil { - db.Log.Info("msg", "Error scanning table structure", "err", err) + db.Log.InfoMsg("Error scanning table structure", "err", err) return nil, err } @@ -225,7 +225,7 @@ func (db *SQLDB) getTableDef(tableName string) (*types.SQLTable, error) { } if err = rows.Err(); err != nil { - db.Log.Info("msg", "Error during rows iteration", "err", err) + db.Log.InfoMsg("Error during rows iteration", "err", err) return nil, err } @@ -235,7 +235,7 @@ func (db *SQLDB) getTableDef(tableName string) (*types.SQLTable, error) { // alterTable alters the structure of a SQL table & add info to the dictionary func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { - db.Log.Info("msg", "Altering table", "value", table.Name) + db.Log.InfoMsg("Altering table", "value", table.Name) // prepare log query logQuery := db.DBAdapter.InsertLogQuery() @@ -267,22 +267,22 @@ func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { query, dictionary := db.DBAdapter.AlterColumnQuery(safeTable, safeCol, newColumn.Type, newColumn.Length, order) //alter column - db.Log.Info("msg", "ALTER TABLE", "query", safe(query)) + db.Log.InfoMsg("ALTER TABLE", "query", safe(query)) _, err = db.DB.Exec(safe(query)) if err != nil { if db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeDuplicatedColumn) { - db.Log.Warn("msg", "Duplicate column", "value", safeCol) + db.Log.InfoMsg("Duplicate column", "value", safeCol) } else { - db.Log.Info("msg", "Error altering table", "err", err) + db.Log.InfoMsg("Error altering table", "err", err) return err } } else { //store dictionary - db.Log.Info("msg", "STORE DICTIONARY", "query", dictionary) + db.Log.InfoMsg("STORE DICTIONARY", "query", dictionary) _, err = db.DB.Exec(dictionary) if err != nil { - db.Log.Info("msg", "Error storing dictionary", "err", err) + db.Log.InfoMsg("Error storing dictionary", "err", err) return err } @@ -290,13 +290,13 @@ func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { var jsonData []byte jsonData, err = getJSON(newColumn) if err != nil { - db.Log.Info("msg", "error marshaling column", "err", err, "value", fmt.Sprintf("%v", newColumn)) + db.Log.InfoMsg("error marshaling column", "err", err, "value", fmt.Sprintf("%v", newColumn)) return err } //insert log _, err = db.DB.Exec(logQuery, chainID, table.Name, "", "", nil, nil, types.ActionAlterTable, jsonData, query, sqlValues) if err != nil { - db.Log.Info("msg", "Error inserting log", "err", err) + db.Log.InfoMsg("Error inserting log", "err", err) return err } } @@ -306,7 +306,7 @@ func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { // Ensure triggers are defined err = db.createTableTriggers(table) if err != nil { - db.Log.Info("msg", "error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) + db.Log.InfoMsg("error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) return fmt.Errorf("could not create table notification triggers: %v", err) } return nil @@ -314,7 +314,7 @@ func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { // createTable creates a new table func (db *SQLDB) createTable(chainID string, table *types.SQLTable, isInitialise bool) error { - db.Log.Info("msg", "Creating Table", "value", table.Name) + db.Log.InfoMsg("Creating Table", "value", table.Name) // prepare log query logQuery := db.DBAdapter.InsertLogQuery() @@ -323,28 +323,28 @@ func (db *SQLDB) createTable(chainID string, table *types.SQLTable, isInitialise safeTable := safe(table.Name) query, dictionary := db.DBAdapter.CreateTableQuery(safeTable, table.Columns) if query == "" { - db.Log.Info("msg", "empty CREATE TABLE query") + db.Log.InfoMsg("empty CREATE TABLE query") return errors.New("empty CREATE TABLE query") } // create table - db.Log.Info("msg", "CREATE TABLE", "query", query) + db.Log.InfoMsg("CREATE TABLE", "query", query) _, err := db.DB.Exec(query) if err != nil { return err } //store dictionary - db.Log.Info("msg", "STORE DICTIONARY", "query", dictionary) + db.Log.InfoMsg("STORE DICTIONARY", "query", dictionary) _, err = db.DB.Exec(dictionary) if err != nil { - db.Log.Info("msg", "Error storing dictionary", "err", err) + db.Log.InfoMsg("Error storing dictionary", "err", err) return err } err = db.createTableTriggers(table) if err != nil { - db.Log.Info("msg", "error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) + db.Log.InfoMsg("error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) return fmt.Errorf("could not create table notification triggers: %v", err) } @@ -354,7 +354,7 @@ func (db *SQLDB) createTable(chainID string, table *types.SQLTable, isInitialise var jsonData []byte jsonData, err = getJSON(table) if err != nil { - db.Log.Info("msg", "error marshaling table", "err", err, "value", fmt.Sprintf("%v", table)) + db.Log.InfoMsg("error marshaling table", "err", err, "value", fmt.Sprintf("%v", table)) return err } sqlValues, _ := getJSON(nil) @@ -362,7 +362,7 @@ func (db *SQLDB) createTable(chainID string, table *types.SQLTable, isInitialise //insert log _, err = db.DB.Exec(logQuery, chainID, table.Name, "", "", nil, nil, types.ActionCreateTable, jsonData, query, sqlValues) if err != nil { - db.Log.Info("msg", "Error inserting log", "err", err) + db.Log.InfoMsg("Error inserting log", "err", err) return err } } @@ -378,7 +378,7 @@ func (db *SQLDB) createTableTriggers(table *types.SQLTable) error { function := fmt.Sprintf("%s_%s_notify_function", table.Name, channel) query := dbNotify.CreateNotifyFunctionQuery(function, channel, columns...) - db.Log.Info("msg", "CREATE NOTIFICATION FUNCTION", "query", query) + db.Log.InfoMsg("CREATE NOTIFICATION FUNCTION", "query", query) _, err := db.DB.Exec(query) if err != nil { return fmt.Errorf("could not create notification function: %v", err) @@ -386,7 +386,7 @@ func (db *SQLDB) createTableTriggers(table *types.SQLTable) error { trigger := fmt.Sprintf("%s_%s_notify_trigger", table.Name, channel) query = dbNotify.CreateTriggerQuery(trigger, table.Name, function) - db.Log.Info("msg", "CREATE NOTIFICATION TRIGGER", "query", query) + db.Log.InfoMsg("CREATE NOTIFICATION TRIGGER", "query", query) _, err = db.DB.Exec(query) if err != nil { return fmt.Errorf("could not create notification trigger: %v", err) @@ -422,11 +422,11 @@ func (db *SQLDB) getBlockTables(chainid string, height uint64) (types.EventTable tables := make(types.EventTables) query := db.DBAdapter.SelectLogQuery() - db.Log.Info("msg", "QUERY LOG", "query", query, "height", height, "chainid", chainid) + db.Log.InfoMsg("QUERY LOG", "query", query, "height", height, "chainid", chainid) rows, err := db.DB.Query(query, height, chainid) if err != nil { - db.Log.Info("msg", "Error querying log", "err", err) + db.Log.InfoMsg("Error querying log", "err", err) return tables, err } @@ -438,13 +438,13 @@ func (db *SQLDB) getBlockTables(chainid string, height uint64) (types.EventTable err = rows.Scan(&tableName, &eventName) if err != nil { - db.Log.Info("msg", "Error scanning table structure", "err", err) + db.Log.InfoMsg("Error scanning table structure", "err", err) return tables, err } err = rows.Err() if err != nil { - db.Log.Info("msg", "Error scanning table structure", "err", err) + db.Log.InfoMsg("Error scanning table structure", "err", err) return tables, err } diff --git a/vent/test/db.go b/vent/test/db.go index 1473294c6..20dc7a195 100644 --- a/vent/test/db.go +++ b/vent/test/db.go @@ -4,18 +4,16 @@ import ( "fmt" "io/ioutil" "math/rand" + "os" "syscall" "testing" "time" - "github.com/stretchr/testify/require" - - "os" - + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/logger" "github.com/hyperledger/burrow/vent/sqldb" "github.com/hyperledger/burrow/vent/types" + "github.com/stretchr/testify/require" ) const ( @@ -45,7 +43,7 @@ func NewTestDB(t *testing.T, cfg *config.VentConfig) (*sqldb.SQLDB, func()) { DBURL: cfg.DBURL, DBSchema: cfg.DBSchema, - Log: logger.NewLogger(""), + Log: logging.NewNoopLogger(), } db, err := sqldb.NewSQLDB(connection) @@ -95,13 +93,13 @@ func PostgresVentConfig(grpcAddress string) *config.VentConfig { } func destroySchema(db *sqldb.SQLDB, dbSchema string) error { - db.Log.Info("msg", "Dropping schema") + db.Log.InfoMsg("Dropping schema") query := fmt.Sprintf("DROP SCHEMA %s CASCADE;", dbSchema) - db.Log.Info("msg", "Drop schema", "query", query) + db.Log.InfoMsg("Drop schema", "query", query) if _, err := db.DB.Exec(query); err != nil { - db.Log.Info("msg", "Error dropping schema", "err", err) + db.Log.InfoMsg("Error dropping schema", "err", err) return err } diff --git a/vent/types/sql_utils.go b/vent/types/sql_utils.go index 9766d7721..114043ff4 100644 --- a/vent/types/sql_utils.go +++ b/vent/types/sql_utils.go @@ -1,13 +1,13 @@ package types -import "github.com/hyperledger/burrow/vent/logger" +import "github.com/hyperledger/burrow/logging" // SQLConnection stores parameters to build a new db connection & initialize the database type SQLConnection struct { DBAdapter string DBURL string DBSchema string - Log *logger.Logger + Log *logging.Logger } // SQLCleanDBQuery stores queries needed to clean the database From 97d33f0236ae88fdfd94218e63dbdf19ff6fd641 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Wed, 19 Jun 2019 21:33:23 +0100 Subject: [PATCH 02/70] Use deterministic protobuf serialisaion in state. Replacing amino - avoid risk of serialising magic protobuf fields and benefit from table marshalling performance. Signed-off-by: Silas Davis --- acm/account.go | 30 +- acm/account_test.go | 34 - acm/acm.pb.go | 102 +- acm/balance/balance.pb.go | 38 +- acm/validator/validator.go | 16 - acm/validator/validator.pb.go | 341 +- bcm/bcm.pb.go | 406 +- bcm/block_store.go | 2 +- bcm/blockchain.go | 19 +- cmd/burrow/commands/configure.go | 1 + consensus/tendermint/tendermint.pb.go | 574 +-- consensus/tendermint/tendermint_test.go | 28 + core/kernel.go | 2 +- core/processes.go | 2 +- crypto/crypto.pb.go | 68 +- dump/dump.pb.go | 178 +- encoding/encoding.pb.go | 372 ++ encoding/protobuf.go | 120 + encoding/protobuf_test.go | 81 + execution/contexts/call_context.go | 2 +- execution/errors/errors.pb.go | 37 +- execution/exec/block_execution.go | 8 +- execution/exec/codec.go | 2 +- execution/exec/exec.pb.go | 910 ++-- execution/exec/stream_event.go | 9 +- execution/exec/tx_execution.go | 34 +- execution/names/names.go | 16 - execution/names/names.pb.go | 42 +- execution/names/names_test.go | 14 - execution/state/accounts.go | 20 +- execution/state/events.go | 40 +- execution/state/events_test.go | 3 +- execution/state/names.go | 9 +- execution/state/state.go | 18 +- execution/state/state_test.go | 6 +- execution/state/validators.go | 10 +- execution/transactor_test.go | 2 +- genesis/spec/spec.pb.go | 95 +- go.mod | 3 +- go.sum | 13 +- keys/keys.pb.go | 4082 ++--------------- permission/permission.pb.go | 22 +- protobuf/acm.proto | 1 + protobuf/balance.proto | 4 +- protobuf/bcm.proto | 8 + protobuf/crypto.proto | 1 + protobuf/dump.proto | 3 +- protobuf/encoding.proto | 21 + protobuf/errors.proto | 1 + protobuf/exec.proto | 6 + .../tendermint/abci/types/types.proto | 3 +- .../tendermint/libs/common/types.proto | 3 +- protobuf/keys.proto | 5 +- protobuf/names.proto | 1 + protobuf/rpc.proto | 3 +- protobuf/rpcdump.proto | 3 +- protobuf/rpcquery.proto | 5 +- protobuf/spec.proto | 2 +- protobuf/storage.proto | 22 + protobuf/tendermint.proto | 3 +- protobuf/validator.proto | 3 +- rpc/rpc.pb.go | 597 +-- rpc/rpcdump/rpcdump.pb.go | 266 +- rpc/rpcevents/rpcevents.pb.go | 50 +- rpc/rpcquery/rpcquery.pb.go | 2534 +--------- rpc/rpctransact/rpctransact.pb.go | 83 +- storage/forest.go | 25 +- storage/forest_test.go | 12 +- storage/storage.pb.go | 390 ++ txs/amino_codec.go | 43 - txs/payload/payload.pb.go | 172 +- txs/payload/proposal_tx.go | 19 +- txs/protobuf_codec.go | 25 + ...o_codec_test.go => protobuf_codec_test.go} | 6 +- txs/tx.go | 8 +- txs/txs.pb.go | 46 +- 76 files changed, 3047 insertions(+), 9138 deletions(-) create mode 100644 consensus/tendermint/tendermint_test.go create mode 100644 encoding/encoding.pb.go create mode 100644 encoding/protobuf.go create mode 100644 encoding/protobuf_test.go create mode 100644 protobuf/encoding.proto create mode 100644 protobuf/storage.proto create mode 100644 storage/storage.pb.go delete mode 100644 txs/amino_codec.go create mode 100644 txs/protobuf_codec.go rename txs/{amino_codec_test.go => protobuf_codec_test.go} (95%) diff --git a/acm/account.go b/acm/account.go index 1cf3eebe0..7a8d19f5d 100644 --- a/acm/account.go +++ b/acm/account.go @@ -18,13 +18,12 @@ import ( "bytes" "fmt" - "github.com/hyperledger/burrow/execution/errors" - - amino "github.com/tendermint/go-amino" + "github.com/gogo/protobuf/proto" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/event/query" + "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/permission" ) @@ -63,23 +62,6 @@ func (acc *Account) SubtractFromBalance(amount uint64) error { return nil } -///---- Serialisation methods - -var cdc = amino.NewCodec() - -func (acc *Account) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(acc) -} - -func Decode(accBytes []byte) (*Account, error) { - ca := new(Account) - err := cdc.UnmarshalBinaryBare(accBytes, ca) - if err != nil { - return nil, err - } - return ca, nil -} - // Conversions // // Using the naming convention is this package of 'As' being @@ -111,14 +93,18 @@ func (acc *Account) Copy() *Account { } func (acc *Account) Equal(accOther *Account) bool { - accEnc, err := acc.Encode() + buf := proto.NewBuffer(nil) + err := buf.Marshal(acc) if err != nil { return false } - accOtherEnc, err := acc.Encode() + accEnc := buf.Bytes() + buf.Reset() + err = buf.Marshal(accOther) if err != nil { return false } + accOtherEnc := buf.Bytes() return bytes.Equal(accEnc, accOtherEnc) } diff --git a/acm/account_test.go b/acm/account_test.go index 445d3ef82..06d94350f 100644 --- a/acm/account_test.go +++ b/acm/account_test.go @@ -49,40 +49,6 @@ func TestAddress(t *testing.T) { assert.Equal(t, addr, addrFromWord256) } -func TestDecodeConcrete(t *testing.T) { - concreteAcc := NewAccountFromSecret("Super Semi Secret") - concreteAcc.Permissions = permission.AccountPermissions{ - Base: permission.BasePermissions{ - Perms: permission.SetGlobal, - SetBit: permission.SetGlobal, - }, - Roles: []string{"bums"}, - } - acc := concreteAcc - encodedAcc, err := acc.Encode() - require.NoError(t, err) - - concreteAccOut, err := Decode(encodedAcc) - require.NoError(t, err) - - assert.Equal(t, concreteAcc, concreteAccOut) - _, err = Decode([]byte("flungepliffery munknut tolopops")) - assert.Error(t, err) -} - -func TestDecode(t *testing.T) { - acc := NewAccountFromSecret("Super Semi Secret") - encodedAcc, err := acc.Encode() - require.NoError(t, err) - accOut, err := Decode(encodedAcc) - require.NoError(t, err) - assert.Equal(t, NewAccountFromSecret("Super Semi Secret"), accOut) - - accOut, err = Decode([]byte("flungepliffery munknut tolopops")) - require.Error(t, err) - assert.Nil(t, accOut) -} - func TestMarshalJSON(t *testing.T) { acc := NewAccountFromSecret("Super Semi Secret") acc.EVMCode = []byte{60, 23, 45} diff --git a/acm/acm.pb.go b/acm/acm.pb.go index 716122b25..dcc36a128 100644 --- a/acm/acm.pb.go +++ b/acm/acm.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -50,16 +51,12 @@ func (m *Account) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Account) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Account.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Account) XXX_Merge(src proto.Message) { xxx_messageInfo_Account.Merge(m, src) @@ -113,30 +110,30 @@ func init() { proto.RegisterFile("acm.proto", fileDescriptor_49ed775bc0a6adf6) } func init() { golang_proto.RegisterFile("acm.proto", fileDescriptor_49ed775bc0a6adf6) } var fileDescriptor_49ed775bc0a6adf6 = []byte{ - // 355 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xbf, 0x4f, 0xfa, 0x40, - 0x1c, 0xe5, 0xa0, 0x5f, 0x0a, 0x07, 0x03, 0xdf, 0x9b, 0x1a, 0x86, 0x16, 0x9d, 0x88, 0xc1, 0x36, - 0xf1, 0x47, 0x4c, 0xd8, 0xa8, 0xd1, 0xc5, 0x68, 0x48, 0x49, 0x34, 0x71, 0x6b, 0xaf, 0x67, 0x69, - 0xc2, 0x71, 0xf5, 0x7a, 0x8d, 0xe9, 0x7f, 0xe2, 0xe8, 0x9f, 0xe2, 0xc8, 0xe8, 0xec, 0x40, 0x0c, - 0x6c, 0xfe, 0x0d, 0x0e, 0x86, 0xe3, 0xa8, 0x8d, 0x83, 0x5b, 0x5f, 0xdf, 0x7b, 0x9f, 0xf7, 0xf2, - 0x0e, 0x36, 0x7d, 0x4c, 0xed, 0x84, 0x33, 0xc1, 0x50, 0xcd, 0xc7, 0xb4, 0x7b, 0x18, 0xc5, 0x62, - 0x9a, 0x05, 0x36, 0x66, 0xd4, 0x89, 0x58, 0xc4, 0x1c, 0xc9, 0x05, 0xd9, 0x83, 0x44, 0x12, 0xc8, - 0xaf, 0xad, 0xa7, 0xdb, 0x49, 0x08, 0xa7, 0x71, 0x9a, 0xc6, 0x6c, 0xae, 0xfe, 0xb4, 0x31, 0xcf, - 0x13, 0xa1, 0xf8, 0xfd, 0xaf, 0x2a, 0xd4, 0x47, 0x18, 0xb3, 0x6c, 0x2e, 0xd0, 0x0d, 0xd4, 0x47, - 0x61, 0xc8, 0x49, 0x9a, 0x1a, 0xa0, 0x07, 0xfa, 0x6d, 0xf7, 0x64, 0xb1, 0xb4, 0x2a, 0xef, 0x4b, - 0x6b, 0x50, 0xca, 0x9c, 0xe6, 0x09, 0xe1, 0x33, 0x12, 0x46, 0x84, 0x3b, 0x41, 0xc6, 0x39, 0x7b, - 0x72, 0xd4, 0x41, 0xe5, 0xf5, 0x76, 0x47, 0xd0, 0x29, 0x6c, 0x8e, 0xb3, 0x60, 0x16, 0xe3, 0x2b, - 0x92, 0x1b, 0xd5, 0x1e, 0xe8, 0xb7, 0x8e, 0xfe, 0xdb, 0x4a, 0x5c, 0x10, 0xae, 0xb6, 0x09, 0xf1, - 0x7e, 0x94, 0xa8, 0x0b, 0x1b, 0x13, 0xf2, 0x98, 0x91, 0x39, 0x26, 0x46, 0xad, 0x07, 0xfa, 0x9a, - 0x57, 0x60, 0x64, 0x40, 0xdd, 0xf5, 0x67, 0xfe, 0x86, 0xd2, 0x24, 0xb5, 0x83, 0xe8, 0x00, 0xea, - 0x17, 0xb7, 0xd7, 0xe7, 0x2c, 0x24, 0xc6, 0x3f, 0x59, 0xbe, 0xa3, 0xca, 0x37, 0xdc, 0x5c, 0x10, - 0xcc, 0x42, 0xe2, 0xed, 0x04, 0xe8, 0x12, 0xb6, 0xc6, 0xc5, 0x2c, 0xa9, 0x51, 0x97, 0xd5, 0x4c, - 0xbb, 0x34, 0x95, 0x9a, 0xa4, 0xa4, 0x52, 0x3d, 0xcb, 0x46, 0x34, 0x84, 0x8d, 0xbb, 0xd1, 0x64, - 0x1b, 0xaa, 0xcb, 0x50, 0xf3, 0x77, 0xe8, 0xe7, 0xd2, 0x82, 0x03, 0x46, 0x63, 0x41, 0x68, 0x22, - 0x72, 0xaf, 0xd0, 0x0f, 0xb5, 0xe7, 0x17, 0xab, 0xe2, 0x9e, 0x2d, 0x56, 0x26, 0x78, 0x5b, 0x99, - 0xe0, 0x63, 0x65, 0x82, 0xd7, 0xb5, 0x09, 0x16, 0x6b, 0x13, 0xdc, 0xef, 0xfd, 0xbd, 0xb7, 0x8f, - 0x69, 0x50, 0x97, 0xcf, 0x77, 0xfc, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x65, 0x02, 0xa3, 0x8e, 0x1f, - 0x02, 0x00, 0x00, + // 357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x3f, 0x4f, 0xc2, 0x40, + 0x1c, 0xe5, 0xa0, 0x52, 0x38, 0x18, 0xf0, 0xa6, 0x86, 0xe1, 0x8a, 0x4e, 0xc4, 0x60, 0x9b, 0xf8, + 0x67, 0xc1, 0x89, 0x1a, 0x5d, 0x8c, 0x86, 0x94, 0x44, 0x13, 0xb7, 0xf6, 0x7a, 0x96, 0x26, 0x94, + 0xab, 0xd7, 0x36, 0xa6, 0xdf, 0xc4, 0xd1, 0x8f, 0xe2, 0xc8, 0xe8, 0x68, 0x1c, 0x88, 0x29, 0x9b, + 0x9f, 0xc1, 0xc1, 0x70, 0x1c, 0xb5, 0x71, 0x70, 0xeb, 0xeb, 0x7b, 0xef, 0xf7, 0x5e, 0xde, 0xc1, + 0xa6, 0x43, 0x42, 0x23, 0xe2, 0x2c, 0x61, 0xa8, 0xe6, 0x90, 0xb0, 0x7b, 0xe8, 0x07, 0xc9, 0x34, + 0x75, 0x0d, 0xc2, 0x42, 0xd3, 0x67, 0x3e, 0x33, 0x05, 0xe7, 0xa6, 0x0f, 0x02, 0x09, 0x20, 0xbe, + 0x36, 0x9e, 0x6e, 0x27, 0xa2, 0x3c, 0x0c, 0xe2, 0x38, 0x60, 0x73, 0xf9, 0xa7, 0x4d, 0x78, 0x16, + 0x25, 0x92, 0xdf, 0xff, 0xae, 0x42, 0x75, 0x44, 0x08, 0x4b, 0xe7, 0x09, 0xba, 0x81, 0xea, 0xc8, + 0xf3, 0x38, 0x8d, 0x63, 0x0d, 0xf4, 0x40, 0xbf, 0x6d, 0x9d, 0x2c, 0x96, 0x7a, 0xe5, 0x63, 0xa9, + 0x0f, 0x4a, 0x99, 0xd3, 0x2c, 0xa2, 0x7c, 0x46, 0x3d, 0x9f, 0x72, 0xd3, 0x4d, 0x39, 0x67, 0x4f, + 0xa6, 0x3c, 0x28, 0xbd, 0xf6, 0xf6, 0x08, 0x3a, 0x85, 0xcd, 0x71, 0xea, 0xce, 0x02, 0x72, 0x45, + 0x33, 0xad, 0xda, 0x03, 0xfd, 0xd6, 0xd1, 0xae, 0x21, 0xc5, 0x05, 0x61, 0x29, 0xeb, 0x10, 0xfb, + 0x57, 0x89, 0xba, 0xb0, 0x31, 0xa1, 0x8f, 0x29, 0x9d, 0x13, 0xaa, 0xd5, 0x7a, 0xa0, 0xaf, 0xd8, + 0x05, 0x46, 0x1a, 0x54, 0x2d, 0x67, 0xe6, 0xac, 0x29, 0x45, 0x50, 0x5b, 0x88, 0x0e, 0xa0, 0x7a, + 0x71, 0x7b, 0x7d, 0xce, 0x3c, 0xaa, 0xed, 0x88, 0xf2, 0x1d, 0x59, 0xbe, 0x61, 0x65, 0x09, 0x25, + 0xcc, 0xa3, 0xf6, 0x56, 0x80, 0x2e, 0x61, 0x6b, 0x5c, 0xcc, 0x12, 0x6b, 0x75, 0x51, 0x0d, 0x1b, + 0xa5, 0xa9, 0xe4, 0x24, 0x25, 0x95, 0xec, 0x59, 0x36, 0xa2, 0x21, 0x6c, 0xdc, 0x8d, 0x26, 0x9b, + 0x50, 0x55, 0x84, 0xe2, 0xbf, 0xa1, 0x5f, 0x4b, 0x1d, 0x0e, 0x58, 0x18, 0x24, 0x34, 0x8c, 0x92, + 0xcc, 0x2e, 0xf4, 0x43, 0xe5, 0xf9, 0x45, 0xaf, 0x58, 0x67, 0x8b, 0x1c, 0x83, 0xb7, 0x1c, 0x83, + 0xf7, 0x1c, 0x83, 0xcf, 0x1c, 0x83, 0xd7, 0x15, 0x06, 0x8b, 0x15, 0x06, 0xf7, 0x7b, 0xff, 0x6f, + 0xee, 0x90, 0xd0, 0xad, 0x8b, 0x27, 0x3c, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0xba, 0x95, 0x25, + 0xdd, 0x23, 0x02, 0x00, 0x00, } func (m *Account) Marshal() (dAtA []byte, err error) { @@ -157,17 +154,17 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintAcm(dAtA, i, uint64(m.Address.Size())) - n1, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Address.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 dAtA[i] = 0x12 i++ i = encodeVarintAcm(dAtA, i, uint64(m.PublicKey.Size())) - n2, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.PublicKey.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 if m.Sequence != 0 { @@ -183,25 +180,25 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintAcm(dAtA, i, uint64(m.EVMCode.Size())) - n3, err := m.EVMCode.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.EVMCode.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 dAtA[i] = 0x32 i++ i = encodeVarintAcm(dAtA, i, uint64(m.Permissions.Size())) - n4, err := m.Permissions.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Permissions.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 dAtA[i] = 0x3a i++ i = encodeVarintAcm(dAtA, i, uint64(m.WASMCode.Size())) - n5, err := m.WASMCode.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.WASMCode.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.XXX_unrecognized != nil { @@ -248,14 +245,7 @@ func (m *Account) Size() (n int) { } func sovAcm(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozAcm(x uint64) (n int) { return sovAcm(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/acm/balance/balance.pb.go b/acm/balance/balance.pb.go index 1e52810f1..163d4dfa2 100644 --- a/acm/balance/balance.pb.go +++ b/acm/balance/balance.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -40,16 +41,12 @@ func (m *Balance) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Balance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Balance.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Balance) XXX_Merge(src proto.Message) { xxx_messageInfo_Balance.Merge(m, src) @@ -89,7 +86,7 @@ func init() { proto.RegisterFile("balance.proto", fileDescriptor_ee25a00b628521b func init() { golang_proto.RegisterFile("balance.proto", fileDescriptor_ee25a00b628521b1) } var fileDescriptor_ee25a00b628521b1 = []byte{ - // 195 bytes of a gzipped FileDescriptorProto + // 199 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4d, 0x4a, 0xcc, 0x49, 0xcc, 0x4b, 0x4e, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x87, 0x72, 0xa5, 0x74, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, 0xc1, @@ -97,12 +94,12 @@ var fileDescriptor_ee25a00b628521b1 = []byte{ 0x41, 0x74, 0x0a, 0xc9, 0x70, 0xb1, 0x84, 0x54, 0x16, 0xa4, 0x4a, 0x30, 0x2a, 0x30, 0x6a, 0xf0, 0x3a, 0x71, 0xfc, 0xba, 0x27, 0x0f, 0xe6, 0x07, 0x81, 0x49, 0x21, 0x31, 0x2e, 0x36, 0xc7, 0xdc, 0xfc, 0xd2, 0xbc, 0x12, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x96, 0x20, 0x28, 0xcf, 0x8a, 0x67, 0xc6, - 0x02, 0x79, 0x86, 0x09, 0x8b, 0xe4, 0x19, 0x66, 0x2c, 0x92, 0x67, 0x70, 0xb2, 0x3f, 0xf1, 0x48, - 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x0f, 0x3c, 0x96, 0x63, 0x3c, 0xf1, - 0x58, 0x8e, 0x31, 0x4a, 0x13, 0xc9, 0x4d, 0x19, 0x95, 0x05, 0xa9, 0x45, 0x39, 0xa9, 0x29, 0xe9, - 0xa9, 0x45, 0xfa, 0x49, 0xa5, 0x45, 0x45, 0xf9, 0xe5, 0xfa, 0x89, 0xc9, 0xb9, 0xfa, 0x50, 0xe7, - 0x27, 0xb1, 0x81, 0x9d, 0x65, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xac, 0x60, 0x98, 0x58, 0xdf, - 0x00, 0x00, 0x00, + 0x02, 0x79, 0x86, 0x09, 0x8b, 0xe4, 0x19, 0x66, 0x2c, 0x92, 0x67, 0x70, 0x72, 0x3e, 0xf1, 0x48, + 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x1b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x3c, 0xf0, + 0x58, 0x8e, 0xf1, 0xc4, 0x63, 0x39, 0xc6, 0x28, 0x4d, 0x24, 0x77, 0x65, 0x54, 0x16, 0xa4, 0x16, + 0xe5, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x27, 0x95, 0x16, 0x15, 0xe5, 0x97, 0xeb, 0x27, 0x26, + 0xe7, 0xea, 0x43, 0xbd, 0x90, 0xc4, 0x06, 0x76, 0x9a, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0xcb, + 0xf5, 0xf6, 0x66, 0xe3, 0x00, 0x00, 0x00, } func (m *Balance) Marshal() (dAtA []byte, err error) { @@ -164,14 +161,7 @@ func (m *Balance) Size() (n int) { } func sovBalance(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozBalance(x uint64) (n int) { return sovBalance(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/acm/validator/validator.go b/acm/validator/validator.go index e0ef0c168..c428e9be7 100644 --- a/acm/validator/validator.go +++ b/acm/validator/validator.go @@ -7,7 +7,6 @@ import ( "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/acm" - amino "github.com/tendermint/go-amino" ) func New(publicKey crypto.PublicKey, power *big.Int) *Validator { @@ -46,18 +45,3 @@ func FromAccount(acc *acm.Account, power uint64) *Validator { Power: power, } } - -var cdc = amino.NewCodec() - -func (v *Validator) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(v) -} - -func Decode(bs []byte) (*Validator, error) { - v := new(Validator) - err := cdc.UnmarshalBinaryBare(bs, v) - if err != nil { - return nil, err - } - return v, nil -} diff --git a/acm/validator/validator.pb.go b/acm/validator/validator.pb.go index 14decd2a3..2caeb7c8a 100644 --- a/acm/validator/validator.pb.go +++ b/acm/validator/validator.pb.go @@ -5,8 +5,8 @@ package validator import ( fmt "fmt" - io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -42,25 +42,16 @@ func (*Validator) Descriptor() ([]byte, []int) { return fileDescriptor_bf1c6ec7c0d80dd5, []int{0} } func (m *Validator) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_Validator.Unmarshal(m, b) } func (m *Validator) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Validator.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_Validator.Marshal(b, m, deterministic) } func (m *Validator) XXX_Merge(src proto.Message) { xxx_messageInfo_Validator.Merge(m, src) } func (m *Validator) XXX_Size() int { - return m.Size() + return xxx_messageInfo_Validator.Size(m) } func (m *Validator) XXX_DiscardUnknown() { xxx_messageInfo_Validator.DiscardUnknown(m) @@ -94,7 +85,7 @@ func init() { proto.RegisterFile("validator.proto", fileDescriptor_bf1c6ec7c0d80 func init() { golang_proto.RegisterFile("validator.proto", fileDescriptor_bf1c6ec7c0d80dd5) } var fileDescriptor_bf1c6ec7c0d80dd5 = []byte{ - // 242 bytes of a gzipped FileDescriptorProto + // 238 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2f, 0x4b, 0xcc, 0xc9, 0x4c, 0x49, 0x2c, 0xc9, 0x2f, 0xd2, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x84, 0x0b, 0x48, 0xe9, 0xa6, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xa7, 0xe7, @@ -106,66 +97,12 @@ var fileDescriptor_bf1c6ec7c0d80dd5 = []byte{ 0x60, 0x06, 0x08, 0x99, 0x72, 0x71, 0x06, 0x94, 0x26, 0xe5, 0x64, 0x26, 0x7b, 0xa7, 0x56, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x1b, 0x09, 0xea, 0x41, 0x15, 0xc3, 0x25, 0x9c, 0x58, 0x4e, 0xdc, 0x93, 0x67, 0x08, 0x42, 0xa8, 0x14, 0x12, 0xe1, 0x62, 0x0d, 0xc8, 0x2f, 0x4f, 0x2d, 0x92, 0x60, - 0x56, 0x60, 0xd4, 0x60, 0x09, 0x82, 0x70, 0xac, 0x58, 0x66, 0x2c, 0x90, 0x67, 0x70, 0x72, 0x3c, - 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x0f, 0x3c, 0x96, 0x63, - 0x3c, 0xf1, 0x58, 0x8e, 0x31, 0x4a, 0x1b, 0xbf, 0xfb, 0x12, 0x93, 0x73, 0xf5, 0xe1, 0x81, 0x95, - 0xc4, 0x06, 0xf6, 0xb6, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x5d, 0xe5, 0x4a, 0x65, 0x51, 0x01, - 0x00, 0x00, -} - -func (m *Validator) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil + 0x56, 0x60, 0xd4, 0x60, 0x09, 0x82, 0x70, 0xac, 0x58, 0x66, 0x2c, 0x90, 0x67, 0x70, 0xb2, 0xbd, + 0xf1, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x03, 0x8f, 0xe5, 0x18, 0x4f, 0x3c, 0x96, 0x63, + 0x8c, 0xd2, 0xc6, 0xef, 0xb6, 0xc4, 0xe4, 0x5c, 0x7d, 0x78, 0x40, 0x25, 0xb1, 0x81, 0xbd, 0x6c, + 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xcf, 0xfa, 0x61, 0xd2, 0x4d, 0x01, 0x00, 0x00, } -func (m *Validator) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Address != nil { - dAtA[i] = 0xa - i++ - i = encodeVarintValidator(dAtA, i, uint64(m.Address.Size())) - n1, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n1 - } - dAtA[i] = 0x12 - i++ - i = encodeVarintValidator(dAtA, i, uint64(m.PublicKey.Size())) - n2, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n2 - if m.Power != 0 { - dAtA[i] = 0x18 - i++ - i = encodeVarintValidator(dAtA, i, uint64(m.Power)) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func encodeVarintValidator(dAtA []byte, offset int, v uint64) int { - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return offset + 1 -} func (m *Validator) Size() (n int) { if m == nil { return 0 @@ -188,266 +125,8 @@ func (m *Validator) Size() (n int) { } func sovValidator(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozValidator(x uint64) (n int) { return sovValidator(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *Validator) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowValidator - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Validator: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Validator: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowValidator - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthValidator - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthValidator - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - var v github_com_hyperledger_burrow_crypto.Address - m.Address = &v - if err := m.Address.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowValidator - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthValidator - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthValidator - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.PublicKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Power", wireType) - } - m.Power = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowValidator - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Power |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipValidator(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthValidator - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthValidator - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipValidator(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowValidator - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowValidator - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowValidator - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthValidator - } - iNdEx += length - if iNdEx < 0 { - return 0, ErrInvalidLengthValidator - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowValidator - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipValidator(dAtA[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - if iNdEx < 0 { - return 0, ErrInvalidLengthValidator - } - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthValidator = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowValidator = fmt.Errorf("proto: integer overflow") -) diff --git a/bcm/bcm.pb.go b/bcm/bcm.pb.go index 9173306b5..4afb005b5 100644 --- a/bcm/bcm.pb.go +++ b/bcm/bcm.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" time "time" _ "github.com/gogo/protobuf/gogoproto" @@ -56,16 +57,12 @@ func (m *SyncInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SyncInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_SyncInfo.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *SyncInfo) XXX_Merge(src proto.Message) { xxx_messageInfo_SyncInfo.Merge(m, src) @@ -110,38 +107,103 @@ func (m *SyncInfo) GetLatestBlockDuration() time.Duration { func (*SyncInfo) XXX_MessageName() string { return "bcm.SyncInfo" } + +type PersistedState struct { + AppHashAfterLastBlock github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,1,opt,name=AppHashAfterLastBlock,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"AppHashAfterLastBlock"` + LastBlockTime time.Time `protobuf:"bytes,2,opt,name=LastBlockTime,proto3,stdtime" json:"LastBlockTime"` + LastBlockHeight uint64 `protobuf:"varint,3,opt,name=LastBlockHeight,proto3" json:"LastBlockHeight,omitempty"` + GenesisHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,4,opt,name=GenesisHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"GenesisHash"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PersistedState) Reset() { *m = PersistedState{} } +func (m *PersistedState) String() string { return proto.CompactTextString(m) } +func (*PersistedState) ProtoMessage() {} +func (*PersistedState) Descriptor() ([]byte, []int) { + return fileDescriptor_0c9ff3e1ca1cc0f1, []int{1} +} +func (m *PersistedState) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *PersistedState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *PersistedState) XXX_Merge(src proto.Message) { + xxx_messageInfo_PersistedState.Merge(m, src) +} +func (m *PersistedState) XXX_Size() int { + return m.Size() +} +func (m *PersistedState) XXX_DiscardUnknown() { + xxx_messageInfo_PersistedState.DiscardUnknown(m) +} + +var xxx_messageInfo_PersistedState proto.InternalMessageInfo + +func (m *PersistedState) GetLastBlockTime() time.Time { + if m != nil { + return m.LastBlockTime + } + return time.Time{} +} + +func (m *PersistedState) GetLastBlockHeight() uint64 { + if m != nil { + return m.LastBlockHeight + } + return 0 +} + +func (*PersistedState) XXX_MessageName() string { + return "bcm.PersistedState" +} func init() { proto.RegisterType((*SyncInfo)(nil), "bcm.SyncInfo") golang_proto.RegisterType((*SyncInfo)(nil), "bcm.SyncInfo") + proto.RegisterType((*PersistedState)(nil), "bcm.PersistedState") + golang_proto.RegisterType((*PersistedState)(nil), "bcm.PersistedState") } func init() { proto.RegisterFile("bcm.proto", fileDescriptor_0c9ff3e1ca1cc0f1) } func init() { golang_proto.RegisterFile("bcm.proto", fileDescriptor_0c9ff3e1ca1cc0f1) } var fileDescriptor_0c9ff3e1ca1cc0f1 = []byte{ - // 350 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4c, 0x4a, 0xce, 0xd5, - 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x4e, 0x4a, 0xce, 0x95, 0xd2, 0x4d, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0x4f, 0xcf, 0xd7, 0x07, 0xcb, 0x25, 0x95, - 0xa6, 0x81, 0x79, 0x60, 0x0e, 0x98, 0x05, 0xd1, 0x23, 0x25, 0x9f, 0x9e, 0x9f, 0x9f, 0x9e, 0x93, - 0x8a, 0x50, 0x55, 0x92, 0x99, 0x9b, 0x5a, 0x5c, 0x92, 0x98, 0x5b, 0x00, 0x55, 0x20, 0x87, 0xae, - 0x20, 0xa5, 0xb4, 0x28, 0xb1, 0x24, 0x33, 0x3f, 0x0f, 0x22, 0xaf, 0xf4, 0x9b, 0x99, 0x8b, 0x23, - 0xb8, 0x32, 0x2f, 0xd9, 0x33, 0x2f, 0x2d, 0x5f, 0xc8, 0x88, 0x4b, 0xd0, 0x27, 0xb1, 0x24, 0xb5, - 0xb8, 0xc4, 0x29, 0x27, 0x3f, 0x39, 0xdb, 0x23, 0x35, 0x33, 0x3d, 0xa3, 0x44, 0x82, 0x51, 0x81, - 0x51, 0x83, 0xc5, 0x89, 0xe5, 0xd5, 0x3d, 0x79, 0x86, 0x20, 0x4c, 0x69, 0xa1, 0x78, 0x2e, 0x7e, - 0x64, 0xc1, 0xc4, 0xe2, 0x0c, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x1e, 0x27, 0xd3, 0x13, 0xf7, 0xe4, - 0x19, 0x6e, 0xdd, 0x93, 0x47, 0xf6, 0x51, 0x46, 0x65, 0x41, 0x6a, 0x51, 0x4e, 0x6a, 0x4a, 0x7a, - 0x6a, 0x91, 0x7e, 0x52, 0x69, 0x51, 0x51, 0x7e, 0xb9, 0x7e, 0x52, 0x66, 0x5e, 0x62, 0x51, 0xa5, - 0x9e, 0x47, 0x6a, 0x85, 0x53, 0x65, 0x49, 0x6a, 0x71, 0x10, 0xba, 0x69, 0x42, 0xd1, 0x5c, 0xbc, - 0x10, 0x21, 0xc7, 0x82, 0x02, 0xb0, 0xf1, 0xcc, 0x94, 0x18, 0x8f, 0x6a, 0x96, 0x90, 0x1f, 0x8a, - 0xeb, 0x43, 0x32, 0x73, 0x53, 0x25, 0x58, 0x14, 0x18, 0x35, 0xb8, 0x8d, 0xa4, 0xf4, 0x20, 0x01, - 0xa7, 0x07, 0x0b, 0x38, 0xbd, 0x10, 0x58, 0xc8, 0x3a, 0x71, 0x80, 0xac, 0x9e, 0x70, 0x5f, 0x9e, - 0x31, 0x08, 0x5d, 0xb3, 0x50, 0x18, 0x97, 0x30, 0x92, 0x50, 0x70, 0x6a, 0x6a, 0x1e, 0xd8, 0x4c, - 0x56, 0x12, 0xcc, 0xc4, 0x66, 0x80, 0x50, 0x28, 0x8a, 0xb9, 0x2e, 0xd0, 0x38, 0x94, 0x60, 0x03, - 0x9b, 0x2b, 0x89, 0x61, 0x2e, 0x4c, 0x01, 0xc4, 0xd8, 0x19, 0xe8, 0xc6, 0xc2, 0xa5, 0xcd, 0x4f, - 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x03, 0x8f, 0xe5, 0x18, - 0x4f, 0x3c, 0x96, 0x63, 0x8c, 0x52, 0x24, 0x10, 0xa4, 0xc9, 0xb9, 0x49, 0x6c, 0x60, 0xab, 0x8c, - 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x40, 0x34, 0x06, 0xbf, 0x02, 0x00, 0x00, + // 437 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x92, 0x3d, 0x8f, 0x94, 0x40, + 0x18, 0xc7, 0x6f, 0x76, 0xf1, 0x72, 0xce, 0xf9, 0x12, 0xc7, 0x98, 0xe0, 0x16, 0xb0, 0x5e, 0x45, + 0x23, 0x24, 0x67, 0xac, 0xac, 0x8e, 0x98, 0x78, 0x9a, 0x8b, 0x31, 0xec, 0xa9, 0x89, 0x16, 0x06, + 0xd8, 0x67, 0x61, 0x72, 0x0b, 0x43, 0x66, 0x86, 0x28, 0xdf, 0xc2, 0xd2, 0x8f, 0x63, 0xb9, 0x85, + 0x85, 0xa5, 0xb1, 0x58, 0x0d, 0xd7, 0xf9, 0x15, 0x6c, 0x0c, 0x03, 0x44, 0xe0, 0x2e, 0x31, 0xeb, + 0x76, 0x3c, 0x6f, 0x3f, 0xe6, 0xf9, 0x3f, 0x7f, 0x7c, 0x35, 0x08, 0x13, 0x3b, 0xe3, 0x4c, 0x32, + 0x32, 0x0e, 0xc2, 0x64, 0x72, 0x3f, 0xa2, 0x32, 0xce, 0x03, 0x3b, 0x64, 0x89, 0x13, 0xb1, 0x88, + 0x39, 0xaa, 0x16, 0xe4, 0x0b, 0x15, 0xa9, 0x40, 0x7d, 0xd5, 0x33, 0x13, 0x33, 0x62, 0x2c, 0x5a, + 0xc2, 0xdf, 0x2e, 0x49, 0x13, 0x10, 0xd2, 0x4f, 0xb2, 0xa6, 0xc1, 0x18, 0x36, 0xcc, 0x73, 0xee, + 0x4b, 0xca, 0xd2, 0xba, 0x7e, 0xf0, 0x7b, 0x8c, 0xf7, 0x66, 0x45, 0x1a, 0x3e, 0x4d, 0x17, 0x8c, + 0x1c, 0xe2, 0x5b, 0x27, 0xbe, 0x04, 0x21, 0xdd, 0x25, 0x0b, 0xcf, 0x8e, 0x81, 0x46, 0xb1, 0xd4, + 0xd1, 0x14, 0x59, 0x9a, 0xab, 0xfd, 0x5a, 0x9b, 0x3b, 0xde, 0xc5, 0x32, 0x79, 0x87, 0x6f, 0x76, + 0x93, 0xbe, 0x88, 0xf5, 0xd1, 0x14, 0x59, 0xd7, 0xdc, 0x87, 0xab, 0xb5, 0xb9, 0xf3, 0x7d, 0x6d, + 0x76, 0x37, 0x8a, 0x8b, 0x0c, 0xf8, 0x12, 0xe6, 0x11, 0x70, 0x27, 0xc8, 0x39, 0x67, 0xef, 0x9d, + 0x80, 0xa6, 0x3e, 0x2f, 0xec, 0x63, 0xf8, 0xe0, 0x16, 0x12, 0x84, 0x37, 0xa4, 0x91, 0xb7, 0xf8, + 0x7a, 0x9d, 0x3a, 0xca, 0x32, 0x85, 0x1f, 0x6f, 0x83, 0xef, 0xb3, 0xc8, 0xf3, 0xde, 0xeb, 0x4f, + 0x69, 0x02, 0xba, 0x36, 0x45, 0xd6, 0xfe, 0xe1, 0xc4, 0xae, 0x85, 0xb3, 0x5b, 0xe1, 0xec, 0xd3, + 0x56, 0x59, 0x77, 0xaf, 0xfa, 0xf5, 0xc7, 0x1f, 0x26, 0xf2, 0x86, 0xc3, 0xe4, 0x15, 0xbe, 0xdd, + 0x49, 0xcd, 0x00, 0x52, 0xc5, 0xbc, 0xb2, 0x01, 0xf3, 0x32, 0x00, 0x79, 0xd9, 0xe3, 0x3e, 0x6e, + 0x6e, 0xa8, 0xef, 0x2a, 0xee, 0xdd, 0x0b, 0xdc, 0xb6, 0xa1, 0xc6, 0x7e, 0x1a, 0x62, 0xdb, 0xf2, + 0xc1, 0x97, 0x11, 0xbe, 0xf1, 0x02, 0xb8, 0xa0, 0x42, 0xc2, 0x7c, 0x26, 0x7d, 0x09, 0xe4, 0x0c, + 0xdf, 0x69, 0xc4, 0x39, 0x5a, 0x48, 0xe0, 0x27, 0x7e, 0x33, 0xa3, 0x7c, 0xf0, 0xdf, 0xb2, 0x5f, + 0xce, 0x24, 0xcf, 0xaa, 0xdb, 0x76, 0xc5, 0x1f, 0x6d, 0x20, 0x54, 0x7f, 0x94, 0x58, 0xd5, 0x29, + 0xfb, 0xd6, 0xad, 0x9c, 0xa2, 0x79, 0xc3, 0x34, 0x79, 0x8d, 0xf7, 0x9f, 0x40, 0x0a, 0x82, 0x0a, + 0xe5, 0x27, 0x6d, 0x9b, 0xc5, 0xba, 0x24, 0xf7, 0xd1, 0xaa, 0x34, 0xd0, 0xd7, 0xd2, 0x40, 0xdf, + 0x4a, 0x03, 0xfd, 0x2c, 0x0d, 0xf4, 0xf9, 0xdc, 0x40, 0xab, 0x73, 0x03, 0xbd, 0xb9, 0xf7, 0x0f, + 0x6a, 0x98, 0x04, 0xbb, 0x6a, 0xd9, 0x07, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x25, 0xfd, 0xe5, + 0xbc, 0x12, 0x04, 0x00, 0x00, } func (m *SyncInfo) Marshal() (dAtA []byte, err error) { @@ -167,41 +229,41 @@ func (m *SyncInfo) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintBcm(dAtA, i, uint64(m.LatestBlockHash.Size())) - n1, err := m.LatestBlockHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.LatestBlockHash.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 dAtA[i] = 0x1a i++ i = encodeVarintBcm(dAtA, i, uint64(m.LatestAppHash.Size())) - n2, err := m.LatestAppHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.LatestAppHash.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 dAtA[i] = 0x22 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LatestBlockTime))) - n3, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockTime, dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockTime, dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 dAtA[i] = 0x2a i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LatestBlockSeenTime))) - n4, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockSeenTime, dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockSeenTime, dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 dAtA[i] = 0x32 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdDuration(m.LatestBlockDuration))) - n5, err := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.LatestBlockDuration, dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.LatestBlockDuration, dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.XXX_unrecognized != nil { @@ -210,6 +272,56 @@ func (m *SyncInfo) MarshalTo(dAtA []byte) (int, error) { return i, nil } +func (m *PersistedState) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalTo(dAtA) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *PersistedState) MarshalTo(dAtA []byte) (int, error) { + var i int + _ = i + var l int + _ = l + dAtA[i] = 0xa + i++ + i = encodeVarintBcm(dAtA, i, uint64(m.AppHashAfterLastBlock.Size())) + n6, err6 := m.AppHashAfterLastBlock.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 + } + i += n6 + dAtA[i] = 0x12 + i++ + i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LastBlockTime))) + n7, err7 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastBlockTime, dAtA[i:]) + if err7 != nil { + return 0, err7 + } + i += n7 + if m.LastBlockHeight != 0 { + dAtA[i] = 0x18 + i++ + i = encodeVarintBcm(dAtA, i, uint64(m.LastBlockHeight)) + } + dAtA[i] = 0x22 + i++ + i = encodeVarintBcm(dAtA, i, uint64(m.GenesisHash.Size())) + n8, err8 := m.GenesisHash.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 + } + i += n8 + if m.XXX_unrecognized != nil { + i += copy(dAtA[i:], m.XXX_unrecognized) + } + return i, nil +} + func encodeVarintBcm(dAtA []byte, offset int, v uint64) int { for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) @@ -244,16 +356,30 @@ func (m *SyncInfo) Size() (n int) { return n } -func sovBcm(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } +func (m *PersistedState) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.AppHashAfterLastBlock.Size() + n += 1 + l + sovBcm(uint64(l)) + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.LastBlockTime) + n += 1 + l + sovBcm(uint64(l)) + if m.LastBlockHeight != 0 { + n += 1 + sovBcm(uint64(m.LastBlockHeight)) + } + l = m.GenesisHash.Size() + n += 1 + l + sovBcm(uint64(l)) + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) } return n } + +func sovBcm(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} func sozBcm(x uint64) (n int) { return sovBcm(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } @@ -495,6 +621,178 @@ func (m *SyncInfo) Unmarshal(dAtA []byte) error { } return nil } +func (m *PersistedState) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: PersistedState: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: PersistedState: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field AppHashAfterLastBlock", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthBcm + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthBcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.AppHashAfterLastBlock.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field LastBlockTime", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthBcm + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthBcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.LastBlockTime, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LastBlockHeight", wireType) + } + m.LastBlockHeight = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.LastBlockHeight |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field GenesisHash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthBcm + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthBcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.GenesisHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipBcm(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthBcm + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthBcm + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipBcm(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/bcm/block_store.go b/bcm/block_store.go index 09542095f..3e3084b17 100644 --- a/bcm/block_store.go +++ b/bcm/block_store.go @@ -18,7 +18,7 @@ type BlockStore struct { func NewBlockStore(blockStore state.BlockStoreRPC) *BlockStore { return &BlockStore{ - txDecoder: txs.NewAminoCodec(), + txDecoder: txs.NewProtobufCodec(), BlockStoreRPC: blockStore, } } diff --git a/bcm/blockchain.go b/bcm/blockchain.go index 2fabb1a95..1207843ed 100644 --- a/bcm/blockchain.go +++ b/bcm/blockchain.go @@ -20,11 +20,11 @@ import ( "sync" "time" + "github.com/hyperledger/burrow/encoding" "github.com/tendermint/tendermint/types" "github.com/hyperledger/burrow/genesis" "github.com/hyperledger/burrow/logging" - amino "github.com/tendermint/go-amino" dbm "github.com/tendermint/tendermint/libs/db" ) @@ -60,13 +60,6 @@ type Blockchain struct { var _ BlockchainInfo = &Blockchain{} -type PersistedState struct { - AppHashAfterLastBlock []byte - LastBlockTime time.Time - LastBlockHeight uint64 - GenesisHash []byte -} - // LoadOrNewBlockchain returns true if state already exists func LoadOrNewBlockchain(db dbm.DB, genesisDoc *genesis.GenesisDoc, logger *logging.Logger) (_ *Blockchain, exists bool, _ error) { logger = logger.WithScope("LoadOrNewBlockchain") @@ -175,19 +168,13 @@ func (bc *Blockchain) save() error { return nil } -var cdc = amino.NewCodec() - func (bc *Blockchain) Encode() ([]byte, error) { - encodedState, err := cdc.MarshalBinaryBare(bc.persistedState) - if err != nil { - return nil, err - } - return encodedState, nil + return encoding.Encode(&bc.persistedState) } func decodeBlockchain(encodedState []byte, genesisDoc *genesis.GenesisDoc) (*Blockchain, error) { bc := NewBlockchain(nil, genesisDoc) - err := cdc.UnmarshalBinaryBare(encodedState, &bc.persistedState) + err := encoding.Decode(encodedState, &bc.persistedState) if err != nil { return nil, err } diff --git a/cmd/burrow/commands/configure.go b/cmd/burrow/commands/configure.go index c51edec42..e8a57b8f2 100644 --- a/cmd/burrow/commands/configure.go +++ b/cmd/burrow/commands/configure.go @@ -234,6 +234,7 @@ func Configure(output Output) func(cmd *cli.Cmd) { peers := make([]string, 0) if conf.GenesisDoc != nil { + // NOTE: amino is needed here to add type metadata to JSON envelope for deserialisation to work cdc := amino.NewCodec() cryptoAmino.RegisterAmino(cdc) pkg.GenesisDoc = conf.GenesisDoc diff --git a/consensus/tendermint/tendermint.pb.go b/consensus/tendermint/tendermint.pb.go index f3bf40096..7031992bb 100644 --- a/consensus/tendermint/tendermint.pb.go +++ b/consensus/tendermint/tendermint.pb.go @@ -5,8 +5,8 @@ package tendermint import ( fmt "fmt" - io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -48,25 +48,16 @@ func (*NodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor_04f926c8da23c367, []int{0} } func (m *NodeInfo) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_NodeInfo.Unmarshal(m, b) } func (m *NodeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_NodeInfo.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_NodeInfo.Marshal(b, m, deterministic) } func (m *NodeInfo) XXX_Merge(src proto.Message) { xxx_messageInfo_NodeInfo.Merge(m, src) } func (m *NodeInfo) XXX_Size() int { - return m.Size() + return xxx_messageInfo_NodeInfo.Size(m) } func (m *NodeInfo) XXX_DiscardUnknown() { xxx_messageInfo_NodeInfo.DiscardUnknown(m) @@ -128,112 +119,30 @@ func init() { proto.RegisterFile("tendermint.proto", fileDescriptor_04f926c8da23 func init() { golang_proto.RegisterFile("tendermint.proto", fileDescriptor_04f926c8da23c367) } var fileDescriptor_04f926c8da23c367 = []byte{ - // 324 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xcf, 0x4e, 0xc2, 0x40, - 0x10, 0xc6, 0xdd, 0xaa, 0x80, 0x1b, 0x4d, 0x4c, 0x4f, 0x1b, 0x0f, 0x85, 0x18, 0x0f, 0x1c, 0xa4, - 0x4d, 0xfc, 0xf3, 0x00, 0x02, 0x07, 0x9a, 0x28, 0xd1, 0xc6, 0x78, 0xf0, 0x46, 0xe9, 0x50, 0x36, - 0xc0, 0x0e, 0xd9, 0xdd, 0x06, 0xfa, 0x76, 0x1e, 0x39, 0xea, 0xd5, 0x03, 0x31, 0xf0, 0x22, 0xa6, - 0xdb, 0x22, 0x78, 0xd1, 0xdb, 0xfe, 0xbe, 0x6f, 0x67, 0xe6, 0xdb, 0x1d, 0x7a, 0xaa, 0x41, 0x44, - 0x20, 0x27, 0x5c, 0x68, 0x77, 0x2a, 0x51, 0xa3, 0x4d, 0xb7, 0xca, 0x59, 0x23, 0xe6, 0x7a, 0x98, - 0x84, 0x6e, 0x1f, 0x27, 0x5e, 0x8c, 0x31, 0x7a, 0xe6, 0x4a, 0x98, 0x0c, 0x0c, 0x19, 0x30, 0xa7, - 0xbc, 0xf4, 0xfc, 0xc3, 0xa2, 0x95, 0x2e, 0x46, 0xe0, 0x8b, 0x01, 0xda, 0x6d, 0x6a, 0xf9, 0x6d, - 0x46, 0x6a, 0xa4, 0x7e, 0xdc, 0xbc, 0x59, 0x2c, 0xab, 0x7b, 0x9f, 0xcb, 0xea, 0xe5, 0x4e, 0xbf, - 0x61, 0x3a, 0x05, 0x39, 0x86, 0x28, 0x06, 0xe9, 0x85, 0x89, 0x94, 0x38, 0xf3, 0xfa, 0x32, 0x9d, - 0x6a, 0x74, 0xef, 0xa2, 0x48, 0x82, 0x52, 0x81, 0xe5, 0xb7, 0xed, 0x0b, 0x7a, 0x72, 0xcf, 0x95, - 0x06, 0x51, 0x88, 0xcc, 0xaa, 0x91, 0xfa, 0x51, 0xf0, 0x5b, 0xb4, 0x19, 0x2d, 0x77, 0x41, 0xcf, - 0x50, 0x8e, 0xd8, 0xbe, 0xf1, 0x37, 0x98, 0x39, 0x2f, 0x20, 0x15, 0x47, 0xc1, 0x0e, 0x72, 0xa7, - 0x40, 0xfb, 0x89, 0x56, 0x5a, 0xc3, 0x9e, 0x10, 0x30, 0x56, 0xec, 0xd0, 0xa4, 0xbc, 0x2d, 0x52, - 0x36, 0xfe, 0x4e, 0x19, 0x72, 0xd1, 0x93, 0xa9, 0xdb, 0x81, 0x79, 0x33, 0xd5, 0xa0, 0x82, 0x9f, - 0x36, 0xd9, 0xb0, 0x07, 0x14, 0x7c, 0x04, 0x92, 0x95, 0xf2, 0x61, 0x05, 0xda, 0x0e, 0xa5, 0xc1, - 0x63, 0x6b, 0xf3, 0x86, 0xb2, 0x31, 0x77, 0x94, 0xac, 0xf2, 0x79, 0xee, 0x8b, 0x08, 0xe6, 0xac, - 0x92, 0x57, 0x16, 0xd8, 0xec, 0x2c, 0x56, 0x0e, 0x79, 0x5f, 0x39, 0xe4, 0x6b, 0xe5, 0x90, 0xb7, - 0xb5, 0x43, 0x16, 0x6b, 0x87, 0xbc, 0x5e, 0xfd, 0xf3, 0x91, 0x28, 0x14, 0x08, 0x95, 0x28, 0x6f, - 0xbb, 0xcc, 0xb0, 0x64, 0x96, 0x74, 0xfd, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x30, 0x44, 0xcf, 0x05, - 0xf3, 0x01, 0x00, 0x00, -} - -func (m *NodeInfo) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *NodeInfo) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - dAtA[i] = 0xa - i++ - i = encodeVarintTendermint(dAtA, i, uint64(m.ID.Size())) - n1, err := m.ID.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n1 - if len(m.ListenAddress) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintTendermint(dAtA, i, uint64(len(m.ListenAddress))) - i += copy(dAtA[i:], m.ListenAddress) - } - if len(m.Network) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintTendermint(dAtA, i, uint64(len(m.Network))) - i += copy(dAtA[i:], m.Network) - } - if len(m.Version) > 0 { - dAtA[i] = 0x22 - i++ - i = encodeVarintTendermint(dAtA, i, uint64(len(m.Version))) - i += copy(dAtA[i:], m.Version) - } - dAtA[i] = 0x2a - i++ - i = encodeVarintTendermint(dAtA, i, uint64(m.Channels.Size())) - n2, err := m.Channels.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n2 - if len(m.Moniker) > 0 { - dAtA[i] = 0x32 - i++ - i = encodeVarintTendermint(dAtA, i, uint64(len(m.Moniker))) - i += copy(dAtA[i:], m.Moniker) - } - if len(m.RPCAddress) > 0 { - dAtA[i] = 0x3a - i++ - i = encodeVarintTendermint(dAtA, i, uint64(len(m.RPCAddress))) - i += copy(dAtA[i:], m.RPCAddress) - } - if len(m.TxIndex) > 0 { - dAtA[i] = 0x42 - i++ - i = encodeVarintTendermint(dAtA, i, uint64(len(m.TxIndex))) - i += copy(dAtA[i:], m.TxIndex) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x4f, 0x4f, 0xf2, 0x40, + 0x10, 0xc6, 0xdf, 0xed, 0xab, 0x80, 0x1b, 0x4d, 0x4c, 0x4f, 0x1b, 0x0f, 0x85, 0x18, 0x0f, 0x1c, + 0xa4, 0x4d, 0xfc, 0xf3, 0x01, 0x84, 0x1e, 0x6c, 0xa2, 0x44, 0x1b, 0xe3, 0xc1, 0x1b, 0xa5, 0x43, + 0xd9, 0x00, 0x3b, 0x64, 0x77, 0x1b, 0xe8, 0xb7, 0xf3, 0xc8, 0xd5, 0x9b, 0xf1, 0x40, 0x0c, 0x7c, + 0x11, 0xd3, 0x6d, 0x11, 0xbc, 0xe8, 0x6d, 0x7f, 0xcf, 0xb3, 0x33, 0xf3, 0xec, 0x0e, 0x3d, 0xd6, + 0x20, 0x62, 0x90, 0x13, 0x2e, 0xb4, 0x3b, 0x95, 0xa8, 0xd1, 0xa6, 0x5b, 0xe5, 0xa4, 0x95, 0x70, + 0x3d, 0x4c, 0x23, 0xb7, 0x8f, 0x13, 0x2f, 0xc1, 0x04, 0x3d, 0x73, 0x25, 0x4a, 0x07, 0x86, 0x0c, + 0x98, 0x53, 0x51, 0x7a, 0xfa, 0x66, 0xd1, 0x5a, 0x17, 0x63, 0x08, 0xc4, 0x00, 0x6d, 0x9f, 0x5a, + 0x81, 0xcf, 0x48, 0x83, 0x34, 0x0f, 0xdb, 0x57, 0x8b, 0x65, 0xfd, 0xdf, 0xc7, 0xb2, 0x7e, 0xbe, + 0xd3, 0x6f, 0x98, 0x4d, 0x41, 0x8e, 0x21, 0x4e, 0x40, 0x7a, 0x51, 0x2a, 0x25, 0xce, 0xbc, 0xbe, + 0xcc, 0xa6, 0x1a, 0xdd, 0x9b, 0x38, 0x96, 0xa0, 0x54, 0x68, 0x05, 0xbe, 0x7d, 0x46, 0x8f, 0xee, + 0xb8, 0xd2, 0x20, 0x4a, 0x91, 0x59, 0x0d, 0xd2, 0x3c, 0x08, 0x7f, 0x8a, 0x36, 0xa3, 0xd5, 0x2e, + 0xe8, 0x19, 0xca, 0x11, 0xfb, 0x6f, 0xfc, 0x0d, 0xe6, 0xce, 0x33, 0x48, 0xc5, 0x51, 0xb0, 0xbd, + 0xc2, 0x29, 0xd1, 0x7e, 0xa4, 0xb5, 0xce, 0xb0, 0x27, 0x04, 0x8c, 0x15, 0xdb, 0x37, 0x29, 0xaf, + 0xcb, 0x94, 0xad, 0xdf, 0x53, 0x46, 0x5c, 0xf4, 0x64, 0xe6, 0xde, 0xc2, 0xbc, 0x9d, 0x69, 0x50, + 0xe1, 0x77, 0x9b, 0x7c, 0xd8, 0x3d, 0x0a, 0x3e, 0x02, 0xc9, 0x2a, 0xc5, 0xb0, 0x12, 0x6d, 0x87, + 0xd2, 0xf0, 0xa1, 0xb3, 0x79, 0x43, 0xd5, 0x98, 0x3b, 0x4a, 0x5e, 0xf9, 0x34, 0x0f, 0x44, 0x0c, + 0x73, 0x56, 0x2b, 0x2a, 0x4b, 0x6c, 0xfb, 0xef, 0x2b, 0x87, 0x7c, 0xae, 0x1c, 0xf2, 0xba, 0x76, + 0xc8, 0x62, 0xed, 0x90, 0x97, 0x8b, 0x3f, 0x3e, 0x11, 0x85, 0x02, 0xa1, 0x52, 0xe5, 0x6d, 0x17, + 0x19, 0x55, 0xcc, 0x82, 0x2e, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xba, 0x9e, 0x0e, 0xac, 0xef, + 0x01, 0x00, 0x00, } -func encodeVarintTendermint(dAtA []byte, offset int, v uint64) int { - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return offset + 1 -} func (m *NodeInfo) Size() (n int) { if m == nil { return 0 @@ -275,437 +184,8 @@ func (m *NodeInfo) Size() (n int) { } func sovTendermint(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozTendermint(x uint64) (n int) { return sovTendermint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *NodeInfo) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: NodeInfo: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: NodeInfo: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.ID.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ListenAddress", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.ListenAddress = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Network", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Network = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Version = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Channels", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.Channels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Moniker", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Moniker = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 7: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field RPCAddress", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.RPCAddress = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 8: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field TxIndex", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTendermint - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTendermint - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTendermint - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.TxIndex = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipTendermint(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthTendermint - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthTendermint - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipTendermint(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTendermint - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTendermint - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTendermint - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthTendermint - } - iNdEx += length - if iNdEx < 0 { - return 0, ErrInvalidLengthTendermint - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTendermint - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipTendermint(dAtA[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - if iNdEx < 0 { - return 0, ErrInvalidLengthTendermint - } - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthTendermint = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowTendermint = fmt.Errorf("proto: integer overflow") -) diff --git a/consensus/tendermint/tendermint_test.go b/consensus/tendermint/tendermint_test.go new file mode 100644 index 000000000..1b11ad318 --- /dev/null +++ b/consensus/tendermint/tendermint_test.go @@ -0,0 +1,28 @@ +package tendermint + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "github.com/tendermint/go-amino" + cryptoAmino "github.com/tendermint/tendermint/crypto/encoding/amino" +) + +func TestMarshalNodeKey(t *testing.T) { + + cdc := amino.NewCodec() + cryptoAmino.RegisterAmino(cdc) + + nodeKey := NewNodeKey() + + bsAmino, err := cdc.MarshalJSON(nodeKey) + require.NoError(t, err) + fmt.Println(string(bsAmino)) + + bs, err := json.Marshal(nodeKey) + require.NoError(t, err) + + fmt.Println(string(bs)) +} diff --git a/core/kernel.go b/core/kernel.go index b751678a3..f3dee657c 100644 --- a/core/kernel.go +++ b/core/kernel.go @@ -96,7 +96,7 @@ func NewKernel(dbDir string) (*Kernel, error) { processes: make(map[string]process.Process), listeners: make(map[string]net.Listener), shutdownNotify: make(chan struct{}), - txCodec: txs.NewAminoCodec(), + txCodec: txs.NewProtobufCodec(), database: dbm.NewDB(BurrowDBName, dbm.GoLevelDBBackend, dbDir), }, err } diff --git a/core/processes.go b/core/processes.go index e1178d774..f5cf9bbed 100644 --- a/core/processes.go +++ b/core/processes.go @@ -281,7 +281,7 @@ func GRPCLauncher(kern *Kernel, conf *rpc.ServerConfig, keyConfig *keys.KeysConf rpcquery.RegisterQueryServer(grpcServer, rpcquery.NewQueryServer(kern.State, nameRegState, proposalRegState, kern.Blockchain, kern.State, nodeView, kern.Logger)) - txCodec := txs.NewAminoCodec() + txCodec := txs.NewProtobufCodec() rpctransact.RegisterTransactServer(grpcServer, rpctransact.NewTransactServer(kern.State, kern.Blockchain, kern.Transactor, txCodec, kern.Logger)) diff --git a/crypto/crypto.pb.go b/crypto/crypto.pb.go index a7bce82f6..161cb490d 100644 --- a/crypto/crypto.pb.go +++ b/crypto/crypto.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -43,16 +44,12 @@ func (m *PublicKey) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *PublicKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PublicKey.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *PublicKey) XXX_Merge(src proto.Message) { xxx_messageInfo_PublicKey.Merge(m, src) @@ -96,16 +93,12 @@ func (m *PrivateKey) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *PrivateKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PrivateKey.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *PrivateKey) XXX_Merge(src proto.Message) { xxx_messageInfo_PrivateKey.Merge(m, src) @@ -140,16 +133,12 @@ func (m *Signature) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Signature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Signature.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Signature) XXX_Merge(src proto.Message) { xxx_messageInfo_Signature.Merge(m, src) @@ -193,7 +182,7 @@ func init() { proto.RegisterFile("crypto.proto", fileDescriptor_527278fb02d03321 func init() { golang_proto.RegisterFile("crypto.proto", fileDescriptor_527278fb02d03321) } var fileDescriptor_527278fb02d03321 = []byte{ - // 278 bytes of a gzipped FileDescriptorProto + // 282 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x49, 0x2e, 0xaa, 0x2c, 0x28, 0xc9, 0xd7, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x83, 0xf0, 0xa4, 0x74, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, 0xc1, 0xd2, @@ -208,10 +197,10 @@ var fileDescriptor_527278fb02d03321 = []byte{ 0xb1, 0x24, 0x95, 0x64, 0x67, 0xc9, 0x60, 0x38, 0x0b, 0xc9, 0x7c, 0x21, 0x39, 0x64, 0x83, 0x25, 0x98, 0xc1, 0xd2, 0x48, 0x22, 0x56, 0x1c, 0x1d, 0x0b, 0xe4, 0x19, 0xc0, 0x6e, 0x88, 0xe1, 0xe2, 0x0c, 0xce, 0x4c, 0xcf, 0x4b, 0x2c, 0x29, 0x2d, 0x4a, 0x25, 0xd9, 0x05, 0x70, 0x9d, 0x30, 0x17, - 0xc0, 0x05, 0x20, 0x3e, 0x74, 0xb2, 0x3a, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, - 0x8f, 0xe4, 0x18, 0x0f, 0x3c, 0x96, 0x63, 0x3c, 0xf1, 0x58, 0x8e, 0x31, 0x4a, 0x05, 0x7f, 0xb8, - 0x41, 0xa2, 0x38, 0x89, 0x0d, 0x1c, 0x75, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe4, 0xcc, - 0x65, 0x14, 0x01, 0x02, 0x00, 0x00, + 0xc0, 0x05, 0x20, 0x3e, 0x74, 0xb2, 0x3b, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x1b, + 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x3c, 0xf0, 0x58, 0x8e, 0xf1, 0xc4, 0x63, 0x39, 0xc6, + 0x28, 0x15, 0xfc, 0x61, 0x07, 0x89, 0xe6, 0x24, 0x36, 0x70, 0xf4, 0x19, 0x03, 0x02, 0x00, 0x00, + 0xff, 0xff, 0x20, 0x25, 0x33, 0xe2, 0x05, 0x02, 0x00, 0x00, } func (m *PublicKey) Marshal() (dAtA []byte, err error) { @@ -237,9 +226,9 @@ func (m *PublicKey) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintCrypto(dAtA, i, uint64(m.PublicKey.Size())) - n1, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.PublicKey.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if m.XXX_unrecognized != nil { @@ -387,14 +376,7 @@ func (m *Signature) Size() (n int) { } func sovCrypto(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozCrypto(x uint64) (n int) { return sovCrypto(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/dump/dump.pb.go b/dump/dump.pb.go index adb99f74b..a0990f643 100644 --- a/dump/dump.pb.go +++ b/dump/dump.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" time "time" _ "github.com/gogo/protobuf/gogoproto" @@ -52,16 +53,12 @@ func (m *Storage) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Storage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Storage.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Storage) XXX_Merge(src proto.Message) { xxx_messageInfo_Storage.Merge(m, src) @@ -97,16 +94,12 @@ func (m *AccountStorage) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *AccountStorage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_AccountStorage.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *AccountStorage) XXX_Merge(src proto.Message) { xxx_messageInfo_AccountStorage.Merge(m, src) @@ -153,16 +146,12 @@ func (m *EVMEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EVMEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_EVMEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *EVMEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_EVMEvent.Merge(m, src) @@ -222,16 +211,12 @@ func (m *Dump) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Dump) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Dump.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Dump) XXX_Merge(src proto.Message) { xxx_messageInfo_Dump.Merge(m, src) @@ -298,37 +283,37 @@ func init() { proto.RegisterFile("dump.proto", fileDescriptor_58418148159c29a6) func init() { golang_proto.RegisterFile("dump.proto", fileDescriptor_58418148159c29a6) } var fileDescriptor_58418148159c29a6 = []byte{ - // 475 bytes of a gzipped FileDescriptorProto + // 479 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0xbf, 0x6f, 0x13, 0x31, 0x14, 0xc6, 0xcd, 0xa5, 0x69, 0x9d, 0xd2, 0xc1, 0x42, 0xe8, 0x94, 0xe1, 0x12, 0x9d, 0x10, 0x54, - 0x88, 0x3a, 0x52, 0xa0, 0xa8, 0x03, 0x4b, 0x43, 0x83, 0x8a, 0x0a, 0x1d, 0x4c, 0x55, 0x24, 0xb6, - 0xfb, 0xf1, 0xf0, 0x9d, 0x94, 0x3b, 0x9f, 0x7c, 0x3e, 0xe8, 0xed, 0x2c, 0x6c, 0xfc, 0x05, 0xfc, - 0x2d, 0x8c, 0x19, 0x11, 0x23, 0x43, 0x41, 0xe9, 0x3f, 0x82, 0xce, 0x67, 0x13, 0xe8, 0x80, 0xe8, - 0xf6, 0xde, 0xfb, 0xfc, 0x3e, 0x7f, 0xfe, 0x3e, 0x63, 0x1c, 0x57, 0x59, 0x41, 0x0b, 0x29, 0x94, - 0x20, 0x4e, 0x53, 0x0f, 0x76, 0x79, 0xaa, 0x92, 0x2a, 0xa4, 0x91, 0xc8, 0xc6, 0x5c, 0x70, 0x31, - 0xd6, 0x60, 0x58, 0xbd, 0xd5, 0x9d, 0x6e, 0x74, 0xd5, 0x2e, 0x0d, 0x86, 0x5c, 0x08, 0x3e, 0x87, - 0xd5, 0x29, 0x95, 0x66, 0x50, 0xaa, 0xc0, 0xb2, 0x0e, 0x36, 0x83, 0x28, 0x33, 0x25, 0x86, 0x73, - 0x88, 0x4c, 0xdd, 0xcf, 0x83, 0x0c, 0xca, 0xb6, 0xf1, 0x3f, 0x23, 0xdc, 0x7b, 0xa5, 0x84, 0x0c, - 0x38, 0x90, 0x67, 0xb8, 0x73, 0x0c, 0xb5, 0x8b, 0x46, 0x68, 0x67, 0x6b, 0xfa, 0x68, 0x71, 0x31, - 0xbc, 0xf1, 0xfd, 0x62, 0xf8, 0xe0, 0x0f, 0x51, 0x49, 0x5d, 0x80, 0x9c, 0x43, 0xcc, 0x41, 0x8e, - 0xc3, 0x4a, 0x4a, 0xf1, 0x7e, 0x1c, 0xa6, 0x79, 0x20, 0x6b, 0xfa, 0x5a, 0xc8, 0x78, 0xb2, 0xf7, - 0x98, 0x35, 0x04, 0xe4, 0x18, 0x77, 0xcf, 0x82, 0x79, 0x05, 0xee, 0x9a, 0x66, 0xda, 0x33, 0x4c, - 0xbb, 0xff, 0xc5, 0x74, 0x04, 0xe7, 0xd3, 0x5a, 0x41, 0xc9, 0x5a, 0x0e, 0xff, 0x23, 0xc2, 0xdb, - 0x07, 0x51, 0x24, 0xaa, 0x5c, 0x59, 0x9d, 0x27, 0xb8, 0x77, 0x10, 0xc7, 0x12, 0xca, 0xf2, 0x7a, - 0x5a, 0x23, 0x59, 0x17, 0x4a, 0x50, 0xb3, 0xcb, 0x2c, 0x09, 0xb9, 0xf7, 0xdb, 0x02, 0x77, 0x6d, - 0xd4, 0xd9, 0xe9, 0x4f, 0x6e, 0x52, 0x9d, 0x8d, 0x19, 0x32, 0x8b, 0xfa, 0x1f, 0x10, 0xde, 0x98, - 0x9d, 0xbd, 0x9c, 0xbd, 0x83, 0x5c, 0x11, 0x17, 0xf7, 0x9e, 0x26, 0x41, 0x9a, 0x3f, 0x3f, 0xd4, - 0x2a, 0x36, 0x99, 0x6d, 0xc9, 0x3e, 0x76, 0x4e, 0xd3, 0xac, 0x7d, 0x7e, 0x7f, 0x32, 0xa0, 0x6d, - 0x4e, 0xd4, 0xe6, 0x44, 0x4f, 0x6d, 0x4e, 0xd3, 0x8d, 0x46, 0xf8, 0xa7, 0x1f, 0x43, 0xc4, 0xf4, - 0x06, 0xb9, 0x83, 0xbb, 0x9a, 0xdc, 0xed, 0xe8, 0xd5, 0x6d, 0xaa, 0x63, 0x7b, 0x21, 0xb8, 0x9e, - 0xb2, 0x16, 0xf4, 0xbf, 0x21, 0xec, 0x1c, 0x56, 0x59, 0x41, 0x6e, 0xe3, 0xf5, 0x23, 0x48, 0x79, - 0xa2, 0xb4, 0x02, 0x87, 0x99, 0x8e, 0xdc, 0xc5, 0x3d, 0x63, 0x99, 0xd1, 0xb0, 0x45, 0x9b, 0xaf, - 0x60, 0x66, 0xcc, 0x82, 0xe4, 0xc9, 0x55, 0x6b, 0xcd, 0xbd, 0xb7, 0xda, 0xf7, 0xff, 0x8d, 0xb1, - 0xab, 0x31, 0xdc, 0x5f, 0x99, 0xe1, 0x3a, 0x46, 0xaf, 0xde, 0xb3, 0x53, 0xb6, 0x32, 0x6b, 0x84, - 0x9d, 0x93, 0x20, 0x03, 0xb7, 0x6b, 0xe4, 0xb4, 0x5f, 0x70, 0x96, 0x2b, 0x59, 0x33, 0x8d, 0x4c, - 0xf7, 0x17, 0x4b, 0x0f, 0x7d, 0x5d, 0x7a, 0xe8, 0xe7, 0xd2, 0x43, 0x5f, 0x2e, 0x3d, 0xb4, 0xb8, - 0xf4, 0xd0, 0x1b, 0xff, 0xdf, 0x89, 0x36, 0xd7, 0x85, 0xeb, 0xda, 0xd8, 0x87, 0xbf, 0x02, 0x00, - 0x00, 0xff, 0xff, 0x60, 0x4c, 0x49, 0xe5, 0x51, 0x03, 0x00, 0x00, + 0x88, 0x3a, 0x52, 0xa0, 0x88, 0xa1, 0x4b, 0x43, 0x83, 0x8a, 0x0a, 0x1d, 0x4c, 0x55, 0x24, 0xb6, + 0xfb, 0xf1, 0x70, 0x4e, 0xca, 0x9d, 0x4f, 0x3e, 0x1f, 0xf4, 0x76, 0x16, 0x36, 0xfe, 0x02, 0xfe, + 0x16, 0xc6, 0x8c, 0x88, 0x09, 0x31, 0x14, 0x74, 0xfd, 0x47, 0xd0, 0xf9, 0x6c, 0x02, 0x1d, 0x10, + 0x6c, 0xef, 0xbd, 0xcf, 0xef, 0xf3, 0xe7, 0xef, 0x33, 0xc6, 0x71, 0x99, 0xe6, 0x34, 0x97, 0x42, + 0x09, 0xe2, 0x34, 0xf5, 0x60, 0x97, 0x27, 0x6a, 0x5e, 0x86, 0x34, 0x12, 0xe9, 0x98, 0x0b, 0x2e, + 0xc6, 0x1a, 0x0c, 0xcb, 0xd7, 0xba, 0xd3, 0x8d, 0xae, 0xda, 0xa5, 0xc1, 0x90, 0x0b, 0xc1, 0x17, + 0xb0, 0x3a, 0xa5, 0x92, 0x14, 0x0a, 0x15, 0x58, 0xd6, 0xc1, 0x66, 0x10, 0xa5, 0xa6, 0xc4, 0x70, + 0x0e, 0x91, 0xa9, 0xfb, 0x59, 0x90, 0x42, 0xd1, 0x36, 0xfe, 0x47, 0x84, 0x7b, 0x2f, 0x94, 0x90, + 0x01, 0x07, 0xf2, 0x04, 0x77, 0x8e, 0xa1, 0x72, 0xd1, 0x08, 0xed, 0x6c, 0x4d, 0x1f, 0x2c, 0x2f, + 0x86, 0xd7, 0xbe, 0x5d, 0x0c, 0xef, 0xfd, 0x26, 0x6a, 0x5e, 0xe5, 0x20, 0x17, 0x10, 0x73, 0x90, + 0xe3, 0xb0, 0x94, 0x52, 0xbc, 0x1d, 0x87, 0x49, 0x16, 0xc8, 0x8a, 0xbe, 0x14, 0x32, 0x9e, 0xec, + 0x3d, 0x64, 0x0d, 0x01, 0x39, 0xc6, 0xdd, 0xb3, 0x60, 0x51, 0x82, 0xbb, 0xa6, 0x99, 0xf6, 0x0c, + 0xd3, 0xee, 0x3f, 0x31, 0x1d, 0xc1, 0xf9, 0xb4, 0x52, 0x50, 0xb0, 0x96, 0xc3, 0x7f, 0x8f, 0xf0, + 0xf6, 0x41, 0x14, 0x89, 0x32, 0x53, 0x56, 0xe7, 0x09, 0xee, 0x1d, 0xc4, 0xb1, 0x84, 0xa2, 0xf8, + 0x3f, 0xad, 0x91, 0xac, 0x72, 0x25, 0xa8, 0xd9, 0x65, 0x96, 0x84, 0xdc, 0xf9, 0x65, 0x81, 0xbb, + 0x36, 0xea, 0xec, 0xf4, 0x27, 0xd7, 0xa9, 0xce, 0xc6, 0x0c, 0x99, 0x45, 0xfd, 0x77, 0x08, 0x6f, + 0xcc, 0xce, 0x9e, 0xcf, 0xde, 0x40, 0xa6, 0x88, 0x8b, 0x7b, 0x8f, 0xe7, 0x41, 0x92, 0x3d, 0x3d, + 0xd4, 0x2a, 0x36, 0x99, 0x6d, 0xc9, 0x23, 0xec, 0x9c, 0x26, 0x69, 0xfb, 0xfc, 0xfe, 0x64, 0x40, + 0xdb, 0x9c, 0xa8, 0xcd, 0x89, 0x9e, 0xda, 0x9c, 0xa6, 0x1b, 0x8d, 0xf0, 0x0f, 0xdf, 0x87, 0x88, + 0xe9, 0x0d, 0x72, 0x0b, 0x77, 0x35, 0xb9, 0xdb, 0xd1, 0xab, 0xdb, 0x54, 0xc7, 0xf6, 0x4c, 0x70, + 0x3d, 0x65, 0x2d, 0xe8, 0x7f, 0x41, 0xd8, 0x39, 0x2c, 0xd3, 0x9c, 0xdc, 0xc4, 0xeb, 0x47, 0x90, + 0xf0, 0xb9, 0xd2, 0x0a, 0x1c, 0x66, 0x3a, 0x72, 0x1b, 0xf7, 0x8c, 0x65, 0x46, 0xc3, 0x16, 0x6d, + 0xbe, 0x82, 0x99, 0x31, 0x0b, 0x92, 0xfd, 0xab, 0xd6, 0x9a, 0x7b, 0x6f, 0xb4, 0xef, 0xff, 0x13, + 0x63, 0x57, 0x63, 0xb8, 0xbb, 0x32, 0xc3, 0x75, 0x8c, 0x5e, 0xbd, 0x67, 0xa7, 0x6c, 0x65, 0xd6, + 0x08, 0x3b, 0x27, 0x41, 0x0a, 0x6e, 0xd7, 0xc8, 0x69, 0xbf, 0xe0, 0x2c, 0x53, 0xb2, 0x62, 0x1a, + 0x99, 0xee, 0x2f, 0x6b, 0x0f, 0x7d, 0xae, 0x3d, 0xf4, 0xb5, 0xf6, 0xd0, 0x8f, 0xda, 0x43, 0x9f, + 0x2e, 0x3d, 0xb4, 0xbc, 0xf4, 0xd0, 0x2b, 0xff, 0xef, 0xa9, 0x36, 0x57, 0x86, 0xeb, 0xda, 0xdc, + 0xfb, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe9, 0xd0, 0x90, 0x5d, 0x55, 0x03, 0x00, 0x00, } func (m *Storage) Marshal() (dAtA []byte, err error) { @@ -349,17 +334,17 @@ func (m *Storage) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintDump(dAtA, i, uint64(m.Key.Size())) - n1, err := m.Key.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Key.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(m.Value.Size())) - n2, err := m.Value.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Value.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 if m.XXX_unrecognized != nil { @@ -386,9 +371,9 @@ func (m *AccountStorage) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintDump(dAtA, i, uint64(m.Address.Size())) - n3, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Address.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 if len(m.Storage) > 0 { @@ -433,18 +418,18 @@ func (m *EVMEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.Time))) - n4, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 if m.Event != nil { dAtA[i] = 0x1a i++ i = encodeVarintDump(dAtA, i, uint64(m.Event.Size())) - n5, err := m.Event.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.Event.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 } @@ -478,9 +463,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(m.Account.Size())) - n6, err := m.Account.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.Account.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 } @@ -488,9 +473,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintDump(dAtA, i, uint64(m.AccountStorage.Size())) - n7, err := m.AccountStorage.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.AccountStorage.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -498,9 +483,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintDump(dAtA, i, uint64(m.EVMEvent.Size())) - n8, err := m.EVMEvent.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.EVMEvent.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 } @@ -508,9 +493,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintDump(dAtA, i, uint64(m.Name.Size())) - n9, err := m.Name.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.Name.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 } @@ -619,14 +604,7 @@ func (m *Dump) Size() (n int) { } func sovDump(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozDump(x uint64) (n int) { return sovDump(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/encoding/encoding.pb.go b/encoding/encoding.pb.go new file mode 100644 index 000000000..11209f200 --- /dev/null +++ b/encoding/encoding.pb.go @@ -0,0 +1,372 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: encoding.proto + +package encoding + +import ( + fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + golang_proto "github.com/golang/protobuf/proto" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = golang_proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package + +// For testing +type TestMessage struct { + Type uint32 `protobuf:"varint,1,opt,name=Type,proto3" json:"Type,omitempty"` + Amount uint64 `protobuf:"varint,2,opt,name=Amount,proto3" json:"Amount,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestMessage) Reset() { *m = TestMessage{} } +func (*TestMessage) ProtoMessage() {} +func (*TestMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_ac330e3fa468db3c, []int{0} +} +func (m *TestMessage) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *TestMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *TestMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestMessage.Merge(m, src) +} +func (m *TestMessage) XXX_Size() int { + return m.Size() +} +func (m *TestMessage) XXX_DiscardUnknown() { + xxx_messageInfo_TestMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_TestMessage proto.InternalMessageInfo + +func (m *TestMessage) GetType() uint32 { + if m != nil { + return m.Type + } + return 0 +} + +func (m *TestMessage) GetAmount() uint64 { + if m != nil { + return m.Amount + } + return 0 +} + +func (*TestMessage) XXX_MessageName() string { + return "encoding.TestMessage" +} +func init() { + proto.RegisterType((*TestMessage)(nil), "encoding.TestMessage") + golang_proto.RegisterType((*TestMessage)(nil), "encoding.TestMessage") +} + +func init() { proto.RegisterFile("encoding.proto", fileDescriptor_ac330e3fa468db3c) } +func init() { golang_proto.RegisterFile("encoding.proto", fileDescriptor_ac330e3fa468db3c) } + +var fileDescriptor_ac330e3fa468db3c = []byte{ + // 189 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4b, 0xcd, 0x4b, 0xce, + 0x4f, 0xc9, 0xcc, 0x4b, 0xd7, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x80, 0xf1, 0xa5, 0x74, + 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, + 0xc1, 0x0a, 0x92, 0x4a, 0xd3, 0xc0, 0x3c, 0x30, 0x07, 0xcc, 0x82, 0x68, 0x54, 0xb2, 0xe7, 0xe2, + 0x0e, 0x49, 0x2d, 0x2e, 0xf1, 0x4d, 0x2d, 0x2e, 0x4e, 0x4c, 0x4f, 0x15, 0x12, 0xe2, 0x62, 0x09, + 0xa9, 0x2c, 0x48, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x0d, 0x02, 0xb3, 0x85, 0xc4, 0xb8, 0xd8, + 0x1c, 0x73, 0xf3, 0x4b, 0xf3, 0x4a, 0x24, 0x98, 0x14, 0x18, 0x35, 0x58, 0x82, 0xa0, 0x3c, 0x2b, + 0x96, 0x19, 0x0b, 0xe4, 0x19, 0x9c, 0x1c, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, + 0xc6, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x0f, 0x3c, 0x96, 0x63, 0x3c, 0xf1, 0x58, 0x8e, + 0x31, 0x4a, 0x0d, 0xc9, 0x25, 0x19, 0x95, 0x05, 0xa9, 0x45, 0x39, 0xa9, 0x29, 0xe9, 0xa9, 0x45, + 0xfa, 0x49, 0xa5, 0x45, 0x45, 0xf9, 0xe5, 0xfa, 0x30, 0x17, 0x27, 0xb1, 0x81, 0x5d, 0x62, 0x0c, + 0x08, 0x00, 0x00, 0xff, 0xff, 0xe3, 0x76, 0xda, 0xe3, 0xd4, 0x00, 0x00, 0x00, +} + +func (m *TestMessage) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalTo(dAtA) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *TestMessage) MarshalTo(dAtA []byte) (int, error) { + var i int + _ = i + var l int + _ = l + if m.Type != 0 { + dAtA[i] = 0x8 + i++ + i = encodeVarintEncoding(dAtA, i, uint64(m.Type)) + } + if m.Amount != 0 { + dAtA[i] = 0x10 + i++ + i = encodeVarintEncoding(dAtA, i, uint64(m.Amount)) + } + if m.XXX_unrecognized != nil { + i += copy(dAtA[i:], m.XXX_unrecognized) + } + return i, nil +} + +func encodeVarintEncoding(dAtA []byte, offset int, v uint64) int { + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return offset + 1 +} +func (m *TestMessage) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Type != 0 { + n += 1 + sovEncoding(uint64(m.Type)) + } + if m.Amount != 0 { + n += 1 + sovEncoding(uint64(m.Amount)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func sovEncoding(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozEncoding(x uint64) (n int) { + return sovEncoding(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *TestMessage) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEncoding + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: TestMessage: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: TestMessage: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) + } + m.Type = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEncoding + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Type |= uint32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Amount", wireType) + } + m.Amount = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEncoding + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Amount |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipEncoding(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthEncoding + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthEncoding + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipEncoding(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEncoding + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEncoding + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + return iNdEx, nil + case 1: + iNdEx += 8 + return iNdEx, nil + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEncoding + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthEncoding + } + iNdEx += length + if iNdEx < 0 { + return 0, ErrInvalidLengthEncoding + } + return iNdEx, nil + case 3: + for { + var innerWire uint64 + var start int = iNdEx + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEncoding + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + innerWire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + innerWireType := int(innerWire & 0x7) + if innerWireType == 4 { + break + } + next, err := skipEncoding(dAtA[start:]) + if err != nil { + return 0, err + } + iNdEx = start + next + if iNdEx < 0 { + return 0, ErrInvalidLengthEncoding + } + } + return iNdEx, nil + case 4: + return iNdEx, nil + case 5: + iNdEx += 4 + return iNdEx, nil + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + } + panic("unreachable") +} + +var ( + ErrInvalidLengthEncoding = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowEncoding = fmt.Errorf("proto: integer overflow") +) diff --git a/encoding/protobuf.go b/encoding/protobuf.go new file mode 100644 index 000000000..211209848 --- /dev/null +++ b/encoding/protobuf.go @@ -0,0 +1,120 @@ +package encoding + +import ( + "encoding/binary" + "fmt" + "io" + + "github.com/golang/protobuf/proto" +) + +type Buffer struct { + *proto.Buffer +} + +// Centralise proto.Buffer constructor. gogo protobuf freaks out about deterministic +// marshalling on objects with custom marshallers so we use google protobuf here +func NewBuffer(bs []byte) *Buffer { + buf := proto.NewBuffer(bs) + buf.SetDeterministic(true) + return &Buffer{Buffer: buf} +} + +// Single shot encoding +func Encode(msg proto.Message) ([]byte, error) { + buf := NewBuffer(nil) + err := buf.Marshal(msg) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +// Single shot decoding +func Decode(bs []byte, msg proto.Message) error { + return NewBuffer(bs).Unmarshal(msg) +} + +// Write messages with length-prefix framing to the provider Writer. Returns the number of bytes written. +func WriteMessage(w io.Writer, pb proto.Message) (int, error) { + const errHeader = "WriteMessage()" + buf := NewBuffer(nil) + err := buf.Marshal(pb) + if err != nil { + return 0, fmt.Errorf("%s: %v", errHeader, err) + } + // Write length prefix + bs := make([]byte, binary.MaxVarintLen64) + n := binary.PutVarint(bs, int64(len(buf.Bytes()))) + written, err := w.Write(bs[:n]) + if err != nil { + return written, fmt.Errorf("%s: %v", errHeader, err) + } + // Write message + n, err = w.Write(buf.Bytes()) + written += n + return written, nil +} + +// Read messages with length-prefix framing from the provided Reader. Returns the number of bytes read and io.EOF if +// ReadMessage is called exactly on the end of a stream. +func ReadMessage(r io.Reader, pb proto.Message) (int, error) { + const errHeader = "ReadMessage()" + // Read varint + br := newByteReader(r) + msgLength, err := binary.ReadVarint(br) + if err != nil { + // Only return EOF if called precisely at the end of stream + if err == io.EOF && br.read == 0 { + return 0, io.EOF + } + return br.read, fmt.Errorf("%s: %v", errHeader, err) + } + read := br.read + // Use any message bytes at end of buffer + bs := make([]byte, msgLength) + n, err := r.Read(bs) + read += n + if err != nil { + return read, fmt.Errorf("%s: %v", errHeader, err) + } + if len(bs) != n { + return read, fmt.Errorf("%s: expected protobuf message of %d bytes but could only read %d bytes", + errHeader, msgLength, n) + } + err = proto.NewBuffer(bs).Unmarshal(pb) + if err != nil { + return read, fmt.Errorf("%s: %v", errHeader, err) + } + return read, nil +} + +type byteReader struct { + io.Reader + byte []byte + read int +} + +func newByteReader(r io.Reader) *byteReader { + return &byteReader{ + Reader: r, + byte: make([]byte, 1), + } +} + +func (br *byteReader) ReadByte() (byte, error) { + br.byte[0] = 0 + n, err := br.Read(br.byte) + if err != nil { + return 0, err + } + if n == 0 { + return 0, io.EOF + } + br.read++ + return br.byte[0], nil +} + +func (tm TestMessage) String() string { + return fmt.Sprintf("{Type: %d, Amount: %d}", tm.Type, tm.Amount) +} diff --git a/encoding/protobuf_test.go b/encoding/protobuf_test.go new file mode 100644 index 000000000..abd8236a7 --- /dev/null +++ b/encoding/protobuf_test.go @@ -0,0 +1,81 @@ +package encoding + +import ( + "bytes" + "fmt" + "io" + "math/rand" + "reflect" + "strings" + "testing" + "testing/quick" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWriteMessages(t *testing.T) { + var fErr error + + f := func(msgs []TestMessage) bool { + var n, written, read int + buf := new(bytes.Buffer) + // encode + for _, msg := range msgs { + n, fErr = WriteMessage(buf, &msg) + written += n + if fErr != nil { + return false + } + } + + // Require non nil for equality check later + msgOut := []TestMessage{} + // decode + for { + msg := new(TestMessage) + n, fErr = ReadMessage(buf, msg) + read += n + if fErr != nil { + if fErr == io.EOF { + fErr = nil + break + } + return false + } + + msgOut = append(msgOut, *msg) + } + + return assert.Equal(t, msgs, msgOut, "messages read should equal those written") && + assert.Equal(t, written, read, "should read the same number of bytes as written") + } + err := quick.Check(f, &quick.Config{ + // Takes about a second on my machine + MaxCount: 9994, + Rand: rand.New(rand.NewSource(320492384234234)), + // Custom value function because arbitrary values for some of the XXX fields can mess things up for proto.Marshal + Values: func(values []reflect.Value, rand *rand.Rand) { + for i := 0; i < len(values); i++ { + msgs := make([]TestMessage, rand.Intn(200)) + for j := range msgs { + msgs[j] = TestMessage{Type: rand.Uint32(), Amount: rand.Uint64()} + } + values[i] = reflect.ValueOf(msgs) + } + }, + }) + if err != nil { + var literal string + err := err.(*quick.CheckError) + for _, in := range err.In { + var str []string + for _, v := range in.([]TestMessage) { + str = append(str, v.String()) + } + literal = fmt.Sprintf("msgs := []TestMessage{%s}", strings.Join(str, ", ")) + } + t.Logf("CheckError with:\n%s", literal) + } + require.NoError(t, fErr) +} diff --git a/execution/contexts/call_context.go b/execution/contexts/call_context.go index 75a1f5132..dfd9ff328 100644 --- a/execution/contexts/call_context.go +++ b/execution/contexts/call_context.go @@ -191,7 +191,7 @@ func (ctx *CallContext) Deliver(inAcc, outAcc *acm.Account, value uint64) error txHash := ctx.txe.Envelope.Tx.Hash() logger := ctx.Logger.With(structure.TxHashKey, txHash) var exception errors.CodedError - if wcode != nil { + if len(wcode) != 0 { if createContract { txCache.InitWASMCode(callee, wcode) } diff --git a/execution/errors/errors.pb.go b/execution/errors/errors.pb.go index a75c80951..f88cc6abe 100644 --- a/execution/errors/errors.pb.go +++ b/execution/errors/errors.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -42,16 +43,12 @@ func (m *Exception) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Exception) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Exception.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Exception) XXX_Merge(src proto.Message) { xxx_messageInfo_Exception.Merge(m, src) @@ -91,7 +88,7 @@ func init() { proto.RegisterFile("errors.proto", fileDescriptor_24fe73c7f0ddb19c func init() { golang_proto.RegisterFile("errors.proto", fileDescriptor_24fe73c7f0ddb19c) } var fileDescriptor_24fe73c7f0ddb19c = []byte{ - // 190 bytes of a gzipped FileDescriptorProto + // 194 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x49, 0x2d, 0x2a, 0xca, 0x2f, 0x2a, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x83, 0xf0, 0xa4, 0x74, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, 0xc1, 0xd2, @@ -99,11 +96,12 @@ var fileDescriptor_24fe73c7f0ddb19c = []byte{ 0x24, 0xa7, 0x16, 0x94, 0x64, 0xe6, 0xe7, 0x09, 0xc9, 0x70, 0xb1, 0x38, 0xe7, 0xa7, 0xa4, 0x4a, 0x30, 0x2a, 0x30, 0x6a, 0xf0, 0x3a, 0x71, 0xfc, 0xba, 0x27, 0x0f, 0xe6, 0x07, 0x81, 0x49, 0x21, 0x19, 0x24, 0xa5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, 0x08, 0x01, 0x2b, 0x96, 0x19, 0x0b, - 0xe4, 0x19, 0x9c, 0x5c, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, - 0xc6, 0x03, 0x8f, 0xe5, 0x18, 0x4f, 0x3c, 0x96, 0x63, 0x8c, 0xd2, 0x43, 0x72, 0x53, 0x46, 0x65, - 0x41, 0x6a, 0x51, 0x4e, 0x6a, 0x4a, 0x7a, 0x6a, 0x91, 0x7e, 0x52, 0x69, 0x51, 0x51, 0x7e, 0xb9, - 0x7e, 0x6a, 0x45, 0x6a, 0x72, 0x29, 0xc8, 0x10, 0x7d, 0x88, 0x1f, 0x92, 0xd8, 0xc0, 0x6e, 0x33, - 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x33, 0x9a, 0x19, 0x05, 0xe2, 0x00, 0x00, 0x00, + 0xe4, 0x19, 0x9c, 0x3c, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc6, 0x23, 0x39, + 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x0f, 0x3c, 0x96, 0x63, 0x3c, 0xf1, 0x58, 0x8e, 0x31, 0x4a, 0x0f, + 0xc9, 0x5d, 0x19, 0x95, 0x05, 0xa9, 0x45, 0x39, 0xa9, 0x29, 0xe9, 0xa9, 0x45, 0xfa, 0x49, 0xa5, + 0x45, 0x45, 0xf9, 0xe5, 0xfa, 0xa9, 0x15, 0xa9, 0xc9, 0xa5, 0x20, 0x83, 0xf4, 0x21, 0xfe, 0x48, + 0x62, 0x03, 0xbb, 0xcf, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x38, 0x0d, 0x6e, 0xa5, 0xe6, 0x00, + 0x00, 0x00, } func (m *Exception) Marshal() (dAtA []byte, err error) { @@ -167,14 +165,7 @@ func (m *Exception) Size() (n int) { } func sovErrors(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozErrors(x uint64) (n int) { return sovErrors(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/exec/block_execution.go b/execution/exec/block_execution.go index 9e7fc2b2c..3cf328d69 100644 --- a/execution/exec/block_execution.go +++ b/execution/exec/block_execution.go @@ -11,8 +11,8 @@ import ( func EventStringBlockExecution(height uint64) string { return fmt.Sprintf("Execution/Block/%v", height) } // Write out TxExecutions parenthetically -func (be *BlockExecution) StreamEvents() StreamEvents { - var ses StreamEvents +func (be *BlockExecution) StreamEvents() []*StreamEvent { + var ses []*StreamEvent ses = append(ses, &StreamEvent{ BeginBlock: &BeginBlock{ Height: be.Height, @@ -29,10 +29,6 @@ func (be *BlockExecution) StreamEvents() StreamEvents { }) } -func (be *BlockExecution) EncodeHeader() ([]byte, error) { - return cdc.MarshalBinaryBare(be.Header) -} - func (*BlockExecution) EventType() EventType { return TypeBlockExecution } diff --git a/execution/exec/codec.go b/execution/exec/codec.go index beb9d32a7..f67e572ed 100644 --- a/execution/exec/codec.go +++ b/execution/exec/codec.go @@ -2,4 +2,4 @@ package exec import "github.com/hyperledger/burrow/txs" -var cdc = txs.NewAminoCodec() +var cdc = txs.NewProtobufCodec() diff --git a/execution/exec/exec.pb.go b/execution/exec/exec.pb.go index bee8cee9c..30c9a7708 100644 --- a/execution/exec/exec.pb.go +++ b/execution/exec/exec.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" time "time" _ "github.com/gogo/protobuf/gogoproto" @@ -39,6 +40,54 @@ var _ = time.Kitchen // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package +// This message exists purely for framing []StreamEvent +type StreamEvents struct { + StreamEvents []*StreamEvent `protobuf:"bytes,1,rep,name=StreamEvents,proto3" json:"StreamEvents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamEvents) Reset() { *m = StreamEvents{} } +func (m *StreamEvents) String() string { return proto.CompactTextString(m) } +func (*StreamEvents) ProtoMessage() {} +func (*StreamEvents) Descriptor() ([]byte, []int) { + return fileDescriptor_4d737c7315c25422, []int{0} +} +func (m *StreamEvents) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *StreamEvents) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *StreamEvents) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamEvents.Merge(m, src) +} +func (m *StreamEvents) XXX_Size() int { + return m.Size() +} +func (m *StreamEvents) XXX_DiscardUnknown() { + xxx_messageInfo_StreamEvents.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamEvents proto.InternalMessageInfo + +func (m *StreamEvents) GetStreamEvents() []*StreamEvent { + if m != nil { + return m.StreamEvents + } + return nil +} + +func (*StreamEvents) XXX_MessageName() string { + return "exec.StreamEvents" +} + type StreamEvent struct { BeginBlock *BeginBlock `protobuf:"bytes,1,opt,name=BeginBlock,proto3" json:"BeginBlock,omitempty"` BeginTx *BeginTx `protobuf:"bytes,2,opt,name=BeginTx,proto3" json:"BeginTx,omitempty"` @@ -55,22 +104,18 @@ func (m *StreamEvent) Reset() { *m = StreamEvent{} } func (m *StreamEvent) String() string { return proto.CompactTextString(m) } func (*StreamEvent) ProtoMessage() {} func (*StreamEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{0} + return fileDescriptor_4d737c7315c25422, []int{1} } func (m *StreamEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *StreamEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_StreamEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *StreamEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_StreamEvent.Merge(m, src) @@ -136,22 +181,18 @@ func (m *BeginBlock) Reset() { *m = BeginBlock{} } func (m *BeginBlock) String() string { return proto.CompactTextString(m) } func (*BeginBlock) ProtoMessage() {} func (*BeginBlock) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{1} + return fileDescriptor_4d737c7315c25422, []int{2} } func (m *BeginBlock) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *BeginBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_BeginBlock.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *BeginBlock) XXX_Merge(src proto.Message) { xxx_messageInfo_BeginBlock.Merge(m, src) @@ -194,22 +235,18 @@ func (m *EndBlock) Reset() { *m = EndBlock{} } func (m *EndBlock) String() string { return proto.CompactTextString(m) } func (*EndBlock) ProtoMessage() {} func (*EndBlock) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{2} + return fileDescriptor_4d737c7315c25422, []int{3} } func (m *EndBlock) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EndBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_EndBlock.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *EndBlock) XXX_Merge(src proto.Message) { xxx_messageInfo_EndBlock.Merge(m, src) @@ -249,22 +286,18 @@ func (m *BeginTx) Reset() { *m = BeginTx{} } func (m *BeginTx) String() string { return proto.CompactTextString(m) } func (*BeginTx) ProtoMessage() {} func (*BeginTx) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{3} + return fileDescriptor_4d737c7315c25422, []int{4} } func (m *BeginTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *BeginTx) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_BeginTx.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *BeginTx) XXX_Merge(src proto.Message) { xxx_messageInfo_BeginTx.Merge(m, src) @@ -315,22 +348,18 @@ func (m *EndTx) Reset() { *m = EndTx{} } func (m *EndTx) String() string { return proto.CompactTextString(m) } func (*EndTx) ProtoMessage() {} func (*EndTx) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{4} + return fileDescriptor_4d737c7315c25422, []int{5} } func (m *EndTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EndTx) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_EndTx.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *EndTx) XXX_Merge(src proto.Message) { xxx_messageInfo_EndTx.Merge(m, src) @@ -368,22 +397,18 @@ func (m *TxHeader) Reset() { *m = TxHeader{} } func (m *TxHeader) String() string { return proto.CompactTextString(m) } func (*TxHeader) ProtoMessage() {} func (*TxHeader) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{5} + return fileDescriptor_4d737c7315c25422, []int{6} } func (m *TxHeader) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TxHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_TxHeader.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *TxHeader) XXX_Merge(src proto.Message) { xxx_messageInfo_TxHeader.Merge(m, src) @@ -443,22 +468,18 @@ func (m *BlockExecution) Reset() { *m = BlockExecution{} } func (m *BlockExecution) String() string { return proto.CompactTextString(m) } func (*BlockExecution) ProtoMessage() {} func (*BlockExecution) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{6} + return fileDescriptor_4d737c7315c25422, []int{7} } func (m *BlockExecution) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *BlockExecution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_BlockExecution.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *BlockExecution) XXX_Merge(src proto.Message) { xxx_messageInfo_BlockExecution.Merge(m, src) @@ -511,22 +532,18 @@ func (m *TxExecutionKey) Reset() { *m = TxExecutionKey{} } func (m *TxExecutionKey) String() string { return proto.CompactTextString(m) } func (*TxExecutionKey) ProtoMessage() {} func (*TxExecutionKey) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{7} + return fileDescriptor_4d737c7315c25422, []int{8} } func (m *TxExecutionKey) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TxExecutionKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_TxExecutionKey.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *TxExecutionKey) XXX_Merge(src proto.Message) { xxx_messageInfo_TxExecutionKey.Merge(m, src) @@ -581,22 +598,18 @@ func (m *TxExecution) Reset() { *m = TxExecution{} } func (m *TxExecution) String() string { return proto.CompactTextString(m) } func (*TxExecution) ProtoMessage() {} func (*TxExecution) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{8} + return fileDescriptor_4d737c7315c25422, []int{9} } func (m *TxExecution) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TxExecution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_TxExecution.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *TxExecution) XXX_Merge(src proto.Message) { xxx_messageInfo_TxExecution.Merge(m, src) @@ -667,22 +680,18 @@ func (m *Origin) Reset() { *m = Origin{} } func (m *Origin) String() string { return proto.CompactTextString(m) } func (*Origin) ProtoMessage() {} func (*Origin) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{9} + return fileDescriptor_4d737c7315c25422, []int{10} } func (m *Origin) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Origin) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Origin.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Origin) XXX_Merge(src proto.Message) { xxx_messageInfo_Origin.Merge(m, src) @@ -751,22 +760,18 @@ type Header struct { func (m *Header) Reset() { *m = Header{} } func (*Header) ProtoMessage() {} func (*Header) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{10} + return fileDescriptor_4d737c7315c25422, []int{11} } func (m *Header) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Header.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Header) XXX_Merge(src proto.Message) { xxx_messageInfo_Header.Merge(m, src) @@ -841,22 +846,18 @@ type Event struct { func (m *Event) Reset() { *m = Event{} } func (*Event) ProtoMessage() {} func (*Event) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{11} + return fileDescriptor_4d737c7315c25422, []int{12} } func (m *Event) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Event) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Event.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Event) XXX_Merge(src proto.Message) { xxx_messageInfo_Event.Merge(m, src) @@ -935,22 +936,18 @@ func (m *Result) Reset() { *m = Result{} } func (m *Result) String() string { return proto.CompactTextString(m) } func (*Result) ProtoMessage() {} func (*Result) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{12} + return fileDescriptor_4d737c7315c25422, []int{13} } func (m *Result) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Result.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Result) XXX_Merge(src proto.Message) { xxx_messageInfo_Result.Merge(m, src) @@ -1009,22 +1006,18 @@ func (m *LogEvent) Reset() { *m = LogEvent{} } func (m *LogEvent) String() string { return proto.CompactTextString(m) } func (*LogEvent) ProtoMessage() {} func (*LogEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{13} + return fileDescriptor_4d737c7315c25422, []int{14} } func (m *LogEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *LogEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_LogEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *LogEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_LogEvent.Merge(m, src) @@ -1057,22 +1050,18 @@ func (m *CallEvent) Reset() { *m = CallEvent{} } func (m *CallEvent) String() string { return proto.CompactTextString(m) } func (*CallEvent) ProtoMessage() {} func (*CallEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{14} + return fileDescriptor_4d737c7315c25422, []int{15} } func (m *CallEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *CallEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_CallEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *CallEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_CallEvent.Merge(m, src) @@ -1122,22 +1111,18 @@ func (m *GovernAccountEvent) Reset() { *m = GovernAccountEvent{} } func (m *GovernAccountEvent) String() string { return proto.CompactTextString(m) } func (*GovernAccountEvent) ProtoMessage() {} func (*GovernAccountEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{15} + return fileDescriptor_4d737c7315c25422, []int{16} } func (m *GovernAccountEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *GovernAccountEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GovernAccountEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *GovernAccountEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_GovernAccountEvent.Merge(m, src) @@ -1173,22 +1158,18 @@ func (m *InputEvent) Reset() { *m = InputEvent{} } func (m *InputEvent) String() string { return proto.CompactTextString(m) } func (*InputEvent) ProtoMessage() {} func (*InputEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{16} + return fileDescriptor_4d737c7315c25422, []int{17} } func (m *InputEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *InputEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_InputEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *InputEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_InputEvent.Merge(m, src) @@ -1217,22 +1198,18 @@ func (m *OutputEvent) Reset() { *m = OutputEvent{} } func (m *OutputEvent) String() string { return proto.CompactTextString(m) } func (*OutputEvent) ProtoMessage() {} func (*OutputEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{17} + return fileDescriptor_4d737c7315c25422, []int{18} } func (m *OutputEvent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *OutputEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_OutputEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *OutputEvent) XXX_Merge(src proto.Message) { xxx_messageInfo_OutputEvent.Merge(m, src) @@ -1265,22 +1242,18 @@ func (m *CallData) Reset() { *m = CallData{} } func (m *CallData) String() string { return proto.CompactTextString(m) } func (*CallData) ProtoMessage() {} func (*CallData) Descriptor() ([]byte, []int) { - return fileDescriptor_4d737c7315c25422, []int{18} + return fileDescriptor_4d737c7315c25422, []int{19} } func (m *CallData) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *CallData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_CallData.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *CallData) XXX_Merge(src proto.Message) { xxx_messageInfo_CallData.Merge(m, src) @@ -1312,6 +1285,8 @@ func (*CallData) XXX_MessageName() string { return "exec.CallData" } func init() { + proto.RegisterType((*StreamEvents)(nil), "exec.StreamEvents") + golang_proto.RegisterType((*StreamEvents)(nil), "exec.StreamEvents") proto.RegisterType((*StreamEvent)(nil), "exec.StreamEvent") golang_proto.RegisterType((*StreamEvent)(nil), "exec.StreamEvent") proto.RegisterType((*BeginBlock)(nil), "exec.BeginBlock") @@ -1356,86 +1331,120 @@ func init() { proto.RegisterFile("exec.proto", fileDescriptor_4d737c7315c25422) func init() { golang_proto.RegisterFile("exec.proto", fileDescriptor_4d737c7315c25422) } var fileDescriptor_4d737c7315c25422 = []byte{ - // 1252 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xef, 0xda, 0xeb, 0x7f, 0x63, 0xa7, 0x94, 0x51, 0xa9, 0xac, 0x1e, 0xec, 0xb0, 0x85, 0x52, - 0x42, 0xbb, 0xae, 0x02, 0x01, 0x14, 0x24, 0x44, 0xdc, 0x44, 0x49, 0x68, 0x68, 0x60, 0xea, 0x16, - 0x81, 0xe0, 0xb0, 0xde, 0x9d, 0xac, 0x57, 0xb5, 0x77, 0x57, 0xbb, 0xe3, 0xb0, 0xfe, 0x02, 0x1c, - 0x10, 0x07, 0xb8, 0x95, 0x0b, 0xea, 0xf7, 0xe0, 0xc2, 0x31, 0x37, 0x7a, 0xee, 0xc1, 0xa0, 0xf4, - 0x13, 0x20, 0x4e, 0xe4, 0x84, 0x66, 0xe6, 0xcd, 0x7a, 0x97, 0xa6, 0x49, 0x85, 0x73, 0xe0, 0x62, - 0xcd, 0x7b, 0xef, 0x37, 0xcf, 0x6f, 0xde, 0xfb, 0xbd, 0xf7, 0x16, 0x21, 0x9a, 0x50, 0xdb, 0x0c, - 0xa3, 0x80, 0x05, 0x58, 0xe7, 0xe7, 0xcb, 0x37, 0x5c, 0x8f, 0x0d, 0xc6, 0x7d, 0xd3, 0x0e, 0x46, - 0x1d, 0x37, 0x70, 0x83, 0x8e, 0x30, 0xf6, 0xc7, 0x7b, 0x42, 0x12, 0x82, 0x38, 0xc9, 0x4b, 0x97, - 0xdf, 0xcb, 0xc0, 0x19, 0xf5, 0x1d, 0x1a, 0x8d, 0x3c, 0x9f, 0x65, 0x8f, 0x56, 0xdf, 0xf6, 0x3a, - 0x6c, 0x12, 0xd2, 0x58, 0xfe, 0xc2, 0xc5, 0xb6, 0x1b, 0x04, 0xee, 0x90, 0xce, 0xdc, 0x33, 0x6f, - 0x44, 0x63, 0x66, 0x8d, 0x42, 0x00, 0x34, 0x68, 0x14, 0x05, 0x91, 0x82, 0xd7, 0x7d, 0x6b, 0x94, - 0xde, 0xad, 0xb1, 0x44, 0x1d, 0x2f, 0x84, 0xfc, 0x6f, 0xe2, 0xd8, 0x0b, 0x7c, 0xd0, 0xa0, 0x38, - 0x54, 0x4f, 0x32, 0x7e, 0x29, 0xa0, 0xfa, 0x5d, 0x16, 0x51, 0x6b, 0xb4, 0xb1, 0x4f, 0x7d, 0x86, - 0x6f, 0x22, 0xd4, 0xa5, 0xae, 0xe7, 0x77, 0x87, 0x81, 0xfd, 0xa0, 0xa9, 0x2d, 0x6a, 0xd7, 0xea, - 0xcb, 0x17, 0x4c, 0x91, 0x83, 0x99, 0x9e, 0x64, 0x30, 0xf8, 0x0d, 0x54, 0x11, 0x52, 0x2f, 0x69, - 0x16, 0x04, 0x7c, 0x21, 0x03, 0xef, 0x25, 0x44, 0x59, 0xf1, 0x17, 0xa8, 0xba, 0xe1, 0xef, 0xd3, - 0x61, 0x10, 0xd2, 0x66, 0x11, 0x90, 0x3c, 0x4c, 0xa5, 0xec, 0x9a, 0x4f, 0xa6, 0xed, 0xa5, 0x4c, - 0xb6, 0x06, 0x93, 0x90, 0x46, 0x43, 0xea, 0xb8, 0x34, 0xea, 0xf4, 0xc7, 0x51, 0x14, 0x7c, 0xd3, - 0xc9, 0xe2, 0x49, 0xea, 0x0e, 0xbf, 0x8a, 0x4a, 0x22, 0xfc, 0xa6, 0x2e, 0xfc, 0xd6, 0x65, 0x04, - 0x42, 0x45, 0xa4, 0x45, 0x40, 0x7c, 0xa7, 0x97, 0x34, 0x4b, 0x39, 0x08, 0x57, 0x11, 0x69, 0xc1, - 0x4b, 0x3c, 0x40, 0x47, 0xbe, 0xbc, 0x2c, 0x50, 0xe7, 0x53, 0x94, 0x7c, 0x77, 0x6a, 0x5f, 0xd5, - 0x0f, 0x1e, 0xb5, 0x35, 0xe3, 0x76, 0x36, 0x5b, 0xf8, 0x12, 0x2a, 0x6f, 0x51, 0xcf, 0x1d, 0x30, - 0x91, 0x37, 0x9d, 0x80, 0x84, 0x5f, 0xe7, 0x7a, 0xcb, 0xa1, 0x51, 0x9a, 0x20, 0x59, 0x66, 0xa9, - 0x24, 0x60, 0x34, 0x8c, 0xd9, 0xdf, 0x3f, 0xcf, 0x95, 0xf1, 0xbd, 0x96, 0x66, 0x9b, 0x87, 0xdb, - 0x4b, 0xc0, 0xb1, 0x96, 0x0d, 0x57, 0x69, 0x49, 0x6a, 0xc7, 0xaf, 0xa1, 0x32, 0xa1, 0xf1, 0x78, - 0xc8, 0x20, 0x84, 0x86, 0x44, 0x4a, 0x1d, 0x01, 0x1b, 0xee, 0xa0, 0xda, 0x46, 0x62, 0xd3, 0x90, - 0x79, 0x81, 0x0f, 0xa9, 0x7c, 0xd9, 0x04, 0x92, 0xa5, 0x06, 0x32, 0xc3, 0x18, 0xf7, 0x21, 0xa9, - 0xf8, 0x13, 0x54, 0xee, 0x25, 0x5b, 0x56, 0x3c, 0x10, 0x95, 0x6d, 0x74, 0x57, 0x0e, 0xa6, 0xed, - 0x73, 0x4f, 0xa6, 0xed, 0x1b, 0x27, 0x97, 0xb3, 0xef, 0xf9, 0x56, 0x34, 0x31, 0xb7, 0x68, 0xd2, - 0x9d, 0x30, 0x1a, 0x13, 0x70, 0x62, 0xfc, 0xad, 0xcd, 0xde, 0x86, 0x3f, 0xe6, 0xbe, 0x7b, 0x93, - 0x90, 0x8a, 0x57, 0x2e, 0x74, 0x97, 0x8f, 0xa6, 0x6d, 0xf3, 0x54, 0x9a, 0x74, 0x42, 0x6b, 0x32, - 0x0c, 0x2c, 0xc7, 0xe4, 0x37, 0x09, 0x78, 0xc8, 0xc4, 0x59, 0x38, 0x83, 0x38, 0x33, 0x65, 0x2a, - 0xe6, 0x2a, 0x7e, 0x11, 0x95, 0xb6, 0x7d, 0x87, 0x26, 0x22, 0x89, 0x3a, 0x91, 0x02, 0x2f, 0xc2, - 0x6e, 0xe4, 0xb9, 0x9e, 0x0f, 0x1c, 0x84, 0x22, 0x48, 0x1d, 0x01, 0x9b, 0xf1, 0xad, 0x86, 0xce, - 0x0b, 0x12, 0x6c, 0x24, 0xd4, 0x1e, 0xf3, 0x34, 0xcf, 0x49, 0x2c, 0xbc, 0x82, 0x1a, 0xbd, 0x24, - 0xf5, 0x16, 0x37, 0x8b, 0x8b, 0x45, 0x59, 0x59, 0x49, 0x96, 0xd4, 0x42, 0x72, 0x30, 0xe3, 0x23, - 0x74, 0x3e, 0x23, 0xdf, 0xa6, 0x93, 0xe7, 0xc6, 0x71, 0x09, 0x95, 0x77, 0xf7, 0xf6, 0x62, 0x2a, - 0xd9, 0xa5, 0x13, 0x90, 0x8c, 0x3f, 0x0b, 0xa8, 0x9e, 0x71, 0x81, 0xaf, 0xa7, 0xf1, 0x1e, 0xcb, - 0xd7, 0xae, 0xfe, 0x78, 0xda, 0xd6, 0xd2, 0xb0, 0xb3, 0xf3, 0xa2, 0x7c, 0xb6, 0xf3, 0xe2, 0x0a, - 0x2a, 0x8b, 0xa9, 0x10, 0x37, 0x2b, 0x22, 0x17, 0xb9, 0x81, 0x01, 0xa6, 0x4c, 0xcf, 0x54, 0x4f, - 0xe8, 0x99, 0xab, 0xa8, 0x42, 0xa8, 0x4d, 0xbd, 0x90, 0x35, 0x6b, 0x00, 0xe3, 0x7f, 0x0a, 0x3a, - 0xa2, 0x8c, 0xf9, 0xde, 0x42, 0xa7, 0xf7, 0xd6, 0x33, 0x55, 0xab, 0xbf, 0x58, 0xd5, 0xbe, 0xd3, - 0x14, 0xcb, 0x70, 0x13, 0x55, 0x6e, 0x0d, 0x2c, 0xcf, 0xdf, 0x5e, 0x17, 0xf9, 0xae, 0x11, 0x25, - 0x66, 0x0a, 0x59, 0x38, 0x9e, 0xb7, 0xc5, 0x2c, 0x6f, 0xdf, 0x47, 0x7a, 0xcf, 0x1b, 0x51, 0x98, - 0x08, 0x97, 0x4d, 0xb9, 0x97, 0x4c, 0xb5, 0x97, 0xcc, 0x9e, 0xda, 0x4b, 0xdd, 0x2a, 0x6f, 0xa7, - 0x1f, 0x7e, 0x6f, 0x6b, 0x44, 0xdc, 0x30, 0x7e, 0x2b, 0xa8, 0x8a, 0xff, 0x9f, 0xbb, 0xf8, 0x2d, - 0x54, 0x13, 0x25, 0x17, 0xd1, 0x15, 0x45, 0x74, 0x0b, 0x47, 0xd3, 0xf6, 0x4c, 0x49, 0x66, 0x47, - 0x9e, 0x54, 0x21, 0x6c, 0xaf, 0x8b, 0x7c, 0xd4, 0x88, 0x12, 0x33, 0x49, 0x2d, 0x1d, 0x9f, 0xd4, - 0x72, 0x36, 0xa9, 0x39, 0x3e, 0x54, 0x4e, 0xe7, 0xc3, 0xaa, 0xfe, 0xf0, 0x51, 0xfb, 0x9c, 0xf1, - 0x63, 0x01, 0x56, 0x1d, 0xa7, 0x67, 0xae, 0x99, 0x80, 0x9e, 0xff, 0xea, 0xfd, 0xab, 0xfc, 0xcf, - 0xc3, 0xb1, 0x9a, 0xfb, 0xb0, 0xca, 0x85, 0x0a, 0xd6, 0xa3, 0x38, 0xe3, 0x37, 0x51, 0x79, 0x77, - 0xcc, 0x38, 0xb0, 0xa8, 0x62, 0x11, 0xb3, 0x49, 0xe8, 0xa0, 0x2f, 0xa4, 0x80, 0xaf, 0x20, 0xfd, - 0x96, 0x35, 0x1c, 0x02, 0x1d, 0x5e, 0x92, 0x40, 0xae, 0x91, 0x30, 0x61, 0xc4, 0x8b, 0xa8, 0xb8, - 0x13, 0xb8, 0x30, 0xe8, 0xa0, 0xcf, 0x77, 0x02, 0x57, 0x42, 0xb8, 0x09, 0x7f, 0x88, 0x16, 0x36, - 0x83, 0x7d, 0x1a, 0xf9, 0x6b, 0xb6, 0x1d, 0x8c, 0x7d, 0x06, 0x3d, 0xde, 0x94, 0xd8, 0x9c, 0x49, - 0xde, 0xca, 0xc3, 0x57, 0xab, 0x3c, 0x1f, 0x62, 0x0b, 0x3f, 0xd4, 0x54, 0xa7, 0xf2, 0x1a, 0x10, - 0xca, 0xc6, 0x91, 0x2f, 0x92, 0xd2, 0x20, 0x20, 0xf1, 0xaa, 0x6d, 0x5a, 0xf1, 0xbd, 0x98, 0x3a, - 0xc0, 0x78, 0x25, 0xe2, 0x25, 0x54, 0xbb, 0x63, 0x8d, 0xe8, 0x86, 0xcf, 0xa2, 0x09, 0xbc, 0xbd, - 0x61, 0xca, 0x4f, 0x29, 0xa1, 0x23, 0x33, 0x33, 0xbe, 0x89, 0xaa, 0x9f, 0xd2, 0x68, 0xb4, 0x16, - 0xb9, 0x31, 0xbc, 0xfe, 0xa2, 0x99, 0xf9, 0xba, 0x52, 0x36, 0x92, 0xa2, 0x8c, 0xbf, 0x34, 0x54, - 0x55, 0xcf, 0xc6, 0x77, 0x50, 0x65, 0xcd, 0x71, 0x22, 0x1a, 0xc7, 0x32, 0xba, 0xee, 0x3b, 0xc0, - 0xdb, 0xeb, 0x27, 0xf3, 0xd6, 0x8e, 0x26, 0x21, 0x0b, 0x4c, 0xb8, 0x4b, 0x94, 0x13, 0xbc, 0x8d, - 0xf4, 0x75, 0x8b, 0x59, 0xf3, 0x35, 0x81, 0x70, 0x81, 0x77, 0x50, 0xb9, 0x17, 0x84, 0x9e, 0x2d, - 0x97, 0xc3, 0x0b, 0x47, 0x06, 0xce, 0x3e, 0x0f, 0x22, 0x67, 0x79, 0xe5, 0x5d, 0x02, 0x3e, 0x8c, - 0x9f, 0x0b, 0xa8, 0x96, 0x12, 0x02, 0x5f, 0x43, 0x55, 0x2e, 0x88, 0xee, 0x2a, 0x89, 0xee, 0x6a, - 0x1c, 0x4d, 0xdb, 0xa9, 0x8e, 0xa4, 0x27, 0xfe, 0x45, 0xc3, 0xcf, 0xe2, 0x51, 0xb9, 0x0d, 0xa1, - 0xb4, 0x24, 0xb5, 0xf3, 0x88, 0x61, 0x99, 0x16, 0xe6, 0xc8, 0xa5, 0x1a, 0x95, 0x2d, 0x84, 0xee, - 0x32, 0xcb, 0x7e, 0xb0, 0x4e, 0x43, 0x36, 0x80, 0xe9, 0x97, 0xd1, 0xf0, 0x89, 0x03, 0xbc, 0xd2, - 0xe7, 0x9a, 0x38, 0xd2, 0x89, 0xf1, 0x19, 0xc2, 0xcf, 0x12, 0x1c, 0x7f, 0x80, 0x16, 0x40, 0xbe, - 0x17, 0x3a, 0x16, 0xa3, 0x90, 0x83, 0x57, 0x4c, 0xf1, 0xbd, 0xde, 0xa3, 0xa3, 0x70, 0x68, 0x31, - 0x0a, 0x10, 0x92, 0xc7, 0x1a, 0x5f, 0x21, 0x34, 0xeb, 0xea, 0xb3, 0xa6, 0x9a, 0xf1, 0x35, 0xaa, - 0x67, 0x46, 0xc1, 0x99, 0xbb, 0xff, 0xa9, 0x80, 0x72, 0x95, 0xe5, 0x67, 0x18, 0x6c, 0xff, 0xb9, - 0xb2, 0xd2, 0x47, 0xea, 0x8d, 0xce, 0xc7, 0x13, 0xe9, 0x23, 0x6d, 0xb9, 0xe2, 0xfc, 0x2d, 0x77, - 0x11, 0x95, 0xee, 0x5b, 0xc3, 0x31, 0x55, 0xdf, 0x88, 0x42, 0xc0, 0x17, 0x50, 0x71, 0xd3, 0x8a, - 0x61, 0x83, 0xf0, 0x63, 0xb7, 0x7b, 0x70, 0xd8, 0xd2, 0x1e, 0x1f, 0xb6, 0xb4, 0x3f, 0x0e, 0x5b, - 0xda, 0xaf, 0x4f, 0x5b, 0xda, 0xc1, 0xd3, 0x96, 0xf6, 0xe5, 0x29, 0xe1, 0x53, 0xf5, 0x41, 0x20, - 0x4e, 0xfd, 0xb2, 0xd8, 0xd5, 0x6f, 0xff, 0x13, 0x00, 0x00, 0xff, 0xff, 0xba, 0x03, 0xcc, 0x65, - 0xcd, 0x0e, 0x00, 0x00, + // 1275 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0xcf, 0x93, 0x13, 0xc5, + 0x17, 0x67, 0x92, 0xc9, 0xaf, 0x4e, 0x96, 0x2f, 0x74, 0xf1, 0xa5, 0x52, 0x1c, 0x12, 0x1c, 0x14, + 0x11, 0x61, 0x42, 0xad, 0xa2, 0x16, 0x56, 0x59, 0x12, 0x36, 0xc2, 0x0a, 0x82, 0x36, 0x01, 0x4b, + 0x4b, 0x0f, 0x93, 0x99, 0x66, 0x32, 0x45, 0x32, 0x33, 0x35, 0xd3, 0xc1, 0xc9, 0x3f, 0xe0, 0xc1, + 0xf2, 0xa0, 0x37, 0xbc, 0x58, 0xfc, 0x1f, 0x5e, 0x3c, 0xee, 0x4d, 0x8e, 0x16, 0x87, 0x68, 0x2d, + 0x7f, 0x81, 0xe5, 0xc9, 0x3d, 0x59, 0xdd, 0xfd, 0x7a, 0xd2, 0x23, 0xcb, 0x2e, 0x65, 0xf6, 0xe0, + 0x25, 0xd5, 0xef, 0xbd, 0x4f, 0xbf, 0xbc, 0x7e, 0xef, 0xf3, 0xde, 0x1b, 0x84, 0x68, 0x46, 0x5d, + 0x3b, 0x4e, 0x22, 0x16, 0x61, 0x93, 0x9f, 0x4f, 0x9c, 0xf7, 0x03, 0x36, 0x9e, 0x8d, 0x6c, 0x37, + 0x9a, 0xf6, 0xfc, 0xc8, 0x8f, 0x7a, 0xc2, 0x38, 0x9a, 0xdd, 0x13, 0x92, 0x10, 0xc4, 0x49, 0x5e, + 0x3a, 0xf1, 0xb6, 0x06, 0x67, 0x34, 0xf4, 0x68, 0x32, 0x0d, 0x42, 0xa6, 0x1f, 0x9d, 0x91, 0x1b, + 0xf4, 0xd8, 0x3c, 0xa6, 0xa9, 0xfc, 0x85, 0x8b, 0x5d, 0x3f, 0x8a, 0xfc, 0x09, 0x5d, 0xba, 0x67, + 0xc1, 0x94, 0xa6, 0xcc, 0x99, 0xc6, 0x00, 0x68, 0xd1, 0x24, 0x89, 0x12, 0x05, 0x6f, 0x86, 0xce, + 0x34, 0xbf, 0xdb, 0x60, 0x99, 0x3a, 0x1e, 0x89, 0xf9, 0xdf, 0xa4, 0x69, 0x10, 0x85, 0xa0, 0x41, + 0x69, 0xac, 0x9e, 0x64, 0x0d, 0x50, 0xeb, 0x36, 0x4b, 0xa8, 0x33, 0x1d, 0x3c, 0xa0, 0x21, 0x4b, + 0xf1, 0xc5, 0xa2, 0xdc, 0x36, 0x4e, 0x96, 0xcf, 0x34, 0xd7, 0x8f, 0xda, 0x22, 0x0b, 0x9a, 0x85, + 0x14, 0x60, 0xd6, 0x4f, 0x25, 0xd4, 0xd4, 0x14, 0xf8, 0x02, 0x42, 0x7d, 0xea, 0x07, 0x61, 0x7f, + 0x12, 0xb9, 0xf7, 0xdb, 0xc6, 0x49, 0xe3, 0x4c, 0x73, 0xfd, 0x88, 0x74, 0xb2, 0xd4, 0x13, 0x0d, + 0x83, 0x5f, 0x45, 0x35, 0x21, 0x0d, 0xb3, 0x76, 0x49, 0xc0, 0xd7, 0x34, 0xf8, 0x30, 0x23, 0xca, + 0x8a, 0x3f, 0x43, 0xf5, 0x41, 0xf8, 0x80, 0x4e, 0xa2, 0x98, 0xb6, 0xcb, 0x80, 0xe4, 0xaf, 0x55, + 0xca, 0xbe, 0xfd, 0x64, 0xd1, 0x3d, 0xab, 0x25, 0x7d, 0x3c, 0x8f, 0x69, 0x32, 0xa1, 0x9e, 0x4f, + 0x93, 0xde, 0x68, 0x96, 0x24, 0xd1, 0x57, 0x3d, 0x1d, 0x4f, 0x72, 0x77, 0xf8, 0x25, 0x54, 0x11, + 0xe1, 0xb7, 0x4d, 0xe1, 0xb7, 0x29, 0x23, 0x90, 0xef, 0x95, 0x16, 0x01, 0x09, 0xbd, 0x61, 0xd6, + 0xae, 0x14, 0x20, 0x5c, 0x45, 0xa4, 0x05, 0x9f, 0xe5, 0x01, 0x7a, 0xf2, 0xe5, 0x55, 0x81, 0x3a, + 0x9c, 0xa3, 0xe4, 0xbb, 0x73, 0xfb, 0x25, 0x73, 0xeb, 0x51, 0xd7, 0xb0, 0xae, 0xeb, 0xd9, 0xc2, + 0xc7, 0x51, 0xf5, 0x1a, 0x0d, 0xfc, 0x31, 0x13, 0x79, 0x33, 0x09, 0x48, 0xf8, 0x15, 0xae, 0x77, + 0x3c, 0x9a, 0xe4, 0x09, 0x92, 0x6c, 0x91, 0x4a, 0x02, 0x46, 0xcb, 0x5a, 0xfe, 0xfd, 0xf3, 0x5c, + 0x59, 0xdf, 0x1a, 0x79, 0xb6, 0x79, 0xb8, 0xc3, 0x0c, 0x1c, 0x1b, 0x7a, 0xb8, 0x4a, 0x4b, 0x72, + 0x3b, 0x7e, 0x19, 0x55, 0x09, 0x4d, 0x67, 0x13, 0x06, 0x21, 0xb4, 0x24, 0x52, 0xea, 0x08, 0xd8, + 0x70, 0x0f, 0x35, 0x06, 0x99, 0x4b, 0x63, 0x16, 0x44, 0x21, 0xa4, 0xf2, 0xa8, 0x0d, 0x5c, 0xcd, + 0x0d, 0x64, 0x89, 0xb1, 0xee, 0x42, 0x52, 0xf1, 0x47, 0xa8, 0x3a, 0xcc, 0xae, 0x39, 0xe9, 0x58, + 0x54, 0xb6, 0xd5, 0xbf, 0xb8, 0xb5, 0xe8, 0x1e, 0x7a, 0xb2, 0xe8, 0x9e, 0xdf, 0xbb, 0x9c, 0xa3, + 0x20, 0x74, 0x92, 0xb9, 0x7d, 0x8d, 0x66, 0xfd, 0x39, 0xa3, 0x29, 0x01, 0x27, 0xd6, 0x5f, 0xc6, + 0xf2, 0x6d, 0xf8, 0x43, 0xee, 0x7b, 0x38, 0x8f, 0xa9, 0x78, 0xe5, 0x5a, 0x7f, 0x7d, 0x67, 0xd1, + 0xb5, 0xf7, 0xa5, 0x49, 0x2f, 0x76, 0xe6, 0x93, 0xc8, 0xf1, 0x6c, 0x7e, 0x93, 0x80, 0x07, 0x2d, + 0xce, 0xd2, 0x01, 0xc4, 0xa9, 0x95, 0xa9, 0x5c, 0xa8, 0xf8, 0x31, 0x54, 0xd9, 0x0c, 0x3d, 0x9a, + 0x89, 0x24, 0x9a, 0x44, 0x0a, 0xbc, 0x08, 0xb7, 0x92, 0xc0, 0x0f, 0x42, 0xe0, 0x20, 0x14, 0x41, + 0xea, 0x08, 0xd8, 0xac, 0xaf, 0x0d, 0x74, 0x58, 0x90, 0x60, 0x90, 0x51, 0x77, 0xc6, 0xd3, 0xbc, + 0x22, 0xb1, 0xf8, 0x68, 0x18, 0x66, 0xb9, 0xb7, 0xb4, 0x5d, 0xd6, 0x47, 0x83, 0x66, 0x21, 0x05, + 0x98, 0xf5, 0x3e, 0x3a, 0xac, 0xc9, 0xd7, 0xe9, 0xfc, 0xb9, 0x71, 0x1c, 0x47, 0xd5, 0x5b, 0xf7, + 0xee, 0xa5, 0x54, 0xb2, 0xcb, 0x24, 0x20, 0x59, 0x7f, 0x94, 0x50, 0x53, 0x73, 0x81, 0xcf, 0xe5, + 0xf1, 0xee, 0xca, 0xd7, 0xbe, 0xf9, 0x78, 0xd1, 0x35, 0xf2, 0xb0, 0xf5, 0x79, 0x51, 0x3d, 0xd8, + 0x79, 0x71, 0x0a, 0x55, 0x61, 0x4c, 0xd6, 0x44, 0x2e, 0x0a, 0x03, 0x03, 0x4c, 0x5a, 0xcf, 0xd4, + 0xf7, 0xe8, 0x99, 0xd3, 0xa8, 0x46, 0xa8, 0x4b, 0x83, 0x98, 0xb5, 0x1b, 0x00, 0xe3, 0x7f, 0x0a, + 0x3a, 0xa2, 0x8c, 0xc5, 0xde, 0x42, 0xfb, 0xf7, 0xd6, 0x33, 0x55, 0x6b, 0xbe, 0x58, 0xd5, 0xbe, + 0x31, 0x14, 0xcb, 0x70, 0x1b, 0xd5, 0xae, 0x8c, 0x9d, 0x20, 0xdc, 0xdc, 0x10, 0xf9, 0x6e, 0x10, + 0x25, 0x6a, 0x85, 0x2c, 0xed, 0xce, 0xdb, 0xb2, 0xce, 0xdb, 0x77, 0x90, 0x39, 0x0c, 0xa6, 0x14, + 0x26, 0xc2, 0x09, 0x5b, 0xae, 0x37, 0x5b, 0xad, 0x37, 0x7b, 0xa8, 0xd6, 0x5b, 0xbf, 0xce, 0xdb, + 0xe9, 0xbb, 0xdf, 0xba, 0x06, 0x11, 0x37, 0xac, 0x5f, 0x4a, 0xaa, 0xe2, 0xff, 0xe5, 0x2e, 0x7e, + 0x1d, 0x35, 0x44, 0xc9, 0x45, 0x74, 0x65, 0x11, 0xdd, 0xda, 0xce, 0xa2, 0xbb, 0x54, 0x92, 0xe5, + 0x91, 0x27, 0x55, 0x08, 0x9b, 0x1b, 0x22, 0x1f, 0x0d, 0xa2, 0x44, 0x2d, 0xa9, 0x95, 0xdd, 0x93, + 0x5a, 0xd5, 0x93, 0x5a, 0xe0, 0x43, 0x6d, 0x7f, 0x3e, 0x5c, 0x32, 0x1f, 0x3e, 0xea, 0x1e, 0xb2, + 0xbe, 0x2f, 0xc1, 0xaa, 0xe3, 0xf4, 0x2c, 0x34, 0x13, 0xd0, 0xf3, 0x1f, 0xbd, 0x7f, 0x9a, 0xff, + 0x79, 0x3c, 0x53, 0x73, 0x1f, 0x56, 0xb9, 0x50, 0xc1, 0x7a, 0x14, 0x67, 0xfc, 0x1a, 0xaa, 0xde, + 0x9a, 0x31, 0x0e, 0x2c, 0xab, 0x58, 0xc4, 0x6c, 0x12, 0x3a, 0xe8, 0x0b, 0x29, 0xe0, 0x53, 0xc8, + 0xbc, 0xe2, 0x4c, 0x26, 0x40, 0x87, 0xff, 0x49, 0x20, 0xd7, 0x48, 0x98, 0x30, 0xe2, 0x93, 0xa8, + 0x7c, 0x23, 0xf2, 0x61, 0xd0, 0x41, 0x9f, 0xdf, 0x88, 0x7c, 0x09, 0xe1, 0x26, 0xfc, 0x1e, 0x5a, + 0xbb, 0x1a, 0x3d, 0xa0, 0x49, 0x78, 0xd9, 0x75, 0xa3, 0x59, 0xc8, 0xa0, 0xc7, 0xdb, 0x12, 0x5b, + 0x30, 0xc9, 0x5b, 0x45, 0xf8, 0xa5, 0x3a, 0xcf, 0x87, 0xd8, 0xc2, 0x0f, 0x0d, 0xd5, 0xa9, 0xbc, + 0x06, 0x84, 0xb2, 0x59, 0x12, 0x8a, 0xa4, 0xb4, 0x08, 0x48, 0xbc, 0x6a, 0x57, 0x9d, 0xf4, 0x4e, + 0x4a, 0x3d, 0x60, 0xbc, 0x12, 0xf1, 0x59, 0xd4, 0xb8, 0xe9, 0x4c, 0xe9, 0x20, 0x64, 0xc9, 0x1c, + 0xde, 0xde, 0xb2, 0xe5, 0x17, 0x99, 0xd0, 0x91, 0xa5, 0x19, 0x5f, 0x40, 0xf5, 0x8f, 0x69, 0x32, + 0xbd, 0x9c, 0xf8, 0x29, 0xbc, 0xfe, 0x98, 0xad, 0x7d, 0xa4, 0x29, 0x1b, 0xc9, 0x51, 0xd6, 0x9f, + 0x06, 0xaa, 0xab, 0x67, 0xe3, 0x9b, 0xa8, 0x76, 0xd9, 0xf3, 0x12, 0x9a, 0xa6, 0x32, 0xba, 0xfe, + 0x9b, 0xc0, 0xdb, 0x73, 0x7b, 0xf3, 0xd6, 0x4d, 0xe6, 0x31, 0x8b, 0x6c, 0xb8, 0x4b, 0x94, 0x13, + 0xbc, 0x89, 0xcc, 0x0d, 0x87, 0x39, 0xab, 0x35, 0x81, 0x70, 0x81, 0x6f, 0xa0, 0xea, 0x30, 0x8a, + 0x03, 0x57, 0x2e, 0x87, 0x17, 0x8e, 0x0c, 0x9c, 0x7d, 0x1a, 0x25, 0xde, 0xfa, 0xc5, 0xb7, 0x08, + 0xf8, 0xb0, 0x7e, 0x2c, 0xa1, 0x46, 0x4e, 0x08, 0x7c, 0x06, 0xd5, 0xb9, 0x20, 0xba, 0xab, 0x22, + 0xba, 0xab, 0xb5, 0xb3, 0xe8, 0xe6, 0x3a, 0x92, 0x9f, 0xf8, 0x17, 0x0d, 0x3f, 0x8b, 0x47, 0x15, + 0x36, 0x84, 0xd2, 0x92, 0xdc, 0xce, 0x23, 0x86, 0x65, 0x5a, 0x5a, 0x21, 0x97, 0x6a, 0x54, 0x76, + 0x10, 0xba, 0xcd, 0x1c, 0xf7, 0xfe, 0x06, 0x8d, 0xd9, 0x18, 0xa6, 0x9f, 0xa6, 0xe1, 0x13, 0x07, + 0x78, 0x65, 0xae, 0x34, 0x71, 0xa4, 0x13, 0xeb, 0x13, 0x84, 0x9f, 0x25, 0x38, 0x7e, 0x17, 0xad, + 0x81, 0x7c, 0x27, 0xf6, 0x1c, 0x46, 0x21, 0x07, 0xff, 0xb7, 0xc5, 0x67, 0xff, 0x90, 0x4e, 0xe3, + 0x89, 0xc3, 0x28, 0x40, 0x48, 0x11, 0x6b, 0x7d, 0x81, 0xd0, 0xb2, 0xab, 0x0f, 0x9a, 0x6a, 0xd6, + 0x97, 0xa8, 0xa9, 0x8d, 0x82, 0x03, 0x77, 0xff, 0x43, 0x09, 0x15, 0x2a, 0xcb, 0xcf, 0x30, 0xd8, + 0xfe, 0x75, 0x65, 0xa5, 0x8f, 0xdc, 0x1b, 0x5d, 0x8d, 0x27, 0xd2, 0x47, 0xde, 0x72, 0xe5, 0xd5, + 0x5b, 0xee, 0x18, 0xaa, 0xdc, 0x75, 0x26, 0x33, 0xaa, 0xbe, 0x11, 0x85, 0x80, 0x8f, 0xa0, 0xf2, + 0x55, 0x27, 0x85, 0x0d, 0xc2, 0x8f, 0xfd, 0x0f, 0xb6, 0xb6, 0x3b, 0xc6, 0xe3, 0xed, 0x8e, 0xf1, + 0xeb, 0x76, 0xc7, 0xf8, 0x7d, 0xbb, 0x63, 0xfc, 0xfc, 0xb4, 0x63, 0x6c, 0x3d, 0xed, 0x18, 0x9f, + 0xef, 0xf3, 0x04, 0xaa, 0x3e, 0x0a, 0xc4, 0x69, 0x54, 0x15, 0xfb, 0xfa, 0x8d, 0xbf, 0x03, 0x00, + 0x00, 0xff, 0xff, 0xf5, 0x7c, 0x70, 0x97, 0x18, 0x0f, 0x00, 0x00, +} + +func (m *StreamEvents) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalTo(dAtA) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *StreamEvents) MarshalTo(dAtA []byte) (int, error) { + var i int + _ = i + var l int + _ = l + if len(m.StreamEvents) > 0 { + for _, msg := range m.StreamEvents { + dAtA[i] = 0xa + i++ + i = encodeVarintExec(dAtA, i, uint64(msg.Size())) + n, err := msg.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n + } + } + if m.XXX_unrecognized != nil { + i += copy(dAtA[i:], m.XXX_unrecognized) + } + return i, nil } func (m *StreamEvent) Marshal() (dAtA []byte, err error) { @@ -1457,9 +1466,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.BeginBlock.Size())) - n1, err := m.BeginBlock.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.BeginBlock.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -1467,9 +1476,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.BeginTx.Size())) - n2, err := m.BeginTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.BeginTx.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -1477,9 +1486,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Envelope.Size())) - n3, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Envelope.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -1487,9 +1496,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Event.Size())) - n4, err := m.Event.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Event.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -1497,9 +1506,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.EndTx.Size())) - n5, err := m.EndTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.EndTx.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 } @@ -1507,9 +1516,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.EndBlock.Size())) - n6, err := m.EndBlock.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.EndBlock.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 } @@ -1543,9 +1552,9 @@ func (m *BeginBlock) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n7, err := m.Header.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.Header.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -1600,9 +1609,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHeader.Size())) - n8, err := m.TxHeader.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.TxHeader.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 } @@ -1610,9 +1619,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Result.Size())) - n9, err := m.Result.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.Result.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 } @@ -1620,9 +1629,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n10, err := m.Exception.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n10, err10 := m.Exception.MarshalTo(dAtA[i:]) + if err10 != nil { + return 0, err10 } i += n10 } @@ -1650,9 +1659,9 @@ func (m *EndTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n11, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n11, err11 := m.TxHash.MarshalTo(dAtA[i:]) + if err11 != nil { + return 0, err11 } i += n11 if m.XXX_unrecognized != nil { @@ -1684,9 +1693,9 @@ func (m *TxHeader) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n12, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n12, err12 := m.TxHash.MarshalTo(dAtA[i:]) + if err12 != nil { + return 0, err12 } i += n12 if m.Height != 0 { @@ -1703,9 +1712,9 @@ func (m *TxHeader) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.Origin.Size())) - n13, err := m.Origin.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n13, err13 := m.Origin.MarshalTo(dAtA[i:]) + if err13 != nil { + return 0, err13 } i += n13 } @@ -1739,9 +1748,9 @@ func (m *BlockExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n14, err := m.Header.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n14, err14 := m.Header.MarshalTo(dAtA[i:]) + if err14 != nil { + return 0, err14 } i += n14 } @@ -1813,9 +1822,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHeader.Size())) - n15, err := m.TxHeader.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n15, err15 := m.TxHeader.MarshalTo(dAtA[i:]) + if err15 != nil { + return 0, err15 } i += n15 } @@ -1823,9 +1832,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.Envelope.Size())) - n16, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n16, err16 := m.Envelope.MarshalTo(dAtA[i:]) + if err16 != nil { + return 0, err16 } i += n16 } @@ -1845,9 +1854,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x42 i++ i = encodeVarintExec(dAtA, i, uint64(m.Result.Size())) - n17, err := m.Result.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n17, err17 := m.Result.MarshalTo(dAtA[i:]) + if err17 != nil { + return 0, err17 } i += n17 } @@ -1855,9 +1864,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x4a i++ i = encodeVarintExec(dAtA, i, uint64(m.Receipt.Size())) - n18, err := m.Receipt.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n18, err18 := m.Receipt.MarshalTo(dAtA[i:]) + if err18 != nil { + return 0, err18 } i += n18 } @@ -1865,9 +1874,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x52 i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n19, err := m.Exception.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n19, err19 := m.Exception.MarshalTo(dAtA[i:]) + if err19 != nil { + return 0, err19 } i += n19 } @@ -1923,9 +1932,9 @@ func (m *Origin) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.Time))) - n20, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) - if err != nil { - return 0, err + n20, err20 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) + if err20 != nil { + return 0, err20 } i += n20 if m.XXX_unrecognized != nil { @@ -1957,9 +1966,9 @@ func (m *Header) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n21, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n21, err21 := m.TxHash.MarshalTo(dAtA[i:]) + if err21 != nil { + return 0, err21 } i += n21 if m.EventType != 0 { @@ -1987,9 +1996,9 @@ func (m *Header) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n22, err := m.Exception.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n22, err22 := m.Exception.MarshalTo(dAtA[i:]) + if err22 != nil { + return 0, err22 } i += n22 } @@ -2018,9 +2027,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n23, err := m.Header.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n23, err23 := m.Header.MarshalTo(dAtA[i:]) + if err23 != nil { + return 0, err23 } i += n23 } @@ -2028,9 +2037,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Input.Size())) - n24, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n24, err24 := m.Input.MarshalTo(dAtA[i:]) + if err24 != nil { + return 0, err24 } i += n24 } @@ -2038,9 +2047,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Output.Size())) - n25, err := m.Output.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n25, err25 := m.Output.MarshalTo(dAtA[i:]) + if err25 != nil { + return 0, err25 } i += n25 } @@ -2048,9 +2057,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Call.Size())) - n26, err := m.Call.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n26, err26 := m.Call.MarshalTo(dAtA[i:]) + if err26 != nil { + return 0, err26 } i += n26 } @@ -2058,9 +2067,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.Log.Size())) - n27, err := m.Log.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n27, err27 := m.Log.MarshalTo(dAtA[i:]) + if err27 != nil { + return 0, err27 } i += n27 } @@ -2068,9 +2077,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.GovernAccount.Size())) - n28, err := m.GovernAccount.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n28, err28 := m.GovernAccount.MarshalTo(dAtA[i:]) + if err28 != nil { + return 0, err28 } i += n28 } @@ -2110,9 +2119,9 @@ func (m *Result) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.NameEntry.Size())) - n29, err := m.NameEntry.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n29, err29 := m.NameEntry.MarshalTo(dAtA[i:]) + if err29 != nil { + return 0, err29 } i += n29 } @@ -2120,9 +2129,9 @@ func (m *Result) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.PermArgs.Size())) - n30, err := m.PermArgs.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n30, err30 := m.PermArgs.MarshalTo(dAtA[i:]) + if err30 != nil { + return 0, err30 } i += n30 } @@ -2150,17 +2159,17 @@ func (m *LogEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n31, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n31, err31 := m.Address.MarshalTo(dAtA[i:]) + if err31 != nil { + return 0, err31 } i += n31 dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Data.Size())) - n32, err := m.Data.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n32, err32 := m.Data.MarshalTo(dAtA[i:]) + if err32 != nil { + return 0, err32 } i += n32 if len(m.Topics) > 0 { @@ -2200,18 +2209,18 @@ func (m *CallEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.CallData.Size())) - n33, err := m.CallData.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n33, err33 := m.CallData.MarshalTo(dAtA[i:]) + if err33 != nil { + return 0, err33 } i += n33 } dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Origin.Size())) - n34, err := m.Origin.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n34, err34 := m.Origin.MarshalTo(dAtA[i:]) + if err34 != nil { + return 0, err34 } i += n34 if m.StackDepth != 0 { @@ -2222,9 +2231,9 @@ func (m *CallEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Return.Size())) - n35, err := m.Return.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n35, err35 := m.Return.MarshalTo(dAtA[i:]) + if err35 != nil { + return 0, err35 } i += n35 if m.CallType != 0 { @@ -2257,9 +2266,9 @@ func (m *GovernAccountEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.AccountUpdate.Size())) - n36, err := m.AccountUpdate.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n36, err36 := m.AccountUpdate.MarshalTo(dAtA[i:]) + if err36 != nil { + return 0, err36 } i += n36 } @@ -2287,9 +2296,9 @@ func (m *InputEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n37, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n37, err37 := m.Address.MarshalTo(dAtA[i:]) + if err37 != nil { + return 0, err37 } i += n37 if m.XXX_unrecognized != nil { @@ -2316,9 +2325,9 @@ func (m *OutputEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n38, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n38, err38 := m.Address.MarshalTo(dAtA[i:]) + if err38 != nil { + return 0, err38 } i += n38 if m.XXX_unrecognized != nil { @@ -2345,25 +2354,25 @@ func (m *CallData) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Caller.Size())) - n39, err := m.Caller.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n39, err39 := m.Caller.MarshalTo(dAtA[i:]) + if err39 != nil { + return 0, err39 } i += n39 dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Callee.Size())) - n40, err := m.Callee.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n40, err40 := m.Callee.MarshalTo(dAtA[i:]) + if err40 != nil { + return 0, err40 } i += n40 dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Data.Size())) - n41, err := m.Data.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n41, err41 := m.Data.MarshalTo(dAtA[i:]) + if err41 != nil { + return 0, err41 } i += n41 if m.Value != 0 { @@ -2391,6 +2400,24 @@ func encodeVarintExec(dAtA []byte, offset int, v uint64) int { dAtA[offset] = uint8(v) return offset + 1 } +func (m *StreamEvents) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.StreamEvents) > 0 { + for _, e := range m.StreamEvents { + l = e.Size() + n += 1 + l + sovExec(uint64(l)) + } + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + func (m *StreamEvent) Size() (n int) { if m == nil { return 0 @@ -2851,14 +2878,7 @@ func (m *CallData) Size() (n int) { } func sovExec(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozExec(x uint64) (n int) { return sovExec(uint64((x << 1) ^ uint64((int64(x) >> 63)))) @@ -2950,6 +2970,94 @@ func (this *Event) SetValue(value interface{}) bool { } return true } +func (m *StreamEvents) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowExec + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: StreamEvents: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: StreamEvents: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field StreamEvents", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowExec + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthExec + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthExec + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.StreamEvents = append(m.StreamEvents, &StreamEvent{}) + if err := m.StreamEvents[len(m.StreamEvents)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipExec(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthExec + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthExec + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *StreamEvent) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 diff --git a/execution/exec/stream_event.go b/execution/exec/stream_event.go index 2794755d0..14e6bb254 100644 --- a/execution/exec/stream_event.go +++ b/execution/exec/stream_event.go @@ -8,15 +8,12 @@ type EventStream interface { Recv() (*StreamEvent, error) } -type StreamEvents []*StreamEvent - func (ses *StreamEvents) Recv() (*StreamEvent, error) { - evs := *ses - if len(evs) == 0 { + if len(ses.StreamEvents) == 0 { return nil, io.EOF } - ev := evs[0] - *ses = evs[1:] + ev := ses.StreamEvents[0] + ses.StreamEvents = ses.StreamEvents[1:] return ev, nil } diff --git a/execution/exec/tx_execution.go b/execution/exec/tx_execution.go index 949db96a9..8d7ffacc3 100644 --- a/execution/exec/tx_execution.go +++ b/execution/exec/tx_execution.go @@ -34,34 +34,8 @@ func NewTxExecution(txEnv *txs.Envelope) *TxExecution { } } -func DecodeTxExecution(bs []byte) (*TxExecution, error) { - txe := new(TxExecution) - err := cdc.UnmarshalBinaryBare(bs, txe) - if err != nil { - return nil, err - } - return txe, nil -} - -func DecodeTxExecutionKey(bs []byte) (*TxExecutionKey, error) { - be := new(TxExecutionKey) - - err := cdc.UnmarshalBinaryLengthPrefixed(bs, be) - if err != nil { - return nil, err - } - return be, nil -} - -func (key *TxExecutionKey) Encode() ([]byte, error) { - // At height 0 index 0, the B cdc.MarshalBinaryBase() returns a string of 0 bytes, - // which cannot be stored in iavl. So, abuse MarshalBinaryLengthPrefixed() to - // ensure we have > 0 bytes. - return cdc.MarshalBinaryLengthPrefixed(key) -} - -func (txe *TxExecution) StreamEvents() StreamEvents { - var ses StreamEvents +func (txe *TxExecution) StreamEvents() []*StreamEvent { + var ses []*StreamEvent ses = append(ses, &StreamEvent{ BeginTx: &BeginTx{ @@ -89,10 +63,6 @@ func (txe *TxExecution) StreamEvents() StreamEvents { }) } -func (txe *TxExecution) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(txe) -} - func (*TxExecution) EventType() EventType { return TypeTxExecution } diff --git a/execution/names/names.go b/execution/names/names.go index a08a45078..ffef04ed2 100644 --- a/execution/names/names.go +++ b/execution/names/names.go @@ -18,7 +18,6 @@ import ( "fmt" "github.com/hyperledger/burrow/event/query" - amino "github.com/tendermint/go-amino" ) var MinNameRegistrationPeriod uint64 = 5 @@ -37,12 +36,6 @@ const ( MaxDataLength = 1 << 16 ) -var cdc = amino.NewCodec() - -func (e *Entry) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(e) -} - func (e *Entry) String() string { return fmt.Sprintf("NameEntry{%v -> %v; Expires: %v, Owner: %v}", e.Name, e.Data, e.Expires, e.Owner) } @@ -59,15 +52,6 @@ func (e *Entry) Tagged() *TaggedEntry { } } -func DecodeEntry(entryBytes []byte) (*Entry, error) { - entry := new(Entry) - err := cdc.UnmarshalBinaryBare(entryBytes, entry) - if err != nil { - return nil, err - } - return entry, nil -} - type Reader interface { GetName(name string) (*Entry, error) } diff --git a/execution/names/names.pb.go b/execution/names/names.pb.go index c6e76166f..5a34f0ce9 100644 --- a/execution/names/names.pb.go +++ b/execution/names/names.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -51,16 +52,12 @@ func (m *Entry) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Entry.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *Entry) XXX_Merge(src proto.Message) { xxx_messageInfo_Entry.Merge(m, src) @@ -107,7 +104,7 @@ func init() { proto.RegisterFile("names.proto", fileDescriptor_f4268625867c617c) func init() { golang_proto.RegisterFile("names.proto", fileDescriptor_f4268625867c617c) } var fileDescriptor_f4268625867c617c = []byte{ - // 245 bytes of a gzipped FileDescriptorProto + // 249 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0xce, 0x4b, 0xcc, 0x4d, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x05, 0x73, 0xa4, 0x74, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, 0xc1, 0xb2, 0x49, @@ -119,11 +116,11 @@ var fileDescriptor_f4268625867c617c = []byte{ 0xfc, 0x72, 0xfd, 0xe4, 0xa2, 0xca, 0x82, 0x92, 0x7c, 0x3d, 0xc7, 0x94, 0x94, 0xa2, 0xd4, 0xe2, 0xe2, 0x20, 0x88, 0x11, 0x20, 0xf3, 0x5d, 0x12, 0x4b, 0x12, 0x25, 0x98, 0x21, 0xe6, 0x83, 0xd8, 0x42, 0x12, 0x5c, 0xec, 0xae, 0x15, 0x05, 0x99, 0x45, 0xa9, 0xc5, 0x12, 0x2c, 0x0a, 0x8c, 0x1a, - 0x2c, 0x41, 0x30, 0xae, 0x15, 0xcb, 0x8c, 0x05, 0xf2, 0x0c, 0x4e, 0xce, 0x27, 0x1e, 0xc9, 0x31, - 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x81, 0xc7, 0x72, 0x8c, 0x27, 0x1e, 0xcb, - 0x31, 0x46, 0xe9, 0xe2, 0xb7, 0x3e, 0xb5, 0x22, 0x35, 0xb9, 0xb4, 0x24, 0x33, 0x3f, 0x4f, 0x1f, - 0x1c, 0x22, 0x49, 0x6c, 0x60, 0x9f, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x03, 0xd1, 0x42, - 0x7f, 0x2e, 0x01, 0x00, 0x00, + 0x2c, 0x41, 0x30, 0xae, 0x15, 0xcb, 0x8c, 0x05, 0xf2, 0x0c, 0x4e, 0xee, 0x27, 0x1e, 0xc9, 0x31, + 0x5e, 0x78, 0x24, 0xc7, 0x78, 0xe3, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x07, 0x1e, 0xcb, + 0x31, 0x9e, 0x78, 0x2c, 0xc7, 0x18, 0xa5, 0x8b, 0xdf, 0x09, 0xa9, 0x15, 0xa9, 0xc9, 0xa5, 0x25, + 0x99, 0xf9, 0x79, 0xfa, 0xe0, 0x50, 0x49, 0x62, 0x03, 0xfb, 0xd6, 0x18, 0x10, 0x00, 0x00, 0xff, + 0xff, 0x3c, 0xd9, 0x3b, 0x5f, 0x32, 0x01, 0x00, 0x00, } func (m *Entry) Marshal() (dAtA []byte, err error) { @@ -150,9 +147,9 @@ func (m *Entry) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintNames(dAtA, i, uint64(m.Owner.Size())) - n1, err := m.Owner.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Owner.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if len(m.Data) > 0 { @@ -207,14 +204,7 @@ func (m *Entry) Size() (n int) { } func sovNames(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozNames(x uint64) (n int) { return sovNames(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/names/names_test.go b/execution/names/names_test.go index 3ee467ddc..bb9717cb8 100644 --- a/execution/names/names_test.go +++ b/execution/names/names_test.go @@ -9,20 +9,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestEncodeAmino(t *testing.T) { - entry := &Entry{ - Name: "Foo", - Data: "oh noes", - Expires: 24423432, - Owner: crypto.Address{1, 2, 0, 9, 8, 8, 1, 2}, - } - encoded, err := entry.Encode() - require.NoError(t, err) - entryOut, err := DecodeEntry(encoded) - require.NoError(t, err) - assert.Equal(t, entry, entryOut) -} - func TestEncodeProtobuf(t *testing.T) { entry := &Entry{ Name: "Foo", diff --git a/execution/state/accounts.go b/execution/state/accounts.go index 28b1352c1..6524e5486 100644 --- a/execution/state/accounts.go +++ b/execution/state/accounts.go @@ -7,6 +7,7 @@ import ( "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/encoding" ) // Returns nil if account does not exist with given address. @@ -19,7 +20,12 @@ func (s *ReadState) GetAccount(address crypto.Address) (*acm.Account, error) { if accBytes == nil { return nil, nil } - return acm.Decode(accBytes) + account := new(acm.Account) + err = encoding.Decode(accBytes, account) + if err != nil { + return nil, fmt.Errorf("could not decode Account: %v", err) + } + return account, nil } func (ws *writeState) statsAddAccount(acc *acm.Account) { @@ -46,7 +52,7 @@ func (ws *writeState) UpdateAccount(account *acm.Account) error { if account == nil { return fmt.Errorf("UpdateAccount passed nil account in State") } - encodedAccount, err := account.Encode() + bs, err := encoding.Encode(account) if err != nil { return fmt.Errorf("UpdateAccount could not encode account: %v", err) } @@ -54,7 +60,7 @@ func (ws *writeState) UpdateAccount(account *acm.Account) error { if err != nil { return err } - updated := tree.Set(keys.Account.KeyNoPrefix(account.Address), encodedAccount) + updated := tree.Set(keys.Account.KeyNoPrefix(account.Address), bs) if updated { ws.statsAddAccount(account) } @@ -68,11 +74,12 @@ func (ws *writeState) RemoveAccount(address crypto.Address) error { } accBytes, deleted := tree.Delete(keys.Account.KeyNoPrefix(address)) if deleted { - acc, err := acm.Decode(accBytes) + account := new(acm.Account) + err := encoding.Decode(accBytes, account) if err != nil { return err } - ws.statsRemoveAccount(acc) + ws.statsRemoveAccount(account) // Delete storage associated with account too _, err = ws.forest.Delete(keys.Storage.Key(address)) if err != nil { @@ -88,7 +95,8 @@ func (s *ReadState) IterateAccounts(consumer func(*acm.Account) error) error { return err } return tree.Iterate(nil, nil, true, func(key []byte, value []byte) error { - account, err := acm.Decode(value) + account := new(acm.Account) + err := encoding.Decode(value, account) if err != nil { return fmt.Errorf("IterateAccounts could not decode account: %v", err) } diff --git a/execution/state/events.go b/execution/state/events.go index 06d912870..3b4029334 100644 --- a/execution/state/events.go +++ b/execution/state/events.go @@ -5,13 +5,10 @@ import ( "fmt" "io" - "github.com/hyperledger/burrow/txs" - + "github.com/hyperledger/burrow/encoding" "github.com/hyperledger/burrow/execution/exec" ) -var cdc = txs.NewAminoCodec() - func (ws *writeState) AddBlock(be *exec.BlockExecution) error { // If there are no transactions, do not store anything. This reduces the amount of data we store and // prevents the iavl tree from changing, which means the AppHash does not change. @@ -19,12 +16,12 @@ func (ws *writeState) AddBlock(be *exec.BlockExecution) error { return nil } - streamEventBytes := make([]byte, 0) - + buf := new(bytes.Buffer) + var offset int for _, ev := range be.StreamEvents() { if ev.BeginTx != nil { - val := &exec.TxExecutionKey{Height: be.Height, Offset: uint64(len(streamEventBytes))} - bs, err := val.Encode() + val := &exec.TxExecutionKey{Height: be.Height, Offset: uint64(offset)} + bs, err := encoding.Encode(val) if err != nil { return err } @@ -32,12 +29,11 @@ func (ws *writeState) AddBlock(be *exec.BlockExecution) error { ws.plain.Set(keys.TxHash.Key(ev.BeginTx.TxHeader.TxHash), bs) } - bs, err := cdc.MarshalBinaryLengthPrefixed(ev) + n, err := encoding.WriteMessage(buf, ev) if err != nil { return err } - - streamEventBytes = append(streamEventBytes, bs...) + offset += n } tree, err := ws.forest.Writer(keys.Event.Prefix()) @@ -45,7 +41,7 @@ func (ws *writeState) AddBlock(be *exec.BlockExecution) error { return err } key := keys.Event.KeyNoPrefix(be.Height) - tree.Set(key, streamEventBytes) + tree.Set(key, buf.Bytes()) return nil } @@ -63,22 +59,23 @@ func (s *ReadState) IterateStreamEvents(start, end *uint64, consumer func(*exec. endKey = keys.Event.KeyNoPrefix(*end) } return tree.Iterate(startKey, endKey, true, func(_, value []byte) error { - r := bytes.NewReader(value) + buf := bytes.NewBuffer(value) - for r.Len() > 0 { + for { ev := new(exec.StreamEvent) - _, err := cdc.UnmarshalBinaryLengthPrefixedReader(r, ev, 0) + _, err := encoding.ReadMessage(buf, ev) if err != nil { + if err == io.EOF { + return nil + } return err } err = consumer(ev) if err != nil { - break + return err } } - - return err }) } @@ -109,7 +106,8 @@ func (s *ReadState) TxByHash(txHash []byte) (*exec.TxExecution, error) { return nil, nil } - key, err := exec.DecodeTxExecutionKey(bs) + key := new(exec.TxExecutionKey) + err := encoding.Decode(bs, key) if err != nil { return nil, err } @@ -125,12 +123,12 @@ func (s *ReadState) TxByHash(txHash []byte) (*exec.TxExecution, error) { errHeader, txHash) } - r := bytes.NewReader(bs[key.Offset:]) + buf := bytes.NewBuffer(bs[key.Offset:]) var stack exec.TxStack for { ev := new(exec.StreamEvent) - _, err := cdc.UnmarshalBinaryLengthPrefixedReader(r, ev, 0) + _, err := encoding.ReadMessage(buf, ev) if err != nil { return nil, err } diff --git a/execution/state/events_test.go b/execution/state/events_test.go index b15fb7c3e..b927da6e0 100644 --- a/execution/state/events_test.go +++ b/execution/state/events_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/hyperledger/burrow/binary" + "github.com/hyperledger/burrow/config/source" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/crypto/sha3" "github.com/hyperledger/burrow/execution/exec" @@ -113,7 +114,7 @@ func TestReadState_TxByHash(t *testing.T) { require.NotNil(t, txOut, "should retrieve non-nil transaction by TxHash %v", tx.TxHash) // Make sure we get the same tx require.Equal(t, txHash, txOut.TxHash.String(), "TxHash does not match as string") - require.Equal(t, *tx, *txOut) + require.Equal(t, source.JSONString(tx), source.JSONString(txOut)) } } } diff --git a/execution/state/names.go b/execution/state/names.go index 1fc8620c3..bcf135af0 100644 --- a/execution/state/names.go +++ b/execution/state/names.go @@ -3,6 +3,7 @@ package state import ( "fmt" + "github.com/hyperledger/burrow/encoding" "github.com/hyperledger/burrow/execution/names" ) @@ -18,7 +19,8 @@ func (s *ReadState) GetName(name string) (*names.Entry, error) { return nil, nil } - return names.DecodeEntry(entryBytes) + entry := new(names.Entry) + return entry, encoding.Decode(entryBytes, entry) } func (ws *writeState) UpdateName(entry *names.Entry) error { @@ -26,7 +28,7 @@ func (ws *writeState) UpdateName(entry *names.Entry) error { if err != nil { return err } - bs, err := entry.Encode() + bs, err := encoding.Encode(entry) if err != nil { return err } @@ -49,7 +51,8 @@ func (s *ReadState) IterateNames(consumer func(*names.Entry) error) error { return err } return tree.Iterate(nil, nil, true, func(key []byte, value []byte) error { - entry, err := names.DecodeEntry(value) + entry := new(names.Entry) + err := encoding.Decode(value, entry) if err != nil { return fmt.Errorf("State.IterateNames() could not iterate over names: %v", err) } diff --git a/execution/state/state.go b/execution/state/state.go index 3b125c567..15f4a948f 100644 --- a/execution/state/state.go +++ b/execution/state/state.go @@ -125,13 +125,19 @@ func NewState(db dbm.DB) *State { } plain := storage.NewPrefixDB(db, plainPrefix) ring := validator.NewRing(nil, DefaultValidatorsWindowSize) - rs := ReadState{Forest: forest, Plain: plain, History: ring} - ws := writeState{forest: forest, plain: plain, ring: ring} return &State{ - db: db, - ReadState: rs, - writeState: ws, - logger: logging.NewNoopLogger(), + db: db, + ReadState: ReadState{ + Forest: forest, + Plain: plain, + History: ring, + }, + writeState: writeState{ + forest: forest, + plain: plain, + ring: ring, + }, + logger: logging.NewNoopLogger(), } } diff --git a/execution/state/state_test.go b/execution/state/state_test.go index c116c1cd3..08584e8d0 100644 --- a/execution/state/state_test.go +++ b/execution/state/state_test.go @@ -1,5 +1,5 @@ // Copyright 2017 Monax Industries Limited -// +//. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -18,6 +18,7 @@ import ( "testing" "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/config/source" "github.com/hyperledger/burrow/permission" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -27,6 +28,7 @@ import ( func TestState_UpdateAccount(t *testing.T) { s := NewState(dbm.NewMemDB()) account := acm.NewAccountFromSecret("Foo") + account.EVMCode = acm.Bytecode{1, 2, 3} account.Permissions.Base.Perms = permission.SetGlobal | permission.HasRole _, _, err := s.Update(func(ws Updatable) error { return ws.UpdateAccount(account) @@ -36,5 +38,5 @@ func TestState_UpdateAccount(t *testing.T) { require.NoError(t, err) accountOut, err := s.GetAccount(account.Address) require.NoError(t, err) - assert.Equal(t, account, accountOut) + assert.Equal(t, source.JSONString(account), source.JSONString(accountOut)) } diff --git a/execution/state/validators.go b/execution/state/validators.go index d43dc1385..362897af9 100644 --- a/execution/state/validators.go +++ b/execution/state/validators.go @@ -3,6 +3,7 @@ package state import ( "math/big" + "github.com/hyperledger/burrow/encoding" "github.com/hyperledger/burrow/genesis" "github.com/hyperledger/burrow/acm/validator" @@ -98,7 +99,8 @@ func (s *ReadState) Power(id crypto.Address) (*big.Int, error) { if len(bs) == 0 { return new(big.Int), nil } - v, err := validator.Decode(bs) + v := new(validator.Validator) + err = encoding.Decode(bs, v) if err != nil { return nil, err } @@ -111,7 +113,8 @@ func (s *ReadState) IterateValidators(fn func(id crypto.Addressable, power *big. return err } return tree.Iterate(nil, nil, true, func(_, value []byte) error { - v, err := validator.Decode(value) + v := new(validator.Validator) + err = encoding.Decode(value, v) if err != nil { return err } @@ -149,8 +152,7 @@ func (ws *writeState) setPower(id crypto.PublicKey, power *big.Int) error { tree.Delete(key) return nil } - v := validator.New(id, power) - bs, err := v.Encode() + bs, err := encoding.Encode(validator.New(id, power)) if err != nil { return err } diff --git a/execution/transactor_test.go b/execution/transactor_test.go index 6759f62d0..85e18e8b5 100644 --- a/execution/transactor_test.go +++ b/execution/transactor_test.go @@ -40,7 +40,7 @@ func TestTransactor_BroadcastTxSync(t *testing.T) { bc := &bcm.Blockchain{} evc := event.NewEmitter() evc.SetLogger(logging.NewNoopLogger()) - txCodec := txs.NewAminoCodec() + txCodec := txs.NewProtobufCodec() privAccount := acm.GeneratePrivateAccountFromSecret("frogs") tx := &payload.CallTx{ Input: &payload.TxInput{ diff --git a/genesis/spec/spec.pb.go b/genesis/spec/spec.pb.go index e5dc717d4..886e49e0f 100644 --- a/genesis/spec/spec.pb.go +++ b/genesis/spec/spec.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -50,16 +51,12 @@ func (m *TemplateAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TemplateAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_TemplateAccount.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err } + return b[:n], nil } func (m *TemplateAccount) XXX_Merge(src proto.Message) { xxx_messageInfo_TemplateAccount.Merge(m, src) @@ -120,32 +117,33 @@ func init() { proto.RegisterFile("spec.proto", fileDescriptor_423806180556987f) func init() { golang_proto.RegisterFile("spec.proto", fileDescriptor_423806180556987f) } var fileDescriptor_423806180556987f = []byte{ - // 396 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x31, 0x8f, 0xd3, 0x30, - 0x14, 0xc7, 0x6b, 0x9a, 0xbb, 0x52, 0xf7, 0x10, 0x9c, 0xa7, 0xe8, 0x86, 0x38, 0x2a, 0x03, 0x11, - 0x3a, 0x12, 0xa9, 0x4c, 0xdc, 0x44, 0x83, 0x60, 0x41, 0x42, 0xa7, 0xdc, 0x4d, 0x6c, 0x89, 0xf3, - 0xc8, 0x45, 0x8a, 0xe3, 0xc8, 0x76, 0x84, 0xf2, 0x2d, 0x90, 0x58, 0x98, 0xef, 0x93, 0x30, 0x66, - 0x64, 0xee, 0x10, 0xa1, 0x76, 0x63, 0xe4, 0x13, 0xa0, 0x3a, 0x2d, 0xed, 0x04, 0x99, 0xfc, 0xfe, - 0x7e, 0xfa, 0xff, 0xde, 0xb3, 0xdf, 0xc3, 0x58, 0x55, 0xc0, 0xfc, 0x4a, 0x0a, 0x2d, 0x88, 0xb5, - 0x8d, 0x2f, 0x5e, 0x64, 0xb9, 0xbe, 0xab, 0x13, 0x9f, 0x09, 0x1e, 0x64, 0x22, 0x13, 0x81, 0x49, - 0x26, 0xf5, 0x27, 0xa3, 0x8c, 0x30, 0x51, 0x6f, 0xba, 0x38, 0x63, 0xb2, 0xa9, 0xf4, 0x5e, 0x3d, - 0x4a, 0xe2, 0x22, 0x2e, 0x19, 0xf4, 0x72, 0xfe, 0xd5, 0xc2, 0x8f, 0x6f, 0x81, 0x57, 0x45, 0xac, - 0x61, 0xc9, 0x98, 0xa8, 0x4b, 0x4d, 0x08, 0xb6, 0x3e, 0xc4, 0x1c, 0x6c, 0xe4, 0x22, 0x6f, 0x1a, - 0x99, 0x98, 0x70, 0x3c, 0x59, 0xa6, 0xa9, 0x04, 0xa5, 0xec, 0x07, 0x2e, 0xf2, 0xce, 0xc2, 0x9b, - 0x55, 0x47, 0x2f, 0x8f, 0x1a, 0xb9, 0x6b, 0x2a, 0x90, 0x05, 0xa4, 0x19, 0xc8, 0x20, 0xa9, 0xa5, - 0x14, 0x9f, 0x83, 0x5d, 0xdd, 0x9d, 0xef, 0x57, 0x47, 0xf1, 0xa5, 0xe0, 0xb9, 0x06, 0x5e, 0xe9, - 0xe6, 0x77, 0x47, 0xcf, 0xb5, 0xe0, 0xc5, 0xd5, 0xfc, 0x70, 0x37, 0x8f, 0xf6, 0x35, 0xc8, 0x2d, - 0x9e, 0x5e, 0xd7, 0x49, 0x91, 0xb3, 0xf7, 0xd0, 0xd8, 0x63, 0x17, 0x79, 0xb3, 0xc5, 0xb9, 0xbf, - 0xe3, 0xfd, 0x4d, 0x84, 0x4f, 0x87, 0x30, 0x0f, 0x20, 0x72, 0x83, 0x27, 0x4b, 0xbe, 0x7d, 0xa2, - 0xb2, 0x2d, 0x77, 0xec, 0xcd, 0x16, 0x4f, 0xfc, 0xfd, 0x6f, 0x84, 0xfd, 0x19, 0x3e, 0x6b, 0x3b, - 0x3a, 0x1a, 0xd6, 0x6a, 0x4f, 0x22, 0x6f, 0xf1, 0xec, 0x1a, 0x24, 0xcf, 0x95, 0xca, 0x45, 0xa9, - 0xec, 0x13, 0x77, 0xec, 0x4d, 0x87, 0x75, 0x76, 0xec, 0x23, 0xaf, 0xf0, 0x49, 0x24, 0x0a, 0x50, - 0xf6, 0xe9, 0x70, 0x40, 0xef, 0x20, 0xef, 0xb0, 0xf5, 0x46, 0xa4, 0x60, 0x4f, 0xcc, 0x60, 0x16, - 0x6d, 0x47, 0xd1, 0xaa, 0xa3, 0xcf, 0xff, 0x3d, 0x9c, 0x98, 0x71, 0x3f, 0x6c, 0x34, 0x30, 0x91, - 0x42, 0x64, 0xfc, 0x57, 0x0f, 0xbf, 0xdc, 0xd3, 0xd1, 0xb7, 0x7b, 0x3a, 0x0a, 0x5f, 0xb7, 0x6b, - 0x07, 0xfd, 0x58, 0x3b, 0xe8, 0xe7, 0xda, 0x41, 0xdf, 0x37, 0x0e, 0x6a, 0x37, 0x0e, 0xfa, 0xf8, - 0x1f, 0x62, 0x06, 0x25, 0xa8, 0x5c, 0x05, 0xdb, 0x1d, 0x4d, 0x4e, 0xcd, 0x7a, 0xbd, 0xfc, 0x13, - 0x00, 0x00, 0xff, 0xff, 0x3b, 0xf1, 0x47, 0x6d, 0xbe, 0x02, 0x00, 0x00, + // 401 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xbf, 0x6f, 0xd4, 0x30, + 0x14, 0xc7, 0xcf, 0x5c, 0xda, 0xe3, 0x7c, 0x45, 0x50, 0x4f, 0x51, 0x87, 0x38, 0x3a, 0x06, 0x22, + 0x54, 0x12, 0xe9, 0x98, 0xe8, 0x76, 0xe1, 0xc7, 0x82, 0x84, 0xaa, 0xb4, 0x13, 0x5b, 0xe2, 0x3c, + 0xd2, 0x48, 0x71, 0x1c, 0xd9, 0x8e, 0x50, 0xfe, 0x0b, 0x24, 0x16, 0xe6, 0xfe, 0x25, 0x8c, 0x19, + 0x19, 0x51, 0x87, 0x08, 0xa5, 0x1b, 0x23, 0x7f, 0x01, 0x3a, 0xe7, 0x8e, 0xde, 0x44, 0x33, 0xf9, + 0x7d, 0xfd, 0xf4, 0xfd, 0xbc, 0x67, 0xbf, 0x87, 0xb1, 0xaa, 0x80, 0xf9, 0x95, 0x14, 0x5a, 0x10, + 0x6b, 0x13, 0x9f, 0xbc, 0xc8, 0x72, 0x7d, 0x55, 0x27, 0x3e, 0x13, 0x3c, 0xc8, 0x44, 0x26, 0x02, + 0x93, 0x4c, 0xea, 0x4f, 0x46, 0x19, 0x61, 0xa2, 0xc1, 0x74, 0x72, 0xc4, 0x64, 0x53, 0xe9, 0x9d, + 0x7a, 0x94, 0xc4, 0x45, 0x5c, 0x32, 0x18, 0xe4, 0xf2, 0xab, 0x85, 0x1f, 0x5f, 0x02, 0xaf, 0x8a, + 0x58, 0xc3, 0x9a, 0x31, 0x51, 0x97, 0x9a, 0x10, 0x6c, 0x7d, 0x88, 0x39, 0xd8, 0xc8, 0x45, 0xde, + 0x3c, 0x32, 0x31, 0xe1, 0x78, 0xb6, 0x4e, 0x53, 0x09, 0x4a, 0xd9, 0x0f, 0x5c, 0xe4, 0x1d, 0x85, + 0x17, 0x37, 0x1d, 0x3d, 0xdd, 0x6b, 0xe4, 0xaa, 0xa9, 0x40, 0x16, 0x90, 0x66, 0x20, 0x83, 0xa4, + 0x96, 0x52, 0x7c, 0x0e, 0xb6, 0x75, 0xb7, 0xbe, 0xdf, 0x1d, 0xc5, 0xa7, 0x82, 0xe7, 0x1a, 0x78, + 0xa5, 0x9b, 0x3f, 0x1d, 0x3d, 0xd6, 0x82, 0x17, 0x67, 0xcb, 0xbb, 0xbb, 0x65, 0xb4, 0xab, 0x41, + 0x2e, 0xf1, 0xfc, 0xbc, 0x4e, 0x8a, 0x9c, 0xbd, 0x87, 0xc6, 0x9e, 0xba, 0xc8, 0x5b, 0xac, 0x8e, + 0xfd, 0x2d, 0xef, 0x5f, 0x22, 0x7c, 0x3a, 0x86, 0x79, 0x07, 0x22, 0x17, 0x78, 0xb6, 0xe6, 0x9b, + 0x27, 0x2a, 0xdb, 0x72, 0xa7, 0xde, 0x62, 0xf5, 0xc4, 0xdf, 0xfd, 0x46, 0x38, 0x9c, 0xe1, 0xb3, + 0xb6, 0xa3, 0x93, 0x71, 0xad, 0x0e, 0x24, 0xf2, 0x16, 0x2f, 0xce, 0x41, 0xf2, 0x5c, 0xa9, 0x5c, + 0x94, 0xca, 0x3e, 0x70, 0xa7, 0xde, 0x7c, 0x5c, 0x67, 0xfb, 0x3e, 0xf2, 0x0a, 0x1f, 0x44, 0xa2, + 0x00, 0x65, 0x1f, 0x8e, 0x07, 0x0c, 0x0e, 0xf2, 0x0e, 0x5b, 0xaf, 0x45, 0x0a, 0xf6, 0xcc, 0x0c, + 0x66, 0xd5, 0x76, 0x14, 0xdd, 0x74, 0xf4, 0xf9, 0xff, 0x87, 0x13, 0x33, 0xee, 0x87, 0x8d, 0x06, + 0x26, 0x52, 0x88, 0x8c, 0xff, 0xec, 0xe1, 0x97, 0x6b, 0x3a, 0xf9, 0x76, 0x4d, 0x27, 0xe1, 0x9b, + 0xb6, 0x77, 0xd0, 0x8f, 0xde, 0x41, 0x3f, 0x7b, 0x07, 0xfd, 0xea, 0x1d, 0xf4, 0xfd, 0xd6, 0x41, + 0xed, 0xad, 0x83, 0x3e, 0xde, 0x43, 0xcd, 0xa0, 0x04, 0x95, 0xab, 0x60, 0xb3, 0xa7, 0xc9, 0xa1, + 0x59, 0xb1, 0x97, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x8e, 0xcf, 0x89, 0xb2, 0xc2, 0x02, 0x00, + 0x00, } func (m *TemplateAccount) Marshal() (dAtA []byte, err error) { @@ -173,9 +171,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintSpec(dAtA, i, uint64(m.Address.Size())) - n1, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Address.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -183,9 +181,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintSpec(dAtA, i, uint64(m.PublicKey.Size())) - n2, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.PublicKey.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -235,9 +233,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintSpec(dAtA, i, uint64(m.Code.Size())) - n3, err := m.Code.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Code.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -303,14 +301,7 @@ func (m *TemplateAccount) Size() (n int) { } func sovSpec(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozSpec(x uint64) (n int) { return sovSpec(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/go.mod b/go.mod index 1792aba69..3669e6a43 100644 --- a/go.mod +++ b/go.mod @@ -34,6 +34,7 @@ require ( github.com/jawher/mow.cli v1.1.0 github.com/jmhodges/levigo v1.0.0 // indirect github.com/jmoiron/sqlx v1.2.0 + github.com/kr/pretty v0.1.0 // indirect github.com/lib/pq v1.1.1 github.com/magiconair/properties v1.8.0 github.com/mattn/go-sqlite3 v1.10.0 @@ -64,6 +65,6 @@ require ( golang.org/x/text v0.3.2 // indirect google.golang.org/grpc v1.20.1 gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect + gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect gopkg.in/yaml.v2 v2.2.2 - honnef.co/go/tools v0.0.0-20190614002413-cb51c254f01b // indirect ) diff --git a/go.sum b/go.sum index deaaddd66..9ace375e2 100644 --- a/go.sum +++ b/go.sum @@ -71,7 +71,6 @@ github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pO github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= @@ -101,8 +100,10 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGi github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515 h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= @@ -144,7 +145,6 @@ github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a h1:9a8MnZMP0X2nL github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a h1:9ZKAASQSHhDYGoxY8uLVpewe1GDZ2vu2Tr/vTdVAkFQ= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rs/cors v1.6.0 h1:G9tHG9lebljV9mfp9SNPDL36nCDxmo3zTlAf1YgvzmI= github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= @@ -192,11 +192,9 @@ github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1: golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f h1:R423Cnkcp5JABoeemiGEPlt9tHXFfw5kvc0yqlxRPWo= golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -206,7 +204,6 @@ golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -222,8 +219,6 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb h1:mnQlcVx8Qq8L70HV0DxUGuiuAtiEHTwF1gYJE/EL9nU= -golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= @@ -234,8 +229,8 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= @@ -244,5 +239,3 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190614002413-cb51c254f01b h1:SWAO5HXhUnouVG7YwFyCqej4vr94EiYYu4O7YRXsxBU= -honnef.co/go/tools v0.0.0-20190614002413-cb51c254f01b/go.mod h1:JlmFZigtG9vBVR3QGIQ9g/Usz4BzH+Xm6Z8iHQWRYUw= diff --git a/keys/keys.pb.go b/keys/keys.pb.go index d38370086..7458fbb32 100644 --- a/keys/keys.pb.go +++ b/keys/keys.pb.go @@ -6,14 +6,16 @@ package keys import ( context "context" fmt "fmt" - io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" crypto "github.com/hyperledger/burrow/crypto" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -42,25 +44,16 @@ func (*ListRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{0} } func (m *ListRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ListRequest.Unmarshal(m, b) } func (m *ListRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ListRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ListRequest.Marshal(b, m, deterministic) } func (m *ListRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ListRequest.Merge(m, src) } func (m *ListRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ListRequest.Size(m) } func (m *ListRequest) XXX_DiscardUnknown() { xxx_messageInfo_ListRequest.DiscardUnknown(m) @@ -92,25 +85,16 @@ func (*VerifyResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{1} } func (m *VerifyResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_VerifyResponse.Unmarshal(m, b) } func (m *VerifyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_VerifyResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_VerifyResponse.Marshal(b, m, deterministic) } func (m *VerifyResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_VerifyResponse.Merge(m, src) } func (m *VerifyResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_VerifyResponse.Size(m) } func (m *VerifyResponse) XXX_DiscardUnknown() { xxx_messageInfo_VerifyResponse.DiscardUnknown(m) @@ -135,25 +119,16 @@ func (*RemoveNameResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{2} } func (m *RemoveNameResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_RemoveNameResponse.Unmarshal(m, b) } func (m *RemoveNameResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_RemoveNameResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_RemoveNameResponse.Marshal(b, m, deterministic) } func (m *RemoveNameResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_RemoveNameResponse.Merge(m, src) } func (m *RemoveNameResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_RemoveNameResponse.Size(m) } func (m *RemoveNameResponse) XXX_DiscardUnknown() { xxx_messageInfo_RemoveNameResponse.DiscardUnknown(m) @@ -178,25 +153,16 @@ func (*AddNameResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{3} } func (m *AddNameResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_AddNameResponse.Unmarshal(m, b) } func (m *AddNameResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_AddNameResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_AddNameResponse.Marshal(b, m, deterministic) } func (m *AddNameResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_AddNameResponse.Merge(m, src) } func (m *AddNameResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_AddNameResponse.Size(m) } func (m *AddNameResponse) XXX_DiscardUnknown() { xxx_messageInfo_AddNameResponse.DiscardUnknown(m) @@ -222,25 +188,16 @@ func (*RemoveNameRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{4} } func (m *RemoveNameRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_RemoveNameRequest.Unmarshal(m, b) } func (m *RemoveNameRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_RemoveNameRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_RemoveNameRequest.Marshal(b, m, deterministic) } func (m *RemoveNameRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_RemoveNameRequest.Merge(m, src) } func (m *RemoveNameRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_RemoveNameRequest.Size(m) } func (m *RemoveNameRequest) XXX_DiscardUnknown() { xxx_messageInfo_RemoveNameRequest.DiscardUnknown(m) @@ -275,25 +232,16 @@ func (*GenRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{5} } func (m *GenRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GenRequest.Unmarshal(m, b) } func (m *GenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GenRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GenRequest.Marshal(b, m, deterministic) } func (m *GenRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_GenRequest.Merge(m, src) } func (m *GenRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GenRequest.Size(m) } func (m *GenRequest) XXX_DiscardUnknown() { xxx_messageInfo_GenRequest.DiscardUnknown(m) @@ -340,25 +288,16 @@ func (*GenResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{6} } func (m *GenResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GenResponse.Unmarshal(m, b) } func (m *GenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GenResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GenResponse.Marshal(b, m, deterministic) } func (m *GenResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_GenResponse.Merge(m, src) } func (m *GenResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GenResponse.Size(m) } func (m *GenResponse) XXX_DiscardUnknown() { xxx_messageInfo_GenResponse.DiscardUnknown(m) @@ -392,25 +331,16 @@ func (*PubRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{7} } func (m *PubRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_PubRequest.Unmarshal(m, b) } func (m *PubRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PubRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_PubRequest.Marshal(b, m, deterministic) } func (m *PubRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_PubRequest.Merge(m, src) } func (m *PubRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_PubRequest.Size(m) } func (m *PubRequest) XXX_DiscardUnknown() { xxx_messageInfo_PubRequest.DiscardUnknown(m) @@ -451,25 +381,16 @@ func (*PubResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{8} } func (m *PubResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_PubResponse.Unmarshal(m, b) } func (m *PubResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PubResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_PubResponse.Marshal(b, m, deterministic) } func (m *PubResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_PubResponse.Merge(m, src) } func (m *PubResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_PubResponse.Size(m) } func (m *PubResponse) XXX_DiscardUnknown() { xxx_messageInfo_PubResponse.DiscardUnknown(m) @@ -510,25 +431,16 @@ func (*ImportJSONRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{9} } func (m *ImportJSONRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ImportJSONRequest.Unmarshal(m, b) } func (m *ImportJSONRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ImportJSONRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ImportJSONRequest.Marshal(b, m, deterministic) } func (m *ImportJSONRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ImportJSONRequest.Merge(m, src) } func (m *ImportJSONRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ImportJSONRequest.Size(m) } func (m *ImportJSONRequest) XXX_DiscardUnknown() { xxx_messageInfo_ImportJSONRequest.DiscardUnknown(m) @@ -568,25 +480,16 @@ func (*ImportResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{10} } func (m *ImportResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ImportResponse.Unmarshal(m, b) } func (m *ImportResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ImportResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ImportResponse.Marshal(b, m, deterministic) } func (m *ImportResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ImportResponse.Merge(m, src) } func (m *ImportResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ImportResponse.Size(m) } func (m *ImportResponse) XXX_DiscardUnknown() { xxx_messageInfo_ImportResponse.DiscardUnknown(m) @@ -622,25 +525,16 @@ func (*ImportRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{11} } func (m *ImportRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ImportRequest.Unmarshal(m, b) } func (m *ImportRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ImportRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ImportRequest.Marshal(b, m, deterministic) } func (m *ImportRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ImportRequest.Merge(m, src) } func (m *ImportRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ImportRequest.Size(m) } func (m *ImportRequest) XXX_DiscardUnknown() { xxx_messageInfo_ImportRequest.DiscardUnknown(m) @@ -696,25 +590,16 @@ func (*ExportRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{12} } func (m *ExportRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ExportRequest.Unmarshal(m, b) } func (m *ExportRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ExportRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ExportRequest.Marshal(b, m, deterministic) } func (m *ExportRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ExportRequest.Merge(m, src) } func (m *ExportRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ExportRequest.Size(m) } func (m *ExportRequest) XXX_DiscardUnknown() { xxx_messageInfo_ExportRequest.DiscardUnknown(m) @@ -764,25 +649,16 @@ func (*ExportResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{13} } func (m *ExportResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ExportResponse.Unmarshal(m, b) } func (m *ExportResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ExportResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ExportResponse.Marshal(b, m, deterministic) } func (m *ExportResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ExportResponse.Merge(m, src) } func (m *ExportResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ExportResponse.Size(m) } func (m *ExportResponse) XXX_DiscardUnknown() { xxx_messageInfo_ExportResponse.DiscardUnknown(m) @@ -839,25 +715,16 @@ func (*SignRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{14} } func (m *SignRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_SignRequest.Unmarshal(m, b) } func (m *SignRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_SignRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_SignRequest.Marshal(b, m, deterministic) } func (m *SignRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_SignRequest.Merge(m, src) } func (m *SignRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_SignRequest.Size(m) } func (m *SignRequest) XXX_DiscardUnknown() { xxx_messageInfo_SignRequest.DiscardUnknown(m) @@ -911,25 +778,16 @@ func (*SignResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{15} } func (m *SignResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_SignResponse.Unmarshal(m, b) } func (m *SignResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_SignResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_SignResponse.Marshal(b, m, deterministic) } func (m *SignResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_SignResponse.Merge(m, src) } func (m *SignResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_SignResponse.Size(m) } func (m *SignResponse) XXX_DiscardUnknown() { xxx_messageInfo_SignResponse.DiscardUnknown(m) @@ -964,25 +822,16 @@ func (*VerifyRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{16} } func (m *VerifyRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_VerifyRequest.Unmarshal(m, b) } func (m *VerifyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_VerifyRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_VerifyRequest.Marshal(b, m, deterministic) } func (m *VerifyRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_VerifyRequest.Merge(m, src) } func (m *VerifyRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_VerifyRequest.Size(m) } func (m *VerifyRequest) XXX_DiscardUnknown() { xxx_messageInfo_VerifyRequest.DiscardUnknown(m) @@ -1030,25 +879,16 @@ func (*HashRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{17} } func (m *HashRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_HashRequest.Unmarshal(m, b) } func (m *HashRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_HashRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_HashRequest.Marshal(b, m, deterministic) } func (m *HashRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_HashRequest.Merge(m, src) } func (m *HashRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_HashRequest.Size(m) } func (m *HashRequest) XXX_DiscardUnknown() { xxx_messageInfo_HashRequest.DiscardUnknown(m) @@ -1088,25 +928,16 @@ func (*HashResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{18} } func (m *HashResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_HashResponse.Unmarshal(m, b) } func (m *HashResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_HashResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_HashResponse.Marshal(b, m, deterministic) } func (m *HashResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_HashResponse.Merge(m, src) } func (m *HashResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_HashResponse.Size(m) } func (m *HashResponse) XXX_DiscardUnknown() { xxx_messageInfo_HashResponse.DiscardUnknown(m) @@ -1140,25 +971,16 @@ func (*KeyID) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{19} } func (m *KeyID) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_KeyID.Unmarshal(m, b) } func (m *KeyID) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_KeyID.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_KeyID.Marshal(b, m, deterministic) } func (m *KeyID) XXX_Merge(src proto.Message) { xxx_messageInfo_KeyID.Merge(m, src) } func (m *KeyID) XXX_Size() int { - return m.Size() + return xxx_messageInfo_KeyID.Size(m) } func (m *KeyID) XXX_DiscardUnknown() { xxx_messageInfo_KeyID.DiscardUnknown(m) @@ -1198,25 +1020,16 @@ func (*ListResponse) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{20} } func (m *ListResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ListResponse.Unmarshal(m, b) } func (m *ListResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ListResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ListResponse.Marshal(b, m, deterministic) } func (m *ListResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ListResponse.Merge(m, src) } func (m *ListResponse) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ListResponse.Size(m) } func (m *ListResponse) XXX_DiscardUnknown() { xxx_messageInfo_ListResponse.DiscardUnknown(m) @@ -1250,25 +1063,16 @@ func (*AddNameRequest) Descriptor() ([]byte, []int) { return fileDescriptor_9084e97af2346a26, []int{21} } func (m *AddNameRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_AddNameRequest.Unmarshal(m, b) } func (m *AddNameRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_AddNameRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_AddNameRequest.Marshal(b, m, deterministic) } func (m *AddNameRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_AddNameRequest.Merge(m, src) } func (m *AddNameRequest) XXX_Size() int { - return m.Size() + return xxx_messageInfo_AddNameRequest.Size(m) } func (m *AddNameRequest) XXX_DiscardUnknown() { xxx_messageInfo_AddNameRequest.DiscardUnknown(m) @@ -1344,56 +1148,55 @@ func init() { proto.RegisterFile("keys.proto", fileDescriptor_9084e97af2346a26) func init() { golang_proto.RegisterFile("keys.proto", fileDescriptor_9084e97af2346a26) } var fileDescriptor_9084e97af2346a26 = []byte{ - // 769 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x5d, 0x4f, 0x13, 0x4d, - 0x14, 0xce, 0x76, 0x17, 0x5e, 0x7a, 0xb6, 0xf4, 0xa5, 0x2b, 0xc6, 0x66, 0x83, 0x0d, 0x99, 0x1b, - 0x88, 0x49, 0x5b, 0x53, 0x12, 0x63, 0xe4, 0x82, 0xf0, 0x61, 0x10, 0xab, 0x48, 0x8a, 0xf1, 0xc2, - 0xc4, 0x8b, 0x2d, 0x3d, 0xb4, 0x0d, 0xb4, 0x5b, 0x67, 0x76, 0x91, 0xbd, 0xf0, 0xd6, 0xdf, 0xe5, - 0x25, 0x57, 0xc6, 0x9f, 0x60, 0xe0, 0x8f, 0x98, 0x99, 0xd9, 0xe9, 0xce, 0x2c, 0x88, 0x4d, 0xbc, - 0x9b, 0xf3, 0x35, 0xcf, 0x39, 0x67, 0x9e, 0x3e, 0x5b, 0x80, 0x33, 0x4c, 0x58, 0x63, 0x42, 0xc3, - 0x28, 0xf4, 0x1c, 0x7e, 0xf6, 0xeb, 0xfd, 0x61, 0x34, 0x88, 0xbb, 0x8d, 0x93, 0x70, 0xd4, 0xec, - 0x87, 0xfd, 0xb0, 0x29, 0x82, 0xdd, 0xf8, 0x54, 0x58, 0xc2, 0x10, 0x27, 0x59, 0xe4, 0x97, 0x4e, - 0x68, 0x32, 0x89, 0x52, 0x8b, 0xac, 0x81, 0xfb, 0x66, 0xc8, 0xa2, 0x0e, 0x7e, 0x8e, 0x91, 0x45, - 0x5e, 0x15, 0xfe, 0x6b, 0x63, 0x72, 0x18, 0x8c, 0xb0, 0x6a, 0xad, 0x5a, 0xeb, 0xc5, 0x8e, 0x32, - 0xc9, 0x12, 0x94, 0x3f, 0x20, 0x1d, 0x9e, 0x26, 0x1d, 0x64, 0x93, 0x70, 0xcc, 0x90, 0x2c, 0x83, - 0xd7, 0xc1, 0x51, 0x78, 0x81, 0x3c, 0x3e, 0xf5, 0x56, 0xe0, 0xff, 0xed, 0x5e, 0xcf, 0x70, 0xd5, - 0xa1, 0xa2, 0x27, 0xfe, 0x0d, 0xa9, 0x07, 0xb0, 0x8f, 0x63, 0x95, 0x57, 0x03, 0x38, 0x0a, 0x18, - 0x9b, 0x0c, 0x68, 0xc0, 0x54, 0xaa, 0xe6, 0xf1, 0x56, 0xa0, 0xb8, 0x1b, 0xd3, 0x0b, 0x7c, 0x9f, - 0x4c, 0xb0, 0x5a, 0x10, 0xe1, 0xcc, 0xa1, 0xa3, 0xd8, 0x26, 0xca, 0x1a, 0xb8, 0x02, 0x45, 0xf6, - 0xc8, 0x13, 0xb7, 0x7b, 0x3d, 0x8a, 0x8c, 0xa9, 0x76, 0x52, 0x93, 0xbc, 0x00, 0x38, 0x8a, 0xbb, - 0x5a, 0xdb, 0x77, 0xe7, 0x79, 0x1e, 0x38, 0x02, 0x47, 0xf6, 0x20, 0xce, 0xe4, 0x00, 0x5c, 0x51, - 0x9b, 0x82, 0xac, 0x40, 0xf1, 0x28, 0xee, 0x9e, 0x0f, 0x4f, 0xda, 0x98, 0x88, 0xf2, 0x52, 0x27, - 0x73, 0xdc, 0x3f, 0x09, 0xd9, 0x87, 0xca, 0xc1, 0x68, 0x12, 0xd2, 0xe8, 0xf5, 0xf1, 0xbb, 0xc3, - 0x59, 0x97, 0xe3, 0x81, 0xc3, 0xd3, 0x55, 0x4f, 0xfc, 0x4c, 0x9e, 0x40, 0x59, 0x5e, 0x34, 0xc3, - 0xec, 0x5f, 0x61, 0x51, 0xe5, 0xce, 0x0c, 0x98, 0x5f, 0x82, 0x39, 0x97, 0x9d, 0x7f, 0x21, 0x1f, - 0x16, 0xda, 0x98, 0xec, 0x24, 0x11, 0xb2, 0xaa, 0x23, 0x56, 0x32, 0xb5, 0xc9, 0x27, 0x58, 0x7c, - 0x79, 0xf9, 0xaf, 0xf0, 0xda, 0x74, 0xb6, 0x39, 0xdd, 0x37, 0x0b, 0xca, 0xea, 0xfe, 0xfc, 0x0b, - 0x9d, 0xe5, 0x5f, 0xe8, 0x0c, 0x13, 0x01, 0x4f, 0x87, 0x17, 0x41, 0x84, 0x3c, 0x5c, 0x10, 0x61, - 0xcd, 0x93, 0x87, 0x2a, 0x65, 0xe4, 0x30, 0x76, 0xe0, 0xe4, 0xdf, 0x36, 0x06, 0xf7, 0x78, 0xd8, - 0x9f, 0x99, 0xf2, 0x1a, 0x4c, 0xe1, 0x6e, 0x0e, 0xda, 0xe6, 0xfc, 0x6f, 0x91, 0xb1, 0xa0, 0x8f, - 0xe9, 0x7e, 0x95, 0x49, 0xb6, 0xa0, 0x24, 0x61, 0xd3, 0xe1, 0x9b, 0x50, 0xe4, 0x76, 0x10, 0xc5, - 0x54, 0x5e, 0xe1, 0xb6, 0x2a, 0x8d, 0x54, 0x2d, 0xa6, 0x81, 0x4e, 0x96, 0x43, 0x2e, 0x61, 0x51, - 0x69, 0x82, 0xec, 0xdc, 0x20, 0x78, 0x21, 0x4f, 0x70, 0xad, 0x13, 0xdb, 0xe8, 0xc4, 0x44, 0x9e, - 0x9b, 0x01, 0x79, 0x17, 0xdc, 0x57, 0x01, 0x1b, 0x28, 0x5c, 0x1f, 0x16, 0xb8, 0x19, 0xf1, 0xed, - 0xca, 0x7d, 0x4d, 0x6d, 0x1d, 0xb5, 0x60, 0xce, 0x4f, 0xa0, 0x24, 0x2f, 0x49, 0xe7, 0xf7, 0xc0, - 0xe1, 0x76, 0x7a, 0x83, 0x38, 0x93, 0x4d, 0x98, 0x6b, 0x63, 0x72, 0xb0, 0x77, 0xcf, 0x0f, 0x5f, - 0xd3, 0x98, 0xc2, 0xaa, 0xad, 0x6b, 0x4c, 0x1d, 0x4a, 0x52, 0x5c, 0x53, 0x80, 0xc7, 0x60, 0x4b, - 0x5e, 0xd9, 0xeb, 0x6e, 0xcb, 0x6d, 0x08, 0x25, 0x17, 0xb7, 0x77, 0xb8, 0x9f, 0xec, 0x41, 0x79, - 0x2a, 0x9d, 0xba, 0x48, 0x8e, 0x4d, 0x91, 0x1c, 0xe7, 0x58, 0x6d, 0x72, 0xa0, 0xf5, 0xc3, 0x01, - 0xa7, 0x8d, 0x09, 0xf3, 0x5a, 0x42, 0xe1, 0x90, 0x06, 0x11, 0xf2, 0xed, 0x2f, 0x49, 0xbc, 0x4c, - 0x5a, 0xfd, 0x8a, 0xe6, 0x49, 0x3b, 0x7c, 0xaa, 0x3d, 0xa0, 0xaa, 0xc8, 0xd4, 0x4f, 0x55, 0xe8, - 0x9a, 0x56, 0x07, 0x87, 0x3f, 0x8b, 0x97, 0x86, 0x34, 0x1e, 0xfb, 0x9e, 0xee, 0x4a, 0xd3, 0x37, - 0x60, 0x5e, 0x52, 0xc6, 0x7b, 0x20, 0xa3, 0x06, 0x81, 0xfc, 0x65, 0xd3, 0x99, 0x15, 0x49, 0x19, - 0x52, 0x45, 0x86, 0x28, 0xa9, 0xa2, 0x9c, 0xaa, 0x6d, 0x02, 0x64, 0x82, 0xe9, 0x3d, 0xd2, 0x73, - 0x34, 0x09, 0xfd, 0x43, 0xf1, 0x06, 0xcc, 0x4b, 0x65, 0x50, 0x88, 0x86, 0x0e, 0xa9, 0xa2, 0x9c, - 0x78, 0xd4, 0x25, 0x7f, 0xd4, 0x2a, 0x34, 0x82, 0xaa, 0x55, 0x18, 0x74, 0xdb, 0x02, 0xc8, 0x3e, - 0x8b, 0xaa, 0xc1, 0x5b, 0x1f, 0x4a, 0xbf, 0x7a, 0x3b, 0x90, 0xe1, 0x71, 0x7a, 0x29, 0x3c, 0xed, - 0x3b, 0xae, 0xf0, 0x0c, 0xf6, 0x3d, 0x13, 0x94, 0x11, 0x60, 0x69, 0xff, 0x26, 0xdb, 0xfc, 0x87, - 0x39, 0xaf, 0xac, 0xdb, 0x79, 0x7e, 0x75, 0x5d, 0xb3, 0x7e, 0x5e, 0xd7, 0xac, 0x5f, 0xd7, 0x35, - 0xeb, 0xfb, 0x4d, 0xcd, 0xba, 0xba, 0xa9, 0x59, 0x1f, 0x89, 0xf6, 0xaf, 0x63, 0x90, 0x4c, 0x90, - 0x9e, 0x63, 0xaf, 0x8f, 0xb4, 0xd9, 0x8d, 0x29, 0x0d, 0xbf, 0x34, 0xf9, 0x4d, 0xdd, 0x79, 0xf1, - 0x1f, 0x63, 0xe3, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6c, 0x2b, 0x01, 0x49, 0xb4, 0x08, 0x00, - 0x00, + // 763 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xcf, 0x4f, 0x13, 0x41, + 0x14, 0x4e, 0xbb, 0x0b, 0xd2, 0xb7, 0xa5, 0xd2, 0x15, 0x63, 0xb3, 0xc1, 0x86, 0xcc, 0x05, 0x62, + 0xd2, 0xd6, 0x14, 0xe3, 0x41, 0x0e, 0x84, 0x1f, 0x06, 0xb1, 0x8a, 0xa4, 0x18, 0x0f, 0x26, 0x1e, + 0xb6, 0xf4, 0xd1, 0x36, 0xd0, 0xee, 0x3a, 0xb3, 0x8b, 0xec, 0xc1, 0xab, 0x7f, 0x97, 0x47, 0x4f, + 0x9e, 0x3d, 0x1a, 0xf8, 0x47, 0xcc, 0xcc, 0xec, 0x74, 0x67, 0x16, 0xc4, 0x26, 0xde, 0xe6, 0xfd, + 0x9a, 0xef, 0xbd, 0x37, 0x5f, 0xbf, 0x2d, 0xc0, 0x19, 0x26, 0xac, 0x19, 0xd2, 0x20, 0x0a, 0x5c, + 0x9b, 0x9f, 0xbd, 0xc6, 0x60, 0x14, 0x0d, 0xe3, 0x5e, 0xf3, 0x24, 0x18, 0xb7, 0x06, 0xc1, 0x20, + 0x68, 0x89, 0x60, 0x2f, 0x3e, 0x15, 0x96, 0x30, 0xc4, 0x49, 0x16, 0x79, 0xe5, 0x13, 0x9a, 0x84, + 0x51, 0x6a, 0x91, 0x35, 0x70, 0xde, 0x8c, 0x58, 0xd4, 0xc5, 0xcf, 0x31, 0xb2, 0xc8, 0xad, 0xc1, + 0xbd, 0x0e, 0x26, 0x87, 0xfe, 0x18, 0x6b, 0x85, 0xd5, 0xc2, 0x7a, 0xa9, 0xab, 0x4c, 0xb2, 0x04, + 0x95, 0x0f, 0x48, 0x47, 0xa7, 0x49, 0x17, 0x59, 0x18, 0x4c, 0x18, 0x92, 0x65, 0x70, 0xbb, 0x38, + 0x0e, 0x2e, 0x90, 0xc7, 0xa7, 0xde, 0x2a, 0xdc, 0xdf, 0xee, 0xf7, 0x0d, 0x57, 0x03, 0xaa, 0x7a, + 0xe2, 0xbf, 0x90, 0xfa, 0x00, 0xfb, 0x38, 0x51, 0x79, 0x75, 0x80, 0x23, 0x9f, 0xb1, 0x70, 0x48, + 0x7d, 0xa6, 0x52, 0x35, 0x8f, 0xbb, 0x02, 0xa5, 0xdd, 0x98, 0x5e, 0xe0, 0xfb, 0x24, 0xc4, 0x5a, + 0x51, 0x84, 0x33, 0x87, 0x8e, 0x62, 0x99, 0x28, 0x6b, 0xe0, 0x08, 0x14, 0xd9, 0x23, 0x4f, 0xdc, + 0xee, 0xf7, 0x29, 0x32, 0xa6, 0xda, 0x49, 0x4d, 0xf2, 0x02, 0xe0, 0x28, 0xee, 0x69, 0x6d, 0xdf, + 0x9e, 0xe7, 0xba, 0x60, 0x0b, 0x1c, 0xd9, 0x83, 0x38, 0x93, 0x03, 0x70, 0x44, 0x6d, 0x0a, 0xb2, + 0x02, 0xa5, 0xa3, 0xb8, 0x77, 0x3e, 0x3a, 0xe9, 0x60, 0x22, 0xca, 0xcb, 0xdd, 0xcc, 0x71, 0xf7, + 0x24, 0x64, 0x1f, 0xaa, 0x07, 0xe3, 0x30, 0xa0, 0xd1, 0xeb, 0xe3, 0x77, 0x87, 0xb3, 0x2e, 0xc7, + 0x05, 0x9b, 0xa7, 0xab, 0x9e, 0xf8, 0x99, 0x3c, 0x81, 0x8a, 0xbc, 0x68, 0x86, 0xd9, 0xbf, 0xc2, + 0xa2, 0xca, 0x9d, 0x19, 0x30, 0xbf, 0x04, 0x73, 0x2e, 0x2b, 0xff, 0x42, 0x1e, 0x2c, 0x74, 0x30, + 0xd9, 0x49, 0x22, 0x64, 0x35, 0x5b, 0xac, 0x64, 0x6a, 0x93, 0x4f, 0xb0, 0xf8, 0xf2, 0xf2, 0x7f, + 0xe1, 0xb5, 0xe9, 0x2c, 0x73, 0xba, 0x6f, 0x05, 0xa8, 0xa8, 0xfb, 0xf3, 0x2f, 0x74, 0x96, 0x7f, + 0xa1, 0x33, 0x4c, 0x04, 0x3c, 0x1d, 0x5d, 0xf8, 0x11, 0xf2, 0x70, 0x51, 0x84, 0x35, 0x4f, 0x1e, + 0xaa, 0x9c, 0x91, 0xc3, 0xd8, 0x81, 0x9d, 0x7f, 0xdb, 0x18, 0x9c, 0xe3, 0xd1, 0x60, 0x66, 0xca, + 0x6b, 0x30, 0xc5, 0xdb, 0x39, 0x68, 0x99, 0xf3, 0xbf, 0x45, 0xc6, 0xfc, 0x01, 0xa6, 0xfb, 0x55, + 0x26, 0xd9, 0x82, 0xb2, 0x84, 0x4d, 0x87, 0x6f, 0x41, 0x89, 0xdb, 0x7e, 0x14, 0x53, 0x79, 0x85, + 0xd3, 0xae, 0x36, 0x53, 0xb5, 0x98, 0x06, 0xba, 0x59, 0x0e, 0xb9, 0x84, 0x45, 0xa5, 0x09, 0xb2, + 0x73, 0x83, 0xe0, 0xc5, 0x3c, 0xc1, 0xb5, 0x4e, 0x2c, 0xa3, 0x13, 0x13, 0x79, 0x6e, 0x06, 0xe4, + 0x5d, 0x70, 0x5e, 0xf9, 0x6c, 0xa8, 0x70, 0x3d, 0x58, 0xe0, 0x66, 0xc4, 0xb7, 0x2b, 0xf7, 0x35, + 0xb5, 0x75, 0xd4, 0xa2, 0x39, 0x3f, 0x81, 0xb2, 0xbc, 0x24, 0x9d, 0xdf, 0x05, 0x9b, 0xdb, 0xe9, + 0x0d, 0xe2, 0x4c, 0x36, 0x61, 0xae, 0x83, 0xc9, 0xc1, 0xde, 0x1d, 0x3f, 0x7c, 0x4d, 0x63, 0x8a, + 0xab, 0x96, 0xae, 0x31, 0x0d, 0x28, 0x4b, 0x71, 0x4d, 0x01, 0x1e, 0x83, 0x25, 0x79, 0x65, 0xad, + 0x3b, 0x6d, 0xa7, 0x29, 0x94, 0x5c, 0xdc, 0xde, 0xe5, 0x7e, 0xb2, 0x07, 0x95, 0xa9, 0x74, 0xea, + 0x22, 0x39, 0x31, 0x45, 0x72, 0x92, 0x63, 0xb5, 0xc9, 0x81, 0xf6, 0x4f, 0x1b, 0xec, 0x0e, 0x26, + 0xcc, 0x6d, 0x0b, 0x85, 0x43, 0xea, 0x47, 0xc8, 0xb7, 0xbf, 0x24, 0xf1, 0x32, 0x69, 0xf5, 0xaa, + 0x9a, 0x27, 0xed, 0xf0, 0xa9, 0xf6, 0x80, 0xaa, 0x22, 0x53, 0x3f, 0x55, 0xa1, 0x6b, 0x5a, 0x03, + 0x6c, 0xfe, 0x2c, 0x6e, 0x1a, 0xd2, 0x78, 0xec, 0xb9, 0xba, 0x2b, 0x4d, 0xdf, 0x80, 0x79, 0x49, + 0x19, 0xf7, 0x81, 0x8c, 0x1a, 0x04, 0xf2, 0x96, 0x4d, 0x67, 0x56, 0x24, 0x65, 0x48, 0x15, 0x19, + 0xa2, 0xa4, 0x8a, 0x72, 0xaa, 0xb6, 0x09, 0x90, 0x09, 0xa6, 0xfb, 0x48, 0xcf, 0xd1, 0x24, 0xf4, + 0x2f, 0xc5, 0x1b, 0x30, 0x2f, 0x95, 0x41, 0x21, 0x1a, 0x3a, 0xa4, 0x8a, 0x72, 0xe2, 0xd1, 0x90, + 0xfc, 0x51, 0xab, 0xd0, 0x08, 0xaa, 0x56, 0x61, 0xd0, 0x6d, 0x0b, 0x20, 0xfb, 0x2c, 0xaa, 0x06, + 0x6f, 0x7c, 0x28, 0xbd, 0xda, 0xcd, 0x40, 0x86, 0xc7, 0xe9, 0xa5, 0xf0, 0xb4, 0xef, 0xb8, 0xc2, + 0x33, 0xd8, 0xf7, 0x5c, 0x50, 0x46, 0x80, 0xa5, 0xfd, 0x9b, 0x6c, 0xf3, 0x1e, 0xe6, 0xbc, 0xb2, + 0x6e, 0xe7, 0xd9, 0xaf, 0xab, 0x7a, 0xe1, 0xf7, 0x55, 0xbd, 0xf0, 0xfd, 0xba, 0x5e, 0xf8, 0x71, + 0x5d, 0x2f, 0x7c, 0x24, 0xda, 0x3f, 0x8e, 0x61, 0x12, 0x22, 0x3d, 0xc7, 0xfe, 0x00, 0x69, 0xab, + 0x17, 0x53, 0x1a, 0x7c, 0x69, 0xf1, 0x5b, 0x7a, 0xf3, 0xe2, 0xff, 0xc5, 0xc6, 0x9f, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x32, 0xe4, 0x71, 0xd8, 0xb0, 0x08, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1543,6 +1346,44 @@ type KeysServer interface { AddName(context.Context, *AddNameRequest) (*AddNameResponse, error) } +// UnimplementedKeysServer can be embedded to have forward compatible implementations. +type UnimplementedKeysServer struct { +} + +func (*UnimplementedKeysServer) GenerateKey(ctx context.Context, req *GenRequest) (*GenResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GenerateKey not implemented") +} +func (*UnimplementedKeysServer) PublicKey(ctx context.Context, req *PubRequest) (*PubResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PublicKey not implemented") +} +func (*UnimplementedKeysServer) Sign(ctx context.Context, req *SignRequest) (*SignResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Sign not implemented") +} +func (*UnimplementedKeysServer) Verify(ctx context.Context, req *VerifyRequest) (*VerifyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Verify not implemented") +} +func (*UnimplementedKeysServer) Import(ctx context.Context, req *ImportRequest) (*ImportResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Import not implemented") +} +func (*UnimplementedKeysServer) ImportJSON(ctx context.Context, req *ImportJSONRequest) (*ImportResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ImportJSON not implemented") +} +func (*UnimplementedKeysServer) Export(ctx context.Context, req *ExportRequest) (*ExportResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Export not implemented") +} +func (*UnimplementedKeysServer) Hash(ctx context.Context, req *HashRequest) (*HashResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Hash not implemented") +} +func (*UnimplementedKeysServer) RemoveName(ctx context.Context, req *RemoveNameRequest) (*RemoveNameResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RemoveName not implemented") +} +func (*UnimplementedKeysServer) List(ctx context.Context, req *ListRequest) (*ListResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method List not implemented") +} +func (*UnimplementedKeysServer) AddName(ctx context.Context, req *AddNameRequest) (*AddNameResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddName not implemented") +} + func RegisterKeysServer(s *grpc.Server, srv KeysServer) { s.RegisterService(&_Keys_serviceDesc, srv) } @@ -1798,974 +1639,240 @@ var _Keys_serviceDesc = grpc.ServiceDesc{ Metadata: "keys.proto", } -func (m *ListRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *ListRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *ListRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.KeyName) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.KeyName))) - i += copy(dAtA[i:], m.KeyName) + l = len(m.KeyName) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *VerifyResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *VerifyResponse) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *VerifyResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *RemoveNameResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *RemoveNameResponse) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *RemoveNameResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *AddNameResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *AddNameResponse) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *AddNameResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *RemoveNameRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *RemoveNameRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *RemoveNameRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.KeyName) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.KeyName))) - i += copy(dAtA[i:], m.KeyName) + l = len(m.KeyName) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *GenRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *GenRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *GenRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Passphrase) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Passphrase))) - i += copy(dAtA[i:], m.Passphrase) - } - if len(m.CurveType) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.CurveType))) - i += copy(dAtA[i:], m.CurveType) + l = len(m.Passphrase) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } - if len(m.KeyName) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.KeyName))) - i += copy(dAtA[i:], m.KeyName) + l = len(m.CurveType) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) + } + l = len(m.KeyName) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *GenResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *GenResponse) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *GenResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Address) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) + l = len(m.Address) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *PubRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *PubRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *PubRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Address) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) - } - if len(m.Name) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Name))) - i += copy(dAtA[i:], m.Name) + l = len(m.Address) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) + } + l = len(m.Name) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *PubResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *PubResponse) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *PubResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.PublicKey) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.PublicKey))) - i += copy(dAtA[i:], m.PublicKey) - } - if len(m.CurveType) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.CurveType))) - i += copy(dAtA[i:], m.CurveType) + l = len(m.PublicKey) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) + } + l = len(m.CurveType) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *ImportJSONRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *ImportJSONRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *ImportJSONRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Passphrase) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Passphrase))) - i += copy(dAtA[i:], m.Passphrase) - } - if len(m.JSON) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.JSON))) - i += copy(dAtA[i:], m.JSON) + l = len(m.Passphrase) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) + } + l = len(m.JSON) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *ImportResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *ImportResponse) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *ImportResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Address) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) + l = len(m.Address) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *ImportRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *ImportRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *ImportRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Passphrase) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Passphrase))) - i += copy(dAtA[i:], m.Passphrase) - } - if len(m.Name) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Name))) - i += copy(dAtA[i:], m.Name) - } - if len(m.CurveType) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.CurveType))) - i += copy(dAtA[i:], m.CurveType) - } - if len(m.KeyBytes) > 0 { - dAtA[i] = 0x22 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.KeyBytes))) - i += copy(dAtA[i:], m.KeyBytes) + l = len(m.Passphrase) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + l = len(m.Name) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } - return i, nil -} - -func (m *ExportRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err + l = len(m.CurveType) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } - return dAtA[:n], nil -} - -func (m *ExportRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Passphrase) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Passphrase))) - i += copy(dAtA[i:], m.Passphrase) - } - if len(m.Name) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Name))) - i += copy(dAtA[i:], m.Name) - } - if len(m.Address) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) + l = len(m.KeyBytes) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil + return n } -func (m *ExportResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err +func (m *ExportRequest) Size() (n int) { + if m == nil { + return 0 } - return dAtA[:n], nil -} - -func (m *ExportResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i var l int _ = l - if len(m.Publickey) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Publickey))) - i += copy(dAtA[i:], m.Publickey) - } - if len(m.Privatekey) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Privatekey))) - i += copy(dAtA[i:], m.Privatekey) - } - if len(m.Address) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) - } - if len(m.CurveType) > 0 { - dAtA[i] = 0x22 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.CurveType))) - i += copy(dAtA[i:], m.CurveType) + l = len(m.Passphrase) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) + } + l = len(m.Name) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) + } + l = len(m.Address) + if l > 0 { + n += 1 + l + sovKeys(uint64(l)) } if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) + n += len(m.XXX_unrecognized) } - return i, nil -} - -func (m *SignRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *SignRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Passphrase) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Passphrase))) - i += copy(dAtA[i:], m.Passphrase) - } - if len(m.Address) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) - } - if len(m.Name) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Name))) - i += copy(dAtA[i:], m.Name) - } - if len(m.Message) > 0 { - dAtA[i] = 0x22 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Message))) - i += copy(dAtA[i:], m.Message) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *SignResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *SignResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Signature != nil { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(m.Signature.Size())) - n1, err := m.Signature.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n1 - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *VerifyRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *VerifyRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.PublicKey) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.PublicKey))) - i += copy(dAtA[i:], m.PublicKey) - } - if len(m.Message) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Message))) - i += copy(dAtA[i:], m.Message) - } - if m.Signature != nil { - dAtA[i] = 0x2a - i++ - i = encodeVarintKeys(dAtA, i, uint64(m.Signature.Size())) - n2, err := m.Signature.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n2 - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *HashRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *HashRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Hashtype) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Hashtype))) - i += copy(dAtA[i:], m.Hashtype) - } - if len(m.Message) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Message))) - i += copy(dAtA[i:], m.Message) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *HashResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *HashResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Hash) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Hash))) - i += copy(dAtA[i:], m.Hash) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *KeyID) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *KeyID) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Address) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) - } - if len(m.KeyName) > 0 { - for _, s := range m.KeyName { - dAtA[i] = 0x12 - i++ - l = len(s) - for l >= 1<<7 { - dAtA[i] = uint8(uint64(l)&0x7f | 0x80) - l >>= 7 - i++ - } - dAtA[i] = uint8(l) - i++ - i += copy(dAtA[i:], s) - } - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ListResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ListResponse) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Key) > 0 { - for _, msg := range m.Key { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(msg.Size())) - n, err := msg.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n - } - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *AddNameRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *AddNameRequest) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Keyname) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Keyname))) - i += copy(dAtA[i:], m.Keyname) - } - if len(m.Address) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintKeys(dAtA, i, uint64(len(m.Address))) - i += copy(dAtA[i:], m.Address) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func encodeVarintKeys(dAtA []byte, offset int, v uint64) int { - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return offset + 1 -} -func (m *ListRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.KeyName) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *VerifyResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *RemoveNameResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *AddNameResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *RemoveNameRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.KeyName) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *GenRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Passphrase) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.CurveType) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.KeyName) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *GenResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Address) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *PubRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Address) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.Name) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *PubResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.PublicKey) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.CurveType) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *ImportJSONRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Passphrase) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.JSON) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *ImportResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Address) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *ImportRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Passphrase) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.Name) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.CurveType) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.KeyBytes) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *ExportRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Passphrase) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.Name) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - l = len(m.Address) - if l > 0 { - n += 1 + l + sovKeys(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n + return n } func (m *ExportResponse) Size() (n int) { @@ -2961,2621 +2068,8 @@ func (m *AddNameRequest) Size() (n int) { } func sovKeys(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozKeys(x uint64) (n int) { return sovKeys(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *ListRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ListRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ListRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field KeyName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.KeyName = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *VerifyResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: VerifyResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: VerifyResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *RemoveNameResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: RemoveNameResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: RemoveNameResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *AddNameResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: AddNameResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: AddNameResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *RemoveNameRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: RemoveNameRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: RemoveNameRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field KeyName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.KeyName = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GenRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GenRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GenRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Passphrase", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Passphrase = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field CurveType", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.CurveType = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field KeyName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.KeyName = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GenResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GenResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GenResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PubRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PubRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PubRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PubResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PubResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PubResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.PublicKey = append(m.PublicKey[:0], dAtA[iNdEx:postIndex]...) - if m.PublicKey == nil { - m.PublicKey = []byte{} - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field CurveType", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.CurveType = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ImportJSONRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ImportJSONRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ImportJSONRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Passphrase", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Passphrase = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field JSON", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.JSON = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ImportResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ImportResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ImportResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ImportRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ImportRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ImportRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Passphrase", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Passphrase = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field CurveType", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.CurveType = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field KeyBytes", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.KeyBytes = append(m.KeyBytes[:0], dAtA[iNdEx:postIndex]...) - if m.KeyBytes == nil { - m.KeyBytes = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ExportRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ExportRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ExportRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Passphrase", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Passphrase = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ExportResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ExportResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ExportResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Publickey", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Publickey = append(m.Publickey[:0], dAtA[iNdEx:postIndex]...) - if m.Publickey == nil { - m.Publickey = []byte{} - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Privatekey", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Privatekey = append(m.Privatekey[:0], dAtA[iNdEx:postIndex]...) - if m.Privatekey == nil { - m.Privatekey = []byte{} - } - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = append(m.Address[:0], dAtA[iNdEx:postIndex]...) - if m.Address == nil { - m.Address = []byte{} - } - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field CurveType", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.CurveType = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *SignRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: SignRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: SignRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Passphrase", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Passphrase = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Message = append(m.Message[:0], dAtA[iNdEx:postIndex]...) - if m.Message == nil { - m.Message = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *SignResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: SignResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: SignResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Signature", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Signature == nil { - m.Signature = &crypto.Signature{} - } - if err := m.Signature.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *VerifyRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: VerifyRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: VerifyRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.PublicKey = append(m.PublicKey[:0], dAtA[iNdEx:postIndex]...) - if m.PublicKey == nil { - m.PublicKey = []byte{} - } - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Message = append(m.Message[:0], dAtA[iNdEx:postIndex]...) - if m.Message == nil { - m.Message = []byte{} - } - iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Signature", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Signature == nil { - m.Signature = &crypto.Signature{} - } - if err := m.Signature.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *HashRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: HashRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: HashRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Hashtype", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Hashtype = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Message = append(m.Message[:0], dAtA[iNdEx:postIndex]...) - if m.Message == nil { - m.Message = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *HashResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: HashResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: HashResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Hash = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *KeyID) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: KeyID: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: KeyID: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field KeyName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.KeyName = append(m.KeyName, string(dAtA[iNdEx:postIndex])) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ListResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ListResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ListResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Key = append(m.Key, &KeyID{}) - if err := m.Key[len(m.Key)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *AddNameRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: AddNameRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: AddNameRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Keyname", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Keyname = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowKeys - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthKeys - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthKeys - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipKeys(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthKeys - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipKeys(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowKeys - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowKeys - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowKeys - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthKeys - } - iNdEx += length - if iNdEx < 0 { - return 0, ErrInvalidLengthKeys - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowKeys - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipKeys(dAtA[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - if iNdEx < 0 { - return 0, ErrInvalidLengthKeys - } - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthKeys = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowKeys = fmt.Errorf("proto: integer overflow") -) diff --git a/permission/permission.pb.go b/permission/permission.pb.go index b91f0d764..41abb94c9 100644 --- a/permission/permission.pb.go +++ b/permission/permission.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -272,9 +273,9 @@ func (m *AccountPermissions) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPermission(dAtA, i, uint64(m.Base.Size())) - n1, err := m.Base.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Base.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if len(m.Roles) > 0 { @@ -347,9 +348,9 @@ func (m *PermArgs) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPermission(dAtA, i, uint64(m.Target.Size())) - n2, err := m.Target.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Target.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -445,14 +446,7 @@ func (m *PermArgs) Size() (n int) { } func sovPermission(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozPermission(x uint64) (n int) { return sovPermission(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/protobuf/acm.proto b/protobuf/acm.proto index 76966b83b..cc886e111 100644 --- a/protobuf/acm.proto +++ b/protobuf/acm.proto @@ -9,6 +9,7 @@ import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "permission.proto"; import "crypto.proto"; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; option (gogoproto.sizer_all) = true; diff --git a/protobuf/balance.proto b/protobuf/balance.proto index 7275fd58c..eefaf786c 100644 --- a/protobuf/balance.proto +++ b/protobuf/balance.proto @@ -1,4 +1,3 @@ -// Needed to proto2 rather than proto3 to get pointer field for PermArg syntax = 'proto3'; option go_package = "github.com/hyperledger/burrow/acm/balance"; @@ -7,6 +6,7 @@ import "github.com/gogo/protobuf/gogoproto/gogo.proto"; package balance; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; option (gogoproto.sizer_all) = true; @@ -19,4 +19,4 @@ message Balance { option (gogoproto.goproto_stringer) = false; uint32 Type = 1 [(gogoproto.casttype) = "Type"]; uint64 Amount = 2; -} \ No newline at end of file +} diff --git a/protobuf/bcm.proto b/protobuf/bcm.proto index a2bc1fd2a..7c81bfb8b 100644 --- a/protobuf/bcm.proto +++ b/protobuf/bcm.proto @@ -9,6 +9,7 @@ import "google/protobuf/duration.proto"; package bcm; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; option (gogoproto.sizer_all) = true; @@ -26,3 +27,10 @@ message SyncInfo { // Time elapsed since last commit google.protobuf.Duration LatestBlockDuration = 6 [(gogoproto.nullable) = false, (gogoproto.stdduration) = true]; } + +message PersistedState { + bytes AppHashAfterLastBlock = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; + google.protobuf.Timestamp LastBlockTime = 2 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + uint64 LastBlockHeight = 3; + bytes GenesisHash = 4 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; +} diff --git a/protobuf/crypto.proto b/protobuf/crypto.proto index bdb1abf32..5fb24c09c 100644 --- a/protobuf/crypto.proto +++ b/protobuf/crypto.proto @@ -6,6 +6,7 @@ option go_package = "github.com/hyperledger/burrow/crypto"; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; option (gogoproto.sizer_all) = true; diff --git a/protobuf/dump.proto b/protobuf/dump.proto index 9f0bc24f0..ef5c84a2e 100644 --- a/protobuf/dump.proto +++ b/protobuf/dump.proto @@ -11,6 +11,7 @@ import "names.proto"; package dump; +option (gogoproto.stable_marshaler_all) = true; // Enable custom Marshal method. option (gogoproto.marshaler_all) = true; // Enable custom Unmarshal method. @@ -38,7 +39,7 @@ message EVMEvent { // The original block time for this transaction google.protobuf.Timestamp Time = 2 [(gogoproto.nullable)=false, (gogoproto.stdtime)=true]; // The event itself - exec.LogEvent Event = 3; + exec.LogEvent Event = 3; } diff --git a/protobuf/encoding.proto b/protobuf/encoding.proto new file mode 100644 index 000000000..be0d97090 --- /dev/null +++ b/protobuf/encoding.proto @@ -0,0 +1,21 @@ +syntax = 'proto3'; + +option go_package = "github.com/hyperledger/burrow/encoding"; + +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; + +package encoding; + +option (gogoproto.stable_marshaler_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.sizer_all) = true; +option (gogoproto.goproto_registration) = true; +option (gogoproto.messagename_all) = true; + +// For testing +message TestMessage { + option (gogoproto.goproto_stringer) = false; + uint32 Type = 1; + uint64 Amount = 2; +} diff --git a/protobuf/errors.proto b/protobuf/errors.proto index 6f81e457b..d3ac4cd20 100644 --- a/protobuf/errors.proto +++ b/protobuf/errors.proto @@ -6,6 +6,7 @@ option go_package = "github.com/hyperledger/burrow/execution/errors"; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; +option (gogoproto.stable_marshaler_all) = true; // Enable custom Marshal method. option (gogoproto.marshaler_all) = true; // Enable custom Unmarshal method. diff --git a/protobuf/exec.proto b/protobuf/exec.proto index b4bb44c7a..53180130e 100644 --- a/protobuf/exec.proto +++ b/protobuf/exec.proto @@ -14,12 +14,18 @@ import "txs.proto"; import "permission.proto"; import "spec.proto"; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; +// This message exists purely for framing []StreamEvent +message StreamEvents { + repeated StreamEvent StreamEvents = 1; +} + message StreamEvent { option (gogoproto.onlyone) = true; BeginBlock BeginBlock = 1; diff --git a/protobuf/github.com/tendermint/tendermint/abci/types/types.proto b/protobuf/github.com/tendermint/tendermint/abci/types/types.proto index 75a53ac46..daf2cc914 100644 --- a/protobuf/github.com/tendermint/tendermint/abci/types/types.proto +++ b/protobuf/github.com/tendermint/tendermint/abci/types/types.proto @@ -11,8 +11,7 @@ import "github.com/tendermint/tendermint/libs/common/types.proto"; // NOTE: When using custom types, mind the warnings. // https://github.com/gogo/protobuf/blob/master/custom_types.md#warnings-and-issues -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; // Generate tests diff --git a/protobuf/github.com/tendermint/tendermint/libs/common/types.proto b/protobuf/github.com/tendermint/tendermint/libs/common/types.proto index 518e7ca09..a8583bc9b 100644 --- a/protobuf/github.com/tendermint/tendermint/libs/common/types.proto +++ b/protobuf/github.com/tendermint/tendermint/libs/common/types.proto @@ -3,8 +3,7 @@ package common; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; // Generate tests diff --git a/protobuf/keys.proto b/protobuf/keys.proto index 2406e2dad..0e0af5f53 100644 --- a/protobuf/keys.proto +++ b/protobuf/keys.proto @@ -7,8 +7,7 @@ package keys; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "crypto.proto"; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; @@ -63,7 +62,7 @@ message GenResponse { message PubRequest { string Address = 1; string Name = 2; -} +} message PubResponse { bytes PublicKey = 1; diff --git a/protobuf/names.proto b/protobuf/names.proto index 0be792b2d..5607d4d7a 100644 --- a/protobuf/names.proto +++ b/protobuf/names.proto @@ -6,6 +6,7 @@ option go_package = "github.com/hyperledger/burrow/execution/names"; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; +option (gogoproto.stable_marshaler_all) = true; // Enable custom Marshal method. option (gogoproto.marshaler_all) = true; // Enable custom Unmarshal method. diff --git a/protobuf/rpc.proto b/protobuf/rpc.proto index 625e51795..229754b36 100644 --- a/protobuf/rpc.proto +++ b/protobuf/rpc.proto @@ -10,8 +10,7 @@ import "bcm.proto"; package rpc; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; diff --git a/protobuf/rpcdump.proto b/protobuf/rpcdump.proto index 66f65eab3..d832cf062 100644 --- a/protobuf/rpcdump.proto +++ b/protobuf/rpcdump.proto @@ -8,8 +8,7 @@ import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "dump.proto"; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; diff --git a/protobuf/rpcquery.proto b/protobuf/rpcquery.proto index c52bf156a..9ec2814c9 100644 --- a/protobuf/rpcquery.proto +++ b/protobuf/rpcquery.proto @@ -13,8 +13,7 @@ import "validator.proto"; import "rpc.proto"; import "payload.proto"; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; @@ -112,4 +111,4 @@ message Stats { message GetBlockParam { uint64 Height = 1; -} \ No newline at end of file +} diff --git a/protobuf/spec.proto b/protobuf/spec.proto index b28f8a827..770f8183c 100644 --- a/protobuf/spec.proto +++ b/protobuf/spec.proto @@ -1,4 +1,3 @@ -// Needed to proto2 rather than proto3 to get pointer field for PermArg syntax = 'proto3'; option go_package = "github.com/hyperledger/burrow/genesis/spec"; @@ -10,6 +9,7 @@ import "balance.proto"; package spec; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; option (gogoproto.sizer_all) = true; diff --git a/protobuf/storage.proto b/protobuf/storage.proto new file mode 100644 index 000000000..f462b55b7 --- /dev/null +++ b/protobuf/storage.proto @@ -0,0 +1,22 @@ +syntax = 'proto3'; + +option go_package = "github.com/hyperledger/burrow/storage"; + +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; + +package storage; + +option (gogoproto.stable_marshaler_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.sizer_all) = true; +option (gogoproto.goproto_registration) = true; +option (gogoproto.messagename_all) = true; + +// This is the object that is stored in the leaves of the commitsTree - it captures the sub-tree hashes so that the +// commitsTree's hash becomes a mixture of the hashes of all the sub-trees. +message CommitID { + option (gogoproto.goproto_stringer) = false; + int64 Version = 1; + bytes Hash = 2; +} diff --git a/protobuf/tendermint.proto b/protobuf/tendermint.proto index 57a83d670..751cddde2 100644 --- a/protobuf/tendermint.proto +++ b/protobuf/tendermint.proto @@ -7,8 +7,7 @@ import "github.com/gogo/protobuf/gogoproto/gogo.proto"; package tendermint; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; diff --git a/protobuf/validator.proto b/protobuf/validator.proto index dc621ab7a..531a380a5 100644 --- a/protobuf/validator.proto +++ b/protobuf/validator.proto @@ -8,8 +8,7 @@ import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "crypto.proto"; -option (gogoproto.marshaler_all) = true; -option (gogoproto.unmarshaler_all) = true; +option (gogoproto.stable_marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.goproto_registration) = true; option (gogoproto.messagename_all) = true; diff --git a/rpc/rpc.pb.go b/rpc/rpc.pb.go index ad538aa98..4e387f401 100644 --- a/rpc/rpc.pb.go +++ b/rpc/rpc.pb.go @@ -5,8 +5,8 @@ package rpc import ( fmt "fmt" - io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -51,25 +51,16 @@ func (*ResultStatus) Descriptor() ([]byte, []int) { return fileDescriptor_77a6da22d6a3feb1, []int{0} } func (m *ResultStatus) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ResultStatus.Unmarshal(m, b) } func (m *ResultStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ResultStatus.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ResultStatus.Marshal(b, m, deterministic) } func (m *ResultStatus) XXX_Merge(src proto.Message) { xxx_messageInfo_ResultStatus.Merge(m, src) } func (m *ResultStatus) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ResultStatus.Size(m) } func (m *ResultStatus) XXX_DiscardUnknown() { xxx_messageInfo_ResultStatus.DiscardUnknown(m) @@ -138,128 +129,32 @@ func init() { proto.RegisterFile("rpc.proto", fileDescriptor_77a6da22d6a3feb1) } func init() { golang_proto.RegisterFile("rpc.proto", fileDescriptor_77a6da22d6a3feb1) } var fileDescriptor_77a6da22d6a3feb1 = []byte{ - // 368 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x3f, 0x6f, 0xda, 0x40, - 0x18, 0xc6, 0x39, 0xfe, 0x9a, 0x03, 0xd4, 0xea, 0xc4, 0x60, 0x31, 0x18, 0xb7, 0x62, 0x70, 0x87, - 0xda, 0x55, 0xab, 0xaa, 0x52, 0x47, 0x13, 0x29, 0xb0, 0x64, 0x38, 0x14, 0x22, 0x65, 0xf3, 0x9f, - 0xc3, 0x3e, 0x09, 0xee, 0xac, 0xf3, 0x39, 0x89, 0xbf, 0x5d, 0x46, 0xc6, 0xcc, 0x19, 0x50, 0x04, - 0x5b, 0x3e, 0x43, 0x86, 0x88, 0x03, 0x13, 0xb3, 0x64, 0x7b, 0x9f, 0xdf, 0xf3, 0xbe, 0x8f, 0xec, - 0xe7, 0x60, 0x5b, 0x24, 0x81, 0x9d, 0x08, 0x2e, 0x39, 0xaa, 0x89, 0x24, 0x18, 0xfc, 0x8c, 0xa8, - 0x8c, 0x33, 0xdf, 0x0e, 0xf8, 0xca, 0x89, 0x78, 0xc4, 0x1d, 0xe5, 0xf9, 0xd9, 0x42, 0x29, 0x25, - 0xd4, 0x74, 0xb8, 0x19, 0x7c, 0x95, 0x84, 0x85, 0x44, 0xac, 0x28, 0x93, 0x47, 0xf2, 0xe5, 0xce, - 0x5b, 0xd2, 0xd0, 0x93, 0x5c, 0x1c, 0x41, 0xdb, 0x0f, 0x56, 0x87, 0xf1, 0xfb, 0x5b, 0x15, 0x76, - 0x31, 0x49, 0xb3, 0xa5, 0x9c, 0x49, 0x4f, 0x66, 0x29, 0xd2, 0x61, 0x6b, 0x1c, 0x7b, 0x94, 0x4d, - 0x2f, 0x74, 0x60, 0x02, 0xab, 0x8d, 0x0b, 0x89, 0xfa, 0xb0, 0x81, 0xb3, 0x3d, 0xaf, 0x2a, 0x7e, - 0x10, 0x68, 0x04, 0x7b, 0x6e, 0x26, 0x04, 0xbf, 0x9f, 0x13, 0x91, 0x52, 0xce, 0xf4, 0x9a, 0x72, - 0xcf, 0x21, 0xba, 0x81, 0x9d, 0x4b, 0xc2, 0x48, 0x4a, 0xd3, 0x89, 0x97, 0xc6, 0x7a, 0xdd, 0x04, - 0x56, 0xd7, 0xfd, 0xbb, 0xde, 0x0c, 0x2b, 0xcf, 0x9b, 0x61, 0xf9, 0x07, 0xe3, 0x3c, 0x21, 0x62, - 0x49, 0xc2, 0x88, 0x08, 0xc7, 0x57, 0x11, 0x8e, 0x4f, 0x99, 0x27, 0x72, 0x7b, 0x42, 0x1e, 0xdc, - 0x5c, 0x92, 0x14, 0x97, 0x93, 0xd0, 0x2f, 0xa8, 0x5d, 0xf1, 0x90, 0x4c, 0xd9, 0x82, 0xeb, 0x0d, - 0x13, 0x58, 0x9d, 0xdf, 0x7d, 0xbb, 0x54, 0x40, 0xe1, 0xe1, 0xd3, 0x16, 0xfa, 0x01, 0xb5, 0x59, - 0xce, 0x02, 0x75, 0xd1, 0x54, 0x17, 0x3d, 0x7b, 0xdf, 0x47, 0x01, 0xf1, 0xc9, 0x46, 0x23, 0x08, - 0xc7, 0x9e, 0x0c, 0x62, 0xca, 0xa2, 0xeb, 0x44, 0xd7, 0x4c, 0x60, 0x69, 0x6e, 0xfd, 0x75, 0x33, - 0xac, 0xe0, 0x12, 0x47, 0xff, 0x61, 0x6f, 0x5e, 0x14, 0xac, 0x52, 0x5b, 0xc7, 0xef, 0xf8, 0xa8, - 0xfd, 0xe4, 0xe3, 0xf3, 0x55, 0xf7, 0xdf, 0x7a, 0x6b, 0x80, 0xa7, 0xad, 0x01, 0x5e, 0xb6, 0x06, - 0x78, 0xdc, 0x19, 0x60, 0xbd, 0x33, 0xc0, 0xed, 0xb7, 0xcf, 0x0b, 0x11, 0x49, 0xe0, 0x37, 0xd5, - 0xf3, 0xfd, 0x79, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x85, 0xc2, 0xfd, 0xda, 0x2d, 0x02, 0x00, 0x00, -} - -func (m *ResultStatus) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xbd, 0x6e, 0xe2, 0x40, + 0x14, 0x85, 0x19, 0x7e, 0xcd, 0x00, 0xda, 0xd5, 0x88, 0xc2, 0xa2, 0x30, 0xde, 0x15, 0x85, 0xb7, + 0x58, 0x7b, 0xb5, 0x68, 0x9b, 0x2d, 0xcd, 0x4a, 0x0b, 0x4d, 0x8a, 0x41, 0x21, 0x52, 0x3a, 0xff, + 0x0c, 0xf6, 0x48, 0x30, 0x63, 0x8d, 0xc7, 0x49, 0xfc, 0x76, 0x29, 0x79, 0x84, 0x28, 0x05, 0x8a, + 0xa0, 0xcb, 0x33, 0xa4, 0x88, 0x18, 0x30, 0x31, 0x4d, 0xba, 0x7b, 0xbe, 0x73, 0xef, 0x91, 0x7d, + 0x06, 0xb6, 0x45, 0x12, 0xd8, 0x89, 0xe0, 0x92, 0xa3, 0x9a, 0x48, 0x82, 0xc1, 0xcf, 0x88, 0xca, + 0x38, 0xf3, 0xed, 0x80, 0xaf, 0x9d, 0x88, 0x47, 0xdc, 0x51, 0x9e, 0x9f, 0x2d, 0x95, 0x52, 0x42, + 0x4d, 0xc7, 0x9b, 0xc1, 0x57, 0x49, 0x58, 0x48, 0xc4, 0x9a, 0x32, 0x79, 0x22, 0x5f, 0xee, 0xbc, + 0x15, 0x0d, 0x3d, 0xc9, 0xc5, 0x09, 0xb4, 0xfd, 0x60, 0x7d, 0x1c, 0xbf, 0xbf, 0x55, 0x61, 0x17, + 0x93, 0x34, 0x5b, 0xc9, 0xb9, 0xf4, 0x64, 0x96, 0x22, 0x1d, 0xb6, 0x26, 0xb1, 0x47, 0xd9, 0xec, + 0x9f, 0x0e, 0x4c, 0x60, 0xb5, 0x71, 0x21, 0x51, 0x1f, 0x36, 0x70, 0x76, 0xe0, 0x55, 0xc5, 0x8f, + 0x02, 0x8d, 0x60, 0xcf, 0xcd, 0x84, 0xe0, 0xf7, 0x0b, 0x22, 0x52, 0xca, 0x99, 0x5e, 0x53, 0xee, + 0x25, 0x44, 0x37, 0xb0, 0xf3, 0x9f, 0x30, 0x92, 0xd2, 0x74, 0xea, 0xa5, 0xb1, 0x5e, 0x37, 0x81, + 0xd5, 0x75, 0xff, 0x6c, 0xb6, 0xc3, 0xca, 0xf3, 0x76, 0x58, 0xfe, 0xc1, 0x38, 0x4f, 0x88, 0x58, + 0x91, 0x30, 0x22, 0xc2, 0xf1, 0x55, 0x84, 0xe3, 0x53, 0xe6, 0x89, 0xdc, 0x9e, 0x92, 0x07, 0x37, + 0x97, 0x24, 0xc5, 0xe5, 0x24, 0xf4, 0x0b, 0x6a, 0x57, 0x3c, 0x24, 0x33, 0xb6, 0xe4, 0x7a, 0xc3, + 0x04, 0x56, 0xe7, 0x77, 0xdf, 0x2e, 0x15, 0x50, 0x78, 0xf8, 0xbc, 0x85, 0x7e, 0x40, 0x6d, 0x9e, + 0xb3, 0x40, 0x5d, 0x34, 0xd5, 0x45, 0xcf, 0x3e, 0xf4, 0x51, 0x40, 0x7c, 0xb6, 0xd1, 0x08, 0xc2, + 0x89, 0x27, 0x83, 0x98, 0xb2, 0xe8, 0x3a, 0xd1, 0x35, 0x13, 0x58, 0x9a, 0x5b, 0x7f, 0xdd, 0x0e, + 0x2b, 0xb8, 0xc4, 0xd1, 0x5f, 0xd8, 0x5b, 0x14, 0x05, 0xab, 0xd4, 0xd6, 0xe9, 0x3b, 0x3e, 0x6a, + 0x3f, 0xfb, 0xf8, 0x72, 0xd5, 0x1d, 0x3f, 0xed, 0x0c, 0xf0, 0xb2, 0x33, 0xc0, 0xe3, 0xde, 0x00, + 0x9b, 0xbd, 0x01, 0x6e, 0xbf, 0x7d, 0x5e, 0x86, 0x48, 0x02, 0xbf, 0xa9, 0x9e, 0x6e, 0xfc, 0x1e, + 0x00, 0x00, 0xff, 0xff, 0xd7, 0x87, 0x65, 0xef, 0x29, 0x02, 0x00, 0x00, } -func (m *ResultStatus) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.ChainID) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpc(dAtA, i, uint64(len(m.ChainID))) - i += copy(dAtA[i:], m.ChainID) - } - if len(m.RunID) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintRpc(dAtA, i, uint64(len(m.RunID))) - i += copy(dAtA[i:], m.RunID) - } - if len(m.BurrowVersion) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintRpc(dAtA, i, uint64(len(m.BurrowVersion))) - i += copy(dAtA[i:], m.BurrowVersion) - } - dAtA[i] = 0x22 - i++ - i = encodeVarintRpc(dAtA, i, uint64(m.GenesisHash.Size())) - n1, err := m.GenesisHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n1 - if m.NodeInfo != nil { - dAtA[i] = 0x2a - i++ - i = encodeVarintRpc(dAtA, i, uint64(m.NodeInfo.Size())) - n2, err := m.NodeInfo.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n2 - } - if m.SyncInfo != nil { - dAtA[i] = 0x32 - i++ - i = encodeVarintRpc(dAtA, i, uint64(m.SyncInfo.Size())) - n3, err := m.SyncInfo.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n3 - } - if m.ValidatorInfo != nil { - dAtA[i] = 0x3a - i++ - i = encodeVarintRpc(dAtA, i, uint64(m.ValidatorInfo.Size())) - n4, err := m.ValidatorInfo.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n4 - } - if m.CatchingUp { - dAtA[i] = 0x40 - i++ - if m.CatchingUp { - dAtA[i] = 1 - } else { - dAtA[i] = 0 - } - i++ - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func encodeVarintRpc(dAtA []byte, offset int, v uint64) int { - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return offset + 1 -} func (m *ResultStatus) Size() (n int) { if m == nil { return 0 @@ -288,13 +183,13 @@ func (m *ResultStatus) Size() (n int) { l = m.SyncInfo.Size() n += 1 + l + sovRpc(uint64(l)) } + if m.CatchingUp { + n += 2 + } if m.ValidatorInfo != nil { l = m.ValidatorInfo.Size() n += 1 + l + sovRpc(uint64(l)) } - if m.CatchingUp { - n += 2 - } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -302,436 +197,8 @@ func (m *ResultStatus) Size() (n int) { } func sovRpc(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpc(x uint64) (n int) { return sovRpc(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *ResultStatus) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ResultStatus: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ResultStatus: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ChainID", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.ChainID = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field RunID", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.RunID = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field BurrowVersion", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.BurrowVersion = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field GenesisHash", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.GenesisHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NodeInfo", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.NodeInfo == nil { - m.NodeInfo = &tendermint.NodeInfo{} - } - if err := m.NodeInfo.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field SyncInfo", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.SyncInfo == nil { - m.SyncInfo = &bcm.SyncInfo{} - } - if err := m.SyncInfo.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 7: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ValidatorInfo", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthRpc - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthRpc - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.ValidatorInfo == nil { - m.ValidatorInfo = &validator.Validator{} - } - if err := m.ValidatorInfo.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 8: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field CatchingUp", wireType) - } - var v int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.CatchingUp = bool(v != 0) - default: - iNdEx = preIndex - skippy, err := skipRpc(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpc - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpc - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipRpc(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpc - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpc - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpc - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthRpc - } - iNdEx += length - if iNdEx < 0 { - return 0, ErrInvalidLengthRpc - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpc - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipRpc(dAtA[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - if iNdEx < 0 { - return 0, ErrInvalidLengthRpc - } - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthRpc = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowRpc = fmt.Errorf("proto: integer overflow") -) diff --git a/rpc/rpcdump/rpcdump.pb.go b/rpc/rpcdump/rpcdump.pb.go index 22a0239d7..34bb4554b 100644 --- a/rpc/rpcdump/rpcdump.pb.go +++ b/rpc/rpcdump/rpcdump.pb.go @@ -6,14 +6,16 @@ package rpcdump import ( context "context" fmt "fmt" - io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" dump "github.com/hyperledger/burrow/dump" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -42,25 +44,16 @@ func (*GetDumpParam) Descriptor() ([]byte, []int) { return fileDescriptor_80c0fd6a8168e015, []int{0} } func (m *GetDumpParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetDumpParam.Unmarshal(m, b) } func (m *GetDumpParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetDumpParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetDumpParam.Marshal(b, m, deterministic) } func (m *GetDumpParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetDumpParam.Merge(m, src) } func (m *GetDumpParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetDumpParam.Size(m) } func (m *GetDumpParam) XXX_DiscardUnknown() { xxx_messageInfo_GetDumpParam.DiscardUnknown(m) @@ -87,7 +80,7 @@ func init() { proto.RegisterFile("rpcdump.proto", fileDescriptor_80c0fd6a8168e01 func init() { golang_proto.RegisterFile("rpcdump.proto", fileDescriptor_80c0fd6a8168e015) } var fileDescriptor_80c0fd6a8168e015 = []byte{ - // 198 bytes of a gzipped FileDescriptorProto + // 194 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2d, 0x2a, 0x48, 0x4e, 0x29, 0xcd, 0x2d, 0xd0, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x87, 0x72, 0xa5, 0x74, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, 0xc1, @@ -96,11 +89,11 @@ var fileDescriptor_80c0fd6a8168e015 = []byte{ 0x12, 0xe3, 0x62, 0xcb, 0x48, 0xcd, 0x4c, 0xcf, 0x28, 0x91, 0x60, 0x54, 0x60, 0xd4, 0x60, 0x09, 0x82, 0xf2, 0x8c, 0xcc, 0xb8, 0x58, 0x40, 0x8a, 0x84, 0xf4, 0xb8, 0xd8, 0xa1, 0xea, 0x85, 0x44, 0xf5, 0x60, 0xce, 0x41, 0x36, 0x41, 0x8a, 0x4b, 0x0f, 0x2c, 0x06, 0x12, 0x30, 0x60, 0x74, 0xb2, - 0x3f, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x0f, 0x3c, 0x96, - 0x63, 0x3c, 0xf1, 0x58, 0x8e, 0x31, 0x4a, 0x13, 0xc9, 0xc1, 0x19, 0x95, 0x05, 0xa9, 0x45, 0x39, - 0xa9, 0x29, 0xe9, 0xa9, 0x45, 0xfa, 0x49, 0xa5, 0x45, 0x45, 0xf9, 0xe5, 0xfa, 0x45, 0x05, 0xc9, - 0xfa, 0x50, 0xb3, 0x93, 0xd8, 0xc0, 0xee, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xba, 0x2f, - 0xc9, 0x63, 0xfc, 0x00, 0x00, 0x00, + 0xbe, 0xf1, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x03, 0x8f, 0xe5, 0x18, 0x4f, 0x3c, 0x96, + 0x63, 0x8c, 0xd2, 0x44, 0x72, 0x6c, 0x46, 0x65, 0x41, 0x6a, 0x51, 0x4e, 0x6a, 0x4a, 0x7a, 0x6a, + 0x91, 0x7e, 0x52, 0x69, 0x51, 0x51, 0x7e, 0xb9, 0x7e, 0x51, 0x41, 0xb2, 0x3e, 0xd4, 0xdc, 0x24, + 0x36, 0xb0, 0x1b, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x75, 0xfd, 0xde, 0x63, 0xf8, 0x00, + 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -163,6 +156,14 @@ type DumpServer interface { GetDump(*GetDumpParam, Dump_GetDumpServer) error } +// UnimplementedDumpServer can be embedded to have forward compatible implementations. +type UnimplementedDumpServer struct { +} + +func (*UnimplementedDumpServer) GetDump(req *GetDumpParam, srv Dump_GetDumpServer) error { + return status.Errorf(codes.Unimplemented, "method GetDump not implemented") +} + func RegisterDumpServer(s *grpc.Server, srv DumpServer) { s.RegisterService(&_Dump_serviceDesc, srv) } @@ -202,41 +203,6 @@ var _Dump_serviceDesc = grpc.ServiceDesc{ Metadata: "rpcdump.proto", } -func (m *GetDumpParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetDumpParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Height != 0 { - dAtA[i] = 0x8 - i++ - i = encodeVarintRpcdump(dAtA, i, uint64(m.Height)) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func encodeVarintRpcdump(dAtA []byte, offset int, v uint64) int { - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return offset + 1 -} func (m *GetDumpParam) Size() (n int) { if m == nil { return 0 @@ -253,198 +219,8 @@ func (m *GetDumpParam) Size() (n int) { } func sovRpcdump(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpcdump(x uint64) (n int) { return sovRpcdump(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *GetDumpParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcdump - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetDumpParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetDumpParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) - } - m.Height = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcdump - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Height |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipRpcdump(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcdump - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcdump - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipRpcdump(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcdump - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcdump - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcdump - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthRpcdump - } - iNdEx += length - if iNdEx < 0 { - return 0, ErrInvalidLengthRpcdump - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcdump - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipRpcdump(dAtA[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - if iNdEx < 0 { - return 0, ErrInvalidLengthRpcdump - } - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthRpcdump = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowRpcdump = fmt.Errorf("proto: integer overflow") -) diff --git a/rpc/rpcevents/rpcevents.pb.go b/rpc/rpcevents/rpcevents.pb.go index ea0229c54..ba120cae2 100644 --- a/rpc/rpcevents/rpcevents.pb.go +++ b/rpc/rpcevents/rpcevents.pb.go @@ -8,6 +8,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -15,6 +16,8 @@ import ( github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" exec "github.com/hyperledger/burrow/execution/exec" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -763,6 +766,20 @@ type ExecutionEventsServer interface { Events(*BlocksRequest, ExecutionEvents_EventsServer) error } +// UnimplementedExecutionEventsServer can be embedded to have forward compatible implementations. +type UnimplementedExecutionEventsServer struct { +} + +func (*UnimplementedExecutionEventsServer) Stream(req *BlocksRequest, srv ExecutionEvents_StreamServer) error { + return status.Errorf(codes.Unimplemented, "method Stream not implemented") +} +func (*UnimplementedExecutionEventsServer) Tx(ctx context.Context, req *TxRequest) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method Tx not implemented") +} +func (*UnimplementedExecutionEventsServer) Events(req *BlocksRequest, srv ExecutionEvents_EventsServer) error { + return status.Errorf(codes.Unimplemented, "method Events not implemented") +} + func RegisterExecutionEventsServer(s *grpc.Server, srv ExecutionEventsServer) { s.RegisterService(&_ExecutionEvents_serviceDesc, srv) } @@ -905,9 +922,9 @@ func (m *TxRequest) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.TxHash.Size())) - n1, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.TxHash.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if m.Wait { @@ -945,9 +962,9 @@ func (m *BlocksRequest) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.BlockRange.Size())) - n2, err := m.BlockRange.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.BlockRange.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -1126,9 +1143,9 @@ func (m *BlockRange) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.Start.Size())) - n3, err := m.Start.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Start.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -1136,9 +1153,9 @@ func (m *BlockRange) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.End.Size())) - n4, err := m.End.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.End.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -1315,14 +1332,7 @@ func (m *BlockRange) Size() (n int) { } func sovRpcevents(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpcevents(x uint64) (n int) { return sovRpcevents(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcquery/rpcquery.pb.go b/rpc/rpcquery/rpcquery.pb.go index d9ea7463d..7e44f38e5 100644 --- a/rpc/rpcquery/rpcquery.pb.go +++ b/rpc/rpcquery/rpcquery.pb.go @@ -6,8 +6,8 @@ package rpcquery import ( context "context" fmt "fmt" - io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -21,6 +21,8 @@ import ( payload "github.com/hyperledger/burrow/txs/payload" types "github.com/tendermint/tendermint/abci/types" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -50,25 +52,16 @@ func (*StatusParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{0} } func (m *StatusParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_StatusParam.Unmarshal(m, b) } func (m *StatusParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_StatusParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_StatusParam.Marshal(b, m, deterministic) } func (m *StatusParam) XXX_Merge(src proto.Message) { xxx_messageInfo_StatusParam.Merge(m, src) } func (m *StatusParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_StatusParam.Size(m) } func (m *StatusParam) XXX_DiscardUnknown() { xxx_messageInfo_StatusParam.DiscardUnknown(m) @@ -108,25 +101,16 @@ func (*GetAccountParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{1} } func (m *GetAccountParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetAccountParam.Unmarshal(m, b) } func (m *GetAccountParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetAccountParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetAccountParam.Marshal(b, m, deterministic) } func (m *GetAccountParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetAccountParam.Merge(m, src) } func (m *GetAccountParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetAccountParam.Size(m) } func (m *GetAccountParam) XXX_DiscardUnknown() { xxx_messageInfo_GetAccountParam.DiscardUnknown(m) @@ -153,25 +137,16 @@ func (*GetStorageParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{2} } func (m *GetStorageParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetStorageParam.Unmarshal(m, b) } func (m *GetStorageParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetStorageParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetStorageParam.Marshal(b, m, deterministic) } func (m *GetStorageParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetStorageParam.Merge(m, src) } func (m *GetStorageParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetStorageParam.Size(m) } func (m *GetStorageParam) XXX_DiscardUnknown() { xxx_messageInfo_GetStorageParam.DiscardUnknown(m) @@ -197,25 +172,16 @@ func (*StorageValue) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{3} } func (m *StorageValue) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_StorageValue.Unmarshal(m, b) } func (m *StorageValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_StorageValue.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_StorageValue.Marshal(b, m, deterministic) } func (m *StorageValue) XXX_Merge(src proto.Message) { xxx_messageInfo_StorageValue.Merge(m, src) } func (m *StorageValue) XXX_Size() int { - return m.Size() + return xxx_messageInfo_StorageValue.Size(m) } func (m *StorageValue) XXX_DiscardUnknown() { xxx_messageInfo_StorageValue.DiscardUnknown(m) @@ -241,25 +207,16 @@ func (*ListAccountsParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{4} } func (m *ListAccountsParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ListAccountsParam.Unmarshal(m, b) } func (m *ListAccountsParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ListAccountsParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ListAccountsParam.Marshal(b, m, deterministic) } func (m *ListAccountsParam) XXX_Merge(src proto.Message) { xxx_messageInfo_ListAccountsParam.Merge(m, src) } func (m *ListAccountsParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ListAccountsParam.Size(m) } func (m *ListAccountsParam) XXX_DiscardUnknown() { xxx_messageInfo_ListAccountsParam.DiscardUnknown(m) @@ -292,25 +249,16 @@ func (*GetNameParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{5} } func (m *GetNameParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetNameParam.Unmarshal(m, b) } func (m *GetNameParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetNameParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetNameParam.Marshal(b, m, deterministic) } func (m *GetNameParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetNameParam.Merge(m, src) } func (m *GetNameParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetNameParam.Size(m) } func (m *GetNameParam) XXX_DiscardUnknown() { xxx_messageInfo_GetNameParam.DiscardUnknown(m) @@ -343,25 +291,16 @@ func (*ListNamesParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{6} } func (m *ListNamesParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ListNamesParam.Unmarshal(m, b) } func (m *ListNamesParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ListNamesParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ListNamesParam.Marshal(b, m, deterministic) } func (m *ListNamesParam) XXX_Merge(src proto.Message) { xxx_messageInfo_ListNamesParam.Merge(m, src) } func (m *ListNamesParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ListNamesParam.Size(m) } func (m *ListNamesParam) XXX_DiscardUnknown() { xxx_messageInfo_ListNamesParam.DiscardUnknown(m) @@ -393,25 +332,16 @@ func (*GetValidatorSetParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{7} } func (m *GetValidatorSetParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetValidatorSetParam.Unmarshal(m, b) } func (m *GetValidatorSetParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetValidatorSetParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetValidatorSetParam.Marshal(b, m, deterministic) } func (m *GetValidatorSetParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetValidatorSetParam.Merge(m, src) } func (m *GetValidatorSetParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetValidatorSetParam.Size(m) } func (m *GetValidatorSetParam) XXX_DiscardUnknown() { xxx_messageInfo_GetValidatorSetParam.DiscardUnknown(m) @@ -438,25 +368,16 @@ func (*GetValidatorSetHistoryParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{8} } func (m *GetValidatorSetHistoryParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetValidatorSetHistoryParam.Unmarshal(m, b) } func (m *GetValidatorSetHistoryParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetValidatorSetHistoryParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetValidatorSetHistoryParam.Marshal(b, m, deterministic) } func (m *GetValidatorSetHistoryParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetValidatorSetHistoryParam.Merge(m, src) } func (m *GetValidatorSetHistoryParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetValidatorSetHistoryParam.Size(m) } func (m *GetValidatorSetHistoryParam) XXX_DiscardUnknown() { xxx_messageInfo_GetValidatorSetHistoryParam.DiscardUnknown(m) @@ -489,25 +410,16 @@ func (*ValidatorSetHistory) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{9} } func (m *ValidatorSetHistory) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ValidatorSetHistory.Unmarshal(m, b) } func (m *ValidatorSetHistory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ValidatorSetHistory.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ValidatorSetHistory.Marshal(b, m, deterministic) } func (m *ValidatorSetHistory) XXX_Merge(src proto.Message) { xxx_messageInfo_ValidatorSetHistory.Merge(m, src) } func (m *ValidatorSetHistory) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ValidatorSetHistory.Size(m) } func (m *ValidatorSetHistory) XXX_DiscardUnknown() { xxx_messageInfo_ValidatorSetHistory.DiscardUnknown(m) @@ -541,25 +453,16 @@ func (*ValidatorSet) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{10} } func (m *ValidatorSet) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ValidatorSet.Unmarshal(m, b) } func (m *ValidatorSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ValidatorSet.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ValidatorSet.Marshal(b, m, deterministic) } func (m *ValidatorSet) XXX_Merge(src proto.Message) { xxx_messageInfo_ValidatorSet.Merge(m, src) } func (m *ValidatorSet) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ValidatorSet.Size(m) } func (m *ValidatorSet) XXX_DiscardUnknown() { xxx_messageInfo_ValidatorSet.DiscardUnknown(m) @@ -599,25 +502,16 @@ func (*GetProposalParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{11} } func (m *GetProposalParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetProposalParam.Unmarshal(m, b) } func (m *GetProposalParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetProposalParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetProposalParam.Marshal(b, m, deterministic) } func (m *GetProposalParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetProposalParam.Merge(m, src) } func (m *GetProposalParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetProposalParam.Size(m) } func (m *GetProposalParam) XXX_DiscardUnknown() { xxx_messageInfo_GetProposalParam.DiscardUnknown(m) @@ -650,25 +544,16 @@ func (*ListProposalsParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{12} } func (m *ListProposalsParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ListProposalsParam.Unmarshal(m, b) } func (m *ListProposalsParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ListProposalsParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ListProposalsParam.Marshal(b, m, deterministic) } func (m *ListProposalsParam) XXX_Merge(src proto.Message) { xxx_messageInfo_ListProposalsParam.Merge(m, src) } func (m *ListProposalsParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ListProposalsParam.Size(m) } func (m *ListProposalsParam) XXX_DiscardUnknown() { xxx_messageInfo_ListProposalsParam.DiscardUnknown(m) @@ -702,25 +587,16 @@ func (*ProposalResult) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{13} } func (m *ProposalResult) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_ProposalResult.Unmarshal(m, b) } func (m *ProposalResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ProposalResult.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_ProposalResult.Marshal(b, m, deterministic) } func (m *ProposalResult) XXX_Merge(src proto.Message) { xxx_messageInfo_ProposalResult.Merge(m, src) } func (m *ProposalResult) XXX_Size() int { - return m.Size() + return xxx_messageInfo_ProposalResult.Size(m) } func (m *ProposalResult) XXX_DiscardUnknown() { xxx_messageInfo_ProposalResult.DiscardUnknown(m) @@ -759,25 +635,16 @@ func (*GetStatsParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{14} } func (m *GetStatsParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetStatsParam.Unmarshal(m, b) } func (m *GetStatsParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetStatsParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetStatsParam.Marshal(b, m, deterministic) } func (m *GetStatsParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetStatsParam.Merge(m, src) } func (m *GetStatsParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetStatsParam.Size(m) } func (m *GetStatsParam) XXX_DiscardUnknown() { xxx_messageInfo_GetStatsParam.DiscardUnknown(m) @@ -804,25 +671,16 @@ func (*Stats) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{15} } func (m *Stats) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_Stats.Unmarshal(m, b) } func (m *Stats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Stats.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_Stats.Marshal(b, m, deterministic) } func (m *Stats) XXX_Merge(src proto.Message) { xxx_messageInfo_Stats.Merge(m, src) } func (m *Stats) XXX_Size() int { - return m.Size() + return xxx_messageInfo_Stats.Size(m) } func (m *Stats) XXX_DiscardUnknown() { xxx_messageInfo_Stats.DiscardUnknown(m) @@ -862,25 +720,16 @@ func (*GetBlockParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{16} } func (m *GetBlockParam) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) + return xxx_messageInfo_GetBlockParam.Unmarshal(m, b) } func (m *GetBlockParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetBlockParam.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalTo(b) - if err != nil { - return nil, err - } - return b[:n], nil - } + return xxx_messageInfo_GetBlockParam.Marshal(b, m, deterministic) } func (m *GetBlockParam) XXX_Merge(src proto.Message) { xxx_messageInfo_GetBlockParam.Merge(m, src) } func (m *GetBlockParam) XXX_Size() int { - return m.Size() + return xxx_messageInfo_GetBlockParam.Size(m) } func (m *GetBlockParam) XXX_DiscardUnknown() { xxx_messageInfo_GetBlockParam.DiscardUnknown(m) @@ -939,63 +788,62 @@ func init() { proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9b99e39f func init() { golang_proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9b99e39f02) } var fileDescriptor_88e25d9b99e39f02 = []byte{ - // 883 bytes of a gzipped FileDescriptorProto + // 879 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x5f, 0x8f, 0xdb, 0x44, - 0x10, 0xc7, 0x77, 0xbd, 0xdc, 0xdd, 0x24, 0x77, 0x69, 0xb7, 0x47, 0x08, 0x2e, 0xa4, 0xd5, 0x4a, - 0x5c, 0x8f, 0x8a, 0x3a, 0x51, 0x68, 0x00, 0xc1, 0x03, 0x34, 0x08, 0x92, 0x53, 0xe1, 0x74, 0x38, + 0x10, 0xc7, 0x77, 0xbd, 0xdc, 0xdd, 0x24, 0x97, 0xb4, 0xdb, 0x23, 0x04, 0x17, 0xd2, 0x6a, 0x25, + 0xae, 0x47, 0x45, 0x9d, 0x28, 0x34, 0x80, 0x00, 0x09, 0x35, 0x08, 0x92, 0x53, 0xe1, 0x74, 0x38, 0xa8, 0x95, 0x40, 0x42, 0xda, 0xd8, 0x4b, 0x62, 0xe1, 0x78, 0xcd, 0x7a, 0x5d, 0xf0, 0x47, 0xe2, - 0x5b, 0xf0, 0x78, 0x8f, 0x3c, 0xf3, 0x50, 0xa1, 0xeb, 0xb7, 0xe0, 0x09, 0x79, 0xff, 0x38, 0xb6, - 0x2f, 0xad, 0xee, 0x85, 0x97, 0x68, 0x66, 0xf6, 0x37, 0xbf, 0x71, 0x66, 0xe7, 0x37, 0x0b, 0x87, - 0x3c, 0xf6, 0x7e, 0x4d, 0x29, 0xcf, 0x9c, 0x98, 0x33, 0xc1, 0xd0, 0x9e, 0xf1, 0xed, 0x87, 0x8b, - 0x40, 0x2c, 0xd3, 0xb9, 0xe3, 0xb1, 0x55, 0x7f, 0xc1, 0x16, 0xac, 0x2f, 0x01, 0xf3, 0xf4, 0x67, - 0xe9, 0x49, 0x47, 0x5a, 0x2a, 0xd1, 0xfe, 0xb8, 0x04, 0x17, 0x34, 0xf2, 0x29, 0x5f, 0x05, 0x91, - 0x28, 0x9b, 0x64, 0xee, 0x05, 0x7d, 0x91, 0xc5, 0x34, 0x51, 0xbf, 0x3a, 0xb1, 0x19, 0x91, 0x55, - 0xe1, 0xec, 0x13, 0x6f, 0xa5, 0xcd, 0xf6, 0x73, 0x12, 0x06, 0x3e, 0x11, 0x8c, 0x9b, 0x33, 0x1e, - 0x7b, 0xda, 0x3c, 0x88, 0x49, 0x16, 0x32, 0xe2, 0x2b, 0x17, 0x07, 0xd0, 0x9c, 0x09, 0x22, 0xd2, - 0xe4, 0x9c, 0x70, 0xb2, 0x42, 0x27, 0xd0, 0x1e, 0x87, 0xcc, 0xfb, 0xe5, 0xfb, 0x60, 0x45, 0x9f, - 0x05, 0x62, 0x19, 0x44, 0x5d, 0xeb, 0x9e, 0x75, 0xb2, 0xef, 0xd6, 0xc3, 0x68, 0x00, 0xb7, 0x65, - 0x68, 0x46, 0x69, 0x54, 0x42, 0x6f, 0x49, 0xf4, 0xa6, 0x23, 0x4c, 0xa0, 0x3d, 0xa1, 0xe2, 0xb1, - 0xe7, 0xb1, 0x34, 0x12, 0xaa, 0xdc, 0x19, 0xec, 0x3e, 0xf6, 0x7d, 0x4e, 0x93, 0x44, 0x96, 0x69, - 0x8d, 0x1f, 0x5d, 0xbc, 0xb8, 0xfb, 0xc6, 0xdf, 0x2f, 0xee, 0x7e, 0x50, 0x6a, 0xc9, 0x32, 0x8b, - 0x29, 0x0f, 0xa9, 0xbf, 0xa0, 0xbc, 0x3f, 0x4f, 0x39, 0x67, 0xbf, 0xf5, 0x3d, 0x9e, 0xc5, 0x82, - 0x39, 0x3a, 0xd7, 0x35, 0x24, 0xf8, 0x0f, 0x4b, 0xd6, 0x98, 0x09, 0xc6, 0xc9, 0x82, 0xfe, 0x2f, - 0x35, 0xd0, 0xd7, 0xb0, 0xfd, 0x84, 0x66, 0xf2, 0x8f, 0x5e, 0x9b, 0x6b, 0x1e, 0x44, 0x84, 0x67, - 0xce, 0x33, 0xc6, 0xfd, 0xe1, 0xe8, 0x23, 0x37, 0x27, 0xc0, 0x3f, 0x42, 0x4b, 0x7f, 0xe7, 0x53, - 0x12, 0xa6, 0x14, 0x3d, 0x81, 0x1d, 0x69, 0xe8, 0xaf, 0x1c, 0x69, 0xe6, 0x87, 0xd7, 0x62, 0x9e, - 0xd2, 0xdf, 0xc7, 0x99, 0xa0, 0x89, 0xab, 0x38, 0xf0, 0xfb, 0x70, 0xeb, 0x9b, 0x20, 0x31, 0xcd, - 0xd6, 0x97, 0x7b, 0x04, 0x3b, 0xdf, 0xe5, 0xf3, 0xa9, 0xaf, 0x54, 0x39, 0x18, 0x43, 0x6b, 0x42, - 0xc5, 0x19, 0x59, 0xe9, 0x7e, 0x21, 0xb8, 0x91, 0x3b, 0x1a, 0x24, 0x6d, 0x7c, 0x0c, 0x87, 0x39, - 0x5d, 0x6e, 0xbf, 0x96, 0xab, 0x03, 0x47, 0x13, 0x2a, 0x9e, 0x9a, 0xe9, 0x9b, 0x51, 0x75, 0xcf, - 0x78, 0x02, 0x77, 0x6a, 0xf1, 0x69, 0x90, 0x08, 0xc6, 0xb3, 0x62, 0xea, 0x4e, 0x23, 0x2f, 0x4c, - 0x7d, 0x7a, 0xce, 0xe9, 0xf3, 0x80, 0xa5, 0xea, 0xaa, 0xb6, 0xdd, 0x7a, 0x18, 0x4f, 0xe0, 0xf6, - 0x06, 0x16, 0x34, 0x80, 0x5d, 0x6d, 0x76, 0xad, 0x7b, 0xdb, 0x27, 0xcd, 0x61, 0xc7, 0x29, 0xc4, - 0x59, 0xc6, 0xbb, 0x06, 0x86, 0xcf, 0xa0, 0x55, 0x3e, 0x40, 0x1d, 0x68, 0x2c, 0x69, 0xb0, 0x58, - 0x0a, 0x59, 0xf9, 0x86, 0xab, 0x3d, 0x74, 0x0c, 0xdb, 0x33, 0x2a, 0xba, 0x5b, 0x92, 0xf5, 0xc8, - 0x59, 0x0b, 0xab, 0xc8, 0x76, 0x73, 0x00, 0x3e, 0x86, 0x9b, 0x13, 0x2a, 0xce, 0x39, 0x8b, 0x59, - 0x42, 0xc2, 0xa2, 0x93, 0x53, 0x92, 0x2c, 0xd5, 0x85, 0xba, 0xd2, 0xc6, 0x03, 0x40, 0x79, 0x27, - 0x0d, 0x50, 0x77, 0xd3, 0x86, 0x3d, 0x15, 0xa1, 0xbe, 0x44, 0xef, 0xb9, 0x85, 0x8f, 0xbf, 0x85, - 0x43, 0x83, 0x76, 0x69, 0x92, 0x86, 0x62, 0x13, 0x2f, 0xba, 0x0f, 0x8d, 0x31, 0x09, 0x43, 0x26, - 0xe4, 0x60, 0x36, 0x87, 0x6d, 0xc7, 0xe8, 0x5c, 0x85, 0x5d, 0x7d, 0x8c, 0xdb, 0x70, 0x20, 0x15, - 0x42, 0xf4, 0x54, 0x60, 0x0a, 0x3b, 0xd2, 0x43, 0x0f, 0xe0, 0xa6, 0x99, 0x97, 0x5c, 0xb1, 0x5f, - 0x32, 0x9f, 0xea, 0x66, 0x5c, 0x89, 0xe7, 0xea, 0x2f, 0xc7, 0x58, 0x2a, 0x24, 0x7c, 0x4b, 0xc2, - 0x37, 0x1d, 0xe1, 0xfb, 0xb2, 0xae, 0xdc, 0x0b, 0xea, 0x3f, 0x77, 0xa0, 0x31, 0xad, 0x74, 0x5c, - 0x79, 0xc3, 0x7f, 0x77, 0xf4, 0x68, 0xa1, 0x21, 0x34, 0xd4, 0x6e, 0x42, 0x6f, 0xae, 0xaf, 0xb3, - 0xb4, 0xad, 0xec, 0x5b, 0x79, 0xd8, 0x51, 0x5d, 0xd1, 0xc8, 0x11, 0xc0, 0x7a, 0xc9, 0xa0, 0xb7, - 0xd7, 0x79, 0xb5, 0xd5, 0x63, 0xb7, 0x9c, 0x7c, 0x5f, 0x1a, 0xe0, 0xe7, 0x32, 0x4d, 0xeb, 0xb1, - 0x96, 0x56, 0xde, 0x26, 0x76, 0xa7, 0xfc, 0x25, 0x25, 0xf5, 0x7e, 0x06, 0xad, 0xb2, 0xe0, 0xd0, - 0x9d, 0x35, 0xee, 0x8a, 0x10, 0xab, 0xb5, 0x07, 0x16, 0xea, 0xc3, 0xae, 0x96, 0x20, 0xea, 0x54, - 0x4a, 0x17, 0xaa, 0xb4, 0x5b, 0x8e, 0xda, 0xf5, 0x5f, 0x45, 0x82, 0x67, 0x68, 0x04, 0xfb, 0x85, - 0x1e, 0x51, 0xb7, 0x5a, 0x6a, 0x2d, 0xd2, 0x6a, 0xd2, 0xc0, 0x42, 0xa7, 0x72, 0x3b, 0x56, 0xe6, - 0xbe, 0x57, 0xa9, 0x77, 0x45, 0xb9, 0xf6, 0x2b, 0x84, 0x84, 0x7e, 0x82, 0xce, 0x66, 0x45, 0xa3, - 0xf7, 0x5e, 0xc9, 0x58, 0xd6, 0xbc, 0xfd, 0xee, 0x66, 0x62, 0xc3, 0xf2, 0x29, 0x34, 0x4b, 0x7a, - 0x42, 0x76, 0x85, 0xb4, 0x22, 0x33, 0xbb, 0x3e, 0xea, 0xe8, 0x14, 0x0e, 0x2a, 0x1a, 0x43, 0xef, - 0x54, 0x3b, 0x54, 0x15, 0x9f, 0x5d, 0xea, 0x5f, 0x55, 0x68, 0x03, 0x0b, 0x3d, 0x82, 0x3d, 0xa3, - 0x16, 0xf4, 0x56, 0x6d, 0x2a, 0x8c, 0x82, 0xec, 0x76, 0x75, 0x3a, 0x13, 0xf4, 0x09, 0x1c, 0x9a, - 0x59, 0x9f, 0x52, 0xe2, 0x53, 0x5e, 0xcb, 0x5d, 0xab, 0xc0, 0x3e, 0x70, 0xd4, 0x83, 0xae, 0x70, - 0xe3, 0x2f, 0x2e, 0x2e, 0x7b, 0xd6, 0x5f, 0x97, 0x3d, 0xeb, 0x9f, 0xcb, 0x9e, 0xf5, 0xe7, 0xcb, - 0x9e, 0x75, 0xf1, 0xb2, 0x67, 0xfd, 0xf0, 0xe0, 0xf5, 0x6f, 0x00, 0x8f, 0xbd, 0xbe, 0xa1, 0x9f, - 0x37, 0xe4, 0xbb, 0xfe, 0xe1, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xac, 0x5a, 0x15, 0x64, 0x9e, - 0x08, 0x00, 0x00, + 0x5b, 0xf0, 0xd8, 0x8f, 0x80, 0x78, 0xa8, 0x50, 0xfb, 0x2d, 0x78, 0x42, 0xde, 0x3f, 0x8e, 0xed, + 0x4b, 0xab, 0x7b, 0xe1, 0x25, 0x9a, 0x99, 0xfd, 0xcd, 0x6f, 0x36, 0xb3, 0xf3, 0x1b, 0x43, 0x9b, + 0xc7, 0xde, 0xaf, 0x29, 0xe5, 0x99, 0x13, 0x73, 0x26, 0x18, 0x3a, 0x30, 0xbe, 0x7d, 0x7f, 0x19, + 0x88, 0x55, 0xba, 0x70, 0x3c, 0xb6, 0x1e, 0x2c, 0xd9, 0x92, 0x0d, 0x24, 0x60, 0x91, 0xfe, 0x2c, + 0x3d, 0xe9, 0x48, 0x4b, 0x25, 0xda, 0x1f, 0x97, 0xe0, 0x82, 0x46, 0x3e, 0xe5, 0xeb, 0x20, 0x12, + 0x65, 0x93, 0x2c, 0xbc, 0x60, 0x20, 0xb2, 0x98, 0x26, 0xea, 0x57, 0x27, 0x36, 0x23, 0xb2, 0x2e, + 0x9c, 0x43, 0xe2, 0xad, 0xb5, 0xd9, 0x79, 0x4a, 0xc2, 0xc0, 0x27, 0x82, 0x71, 0x73, 0xc6, 0x63, + 0x4f, 0x9b, 0x47, 0x31, 0xc9, 0x42, 0x46, 0x7c, 0xe5, 0xe2, 0x00, 0x9a, 0x73, 0x41, 0x44, 0x9a, + 0x5c, 0x10, 0x4e, 0xd6, 0xe8, 0x14, 0x3a, 0x93, 0x90, 0x79, 0xbf, 0x7c, 0x1f, 0xac, 0xe9, 0x93, + 0x40, 0xac, 0x82, 0xa8, 0x67, 0xdd, 0xb1, 0x4e, 0x0f, 0xdd, 0x7a, 0x18, 0x0d, 0xe1, 0xa6, 0x0c, + 0xcd, 0x29, 0x8d, 0x4a, 0xe8, 0x1d, 0x89, 0xde, 0x76, 0x84, 0x09, 0x74, 0xa6, 0x54, 0x3c, 0xf4, + 0x3c, 0x96, 0x46, 0x42, 0x95, 0x3b, 0x87, 0xfd, 0x87, 0xbe, 0xcf, 0x69, 0x92, 0xc8, 0x32, 0xad, + 0xc9, 0x83, 0x67, 0xcf, 0x6f, 0xbf, 0xf1, 0xf7, 0xf3, 0xdb, 0x1f, 0x94, 0x5a, 0xb2, 0xca, 0x62, + 0xca, 0x43, 0xea, 0x2f, 0x29, 0x1f, 0x2c, 0x52, 0xce, 0xd9, 0x6f, 0x03, 0x8f, 0x67, 0xb1, 0x60, + 0x8e, 0xce, 0x75, 0x0d, 0x09, 0xfe, 0xc3, 0x92, 0x35, 0xe6, 0x82, 0x71, 0xb2, 0xa4, 0xff, 0x4b, + 0x0d, 0xf4, 0x35, 0xec, 0x3e, 0xa2, 0x99, 0xfc, 0xa3, 0x57, 0xe6, 0x5a, 0x04, 0x11, 0xe1, 0x99, + 0xf3, 0x84, 0x71, 0x7f, 0x34, 0xfe, 0xc8, 0xcd, 0x09, 0xf0, 0x8f, 0xd0, 0xd2, 0xf7, 0x7c, 0x4c, + 0xc2, 0x94, 0xa2, 0x47, 0xb0, 0x27, 0x0d, 0x7d, 0xcb, 0xb1, 0x66, 0xbe, 0x7f, 0x25, 0xe6, 0x19, + 0xfd, 0x7d, 0x92, 0x09, 0x9a, 0xb8, 0x8a, 0x03, 0xbf, 0x0f, 0x37, 0xbe, 0x09, 0x12, 0xd3, 0x6c, + 0xfd, 0xb8, 0xc7, 0xb0, 0xf7, 0x5d, 0x3e, 0x9f, 0xfa, 0x49, 0x95, 0x83, 0x31, 0xb4, 0xa6, 0x54, + 0x9c, 0x93, 0xb5, 0xee, 0x17, 0x82, 0x6b, 0xb9, 0xa3, 0x41, 0xd2, 0xc6, 0x27, 0xd0, 0xce, 0xe9, + 0x72, 0xfb, 0xb5, 0x5c, 0x5d, 0x38, 0x9e, 0x52, 0xf1, 0xd8, 0x4c, 0xdf, 0x9c, 0xaa, 0x77, 0xc6, + 0x53, 0xb8, 0x55, 0x8b, 0xcf, 0x82, 0x44, 0x30, 0x9e, 0x15, 0x53, 0x77, 0x16, 0x79, 0x61, 0xea, + 0xd3, 0x0b, 0x4e, 0x9f, 0x06, 0x2c, 0x55, 0x4f, 0xb5, 0xeb, 0xd6, 0xc3, 0x78, 0x0a, 0x37, 0xb7, + 0xb0, 0xa0, 0x21, 0xec, 0x6b, 0xb3, 0x67, 0xdd, 0xd9, 0x3d, 0x6d, 0x8e, 0xba, 0x4e, 0x21, 0xce, + 0x32, 0xde, 0x35, 0x30, 0x7c, 0x0e, 0xad, 0xf2, 0x01, 0xea, 0x42, 0x63, 0x45, 0x83, 0xe5, 0x4a, + 0xc8, 0xca, 0xd7, 0x5c, 0xed, 0xa1, 0x13, 0xd8, 0x9d, 0x53, 0xd1, 0xdb, 0x91, 0xac, 0xc7, 0xce, + 0x46, 0x58, 0x45, 0xb6, 0x9b, 0x03, 0xf0, 0x09, 0x5c, 0x9f, 0x52, 0x71, 0xc1, 0x59, 0xcc, 0x12, + 0x12, 0x16, 0x9d, 0x9c, 0x91, 0x64, 0xa5, 0x1e, 0xd4, 0x95, 0x36, 0x1e, 0x02, 0xca, 0x3b, 0x69, + 0x80, 0xba, 0x9b, 0x36, 0x1c, 0xa8, 0x08, 0xf5, 0x25, 0xfa, 0xc0, 0x2d, 0x7c, 0xfc, 0x2d, 0xb4, + 0x0d, 0xda, 0xa5, 0x49, 0x1a, 0x8a, 0x6d, 0xbc, 0xe8, 0x2e, 0x34, 0x26, 0x24, 0x0c, 0x99, 0x90, + 0x83, 0xd9, 0x1c, 0x75, 0x1c, 0xa3, 0x73, 0x15, 0x76, 0xf5, 0x31, 0xee, 0xc0, 0x91, 0x54, 0x08, + 0xd1, 0x53, 0x81, 0x29, 0xec, 0x49, 0x0f, 0xdd, 0x83, 0xeb, 0x66, 0x5e, 0x72, 0xc5, 0x7e, 0xc9, + 0x7c, 0xaa, 0x9b, 0x71, 0x29, 0x9e, 0xab, 0xbf, 0x1c, 0x63, 0xa9, 0x90, 0xf0, 0x1d, 0x09, 0xdf, + 0x76, 0x84, 0xef, 0xca, 0xba, 0x72, 0x2f, 0xa8, 0xff, 0xdc, 0x85, 0xc6, 0xac, 0xd2, 0x71, 0xe5, + 0x8d, 0xfe, 0xdd, 0xd3, 0xa3, 0x85, 0x46, 0xd0, 0x50, 0xbb, 0x09, 0xbd, 0xb9, 0x79, 0xce, 0xd2, + 0xb6, 0xb2, 0x6f, 0xe4, 0x61, 0x47, 0x75, 0x45, 0x23, 0xc7, 0x00, 0x9b, 0x25, 0x83, 0xde, 0xde, + 0xe4, 0xd5, 0x56, 0x8f, 0xdd, 0x72, 0xf2, 0x7d, 0x69, 0x80, 0x5f, 0xc8, 0x34, 0xad, 0xc7, 0x5a, + 0x5a, 0x79, 0x9b, 0xd8, 0xdd, 0xf2, 0x4d, 0x4a, 0xea, 0xfd, 0x0c, 0x5a, 0x65, 0xc1, 0xa1, 0x5b, + 0x1b, 0xdc, 0x25, 0x21, 0x56, 0x6b, 0x0f, 0x2d, 0x34, 0x80, 0x7d, 0x2d, 0x41, 0xd4, 0xad, 0x94, + 0x2e, 0x54, 0x69, 0xb7, 0x1c, 0xb5, 0xeb, 0xbf, 0x8a, 0x04, 0xcf, 0xd0, 0x18, 0x0e, 0x0b, 0x3d, + 0xa2, 0x5e, 0xb5, 0xd4, 0x46, 0xa4, 0xd5, 0xa4, 0xa1, 0x85, 0xce, 0xe4, 0x76, 0xac, 0xcc, 0x7d, + 0xbf, 0x52, 0xef, 0x92, 0x72, 0xed, 0x57, 0x08, 0x09, 0xfd, 0x04, 0xdd, 0xed, 0x8a, 0x46, 0xef, + 0xbd, 0x92, 0xb1, 0xac, 0x79, 0xfb, 0xdd, 0xed, 0xc4, 0x86, 0xe5, 0x53, 0x68, 0x96, 0xf4, 0x84, + 0xec, 0x0a, 0x69, 0x45, 0x66, 0x76, 0x7d, 0xd4, 0xd1, 0x19, 0x1c, 0x55, 0x34, 0x86, 0xde, 0xa9, + 0x76, 0xa8, 0x2a, 0x3e, 0xbb, 0xd4, 0xbf, 0xaa, 0xd0, 0x86, 0x16, 0x7a, 0x00, 0x07, 0x46, 0x2d, + 0xe8, 0xad, 0xda, 0x54, 0x18, 0x05, 0xd9, 0x9d, 0xea, 0x74, 0x26, 0xe8, 0x13, 0x68, 0x9b, 0x59, + 0x9f, 0x51, 0xe2, 0x53, 0x5e, 0xcb, 0xdd, 0xa8, 0xc0, 0x3e, 0x72, 0xd4, 0x07, 0x5d, 0xe1, 0x26, + 0x9f, 0xff, 0xf5, 0xa2, 0x6f, 0xfd, 0xf3, 0xa2, 0x6f, 0xfd, 0xf9, 0xb2, 0x6f, 0x3d, 0x7b, 0xd9, + 0xb7, 0x7e, 0xb8, 0xf7, 0xfa, 0xfd, 0xcf, 0x63, 0x6f, 0x60, 0xa8, 0x17, 0x0d, 0xf9, 0x4d, 0xff, + 0xf0, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbe, 0x6e, 0x9f, 0x8e, 0x9a, 0x08, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1225,6 +1073,47 @@ type QueryServer interface { GetBlockHeader(context.Context, *GetBlockParam) (*types.Header, error) } +// UnimplementedQueryServer can be embedded to have forward compatible implementations. +type UnimplementedQueryServer struct { +} + +func (*UnimplementedQueryServer) Status(ctx context.Context, req *StatusParam) (*rpc.ResultStatus, error) { + return nil, status.Errorf(codes.Unimplemented, "method Status not implemented") +} +func (*UnimplementedQueryServer) GetAccount(ctx context.Context, req *GetAccountParam) (*acm.Account, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetAccount not implemented") +} +func (*UnimplementedQueryServer) GetStorage(ctx context.Context, req *GetStorageParam) (*StorageValue, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStorage not implemented") +} +func (*UnimplementedQueryServer) ListAccounts(req *ListAccountsParam, srv Query_ListAccountsServer) error { + return status.Errorf(codes.Unimplemented, "method ListAccounts not implemented") +} +func (*UnimplementedQueryServer) GetName(ctx context.Context, req *GetNameParam) (*names.Entry, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetName not implemented") +} +func (*UnimplementedQueryServer) ListNames(req *ListNamesParam, srv Query_ListNamesServer) error { + return status.Errorf(codes.Unimplemented, "method ListNames not implemented") +} +func (*UnimplementedQueryServer) GetValidatorSet(ctx context.Context, req *GetValidatorSetParam) (*ValidatorSet, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetValidatorSet not implemented") +} +func (*UnimplementedQueryServer) GetValidatorSetHistory(ctx context.Context, req *GetValidatorSetHistoryParam) (*ValidatorSetHistory, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetValidatorSetHistory not implemented") +} +func (*UnimplementedQueryServer) GetProposal(ctx context.Context, req *GetProposalParam) (*payload.Ballot, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetProposal not implemented") +} +func (*UnimplementedQueryServer) ListProposals(req *ListProposalsParam, srv Query_ListProposalsServer) error { + return status.Errorf(codes.Unimplemented, "method ListProposals not implemented") +} +func (*UnimplementedQueryServer) GetStats(ctx context.Context, req *GetStatsParam) (*Stats, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStats not implemented") +} +func (*UnimplementedQueryServer) GetBlockHeader(ctx context.Context, req *GetBlockParam) (*types.Header, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetBlockHeader not implemented") +} + func RegisterQueryServer(s *grpc.Server, srv QueryServer) { s.RegisterService(&_Query_serviceDesc, srv) } @@ -1515,515 +1404,6 @@ var _Query_serviceDesc = grpc.ServiceDesc{ Metadata: "rpcquery.proto", } -func (m *StatusParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *StatusParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.BlockTimeWithin) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.BlockTimeWithin))) - i += copy(dAtA[i:], m.BlockTimeWithin) - } - if len(m.BlockSeenTimeWithin) > 0 { - dAtA[i] = 0x12 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.BlockSeenTimeWithin))) - i += copy(dAtA[i:], m.BlockSeenTimeWithin) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetAccountParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAccountParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Address.Size())) - n1, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n1 - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetStorageParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetStorageParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Address.Size())) - n2, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n2 - dAtA[i] = 0x12 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Key.Size())) - n3, err := m.Key.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n3 - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *StorageValue) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *StorageValue) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Value.Size())) - n4, err := m.Value.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n4 - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ListAccountsParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ListAccountsParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Query) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.Query))) - i += copy(dAtA[i:], m.Query) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetNameParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetNameParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Name) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.Name))) - i += copy(dAtA[i:], m.Name) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ListNamesParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ListNamesParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Query) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.Query))) - i += copy(dAtA[i:], m.Query) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetValidatorSetParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetValidatorSetParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetValidatorSetHistoryParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetValidatorSetHistoryParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.IncludePrevious != 0 { - dAtA[i] = 0x8 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.IncludePrevious)) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ValidatorSetHistory) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ValidatorSetHistory) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.History) > 0 { - for _, msg := range m.History { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(msg.Size())) - n, err := msg.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n - } - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ValidatorSet) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ValidatorSet) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Height != 0 { - dAtA[i] = 0x8 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Height)) - } - if len(m.Set) > 0 { - for _, msg := range m.Set { - dAtA[i] = 0x12 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(msg.Size())) - n, err := msg.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n - } - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetProposalParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetProposalParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Hash) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.Hash))) - i += copy(dAtA[i:], m.Hash) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ListProposalsParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ListProposalsParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Proposed { - dAtA[i] = 0x8 - i++ - if m.Proposed { - dAtA[i] = 1 - } else { - dAtA[i] = 0 - } - i++ - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *ProposalResult) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ProposalResult) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.Hash) > 0 { - dAtA[i] = 0xa - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(len(m.Hash))) - i += copy(dAtA[i:], m.Hash) - } - if m.Ballot != nil { - dAtA[i] = 0x12 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Ballot.Size())) - n5, err := m.Ballot.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n5 - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetStatsParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetStatsParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *Stats) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *Stats) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.AccountsWithCode != 0 { - dAtA[i] = 0x8 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.AccountsWithCode)) - } - if m.AccountsWithoutCode != 0 { - dAtA[i] = 0x10 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.AccountsWithoutCode)) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func (m *GetBlockParam) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalTo(dAtA) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetBlockParam) MarshalTo(dAtA []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Height != 0 { - dAtA[i] = 0x8 - i++ - i = encodeVarintRpcquery(dAtA, i, uint64(m.Height)) - } - if m.XXX_unrecognized != nil { - i += copy(dAtA[i:], m.XXX_unrecognized) - } - return i, nil -} - -func encodeVarintRpcquery(dAtA []byte, offset int, v uint64) int { - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return offset + 1 -} func (m *StatusParam) Size() (n int) { if m == nil { return 0 @@ -2299,1622 +1679,8 @@ func (m *GetBlockParam) Size() (n int) { } func sovRpcquery(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpcquery(x uint64) (n int) { return sovRpcquery(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *StatusParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: StatusParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: StatusParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field BlockTimeWithin", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.BlockTimeWithin = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field BlockSeenTimeWithin", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.BlockSeenTimeWithin = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAccountParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAccountParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAccountParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.Address.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetStorageParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetStorageParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetStorageParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.Address.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.Key.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *StorageValue) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: StorageValue: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: StorageValue: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.Value.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ListAccountsParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ListAccountsParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ListAccountsParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Query = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetNameParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetNameParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetNameParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ListNamesParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ListNamesParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ListNamesParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Query = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetValidatorSetParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetValidatorSetParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetValidatorSetParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetValidatorSetHistoryParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetValidatorSetHistoryParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetValidatorSetHistoryParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field IncludePrevious", wireType) - } - m.IncludePrevious = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.IncludePrevious |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ValidatorSetHistory) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ValidatorSetHistory: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ValidatorSetHistory: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field History", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.History = append(m.History, &ValidatorSet{}) - if err := m.History[len(m.History)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ValidatorSet) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ValidatorSet: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ValidatorSet: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) - } - m.Height = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Height |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Set", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Set = append(m.Set, &validator.Validator{}) - if err := m.Set[len(m.Set)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetProposalParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetProposalParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetProposalParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Hash = append(m.Hash[:0], dAtA[iNdEx:postIndex]...) - if m.Hash == nil { - m.Hash = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ListProposalsParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ListProposalsParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ListProposalsParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Proposed", wireType) - } - var v int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.Proposed = bool(v != 0) - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ProposalResult) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ProposalResult: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ProposalResult: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Hash = append(m.Hash[:0], dAtA[iNdEx:postIndex]...) - if m.Hash == nil { - m.Hash = []byte{} - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Ballot", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthRpcquery - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthRpcquery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Ballot == nil { - m.Ballot = &payload.Ballot{} - } - if err := m.Ballot.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetStatsParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetStatsParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetStatsParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *Stats) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Stats: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Stats: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field AccountsWithCode", wireType) - } - m.AccountsWithCode = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.AccountsWithCode |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field AccountsWithoutCode", wireType) - } - m.AccountsWithoutCode = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.AccountsWithoutCode |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetBlockParam) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetBlockParam: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetBlockParam: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) - } - m.Height = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpcquery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Height |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipRpcquery(dAtA[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) < 0 { - return ErrInvalidLengthRpcquery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipRpcquery(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcquery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcquery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcquery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthRpcquery - } - iNdEx += length - if iNdEx < 0 { - return 0, ErrInvalidLengthRpcquery - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowRpcquery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipRpcquery(dAtA[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - if iNdEx < 0 { - return 0, ErrInvalidLengthRpcquery - } - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthRpcquery = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowRpcquery = fmt.Errorf("proto: integer overflow") -) diff --git a/rpc/rpctransact/rpctransact.pb.go b/rpc/rpctransact/rpctransact.pb.go index ea6523360..a11f42d3a 100644 --- a/rpc/rpctransact/rpctransact.pb.go +++ b/rpc/rpctransact/rpctransact.pb.go @@ -8,6 +8,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" time "time" _ "github.com/gogo/protobuf/gogoproto" @@ -21,6 +22,8 @@ import ( txs "github.com/hyperledger/burrow/txs" payload "github.com/hyperledger/burrow/txs/payload" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -441,6 +444,47 @@ type TransactServer interface { NameTxAsync(context.Context, *payload.NameTx) (*txs.Receipt, error) } +// UnimplementedTransactServer can be embedded to have forward compatible implementations. +type UnimplementedTransactServer struct { +} + +func (*UnimplementedTransactServer) BroadcastTxSync(ctx context.Context, req *TxEnvelopeParam) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method BroadcastTxSync not implemented") +} +func (*UnimplementedTransactServer) BroadcastTxAsync(ctx context.Context, req *TxEnvelopeParam) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method BroadcastTxAsync not implemented") +} +func (*UnimplementedTransactServer) SignTx(ctx context.Context, req *TxEnvelopeParam) (*TxEnvelope, error) { + return nil, status.Errorf(codes.Unimplemented, "method SignTx not implemented") +} +func (*UnimplementedTransactServer) FormulateTx(ctx context.Context, req *payload.Any) (*TxEnvelope, error) { + return nil, status.Errorf(codes.Unimplemented, "method FormulateTx not implemented") +} +func (*UnimplementedTransactServer) CallTxSync(ctx context.Context, req *payload.CallTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallTxSync not implemented") +} +func (*UnimplementedTransactServer) CallTxAsync(ctx context.Context, req *payload.CallTx) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallTxAsync not implemented") +} +func (*UnimplementedTransactServer) CallTxSim(ctx context.Context, req *payload.CallTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallTxSim not implemented") +} +func (*UnimplementedTransactServer) CallCodeSim(ctx context.Context, req *CallCodeParam) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallCodeSim not implemented") +} +func (*UnimplementedTransactServer) SendTxSync(ctx context.Context, req *payload.SendTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method SendTxSync not implemented") +} +func (*UnimplementedTransactServer) SendTxAsync(ctx context.Context, req *payload.SendTx) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method SendTxAsync not implemented") +} +func (*UnimplementedTransactServer) NameTxSync(ctx context.Context, req *payload.NameTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method NameTxSync not implemented") +} +func (*UnimplementedTransactServer) NameTxAsync(ctx context.Context, req *payload.NameTx) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method NameTxAsync not implemented") +} + func RegisterTransactServer(s *grpc.Server, srv TransactServer) { s.RegisterService(&_Transact_serviceDesc, srv) } @@ -736,9 +780,9 @@ func (m *CallCodeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.FromAddress.Size())) - n1, err := m.FromAddress.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.FromAddress.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if len(m.Code) > 0 { @@ -778,9 +822,9 @@ func (m *TxEnvelope) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Envelope.Size())) - n2, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Envelope.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -809,9 +853,9 @@ func (m *TxEnvelopeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Envelope.Size())) - n3, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Envelope.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -819,18 +863,18 @@ func (m *TxEnvelopeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Payload.Size())) - n4, err := m.Payload.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Payload.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } dAtA[i] = 0x1a i++ i = encodeVarintRpctransact(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdDuration(m.Timeout))) - n5, err := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.XXX_unrecognized != nil { @@ -909,14 +953,7 @@ func (m *TxEnvelopeParam) Size() (n int) { } func sovRpctransact(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpctransact(x uint64) (n int) { return sovRpctransact(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/storage/forest.go b/storage/forest.go index 63823ede7..d99b3a407 100644 --- a/storage/forest.go +++ b/storage/forest.go @@ -3,9 +3,9 @@ package storage import ( "fmt" + "github.com/golang/protobuf/proto" + lru "github.com/hashicorp/golang-lru" - "github.com/hyperledger/burrow/binary" - "github.com/tendermint/go-amino" dbm "github.com/tendermint/tendermint/libs/db" "github.com/xlab/treeprint" ) @@ -88,13 +88,6 @@ type ImmutableForest struct { overwriting bool } -// This is the object that is stored in the leaves of the commitsTree - it captures the sub-tree hashes so that the -// commitsTree's hash becomes a mixture of the hashes of all the sub-trees. -type CommitID struct { - Hash binary.HexBytes - Version int64 -} - type ForestOption func(*ImmutableForest) var WithOverwriting ForestOption = func(imf *ImmutableForest) { imf.overwriting = true } @@ -307,14 +300,18 @@ func (imf *ImmutableForest) newTree(prefix []byte) *RWTree { // CommitID serialisation -var codec = amino.NewCodec() - func (cid *CommitID) UnmarshalBinary(data []byte) error { - return codec.UnmarshalBinaryBare(data, cid) + buf := proto.NewBuffer(data) + return buf.Unmarshal(cid) } -func (cid *CommitID) MarshalBinary() (data []byte, err error) { - return codec.MarshalBinaryBare(cid) +func (cid *CommitID) MarshalBinary() ([]byte, error) { + buf := proto.NewBuffer(nil) + err := buf.Marshal(cid) + if err != nil { + return nil, err + } + return buf.Bytes(), nil } func (cid CommitID) String() string { diff --git a/storage/forest_test.go b/storage/forest_test.go index 97ccad8d4..aaf2bdd8a 100644 --- a/storage/forest_test.go +++ b/storage/forest_test.go @@ -50,7 +50,7 @@ func TestMutableForest_Save(t *testing.T) { assertDump(t, forest, ` . ├── "Commits" - │   └── "fooos" -> "\n ym.\xb8fw\xdcIK\xe8QQ\xb6\x8a\x1fT\x15\xff\x80\xd5\xd91\xf6YKf\x12wx\x16l\xf5\x10\x01" + │   └── "fooos" -> "\b\x01\x12 ym.\xb8fw\xdcIK\xe8QQ\xb6\x8a\x1fT\x15\xff\x80\xd5\xd91\xf6YKf\x12wx\x16l\xf5" └── "fooos" └── "bar" -> "nog" `) @@ -70,8 +70,8 @@ func TestMutableForest_Save(t *testing.T) { assertDump(t, forest, ` . ├── "Commits" - │   ├── "fooos" -> "\n ym.\xb8fw\xdcIK\xe8QQ\xb6\x8a\x1fT\x15\xff\x80\xd5\xd91\xf6YKf\x12wx\x16l\xf5\x10\x01" - │   └── "prefixo" -> "\n E\xb2\xa4{аA\xddf\xcc\x02ȭ\xfa\xd1\xceZ\xa0nP\xe0\xd3\\X\x9c\x16M\xc1\x88t\x15\x8c\x10\x01" + │   ├── "fooos" -> "\b\x01\x12 ym.\xb8fw\xdcIK\xe8QQ\xb6\x8a\x1fT\x15\xff\x80\xd5\xd91\xf6YKf\x12wx\x16l\xf5" + │   └── "prefixo" -> "\b\x01\x12 E\xb2\xa4{аA\xddf\xcc\x02ȭ\xfa\xd1\xceZ\xa0nP\xe0\xd3\\X\x9c\x16M\xc1\x88t\x15\x8c" ├── "fooos" │   └── "bar" -> "nog" └── "prefixo" @@ -133,9 +133,9 @@ func TestSorted(t *testing.T) { assertDump(t, forest, ` . ├── "Commits" - │   ├── "age" -> "\n \x1dwd_\xbaRB\xf5\xa6\xf0\n\xab\x9aWY\xf7\t\x16t웿\xb6\x89O\n\xcf&\xf7\xe6\xcd\n\x10\x01" - │   ├── "balances" -> "\n \x9f\xab\xd3s\x18{\xbc\xe8\x98\xdai\xf5\x9f\x16\xden\xac(\xc9ԷU\x99\x17\xda'\xfa3-\x98\xd4\xc9\x10\x02" - │   └── "names" -> "\n \xbf\xf8\xf9vt>\xbc\x06@C\xe9I\x01C\xa3\xc3O \xbc\xaf\xbf\xb3\b\xb2UHh\xe8TM\xb3\xba\x10\x01" + │   ├── "age" -> "\b\x01\x12 \x1dwd_\xbaRB\xf5\xa6\xf0\n\xab\x9aWY\xf7\t\x16t웿\xb6\x89O\n\xcf&\xf7\xe6\xcd\n" + │   ├── "balances" -> "\b\x02\x12 \x9f\xab\xd3s\x18{\xbc\xe8\x98\xdai\xf5\x9f\x16\xden\xac(\xc9ԷU\x99\x17\xda'\xfa3-\x98\xd4\xc9" + │   └── "names" -> "\b\x01\x12 \xbf\xf8\xf9vt>\xbc\x06@C\xe9I\x01C\xa3\xc3O \xbc\xaf\xbf\xb3\b\xb2UHh\xe8TM\xb3\xba" ├── "age" │   ├── "Cora" -> "1" │   └── "Lindsay" -> "34" diff --git a/storage/storage.pb.go b/storage/storage.pb.go new file mode 100644 index 000000000..b29a673a8 --- /dev/null +++ b/storage/storage.pb.go @@ -0,0 +1,390 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: storage.proto + +package storage + +import ( + fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + golang_proto "github.com/golang/protobuf/proto" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = golang_proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package + +// This is the object that is stored in the leaves of the commitsTree - it captures the sub-tree hashes so that the +// commitsTree's hash becomes a mixture of the hashes of all the sub-trees. +type CommitID struct { + Version int64 `protobuf:"varint,1,opt,name=Version,proto3" json:"Version,omitempty"` + Hash []byte `protobuf:"bytes,2,opt,name=Hash,proto3" json:"Hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitID) Reset() { *m = CommitID{} } +func (*CommitID) ProtoMessage() {} +func (*CommitID) Descriptor() ([]byte, []int) { + return fileDescriptor_0d2c4ccf1453ffdb, []int{0} +} +func (m *CommitID) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CommitID) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *CommitID) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitID.Merge(m, src) +} +func (m *CommitID) XXX_Size() int { + return m.Size() +} +func (m *CommitID) XXX_DiscardUnknown() { + xxx_messageInfo_CommitID.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitID proto.InternalMessageInfo + +func (m *CommitID) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *CommitID) GetHash() []byte { + if m != nil { + return m.Hash + } + return nil +} + +func (*CommitID) XXX_MessageName() string { + return "storage.CommitID" +} +func init() { + proto.RegisterType((*CommitID)(nil), "storage.CommitID") + golang_proto.RegisterType((*CommitID)(nil), "storage.CommitID") +} + +func init() { proto.RegisterFile("storage.proto", fileDescriptor_0d2c4ccf1453ffdb) } +func init() { golang_proto.RegisterFile("storage.proto", fileDescriptor_0d2c4ccf1453ffdb) } + +var fileDescriptor_0d2c4ccf1453ffdb = []byte{ + // 186 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2d, 0x2e, 0xc9, 0x2f, + 0x4a, 0x4c, 0x4f, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x87, 0x72, 0xa5, 0x74, 0xd3, + 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5, 0xc1, + 0xf2, 0x49, 0xa5, 0x69, 0x60, 0x1e, 0x98, 0x03, 0x66, 0x41, 0xf4, 0x29, 0xd9, 0x71, 0x71, 0x38, + 0xe7, 0xe7, 0xe6, 0x66, 0x96, 0x78, 0xba, 0x08, 0x49, 0x70, 0xb1, 0x87, 0xa5, 0x16, 0x15, 0x67, + 0xe6, 0xe7, 0x49, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x42, 0x5c, 0x2c, 0x1e, + 0x89, 0xc5, 0x19, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0x3c, 0x41, 0x60, 0xb6, 0x15, 0xcb, 0x8c, 0x05, + 0xf2, 0x0c, 0x4e, 0xf6, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0x78, 0xe3, 0x91, 0x1c, + 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x07, 0x1e, 0xcb, 0x31, 0x9e, 0x78, 0x2c, 0xc7, 0x18, 0xa5, 0x8a, + 0xe4, 0x90, 0x8c, 0xca, 0x82, 0xd4, 0xa2, 0x9c, 0xd4, 0x94, 0xf4, 0xd4, 0x22, 0xfd, 0xa4, 0xd2, + 0xa2, 0xa2, 0xfc, 0x72, 0x7d, 0xa8, 0x7b, 0x93, 0xd8, 0xc0, 0xee, 0x30, 0x06, 0x04, 0x00, 0x00, + 0xff, 0xff, 0xc6, 0xa6, 0xc1, 0x7f, 0xd0, 0x00, 0x00, 0x00, +} + +func (m *CommitID) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalTo(dAtA) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *CommitID) MarshalTo(dAtA []byte) (int, error) { + var i int + _ = i + var l int + _ = l + if m.Version != 0 { + dAtA[i] = 0x8 + i++ + i = encodeVarintStorage(dAtA, i, uint64(m.Version)) + } + if len(m.Hash) > 0 { + dAtA[i] = 0x12 + i++ + i = encodeVarintStorage(dAtA, i, uint64(len(m.Hash))) + i += copy(dAtA[i:], m.Hash) + } + if m.XXX_unrecognized != nil { + i += copy(dAtA[i:], m.XXX_unrecognized) + } + return i, nil +} + +func encodeVarintStorage(dAtA []byte, offset int, v uint64) int { + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return offset + 1 +} +func (m *CommitID) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Version != 0 { + n += 1 + sovStorage(uint64(m.Version)) + } + l = len(m.Hash) + if l > 0 { + n += 1 + l + sovStorage(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func sovStorage(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozStorage(x uint64) (n int) { + return sovStorage(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *CommitID) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStorage + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CommitID: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CommitID: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType) + } + m.Version = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStorage + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Version |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStorage + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthStorage + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthStorage + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Hash = append(m.Hash[:0], dAtA[iNdEx:postIndex]...) + if m.Hash == nil { + m.Hash = []byte{} + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipStorage(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthStorage + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthStorage + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipStorage(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStorage + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStorage + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + return iNdEx, nil + case 1: + iNdEx += 8 + return iNdEx, nil + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStorage + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthStorage + } + iNdEx += length + if iNdEx < 0 { + return 0, ErrInvalidLengthStorage + } + return iNdEx, nil + case 3: + for { + var innerWire uint64 + var start int = iNdEx + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStorage + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + innerWire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + innerWireType := int(innerWire & 0x7) + if innerWireType == 4 { + break + } + next, err := skipStorage(dAtA[start:]) + if err != nil { + return 0, err + } + iNdEx = start + next + if iNdEx < 0 { + return 0, ErrInvalidLengthStorage + } + } + return iNdEx, nil + case 4: + return iNdEx, nil + case 5: + iNdEx += 4 + return iNdEx, nil + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + } + panic("unreachable") +} + +var ( + ErrInvalidLengthStorage = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowStorage = fmt.Errorf("proto: integer overflow") +) diff --git a/txs/amino_codec.go b/txs/amino_codec.go deleted file mode 100644 index bd561ac49..000000000 --- a/txs/amino_codec.go +++ /dev/null @@ -1,43 +0,0 @@ -package txs - -import ( - "fmt" - - "github.com/hyperledger/burrow/txs/payload" - amino "github.com/tendermint/go-amino" -) - -type aminoCodec struct { - *amino.Codec -} - -func NewAminoCodec() *aminoCodec { - cdc := amino.NewCodec() - cdc.RegisterInterface((*payload.Payload)(nil), nil) - registerTx(cdc, &payload.SendTx{}) - registerTx(cdc, &payload.CallTx{}) - registerTx(cdc, &payload.BondTx{}) - registerTx(cdc, &payload.UnbondTx{}) - registerTx(cdc, &payload.PermsTx{}) - registerTx(cdc, &payload.NameTx{}) - registerTx(cdc, &payload.GovTx{}) - registerTx(cdc, &payload.ProposalTx{}) - return &aminoCodec{cdc} -} - -func (gwc *aminoCodec) EncodeTx(env *Envelope) ([]byte, error) { - return gwc.MarshalBinaryBare(env) -} - -func (gwc *aminoCodec) DecodeTx(txBytes []byte) (*Envelope, error) { - env := new(Envelope) - err := gwc.UnmarshalBinaryBare(txBytes, env) - if err != nil { - return nil, err - } - return env, nil -} - -func registerTx(cdc *amino.Codec, tx payload.Payload) { - cdc.RegisterConcrete(tx, fmt.Sprintf("burrow/txs/payload/%v", tx.Type()), nil) -} diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index c99de74e1..400f266b2 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -1056,9 +1057,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.CallTx.Size())) - n1, err := m.CallTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.CallTx.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -1066,9 +1067,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.SendTx.Size())) - n2, err := m.SendTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.SendTx.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -1076,9 +1077,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.NameTx.Size())) - n3, err := m.NameTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.NameTx.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -1086,9 +1087,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermsTx.Size())) - n4, err := m.PermsTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.PermsTx.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -1096,9 +1097,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintPayload(dAtA, i, uint64(m.GovTx.Size())) - n5, err := m.GovTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.GovTx.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 } @@ -1106,9 +1107,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintPayload(dAtA, i, uint64(m.BondTx.Size())) - n6, err := m.BondTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.BondTx.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 } @@ -1116,9 +1117,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintPayload(dAtA, i, uint64(m.UnbondTx.Size())) - n7, err := m.UnbondTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.UnbondTx.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -1126,9 +1127,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x42 i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n8, err := m.BatchTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.BatchTx.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 } @@ -1136,9 +1137,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x4a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalTx.Size())) - n9, err := m.ProposalTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.ProposalTx.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 } @@ -1166,9 +1167,9 @@ func (m *TxInput) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n10, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n10, err10 := m.Address.MarshalTo(dAtA[i:]) + if err10 != nil { + return 0, err10 } i += n10 if m.Amount != 0 { @@ -1205,9 +1206,9 @@ func (m *TxOutput) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n11, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n11, err11 := m.Address.MarshalTo(dAtA[i:]) + if err11 != nil { + return 0, err11 } i += n11 if m.Amount != 0 { @@ -1240,9 +1241,9 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n12, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n12, err12 := m.Input.MarshalTo(dAtA[i:]) + if err12 != nil { + return 0, err12 } i += n12 } @@ -1250,9 +1251,9 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n13, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n13, err13 := m.Address.MarshalTo(dAtA[i:]) + if err13 != nil { + return 0, err13 } i += n13 } @@ -1269,17 +1270,17 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintPayload(dAtA, i, uint64(m.Data.Size())) - n14, err := m.Data.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n14, err14 := m.Data.MarshalTo(dAtA[i:]) + if err14 != nil { + return 0, err14 } i += n14 dAtA[i] = 0x32 i++ i = encodeVarintPayload(dAtA, i, uint64(m.WASM.Size())) - n15, err := m.WASM.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n15, err15 := m.WASM.MarshalTo(dAtA[i:]) + if err15 != nil { + return 0, err15 } i += n15 if m.XXX_unrecognized != nil { @@ -1352,18 +1353,18 @@ func (m *PermsTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n16, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n16, err16 := m.Input.MarshalTo(dAtA[i:]) + if err16 != nil { + return 0, err16 } i += n16 } dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermArgs.Size())) - n17, err := m.PermArgs.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n17, err17 := m.PermArgs.MarshalTo(dAtA[i:]) + if err17 != nil { + return 0, err17 } i += n17 if m.XXX_unrecognized != nil { @@ -1391,9 +1392,9 @@ func (m *NameTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n18, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n18, err18 := m.Input.MarshalTo(dAtA[i:]) + if err18 != nil { + return 0, err18 } i += n18 } @@ -1484,18 +1485,18 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n19, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n19, err19 := m.Input.MarshalTo(dAtA[i:]) + if err19 != nil { + return 0, err19 } i += n19 } dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n20, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n20, err20 := m.Address.MarshalTo(dAtA[i:]) + if err20 != nil { + return 0, err20 } i += n20 if m.Height != 0 { @@ -1573,9 +1574,9 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n21, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n21, err21 := m.Input.MarshalTo(dAtA[i:]) + if err21 != nil { + return 0, err21 } i += n21 } @@ -1588,9 +1589,9 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalHash.Size())) - n22, err := m.ProposalHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n22, err22 := m.ProposalHash.MarshalTo(dAtA[i:]) + if err22 != nil { + return 0, err22 } i += n22 } @@ -1598,9 +1599,9 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n23, err := m.Proposal.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n23, err23 := m.Proposal.MarshalTo(dAtA[i:]) + if err23 != nil { + return 0, err23 } i += n23 } @@ -1673,9 +1674,9 @@ func (m *Vote) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n24, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n24, err24 := m.Address.MarshalTo(dAtA[i:]) + if err24 != nil { + return 0, err24 } i += n24 if m.VotingWeight != 0 { @@ -1720,9 +1721,9 @@ func (m *Proposal) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n25, err := m.BatchTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n25, err25 := m.BatchTx.MarshalTo(dAtA[i:]) + if err25 != nil { + return 0, err25 } i += n25 } @@ -1751,9 +1752,9 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n26, err := m.Proposal.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n26, err26 := m.Proposal.MarshalTo(dAtA[i:]) + if err26 != nil { + return 0, err26 } i += n26 } @@ -1761,9 +1762,9 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.FinalizingTx.Size())) - n27, err := m.FinalizingTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n27, err27 := m.FinalizingTx.MarshalTo(dAtA[i:]) + if err27 != nil { + return 0, err27 } i += n27 } @@ -2174,14 +2175,7 @@ func (m *Ballot) Size() (n int) { } func sovPayload(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozPayload(x uint64) (n int) { return sovPayload(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/txs/payload/proposal_tx.go b/txs/payload/proposal_tx.go index b0b8d0935..99c13f112 100644 --- a/txs/payload/proposal_tx.go +++ b/txs/payload/proposal_tx.go @@ -4,11 +4,10 @@ import ( "crypto/sha256" "fmt" - amino "github.com/tendermint/go-amino" + "github.com/golang/protobuf/proto" + "github.com/hyperledger/burrow/encoding" ) -var cdc = amino.NewCodec() - func NewProposalTx(propsal *Proposal) *ProposalTx { return &ProposalTx{ Proposal: propsal, @@ -34,8 +33,9 @@ func (tx *ProposalTx) Any() *Any { } func DecodeProposal(proposalBytes []byte) (*Proposal, error) { + buf := proto.NewBuffer(proposalBytes) proposal := new(Proposal) - err := cdc.UnmarshalBinaryBare(proposalBytes, proposal) + err := buf.Unmarshal(proposal) if err != nil { return nil, err } @@ -43,7 +43,12 @@ func DecodeProposal(proposalBytes []byte) (*Proposal, error) { } func (p *Proposal) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(p) + buf := proto.NewBuffer(nil) + err := buf.Marshal(p) + if err != nil { + return nil, err + } + return buf.Bytes(), nil } func (p *Proposal) Hash() []byte { @@ -67,7 +72,7 @@ func (v *Vote) String() string { func DecodeBallot(ballotBytes []byte) (*Ballot, error) { ballot := new(Ballot) - err := cdc.UnmarshalBinaryBare(ballotBytes, ballot) + err := encoding.Decode(ballotBytes, ballot) if err != nil { return nil, err } @@ -75,5 +80,5 @@ func DecodeBallot(ballotBytes []byte) (*Ballot, error) { } func (p *Ballot) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(p) + return encoding.Encode(p) } diff --git a/txs/protobuf_codec.go b/txs/protobuf_codec.go new file mode 100644 index 000000000..dd2d09eb7 --- /dev/null +++ b/txs/protobuf_codec.go @@ -0,0 +1,25 @@ +package txs + +import ( + "github.com/hyperledger/burrow/encoding" +) + +type protobufCodec struct { +} + +func NewProtobufCodec() *protobufCodec { + return &protobufCodec{} +} + +func (gwc *protobufCodec) EncodeTx(env *Envelope) ([]byte, error) { + return encoding.Encode(env) +} + +func (gwc *protobufCodec) DecodeTx(txBytes []byte) (*Envelope, error) { + env := new(Envelope) + err := encoding.Decode(txBytes, env) + if err != nil { + return nil, err + } + return env, nil +} diff --git a/txs/amino_codec_test.go b/txs/protobuf_codec_test.go similarity index 95% rename from txs/amino_codec_test.go rename to txs/protobuf_codec_test.go index 6adbc3748..07de91b3e 100644 --- a/txs/amino_codec_test.go +++ b/txs/protobuf_codec_test.go @@ -11,7 +11,7 @@ import ( ) func TestAminoEncodeTxDecodeTx(t *testing.T) { - codec := NewAminoCodec() + codec := NewProtobufCodec() inputAddress := crypto.Address{1, 2, 3, 4, 5} outputAddress := crypto.Address{5, 4, 3, 2, 1} amount := uint64(2) @@ -38,7 +38,7 @@ func TestAminoEncodeTxDecodeTx(t *testing.T) { } func TestAminoEncodeTxDecodeTx_CallTx(t *testing.T) { - codec := NewAminoCodec() + codec := NewProtobufCodec() inputAccount := acm.GeneratePrivateAccountFromSecret("fooo") amount := uint64(2) sequence := uint64(3) @@ -65,7 +65,7 @@ func TestAminoEncodeTxDecodeTx_CallTx(t *testing.T) { } func TestAminoTxEnvelope(t *testing.T) { - codec := NewAminoCodec() + codec := NewProtobufCodec() privAccFrom := acm.GeneratePrivateAccountFromSecret("foo") privAccTo := acm.GeneratePrivateAccountFromSecret("bar") toAddress := privAccTo.GetAddress() diff --git a/txs/tx.go b/txs/tx.go index 1e9e17a7c..34d3f7e6f 100644 --- a/txs/tx.go +++ b/txs/tx.go @@ -22,6 +22,7 @@ import ( "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/encoding" "github.com/hyperledger/burrow/event/query" "github.com/hyperledger/burrow/txs/payload" ) @@ -200,19 +201,18 @@ func (tx *Tx) GenerateReceipt() *Receipt { return receipt } -var cdc = NewAminoCodec() - func DecodeReceipt(bs []byte) (*Receipt, error) { receipt := new(Receipt) - err := cdc.UnmarshalBinaryBare(bs, receipt) + err := encoding.Decode(bs, receipt) if err != nil { return nil, err } + return receipt, nil } func (receipt *Receipt) Encode() ([]byte, error) { - return cdc.MarshalBinaryBare(receipt) + return encoding.Encode(receipt) } func EnvelopeFromAny(chainID string, p *payload.Any) *Envelope { diff --git a/txs/txs.pb.go b/txs/txs.pb.go index 0e9ac84c0..bebf3056a 100644 --- a/txs/txs.pb.go +++ b/txs/txs.pb.go @@ -7,6 +7,7 @@ import ( fmt "fmt" io "io" math "math" + math_bits "math/bits" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" @@ -282,9 +283,9 @@ func (m *Envelope) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.Tx.Size())) - n1, err := m.Tx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Tx.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -313,9 +314,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintTxs(dAtA, i, uint64(m.Address.Size())) - n2, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Address.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -323,9 +324,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.PublicKey.Size())) - n3, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.PublicKey.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -333,9 +334,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintTxs(dAtA, i, uint64(m.Signature.Size())) - n4, err := m.Signature.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Signature.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -368,9 +369,9 @@ func (m *Receipt) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.TxHash.Size())) - n5, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.TxHash.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.CreatesContract { @@ -386,9 +387,9 @@ func (m *Receipt) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintTxs(dAtA, i, uint64(m.ContractAddress.Size())) - n6, err := m.ContractAddress.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.ContractAddress.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 if m.XXX_unrecognized != nil { @@ -475,14 +476,7 @@ func (m *Receipt) Size() (n int) { } func sovTxs(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozTxs(x uint64) (n int) { return sovTxs(uint64((x << 1) ^ uint64((int64(x) >> 63)))) From 89b2b32caea09f63f14060a22a196bfe6b0a2284 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Sat, 29 Jun 2019 21:36:56 +0100 Subject: [PATCH 03/70] Update CI image to newer Go Signed-off-by: Silas Davis --- .circleci/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/Dockerfile b/.circleci/Dockerfile index 5097163cc..240363bed 100644 --- a/.circleci/Dockerfile +++ b/.circleci/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.12.1-alpine3.9 +FROM golang:1.12.6-alpine3.10 MAINTAINER Monax ENV DOCKER_VERSION "17.12.1-ce" From b45eb6a8338e752767c2fd80c3b9557e0dbb57fb Mon Sep 17 00:00:00 2001 From: Sean Young Date: Tue, 2 Jul 2019 12:39:07 +0100 Subject: [PATCH 04/70] make test fails in circleci due to out of memory Reducing parallelism solves the problem. The problem can be reproduced by running make test in cgroups, see: https://stackoverflow.com/questions/26860822/how-limit-memory-usage-for-a-single-linux-process-and-not-kill-the-process Signed-off-by: Sean Young --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 13a272240..abdc013aa 100644 --- a/Makefile +++ b/Makefile @@ -164,7 +164,8 @@ solang: $(SOLANG_GO_FILES) .PHONY: test test: check bin/solc - @tests/scripts/bin_wrapper.sh go test ./... ${GOPACKAGES_NOVENDOR} +# limit parallelism with -p to prevent OOM on circleci + @tests/scripts/bin_wrapper.sh go test ./... -p 2 .PHONY: test_keys test_keys: build_burrow From cf7a8e9a612c2bf57fc3c59e6c45ac1d3bfc1a63 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Tue, 2 Jul 2019 17:20:54 +0100 Subject: [PATCH 05/70] add test coverage to ci Signed-off-by: Gregory Hill --- .circleci/config.yml | 12 ++++++++++++ README.md | 2 ++ 2 files changed, 14 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c03db0be..1f7117d3b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -13,9 +13,21 @@ tag_filters: &tags_filters setup_docker: &setup_docker version: 17.11.0-ce +orbs: + codecov: codecov/codecov@1.0.4 + # Start of CircleCI 2.0 config version: 2 jobs: + cover: + <<: *defaults + steps: + - checkout + - run: go test -coverprofile=c.out + - run: go tool cover -html=c.out -o coverage.html + - codecov/upload: + file: coverage.html + test: <<: *defaults steps: diff --git a/README.md b/README.md index fb9fc1b46..fc5ba7ac5 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@ [![GoDoc](https://godoc.org/github.com/burrow?status.png)](https://godoc.org/github.com/hyperledger/burrow) [![license](https://img.shields.io/github/license/hyperledger/burrow.svg)](LICENSE.md) [![LoC](https://tokei.rs/b1/github/hyperledger/burrow?category=lines)](https://github.com/hyperledger/burrow) +[![codecov](https://codecov.io/gh/hyperledger/burrow/branch/develop/graph/badge.svg)](https://codecov.io/gh/hyperledger/burrow) + Branch | Linux ----------|------ From ee42ddee072ae0a15039af43be21d2b1edc7bb04 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Tue, 2 Jul 2019 18:32:32 +0100 Subject: [PATCH 06/70] add cover test to workflow Signed-off-by: Gregory Hill --- .circleci/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1f7117d3b..3c07c3f35 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -79,6 +79,10 @@ workflows: test_and_release: jobs: + - cover: + filters: + <<: *tags_filters + - test: filters: <<: *tags_filters From c026774e11835bdbb1e4ecf994bbf536686def93 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Mon, 24 Jun 2019 16:43:02 +0100 Subject: [PATCH 07/70] refactor forensics Signed-off-by: Gregory Hill --- cmd/burrow/commands/examine.go | 100 -------- cmd/burrow/commands/explore.go | 178 ++++++++++++++ cmd/burrow/main.go | 4 +- forensics/capture.go | 42 ++++ forensics/capture_test.go | 33 +++ forensics/forensics.go | 8 - forensics/replay.go | 220 +++++++++--------- forensics/replay_test.go | 51 +--- forensics/{ => storage}/cache_db.go | 2 +- forensics/{ => storage}/cache_db_test.go | 2 +- forensics/{ => storage}/channel_iterator.go | 2 +- .../{ => storage}/channel_iterator_test.go | 2 +- forensics/{ => storage}/kvcache.go | 2 +- forensics/{ => storage}/kvcache_test.go | 2 +- forensics/{ => storage}/multi_iterator.go | 2 +- .../{ => storage}/multi_iterator_test.go | 2 +- forensics/{ => storage}/unique_iterator.go | 2 +- .../{ => storage}/unique_iterator_test.go | 2 +- forensics/{ => storage}/util_test.go | 2 +- go.mod | 2 + go.sum | 7 + storage/prefix_db_test.go | 29 +-- 22 files changed, 408 insertions(+), 288 deletions(-) delete mode 100644 cmd/burrow/commands/examine.go create mode 100644 cmd/burrow/commands/explore.go create mode 100644 forensics/capture.go create mode 100644 forensics/capture_test.go delete mode 100644 forensics/forensics.go rename forensics/{ => storage}/cache_db.go (99%) rename forensics/{ => storage}/cache_db_test.go (98%) rename forensics/{ => storage}/channel_iterator.go (99%) rename forensics/{ => storage}/channel_iterator_test.go (98%) rename forensics/{ => storage}/kvcache.go (99%) rename forensics/{ => storage}/kvcache_test.go (99%) rename forensics/{ => storage}/multi_iterator.go (99%) rename forensics/{ => storage}/multi_iterator_test.go (99%) rename forensics/{ => storage}/unique_iterator.go (97%) rename forensics/{ => storage}/unique_iterator_test.go (94%) rename forensics/{ => storage}/util_test.go (98%) diff --git a/cmd/burrow/commands/examine.go b/cmd/burrow/commands/examine.go deleted file mode 100644 index f2351eb47..000000000 --- a/cmd/burrow/commands/examine.go +++ /dev/null @@ -1,100 +0,0 @@ -package commands - -import ( - "encoding/json" - - "github.com/hyperledger/burrow/bcm" - - "github.com/hyperledger/burrow/txs" - cli "github.com/jawher/mow.cli" - "github.com/tendermint/tendermint/libs/db" -) - -func Examine(output Output) func(cmd *cli.Cmd) { - return func(dump *cli.Cmd) { - configOpts := addConfigOptions(dump) - - var explorer *bcm.BlockStore - - dump.Before = func() { - conf, err := configOpts.obtainBurrowConfig() - if err != nil { - output.Fatalf("Could not obtain config: %v", err) - } - tmConf, err := conf.TendermintConfig() - if err != nil { - output.Fatalf("Could not build Tendermint config:", err) - } - - explorer = bcm.NewBlockExplorer(db.DBBackendType(tmConf.DBBackend), tmConf.DBDir()) - } - - dump.Command("blocks", "dump blocks to stdout", func(cmd *cli.Cmd) { - rangeArg := cmd.StringArg("RANGE", "", "Range as START_HEIGHT:END_HEIGHT where omitting "+ - "either endpoint implicitly describes the start/end and a negative index counts back from the last block") - - cmd.Spec = "[RANGE]" - - cmd.Action = func() { - start, end, err := parseRange(*rangeArg) - if err != nil { - output.Fatalf("could not parse range '%s': %v", *rangeArg, err) - } - - err = explorer.Blocks(start, end, - func(block *bcm.Block) error { - bs, err := json.Marshal(block) - if err != nil { - output.Fatalf("Could not serialise block: %v", err) - } - output.Printf(string(bs)) - return nil - }) - if err != nil { - output.Fatalf("Error iterating over blocks: %v", err) - } - } - }) - - dump.Command("txs", "dump transactions to stdout", func(cmd *cli.Cmd) { - rangeArg := cmd.StringArg("RANGE", "", "Range as START_HEIGHT:END_HEIGHT where omitting "+ - "either endpoint implicitly describes the start/end and a negative index counts back from the last block") - - cmd.Spec = "[RANGE]" - - cmd.Action = func() { - start, end, err := parseRange(*rangeArg) - if err != nil { - output.Fatalf("could not parse range '%s': %v", *rangeArg, err) - } - - err = explorer.Blocks(start, end, - func(block *bcm.Block) error { - err := block.Transactions(func(txEnv *txs.Envelope) error { - wrapper := struct { - Height int64 - Tx *txs.Envelope - }{ - Height: block.Height, - Tx: txEnv, - } - bs, err := json.Marshal(wrapper) - if err != nil { - output.Fatalf("Could not deserialise transaction: %v", err) - } - output.Printf(string(bs)) - return nil - }) - if err != nil { - output.Fatalf("Error iterating over transactions: %v", err) - } - // If we stopped transactions stop everything - return nil - }) - if err != nil { - output.Fatalf("Error iterating over blocks: %v", err) - } - } - }) - } -} diff --git a/cmd/burrow/commands/explore.go b/cmd/burrow/commands/explore.go new file mode 100644 index 000000000..9d3503a69 --- /dev/null +++ b/cmd/burrow/commands/explore.go @@ -0,0 +1,178 @@ +package commands + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/hyperledger/burrow/config" + "github.com/hyperledger/burrow/forensics" + "github.com/hyperledger/burrow/logging" + + "github.com/hyperledger/burrow/bcm" + + "github.com/hyperledger/burrow/txs" + cli "github.com/jawher/mow.cli" + "github.com/tendermint/tendermint/libs/db" +) + +func Explore(output Output) func(cmd *cli.Cmd) { + return func(cmd *cli.Cmd) { + configOpts := addConfigOptions(cmd) + var conf *config.BurrowConfig + var explorer *bcm.BlockStore + var err error + + cmd.Before = func() { + conf, err = configOpts.obtainBurrowConfig() + if err != nil { + output.Fatalf("could not obtain config: %v", err) + } + tmConf, err := conf.TendermintConfig() + if err != nil { + output.Fatalf("could not build Tendermint config:", err) + } + + if conf.GenesisDoc == nil { + output.Fatalf("genesis doc is required") + } + + explorer = bcm.NewBlockExplorer(db.DBBackendType(tmConf.DBBackend), tmConf.DBDir()) + } + + cmd.Command("compare", "granularly compare the state from two .burrow directories", func(cmd *cli.Cmd) { + goodDir := cmd.StringArg("GOOD", "", "Directory containing expected state") + badDir := cmd.StringArg("BAD", "", "Directory containing invalid state") + cmd.Spec = "[GOOD] [BAD]" + + cmd.Before = func() { + if err := isDir(*goodDir); err != nil { + output.Fatalf("could not obtain state: %v", err) + } + if err := isDir(*badDir); err != nil { + output.Fatalf("could not obtain state: %v", err) + } + } + + cmd.Action = func() { + logger := logging.NewNoopLogger() + replay1 := forensics.NewReplay(logger, conf.GenesisDoc, *goodDir) + replay2 := forensics.NewReplay(logger, conf.GenesisDoc, *badDir) + + h1, err := replay1.LatestHeight() + if err != nil { + output.Fatalf("could not get height for first replay: %v", err) + } + h2, err := replay2.LatestHeight() + if err != nil { + output.Fatalf("could not get height for second replay: %v", err) + } + + height := h1 + if h2 < h1 { + height = h2 + output.Printf("States do not agree on last height, using min: %d", h2) + } else { + output.Printf("Using last height: %d", h1) + } + + recap1, err := replay1.Blocks(1, height) + if err != nil { + output.Fatalf("could not replay first state: %v", err) + } + + recap2, err := replay2.Blocks(1, height) + if err != nil { + output.Fatalf("could not replay second state: %v", err) + } + + if height, err := forensics.CompareCaptures(recap1, recap2); err != nil { + output.Printf("difference in capture: %v", err) + if err := forensics.CompareState(replay1, replay2, height); err != nil { + output.Fatalf("difference in state: %v", err) + } + } + + output.Printf("States match!") + } + }) + + cmd.Command("blocks", "dump blocks to stdout", func(cmd *cli.Cmd) { + rangeArg := cmd.StringArg("RANGE", "", "Range as START_HEIGHT:END_HEIGHT where omitting "+ + "either endpoint implicitly describes the start/end and a negative index counts back from the last block") + + cmd.Spec = "[RANGE]" + + cmd.Action = func() { + start, end, err := parseRange(*rangeArg) + if err != nil { + output.Fatalf("could not parse range '%s': %v", *rangeArg, err) + } + + err = explorer.Blocks(start, end, + func(block *bcm.Block) error { + bs, err := json.Marshal(block) + if err != nil { + output.Fatalf("Could not serialise block: %v", err) + } + output.Printf(string(bs)) + return nil + }) + if err != nil { + output.Fatalf("Error iterating over blocks: %v", err) + } + } + }) + + cmd.Command("txs", "dump transactions to stdout", func(cmd *cli.Cmd) { + rangeArg := cmd.StringArg("RANGE", "", "Range as START_HEIGHT:END_HEIGHT where omitting "+ + "either endpoint implicitly describes the start/end and a negative index counts back from the last block") + + cmd.Spec = "[RANGE]" + + cmd.Action = func() { + start, end, err := parseRange(*rangeArg) + if err != nil { + output.Fatalf("could not parse range '%s': %v", *rangeArg, err) + } + + err = explorer.Blocks(start, end, + func(block *bcm.Block) error { + err := block.Transactions(func(txEnv *txs.Envelope) error { + wrapper := struct { + Height int64 + Tx *txs.Envelope + }{ + Height: block.Height, + Tx: txEnv, + } + bs, err := json.Marshal(wrapper) + if err != nil { + output.Fatalf("Could not deserialise transaction: %v", err) + } + output.Printf(string(bs)) + return nil + }) + if err != nil { + output.Fatalf("Error iterating over transactions: %v", err) + } + // If we stopped transactions stop everything + return nil + }) + if err != nil { + output.Fatalf("Error iterating over blocks: %v", err) + } + } + }) + } +} + +func isDir(path string) error { + file, err := os.Stat(path) + if err != nil { + return fmt.Errorf("could not read state directory: %v", err) + } else if !file.IsDir() { + return fmt.Errorf("%s is not a directory", path) + } + return nil +} diff --git a/cmd/burrow/main.go b/cmd/burrow/main.go index 189d22b50..208433ab3 100644 --- a/cmd/burrow/main.go +++ b/cmd/burrow/main.go @@ -57,8 +57,8 @@ func burrow(output commands.Output) *cli.Cli { app.Command("keys", "A tool for doing a bunch of cool stuff with keys", commands.Keys(output)) - app.Command("examine", "Dump objects from an offline Burrow .burrow directory", - commands.Examine(output)) + app.Command("explore", "Dump objects from an offline Burrow .burrow directory", + commands.Explore(output)) app.Command("deploy", "Deploy and test contracts", commands.Deploy(output)) diff --git a/forensics/capture.go b/forensics/capture.go new file mode 100644 index 000000000..672c43e1b --- /dev/null +++ b/forensics/capture.go @@ -0,0 +1,42 @@ +package forensics + +import ( + "fmt" + + "github.com/hyperledger/burrow/binary" + "github.com/hyperledger/burrow/execution/exec" + "github.com/pkg/errors" +) + +type ReplayCapture struct { + Height uint64 + AppHashBefore binary.HexBytes + AppHashAfter binary.HexBytes + TxExecutions []*exec.TxExecution +} + +func (rc *ReplayCapture) String() string { + return fmt.Sprintf("ReplayCapture[Height %d; AppHash: %v -> %v]", + rc.Height, rc.AppHashBefore, rc.AppHashAfter) +} + +// Compare the app hashes of two block replays +func (exp *ReplayCapture) Compare(act *ReplayCapture) error { + if exp.AppHashBefore.String() != act.AppHashBefore.String() { + return fmt.Errorf("app hashes before do not match") + } else if exp.AppHashAfter.String() != act.AppHashAfter.String() { + return fmt.Errorf("app hashes after do not match") + } + + return nil +} + +// CompareCaptures of two independent replays +func CompareCaptures(exp, act []*ReplayCapture) (uint64, error) { + for i, rc := range exp { + if err := rc.Compare(act[i]); err != nil { + return rc.Height, errors.Wrapf(err, "mismatch at height %d", rc.Height) + } + } + return 0, nil +} diff --git a/forensics/capture_test.go b/forensics/capture_test.go new file mode 100644 index 000000000..b0565dedc --- /dev/null +++ b/forensics/capture_test.go @@ -0,0 +1,33 @@ +package forensics + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/hyperledger/burrow/binary" +) + +func TestCompareCapture(t *testing.T) { + exp := []*ReplayCapture{{ + Height: 0, + AppHashBefore: binary.HexBytes("00000000000000000000"), + AppHashAfter: binary.HexBytes("00000000000000000000"), + }, { + Height: 1, + AppHashBefore: binary.HexBytes("00000000000000000000"), + AppHashAfter: binary.HexBytes("00000000000000000000"), + }} + act := []*ReplayCapture{{ + Height: 0, + AppHashBefore: binary.HexBytes("00000000000000000000"), + AppHashAfter: binary.HexBytes("00000000000000000000"), + }, { + Height: 1, + AppHashBefore: binary.HexBytes("00000000000000000000"), + AppHashAfter: binary.HexBytes("11111111111111111111"), + }} + height, err := CompareCaptures(exp, act) + require.Error(t, err) + require.Equal(t, uint64(1), height) +} diff --git a/forensics/forensics.go b/forensics/forensics.go deleted file mode 100644 index f7e1b440b..000000000 --- a/forensics/forensics.go +++ /dev/null @@ -1,8 +0,0 @@ -// +build forensics - -// This package contains tools for examining, replaying, and debugging Tendermint-side and Burrow-side blockchain state. -// Some code is quick and dirty from particular investigations and some is better extracted, encapsulated and generalised. -// The sketchy code is included so that useful tools can be progressively put together as the generality of the types of -// forensic debugging needed in the wild are determined. - -package forensics diff --git a/forensics/replay.go b/forensics/replay.go index b5de6bd83..d004931b8 100644 --- a/forensics/replay.go +++ b/forensics/replay.go @@ -1,53 +1,49 @@ -// +build forensics +// This package contains tools for examining, replaying, and debugging Tendermint-side and Burrow-side blockchain state. +// Some code is quick and dirty from particular investigations and some is better extracted, encapsulated and generalised. +// The sketchy code is included so that useful tools can be progressively put together as the generality of the types of +// forensic debugging needed in the wild are determined. package forensics import ( "bytes" + "encoding/hex" "fmt" "path" + "github.com/fatih/color" "github.com/hyperledger/burrow/bcm" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/core" "github.com/hyperledger/burrow/event" "github.com/hyperledger/burrow/execution" - "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/execution/state" + "github.com/hyperledger/burrow/forensics/storage" "github.com/hyperledger/burrow/genesis" "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/txs" "github.com/pkg/errors" dbm "github.com/tendermint/tendermint/libs/db" "github.com/tendermint/tendermint/types" + "github.com/xlab/treeprint" ) +// Replay is a kernel for state replaying type Replay struct { Explorer *bcm.BlockStore db dbm.DB cacheDB dbm.DB blockchain *bcm.Blockchain genesisDoc *genesis.GenesisDoc + committer execution.BatchCommitter + state *state.State logger *logging.Logger } -type ReplayCapture struct { - Height uint64 - AppHashBefore binary.HexBytes - AppHashAfter binary.HexBytes - TxExecutions []*exec.TxExecution -} - -func (recap *ReplayCapture) String() string { - return fmt.Sprintf("ReplayCapture[Height %d; AppHash: %v -> %v]", - recap.Height, recap.AppHashBefore, recap.AppHashAfter) -} - -func NewReplay(dbDir string, genesisDoc *genesis.GenesisDoc, logger *logging.Logger) *Replay { - // burrowDB := core.NewBurrowDB(dbDir) +func NewReplay(logger *logging.Logger, genesisDoc *genesis.GenesisDoc, dbDir string) *Replay { // Avoid writing through to underlying DB db := dbm.NewDB(core.BurrowDBName, dbm.GoLevelDBBackend, dbDir) - cacheDB := NewCacheDB(db) + cacheDB := storage.NewCacheDB(db) return &Replay{ Explorer: bcm.NewBlockExplorer(dbm.LevelDBBackend, path.Join(dbDir, "data")), db: db, @@ -58,6 +54,14 @@ func NewReplay(dbDir string, genesisDoc *genesis.GenesisDoc, logger *logging.Log } } +func (re *Replay) LatestHeight() (uint64, error) { + blockchain, _, err := bcm.LoadOrNewBlockchain(re.db, re.genesisDoc, re.logger) + if err != nil { + return 0, err + } + return blockchain.LastBlockHeight(), nil +} + func (re *Replay) LatestBlockchain() (*bcm.Blockchain, error) { blockchain, _, err := bcm.LoadOrNewBlockchain(re.db, re.genesisDoc, re.logger) if err != nil { @@ -67,129 +71,137 @@ func (re *Replay) LatestBlockchain() (*bcm.Blockchain, error) { return blockchain, nil } -func (re *Replay) State(height uint64) (*state.State, error) { - return state.LoadState(re.cacheDB, execution.VersionAtHeight(height)) -} - +// Block loads and commits a block func (re *Replay) Block(height uint64) (*ReplayCapture, error) { - recap := new(ReplayCapture) - // Load and commit previous block - block, err := re.Explorer.Block(int64(height - 1)) - if err != nil { - return nil, err - } - err = re.blockchain.CommitBlockAtHeight(block.Time, block.Hash(), block.Header.AppHash, uint64(block.Height)) - if err != nil { - return nil, err - } // block.AppHash is hash after txs from previous block have been applied - it's the state we want to load on top // of which we will reapply this block txs - st, err := re.State(height - 1) - if err != nil { - return nil, err - } - // Load block for replay - block, err = re.Explorer.Block(int64(height)) - if err != nil { + if err := re.LoadAt(height - 1); err != nil { return nil, err } - if !bytes.Equal(st.Hash(), block.AppHash) { - return nil, fmt.Errorf("state hash (%X) retrieved for block AppHash (%X) do not match", - st.Hash(), block.AppHash) + return re.Commit(height) +} + +// Blocks iterates through the given range +func (re *Replay) Blocks(startHeight, endHeight uint64) ([]*ReplayCapture, error) { + if err := re.LoadAt(startHeight - 1); err != nil { + return nil, errors.Wrap(err, "State()") } - recap.AppHashBefore = binary.HexBytes(block.AppHash) // Get our commit machinery - committer := execution.NewBatchCommitter(st, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, + re.committer = execution.NewBatchCommitter(re.state, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, event.NewEmitter(), re.logger) + recaps := make([]*ReplayCapture, 0, endHeight-startHeight+1) + for height := startHeight; height < endHeight; height++ { + recap, err := re.Commit(height) + if err != nil { + return nil, err + } + recaps = append(recaps, recap) + } + return recaps, nil +} + +// Commit block at height to state cache, saving a capture +func (re *Replay) Commit(height uint64) (*ReplayCapture, error) { + recap := &ReplayCapture{ + Height: height, + } + + block, err := re.Explorer.Block(int64(height)) + if err != nil { + return nil, errors.Wrap(err, "explorer.Block()") + } + if uint64(block.Height) != height { + return nil, errors.Errorf("Tendermint block height %d != requested block height %d", + block.Height, height) + } + if height > 1 && !bytes.Equal(re.state.Hash(), block.AppHash) { + return nil, errors.Errorf("state hash %X does not match AppHash %X at height %d", + re.state.Hash(), block.AppHash[:], height) + } + + recap.AppHashBefore = binary.HexBytes(block.AppHash) err = block.Transactions(func(txEnv *txs.Envelope) error { - txe, err := committer.Execute(txEnv) + txe, err := re.committer.Execute(txEnv) if err != nil { - return err + return errors.Wrap(err, "committer.Execute()") } recap.TxExecutions = append(recap.TxExecutions, txe) return nil }) if err != nil { - return nil, err + return nil, errors.Wrap(err, "block.Transactions()") } + abciHeader := types.TM2PB.Header(&block.Header) - recap.AppHashAfter, err = committer.Commit(&abciHeader) + recap.AppHashAfter, err = re.committer.Commit(&abciHeader) if err != nil { - return nil, err + return nil, errors.Wrap(err, "committer.Commit()") } - block, err = re.Explorer.Block(int64(height + 1)) - if err != nil { - return nil, err - } - fmt.Println(block.AppHash) - return recap, nil -} -func (re *Replay) Blocks(startHeight, endHeight uint64) ([]*ReplayCapture, error) { - var err error - var st *state.State + return recap, err +} - if startHeight > 1 { +// LoadAt height +func (re *Replay) LoadAt(height uint64) (err error) { + if height >= 1 { // Load and commit previous block - block, err := re.Explorer.Block(int64(startHeight - 1)) + block, err := re.Explorer.Block(int64(height)) if err != nil { - return nil, errors.Wrap(err, "explorer.Block()") + return err } err = re.blockchain.CommitBlockAtHeight(block.Time, block.Hash(), block.Header.AppHash, uint64(block.Height)) if err != nil { - return nil, errors.Wrap(err, "blockchain.CommitBlockAtHeight()") + return err } } - // block.AppHash is hash after txs from previous block have been applied - it's the state we want to load on - // top of which we will reapply this block txs - st, err = re.State(startHeight - 1) + re.state, err = state.LoadState(re.cacheDB, execution.VersionAtHeight(height)) + return err +} + +func iterateTrees(exp, act *state.State, tree treeprint.Tree, prefix string) error { + reader1, err := exp.Forest.Reader([]byte(prefix)) if err != nil { - return nil, errors.Wrap(err, "State()") + return err } - // Get our commit machinery - committer := execution.NewBatchCommitter(st, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, - event.NewEmitter(), re.logger) - recaps := make([]*ReplayCapture, 0, endHeight-startHeight+1) - for height := startHeight; height < endHeight; height++ { - recap := &ReplayCapture{ - Height: height, - } - // Load block for replay - block, err := re.Explorer.Block(int64(height)) - if err != nil { - return nil, errors.Wrap(err, "explorer.Block()") - } - if uint64(block.Height) != height { - return nil, errors.Errorf("Tendermint block height %d != requested block height %d", - block.Height, height) - - } - if height > 1 && !bytes.Equal(st.Hash(), block.AppHash) { - return nil, errors.Errorf("state hash %X does not match AppHash %X at height %d", - st.Hash(), block.AppHash[:], height) - } - recap.AppHashBefore = binary.HexBytes(block.AppHash) + reader2, err := act.Forest.Reader([]byte(prefix)) + if err != nil { + return err + } - err = block.Transactions(func(txEnv *txs.Envelope) error { - txe, err := committer.Execute(txEnv) - if err != nil { - return errors.Wrap(err, "committer.Execute()") + branch := tree.AddBranch(prefix) + err = reader1.Iterate(nil, nil, true, + func(key, value []byte) error { + actual := reader2.Get(key) + if !bytes.Equal(actual, value) { + branch.AddNode(color.GreenString("%q -> %q", hex.EncodeToString(key), hex.EncodeToString(value))) + branch.AddNode(color.RedString("%q -> %q", hex.EncodeToString(key), hex.EncodeToString(actual))) } - recap.TxExecutions = append(recap.TxExecutions, txe) return nil }) - if err != nil { - return nil, errors.Wrap(err, "block.Transactions()") - } - abciHeader := types.TM2PB.Header(&block.Header) - recap.AppHashAfter, err = committer.Commit(&abciHeader) - if err != nil { - return nil, errors.Wrap(err, "committer.Commit()") - } - recaps = append(recaps, recap) + return err +} + +// CompareState of two replays at given height +func CompareState(exp, act *Replay, height uint64) error { + if err := exp.LoadAt(height); err != nil { + return errors.Wrap(err, "could not load expected state") } - return recaps, nil + if err := act.LoadAt(height); err != nil { + return errors.Wrap(err, "could not load actual state") + } + + tree := treeprint.New() + err := iterateTrees(exp.state, act.state, tree, "a") + err = iterateTrees(exp.state, act.state, tree, "s") + err = iterateTrees(exp.state, act.state, tree, "n") + err = iterateTrees(exp.state, act.state, tree, "p") + err = iterateTrees(exp.state, act.state, tree, "v") + err = iterateTrees(exp.state, act.state, tree, "e") + err = iterateTrees(exp.state, act.state, tree, "th") + + fmt.Println(tree.String()) + return err } diff --git a/forensics/replay_test.go b/forensics/replay_test.go index c12d9aba4..7b7b89883 100644 --- a/forensics/replay_test.go +++ b/forensics/replay_test.go @@ -11,12 +11,9 @@ import ( "testing" "github.com/hyperledger/burrow/txs" - "github.com/hyperledger/burrow/integration/rpctest" "github.com/hyperledger/burrow/rpc/rpcevents" - "github.com/magiconair/properties/assert" - "github.com/hyperledger/burrow/config/source" "github.com/hyperledger/burrow/execution/state" "github.com/hyperledger/burrow/genesis" @@ -24,48 +21,11 @@ import ( "github.com/stretchr/testify/require" ) -// TODO: repackage the ad-hoc work in this test into a sausage machine for diagnosing -// AppHash errors by comparing dumps from nodes that disagree on state - // This serves as a testbed for looking at non-deterministic burrow instances capture from the wild // Put the path to 'good' and 'bad' burrow directories here (containing the config files and .burrow dir) -//const goodDir = "/home/silas/test-chain" -//const goodDir = "/home/silas/burrows/production-t9/burrow-t9-studio-001-good" -//const badDir = "/home/silas/burrows/production-t9/burrow-t9-studio-000-bad" -//const criticalBlock uint64 = 6 -// -const goodDir = "/home/silas/burrows/production-t9/dealspace/002" -const badDir = "/home/silas/burrows/production-t9/dealspace/001" -const criticalBlock uint64 = 38 - -//const criticalBlock uint64 = 52 - -func TestState(t *testing.T) { - goodReplay := newReplay(t, goodDir) - stGood, err := goodReplay.State(criticalBlock) - require.NoError(t, err) - hashGood := stGood.Hash() - - //badReplay := newReplay(t, badDir) - //stBad, err := badReplay.State(criticalBlock) - //require.NoError(t, err) - //hashBad := stBad.Hash() - - var hashBad []byte - fmt.Printf("AppHash,\nGood: %X\n Bad: %X\n", hashGood, hashBad) - fmt.Println() - goodBlock, err := goodReplay.Explorer.Block(int64(criticalBlock)) - require.NoError(t, err) - err = goodBlock.Transactions(func(envelope *txs.Envelope) error { - fmt.Printf("%v\n", envelope) - return nil - }) - - //err = ioutil.WriteFile("txeGood.json", []byte(source.JSONString(txeGood)), 0600) - //require.NoError(t, err) - //err = ioutil.WriteFile("txeBad.json", []byte(source.JSONString(txeBad)), 0600) - //require.NoError(t, err) +func TestStateComp(t *testing.T) { + CompareState() } func TestReplay_Critical(t *testing.T) { @@ -125,13 +85,6 @@ func TestReplay_Compare(t *testing.T) { fmt.Println(txeRemote) } -func TestDecipher(t *testing.T) { - hexmsg := "7B22436861696E4944223A2270726F64756374696F6E2D74392D73747564696F2D627572726F772D364337333335222C2254797065223A2243616C6C5478222C225061796C6F6164223A7B22496E707574223A7B2241646472657373223A2236354139334431443333423633453932453942454335463938444633313638303033384530303431222C2253657175656E6365223A34307D2C2241646472657373223A2242413544333042313031393233363033444331333133313231334431334633443939354138344142222C224761734C696D6974223A393939393939392C2244617461223A224636373138374143303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303032303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030304534343635363136343643363936453635344637323631363336433635303030303030303030303030303030303030303030303030303030303030303030303030222C225741534D223A22227D7D" - bs, err := hex.DecodeString(hexmsg) - require.NoError(t, err) - fmt.Println(string(bs)) -} - func TestReplay_Good(t *testing.T) { replay := newReplay(t, goodDir) recaps, err := replay.Blocks(1, criticalBlock+1) diff --git a/forensics/cache_db.go b/forensics/storage/cache_db.go similarity index 99% rename from forensics/cache_db.go rename to forensics/storage/cache_db.go index 293f0ed2e..a8df207d1 100644 --- a/forensics/cache_db.go +++ b/forensics/storage/cache_db.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "github.com/hyperledger/burrow/storage" diff --git a/forensics/cache_db_test.go b/forensics/storage/cache_db_test.go similarity index 98% rename from forensics/cache_db_test.go rename to forensics/storage/cache_db_test.go index 08b93ef3b..5ff183a2d 100644 --- a/forensics/cache_db_test.go +++ b/forensics/storage/cache_db_test.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "fmt" diff --git a/forensics/channel_iterator.go b/forensics/storage/channel_iterator.go similarity index 99% rename from forensics/channel_iterator.go rename to forensics/storage/channel_iterator.go index 897e86b4f..a59eb737e 100644 --- a/forensics/channel_iterator.go +++ b/forensics/storage/channel_iterator.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "bytes" diff --git a/forensics/channel_iterator_test.go b/forensics/storage/channel_iterator_test.go similarity index 98% rename from forensics/channel_iterator_test.go rename to forensics/storage/channel_iterator_test.go index 998d2bcb9..9787c9917 100644 --- a/forensics/channel_iterator_test.go +++ b/forensics/storage/channel_iterator_test.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "testing" diff --git a/forensics/kvcache.go b/forensics/storage/kvcache.go similarity index 99% rename from forensics/kvcache.go rename to forensics/storage/kvcache.go index 16cafb532..d549038a2 100644 --- a/forensics/kvcache.go +++ b/forensics/storage/kvcache.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "bytes" diff --git a/forensics/kvcache_test.go b/forensics/storage/kvcache_test.go similarity index 99% rename from forensics/kvcache_test.go rename to forensics/storage/kvcache_test.go index 6c33d80a3..b5aabcefc 100644 --- a/forensics/kvcache_test.go +++ b/forensics/storage/kvcache_test.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( bin "encoding/binary" diff --git a/forensics/multi_iterator.go b/forensics/storage/multi_iterator.go similarity index 99% rename from forensics/multi_iterator.go rename to forensics/storage/multi_iterator.go index a74cec68d..9a1c8ba7a 100644 --- a/forensics/multi_iterator.go +++ b/forensics/storage/multi_iterator.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "bytes" diff --git a/forensics/multi_iterator_test.go b/forensics/storage/multi_iterator_test.go similarity index 99% rename from forensics/multi_iterator_test.go rename to forensics/storage/multi_iterator_test.go index 57f1f6a19..dff304a3f 100644 --- a/forensics/multi_iterator_test.go +++ b/forensics/storage/multi_iterator_test.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "sort" diff --git a/forensics/unique_iterator.go b/forensics/storage/unique_iterator.go similarity index 97% rename from forensics/unique_iterator.go rename to forensics/storage/unique_iterator.go index 281ebdbc9..d5904315e 100644 --- a/forensics/unique_iterator.go +++ b/forensics/storage/unique_iterator.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "bytes" diff --git a/forensics/unique_iterator_test.go b/forensics/storage/unique_iterator_test.go similarity index 94% rename from forensics/unique_iterator_test.go rename to forensics/storage/unique_iterator_test.go index 3db8300fa..bf82bccab 100644 --- a/forensics/unique_iterator_test.go +++ b/forensics/storage/unique_iterator_test.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "testing" diff --git a/forensics/util_test.go b/forensics/storage/util_test.go similarity index 98% rename from forensics/util_test.go rename to forensics/storage/util_test.go index ecb56b37a..90e45a947 100644 --- a/forensics/util_test.go +++ b/forensics/storage/util_test.go @@ -1,4 +1,4 @@ -package forensics +package storage import ( "strings" diff --git a/go.mod b/go.mod index 3669e6a43..f4429cfa0 100644 --- a/go.mod +++ b/go.mod @@ -18,6 +18,7 @@ require ( github.com/eapache/queue v1.1.0 // indirect github.com/elgs/gojq v0.0.0-20160421194050-81fa9a608a13 github.com/elgs/gosplitargs v0.0.0-20161028071935-a491c5eeb3c8 // indirect + github.com/fatih/color v1.7.0 github.com/fortytw2/leaktest v1.3.0 // indirect github.com/go-kit/kit v0.8.0 github.com/go-logfmt/logfmt v0.4.0 // indirect @@ -37,6 +38,7 @@ require ( github.com/kr/pretty v0.1.0 // indirect github.com/lib/pq v1.1.1 github.com/magiconair/properties v1.8.0 + github.com/mattn/go-colorable v0.1.2 // indirect github.com/mattn/go-sqlite3 v1.10.0 github.com/monax/relic v2.0.0+incompatible github.com/perlin-network/life v0.0.0-20190521143330-57f3819c2df0 diff --git a/go.sum b/go.sum index 9ace375e2..7aaab95b6 100644 --- a/go.sum +++ b/go.sum @@ -45,6 +45,8 @@ github.com/elgs/gojq v0.0.0-20160421194050-81fa9a608a13 h1:/voSflvo4UvPT0XZy+YQM github.com/elgs/gojq v0.0.0-20160421194050-81fa9a608a13/go.mod h1:rQELVIqRXpraeUryHOBadz99ePvEVQmTVpGr8M9QQ4Q= github.com/elgs/gosplitargs v0.0.0-20161028071935-a491c5eeb3c8 h1:bD2/rCXwgXJm2vgoSSSCM9IPjVFfEoQFFblzg7HHABI= github.com/elgs/gosplitargs v0.0.0-20161028071935-a491c5eeb3c8/go.mod h1:o4DgpccPNAQAlPSxo7I4L/LWNh2oyr/BBGSynrLTmZM= +github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= @@ -111,6 +113,10 @@ github.com/lib/pq v1.1.1 h1:sJZmqHoEaY7f+NPP8pgLB/WxulyR3fewgCM2qaSlBb4= github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o= github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= @@ -207,6 +213,7 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/storage/prefix_db_test.go b/storage/prefix_db_test.go index 23b1771a2..c8cd8023f 100644 --- a/storage/prefix_db_test.go +++ b/storage/prefix_db_test.go @@ -3,6 +3,7 @@ package storage import ( "fmt" "testing" + "testing/quick" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -26,21 +27,21 @@ func mockDBWithStuff() dbm.DB { } func TestPrefixDBSimple(t *testing.T) { - db := mockDBWithStuff() - pdb := NewPrefixDB(db, "key") + db := NewPrefixDB(dbm.NewMemDB(), "key") + + set := func(key []byte, value []byte) interface{} { + db.Set(key, value) + return value + } + + get := func(key []byte, value []byte) interface{} { + act := db.Get(key) + return act + } - checkValue(t, pdb, []byte("key"), nil) - checkValue(t, pdb, []byte(""), []byte("value")) - checkValue(t, pdb, []byte("key1"), nil) - checkValue(t, pdb, []byte("1"), []byte("value1")) - checkValue(t, pdb, []byte("key2"), nil) - checkValue(t, pdb, []byte("2"), []byte("value2")) - checkValue(t, pdb, []byte("key3"), nil) - checkValue(t, pdb, []byte("3"), []byte("value3")) - checkValue(t, pdb, []byte("something"), nil) - checkValue(t, pdb, []byte("k"), nil) - checkValue(t, pdb, []byte("ke"), nil) - checkValue(t, pdb, []byte("kee"), nil) + if err := quick.CheckEqual(set, get, nil); err != nil { + t.Error(err) + } } func TestPrefixDBIterator1(t *testing.T) { From 870490c6fd75019f0215ffe94dbfdf7a0b8fb83a Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Mon, 24 Jun 2019 19:45:27 +0100 Subject: [PATCH 08/70] replay test Signed-off-by: Gregory Hill --- cmd/burrow/commands/explore.go | 2 +- forensics/replay.go | 53 ++++++----- forensics/replay_test.go | 159 +++------------------------------ 3 files changed, 45 insertions(+), 169 deletions(-) diff --git a/cmd/burrow/commands/explore.go b/cmd/burrow/commands/explore.go index 9d3503a69..37a137e6f 100644 --- a/cmd/burrow/commands/explore.go +++ b/cmd/burrow/commands/explore.go @@ -88,7 +88,7 @@ func Explore(output Output) func(cmd *cli.Cmd) { if height, err := forensics.CompareCaptures(recap1, recap2); err != nil { output.Printf("difference in capture: %v", err) - if err := forensics.CompareState(replay1, replay2, height); err != nil { + if err := forensics.CompareState(replay1.State, replay2.State, height); err != nil { output.Fatalf("difference in state: %v", err) } } diff --git a/forensics/replay.go b/forensics/replay.go index d004931b8..64121bcd9 100644 --- a/forensics/replay.go +++ b/forensics/replay.go @@ -31,12 +31,12 @@ import ( // Replay is a kernel for state replaying type Replay struct { Explorer *bcm.BlockStore + State *state.State db dbm.DB cacheDB dbm.DB blockchain *bcm.Blockchain genesisDoc *genesis.GenesisDoc committer execution.BatchCommitter - state *state.State logger *logging.Logger } @@ -88,7 +88,7 @@ func (re *Replay) Blocks(startHeight, endHeight uint64) ([]*ReplayCapture, error } // Get our commit machinery - re.committer = execution.NewBatchCommitter(re.state, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, + re.committer = execution.NewBatchCommitter(re.State, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, event.NewEmitter(), re.logger) recaps := make([]*ReplayCapture, 0, endHeight-startHeight+1) @@ -116,9 +116,9 @@ func (re *Replay) Commit(height uint64) (*ReplayCapture, error) { return nil, errors.Errorf("Tendermint block height %d != requested block height %d", block.Height, height) } - if height > 1 && !bytes.Equal(re.state.Hash(), block.AppHash) { + if height > 1 && !bytes.Equal(re.State.Hash(), block.AppHash) { return nil, errors.Errorf("state hash %X does not match AppHash %X at height %d", - re.state.Hash(), block.AppHash[:], height) + re.State.Hash(), block.AppHash[:], height) } recap.AppHashBefore = binary.HexBytes(block.AppHash) @@ -156,52 +156,59 @@ func (re *Replay) LoadAt(height uint64) (err error) { return err } } - re.state, err = state.LoadState(re.cacheDB, execution.VersionAtHeight(height)) + re.State, err = state.LoadState(re.cacheDB, execution.VersionAtHeight(height)) return err } -func iterateTrees(exp, act *state.State, tree treeprint.Tree, prefix string) error { +func iterComp(exp, act *state.ReadState, tree treeprint.Tree, prefix string) (uint, error) { reader1, err := exp.Forest.Reader([]byte(prefix)) if err != nil { - return err + return 0, err } reader2, err := act.Forest.Reader([]byte(prefix)) if err != nil { - return err + return 0, err } + var diffs uint branch := tree.AddBranch(prefix) - err = reader1.Iterate(nil, nil, true, + return diffs, reader1.Iterate(nil, nil, true, func(key, value []byte) error { actual := reader2.Get(key) if !bytes.Equal(actual, value) { + diffs++ branch.AddNode(color.GreenString("%q -> %q", hex.EncodeToString(key), hex.EncodeToString(value))) branch.AddNode(color.RedString("%q -> %q", hex.EncodeToString(key), hex.EncodeToString(actual))) } return nil }) - return err } // CompareState of two replays at given height -func CompareState(exp, act *Replay, height uint64) error { - if err := exp.LoadAt(height); err != nil { +func CompareState(exp, act *state.State, height uint64) error { + rs1, err := exp.LoadHeight(height) + if err != nil { return errors.Wrap(err, "could not load expected state") } - if err := act.LoadAt(height); err != nil { + rs2, err := act.LoadHeight(height) + if err != nil { return errors.Wrap(err, "could not load actual state") } + var diffs uint tree := treeprint.New() - err := iterateTrees(exp.state, act.state, tree, "a") - err = iterateTrees(exp.state, act.state, tree, "s") - err = iterateTrees(exp.state, act.state, tree, "n") - err = iterateTrees(exp.state, act.state, tree, "p") - err = iterateTrees(exp.state, act.state, tree, "v") - err = iterateTrees(exp.state, act.state, tree, "e") - err = iterateTrees(exp.state, act.state, tree, "th") - - fmt.Println(tree.String()) - return err + prefixes := []string{"a", "s", "n", "p", "v", "e", "th"} + for _, p := range prefixes { + n, err := iterComp(rs1, rs2, tree, p) + if err != nil { + return err + } + diffs += n + } + + if diffs > 0 { + return fmt.Errorf("found %d difference(s): \n%v", diffs, tree.String()) + } + return nil } diff --git a/forensics/replay_test.go b/forensics/replay_test.go index 7b7b89883..7ecf49231 100644 --- a/forensics/replay_test.go +++ b/forensics/replay_test.go @@ -1,167 +1,36 @@ -// +build forensics - package forensics import ( - "context" - "encoding/hex" - "fmt" - "io/ioutil" - "path" "testing" - "github.com/hyperledger/burrow/txs" - "github.com/hyperledger/burrow/integration/rpctest" - "github.com/hyperledger/burrow/rpc/rpcevents" - "github.com/magiconair/properties/assert" - "github.com/hyperledger/burrow/config/source" + "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/execution/state" - "github.com/hyperledger/burrow/genesis" - "github.com/hyperledger/burrow/logging" "github.com/stretchr/testify/require" + dbm "github.com/tendermint/tendermint/libs/db" ) // This serves as a testbed for looking at non-deterministic burrow instances capture from the wild // Put the path to 'good' and 'bad' burrow directories here (containing the config files and .burrow dir) func TestStateComp(t *testing.T) { - CompareState() -} - -func TestReplay_Critical(t *testing.T) { - badReplay := newReplay(t, badDir) - goodReplay := newReplay(t, goodDir) - startHeight := uint64(1) - goodRecaps, err := goodReplay.Blocks(startHeight, criticalBlock+1) - require.NoError(t, err) - badRecaps, err := badReplay.Blocks(startHeight, criticalBlock+1) - require.NoError(t, err) - for i, goodRecap := range goodRecaps { - fmt.Printf("Good: %v\n", goodRecap) - fmt.Printf("Bad: %v\n", badRecaps[i]) - assert.Equal(t, goodRecap, badRecaps[i]) - fmt.Println() - for i, txe := range goodRecap.TxExecutions { - fmt.Printf("Tx %d: %v\n", i, txe.TxHash) - fmt.Println(txe.Envelope) - } - fmt.Println() - fmt.Println() - } -} - -func TestReplay_Compare(t *testing.T) { - badReplay := newReplay(t, badDir) - goodReplay := newReplay(t, goodDir) - badRecaps, err := badReplay.Blocks(2, criticalBlock+1) - require.NoError(t, err) - goodRecaps, err := goodReplay.Blocks(2, criticalBlock+1) - require.NoError(t, err) - for i, goodRecap := range goodRecaps { - fmt.Printf("Good: %v\n", goodRecap) - fmt.Printf("Bad: %v\n", badRecaps[i]) - assert.Equal(t, goodRecap, badRecaps[i]) - for i, txe := range goodRecap.TxExecutions { - fmt.Printf("Tx %d: %v\n", i, txe.TxHash) - fmt.Println(txe.Envelope) - } - fmt.Println() - } - - txe := goodRecaps[5].TxExecutions[0] - assert.Equal(t, badRecaps[5].TxExecutions[0], txe) - fmt.Printf("%v \n\n", txe) - - cli := rpctest.NewExecutionEventsClient(t, "localhost:10997") - txeRemote, err := cli.Tx(context.Background(), &rpcevents.TxRequest{ - TxHash: txe.TxHash, + st1 := state.NewState(dbm.NewMemDB()) + _, _, err := st1.Update(func(ws state.Updatable) error { + return ws.UpdateAccount(acm.NewAccountFromSecret("1")) }) require.NoError(t, err) - err = ioutil.WriteFile("txe.json", []byte(source.JSONString(txe)), 0600) - require.NoError(t, err) - err = ioutil.WriteFile("txeRemote.json", []byte(source.JSONString(txeRemote)), 0600) - require.NoError(t, err) - - fmt.Println(txeRemote) -} - -func TestReplay_Good(t *testing.T) { - replay := newReplay(t, goodDir) - recaps, err := replay.Blocks(1, criticalBlock+1) - require.NoError(t, err) - for _, recap := range recaps { - fmt.Println(recap.String()) - } -} - -func TestReplay_Bad(t *testing.T) { - replay := newReplay(t, badDir) - recaps, err := replay.Blocks(1, criticalBlock+1) - require.NoError(t, err) - for _, recap := range recaps { - fmt.Println(recap.String()) - } -} - -func TestStateHashes_Bad(t *testing.T) { - badReplay := newReplay(t, badDir) - goodReplay := newReplay(t, goodDir) - for i := uint64(0); i <= criticalBlock+1; i++ { - fmt.Println("Good") - goodSt, err := goodReplay.State(i) - require.NoError(t, err) - fmt.Printf("Good: Version: %d, Hash: %X\n", goodSt.Version(), goodSt.Hash()) - fmt.Println("Bad") - badSt, err := badReplay.State(i) - require.NoError(t, err) - fmt.Printf("Bad: Version: %d, Hash: %X\n", badSt.Version(), badSt.Hash()) - fmt.Println() - } -} - -func TestReplay_Good_Block(t *testing.T) { - replayBlock(t, goodDir, criticalBlock) -} - -func TestReplay_Bad_Block(t *testing.T) { - replayBlock(t, badDir, criticalBlock) -} - -func TestCriticalBlock(t *testing.T) { - badState := getState(t, badDir, criticalBlock) - goodState := getState(t, goodDir, criticalBlock) - require.Equal(t, goodState.Hash(), badState.Hash()) - fmt.Printf("good: %X, bad: %X\n", goodState.Hash(), badState.Hash()) - _, _, err := badState.Update(func(up state.Updatable) error { - return nil - }) - require.NoError(t, err) - _, _, err = goodState.Update(func(up state.Updatable) error { - return nil + _, _, err = st1.Update(func(ws state.Updatable) error { + return ws.UpdateAccount(acm.NewAccountFromSecret("2")) }) require.NoError(t, err) - fmt.Printf("good: %X, bad: %X\n", goodState.Hash(), badState.Hash()) -} - -func replayBlock(t *testing.T, burrowDir string, height uint64) { - replay := newReplay(t, burrowDir) - //replay.State() - recap, err := replay.Block(height) + db2 := dbm.NewMemDB() + st2, err := st1.Copy(db2) require.NoError(t, err) - recap.TxExecutions = nil - fmt.Println(recap) -} - -func getState(t *testing.T, burrowDir string, height uint64) *state.State { - st, err := newReplay(t, burrowDir).State(height) + _, _, err = st2.Update(func(ws state.Updatable) error { + return ws.UpdateAccount(acm.NewAccountFromSecret("3")) + }) require.NoError(t, err) - return st -} -func newReplay(t *testing.T, burrowDir string) *Replay { - genesisDoc := new(genesis.GenesisDoc) - err := source.FromFile(path.Join(burrowDir, "genesis.json"), genesisDoc) - require.NoError(t, err) - return NewReplay(path.Join(burrowDir, ".burrow"), genesisDoc, logging.NewNoopLogger()) + err = CompareState(st2, st1, 1) + require.Error(t, err) } From fa0db50ccd74682c9e68c1e10cf4c034222cd8a2 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Thu, 27 Jun 2019 11:20:12 +0100 Subject: [PATCH 09/70] replay test harness Signed-off-by: Gregory Hill --- cmd/burrow/commands/explore.go | 8 +-- forensics/replay.go | 71 +++++++++++-------- forensics/replay_test.go | 113 ++++++++++++++++++++++++++++++- genesis/deterministic_genesis.go | 23 ++++--- 4 files changed, 170 insertions(+), 45 deletions(-) diff --git a/cmd/burrow/commands/explore.go b/cmd/burrow/commands/explore.go index 37a137e6f..02e42ebe7 100644 --- a/cmd/burrow/commands/explore.go +++ b/cmd/burrow/commands/explore.go @@ -7,7 +7,6 @@ import ( "github.com/hyperledger/burrow/config" "github.com/hyperledger/burrow/forensics" - "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/bcm" @@ -55,9 +54,8 @@ func Explore(output Output) func(cmd *cli.Cmd) { } cmd.Action = func() { - logger := logging.NewNoopLogger() - replay1 := forensics.NewReplay(logger, conf.GenesisDoc, *goodDir) - replay2 := forensics.NewReplay(logger, conf.GenesisDoc, *badDir) + replay1 := forensics.NewReplayFromDir(conf.GenesisDoc, *goodDir) + replay2 := forensics.NewReplayFromDir(conf.GenesisDoc, *badDir) h1, err := replay1.LatestHeight() if err != nil { @@ -88,7 +86,7 @@ func Explore(output Output) func(cmd *cli.Cmd) { if height, err := forensics.CompareCaptures(recap1, recap2); err != nil { output.Printf("difference in capture: %v", err) - if err := forensics.CompareState(replay1.State, replay2.State, height); err != nil { + if err := forensics.CompareStateAtHeight(replay1.State, replay2.State, height); err != nil { output.Fatalf("difference in state: %v", err) } } diff --git a/forensics/replay.go b/forensics/replay.go index 64121bcd9..54bd60d96 100644 --- a/forensics/replay.go +++ b/forensics/replay.go @@ -23,6 +23,8 @@ import ( "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/txs" "github.com/pkg/errors" + "github.com/tendermint/tendermint/blockchain" + "github.com/tendermint/tendermint/libs/db" dbm "github.com/tendermint/tendermint/libs/db" "github.com/tendermint/tendermint/types" "github.com/xlab/treeprint" @@ -40,20 +42,49 @@ type Replay struct { logger *logging.Logger } -func NewReplay(logger *logging.Logger, genesisDoc *genesis.GenesisDoc, dbDir string) *Replay { +func NewReplay(burrowDB, tmDB dbm.DB, genesisDoc *genesis.GenesisDoc) *Replay { // Avoid writing through to underlying DB - db := dbm.NewDB(core.BurrowDBName, dbm.GoLevelDBBackend, dbDir) - cacheDB := storage.NewCacheDB(db) + cacheDB := storage.NewCacheDB(burrowDB) return &Replay{ - Explorer: bcm.NewBlockExplorer(dbm.LevelDBBackend, path.Join(dbDir, "data")), - db: db, + Explorer: bcm.NewBlockStore(blockchain.NewBlockStore(tmDB)), + db: burrowDB, cacheDB: cacheDB, blockchain: bcm.NewBlockchain(cacheDB, genesisDoc), genesisDoc: genesisDoc, - logger: logger, + logger: logging.NewNoopLogger(), } } +func NewReplayFromDir(genesisDoc *genesis.GenesisDoc, dbDir string) *Replay { + burrowDB := dbm.NewDB(core.BurrowDBName, dbm.GoLevelDBBackend, dbDir) + tmDB := db.NewDB("blockstore", dbm.LevelDBBackend, path.Join(dbDir, "data")) + return NewReplay(burrowDB, tmDB, genesisDoc) +} + +// LoadAt height +func (re *Replay) LoadAt(height uint64) (err error) { + if height >= 1 { + // Load and commit previous block + block, err := re.Explorer.Block(int64(height)) + if err != nil { + return err + } + err = re.blockchain.CommitBlockAtHeight(block.Time, block.Hash(), block.Header.AppHash, uint64(block.Height)) + if err != nil { + return err + } + } + re.State, err = state.LoadState(re.cacheDB, execution.VersionAtHeight(height)) + if err != nil { + return err + } + + // Get our commit machinery + re.committer = execution.NewBatchCommitter(re.State, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, + event.NewEmitter(), re.logger) + return nil +} + func (re *Replay) LatestHeight() (uint64, error) { blockchain, _, err := bcm.LoadOrNewBlockchain(re.db, re.genesisDoc, re.logger) if err != nil { @@ -87,10 +118,6 @@ func (re *Replay) Blocks(startHeight, endHeight uint64) ([]*ReplayCapture, error return nil, errors.Wrap(err, "State()") } - // Get our commit machinery - re.committer = execution.NewBatchCommitter(re.State, execution.ParamsFromGenesis(re.genesisDoc), re.blockchain, - event.NewEmitter(), re.logger) - recaps := make([]*ReplayCapture, 0, endHeight-startHeight+1) for height := startHeight; height < endHeight; height++ { recap, err := re.Commit(height) @@ -143,23 +170,6 @@ func (re *Replay) Commit(height uint64) (*ReplayCapture, error) { return recap, err } -// LoadAt height -func (re *Replay) LoadAt(height uint64) (err error) { - if height >= 1 { - // Load and commit previous block - block, err := re.Explorer.Block(int64(height)) - if err != nil { - return err - } - err = re.blockchain.CommitBlockAtHeight(block.Time, block.Hash(), block.Header.AppHash, uint64(block.Height)) - if err != nil { - return err - } - } - re.State, err = state.LoadState(re.cacheDB, execution.VersionAtHeight(height)) - return err -} - func iterComp(exp, act *state.ReadState, tree treeprint.Tree, prefix string) (uint, error) { reader1, err := exp.Forest.Reader([]byte(prefix)) if err != nil { @@ -185,8 +195,8 @@ func iterComp(exp, act *state.ReadState, tree treeprint.Tree, prefix string) (ui }) } -// CompareState of two replays at given height -func CompareState(exp, act *state.State, height uint64) error { +// CompareStateAtHeight of two replays +func CompareStateAtHeight(exp, act *state.State, height uint64) error { rs1, err := exp.LoadHeight(height) if err != nil { return errors.Wrap(err, "could not load expected state") @@ -208,7 +218,8 @@ func CompareState(exp, act *state.State, height uint64) error { } if diffs > 0 { - return fmt.Errorf("found %d difference(s): \n%v", diffs, tree.String()) + return fmt.Errorf("found %d difference(s): \n%v", + diffs, tree.String()) } return nil } diff --git a/forensics/replay_test.go b/forensics/replay_test.go index 7ecf49231..e94a43026 100644 --- a/forensics/replay_test.go +++ b/forensics/replay_test.go @@ -1,12 +1,28 @@ package forensics import ( + "fmt" "testing" + "time" + + "github.com/hyperledger/burrow/bcm" + "github.com/hyperledger/burrow/event" + "github.com/hyperledger/burrow/execution" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/txs" + + "github.com/hyperledger/burrow/txs/payload" + + "github.com/hyperledger/burrow/consensus/tendermint" "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/execution/state" + "github.com/hyperledger/burrow/genesis" "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/blockchain" dbm "github.com/tendermint/tendermint/libs/db" + sm "github.com/tendermint/tendermint/state" + "github.com/tendermint/tendermint/types" ) // This serves as a testbed for looking at non-deterministic burrow instances capture from the wild @@ -26,11 +42,106 @@ func TestStateComp(t *testing.T) { db2 := dbm.NewMemDB() st2, err := st1.Copy(db2) require.NoError(t, err) + err = CompareStateAtHeight(st2, st1, 0) + require.Error(t, err) + _, _, err = st2.Update(func(ws state.Updatable) error { return ws.UpdateAccount(acm.NewAccountFromSecret("3")) }) require.NoError(t, err) - err = CompareState(st2, st1, 1) + err = CompareStateAtHeight(st2, st1, 1) require.Error(t, err) } + +func TestReplay(t *testing.T) { + var height uint64 = 10 + genesisDoc, tmDB, burrowDB := makeChain(t, height) + + re := NewReplay(burrowDB, tmDB, genesisDoc) + rc, err := re.Blocks(1, height) + require.NoError(t, err) + require.Len(t, rc, int(height-1)) +} + +func initBurrow(t *testing.T, gd *genesis.GenesisDoc) (dbm.DB, *state.State, *bcm.Blockchain) { + db := dbm.NewMemDB() + st, err := state.MakeGenesisState(db, gd) + require.NoError(t, err) + err = st.InitialCommit() + require.NoError(t, err) + chain := bcm.NewBlockchain(db, gd) + return db, st, chain +} + +func makeChain(t *testing.T, max uint64) (*genesis.GenesisDoc, dbm.DB, dbm.DB) { + genesisDoc, _, validators := genesis.NewDeterministicGenesis(0).GenesisDoc(0, 1) + + tmDB := dbm.NewMemDB() + bs := blockchain.NewBlockStore(tmDB) + gd := tendermint.DeriveGenesisDoc(genesisDoc, nil) + st, err := sm.MakeGenesisState(&types.GenesisDoc{ + ChainID: gd.ChainID, + Validators: gd.Validators, + AppHash: gd.AppHash, + }) + require.NoError(t, err) + + burrowDB, burrowState, burrowChain := initBurrow(t, genesisDoc) + + committer := execution.NewBatchCommitter(burrowState, execution.ParamsFromGenesis(genesisDoc), + burrowChain, event.NewEmitter(), logging.NewNoopLogger()) + + var stateHash []byte + for i := uint64(1); i < max; i++ { + makeBlock(t, st, bs, func(block *types.Block) { + + decoder := txs.NewAminoCodec() + err = bcm.NewBlock(decoder, block).Transactions(func(txEnv *txs.Envelope) error { + _, err := committer.Execute(txEnv) + require.NoError(t, err) + return nil + }) + // empty if height == 1 + block.AppHash = stateHash + // we need app hash in the abci header + abciHeader := types.TM2PB.Header(&block.Header) + stateHash, err = committer.Commit(&abciHeader) + require.NoError(t, err) + + }, validators[0]) + require.Equal(t, int64(i), bs.Height()) + } + return genesisDoc, tmDB, burrowDB +} + +func makeBlock(t *testing.T, st sm.State, bs *blockchain.BlockStore, commit func(*types.Block), val *acm.PrivateAccount) { + height := bs.Height() + 1 + tx := makeTx(t, st.ChainID, height, val) + block, _ := st.MakeBlock(height, []types.Tx{tx}, new(types.Commit), nil, + st.Validators.GetProposer().Address) + + commit(block) + partSet := block.MakePartSet(2) + commitSigs := []*types.CommitSig{{Height: height, Timestamp: time.Time{}}} + seenCommit := types.NewCommit(types.BlockID{ + Hash: block.Hash(), + PartsHeader: partSet.Header(), + }, commitSigs) + bs.SaveBlock(block, partSet, seenCommit) +} + +func makeTx(t *testing.T, chainID string, height int64, val *acm.PrivateAccount) (tx types.Tx) { + sendTx := payload.NewSendTx() + amount := uint64(height) + acc := acm.NewAccountFromSecret(fmt.Sprintf("%d", height)) + sendTx.AddInputWithSequence(val.GetPublicKey(), amount, uint64(height)) + sendTx.AddOutput(acc.GetAddress(), amount) + txEnv := txs.Enclose(chainID, sendTx) + err := txEnv.Sign(val) + require.NoError(t, err) + + data, err := txs.NewAminoCodec().EncodeTx(txEnv) + require.NoError(t, err) + return types.Tx(data) +} diff --git a/genesis/deterministic_genesis.go b/genesis/deterministic_genesis.go index cc231bead..5604b2db9 100644 --- a/genesis/deterministic_genesis.go +++ b/genesis/deterministic_genesis.go @@ -22,9 +22,8 @@ func NewDeterministicGenesis(seed int64) *deterministicGenesis { } func (dg *deterministicGenesis) GenesisDoc(numAccounts int, numValidators int) (*GenesisDoc, []*acm.PrivateAccount, []*acm.PrivateAccount) { - - accounts := make([]Account, numAccounts) - privAccounts := make([]*acm.PrivateAccount, numAccounts) + accounts := make([]Account, numAccounts+numValidators) + privAccounts := make([]*acm.PrivateAccount, numAccounts+numValidators) defaultPerms := permission.DefaultAccountPermissions for i := 0; i < numAccounts; i++ { account, privAccount := dg.Account(9999999) @@ -44,13 +43,19 @@ func (dg *deterministicGenesis) GenesisDoc(numAccounts int, numValidators int) ( for i := 0; i < numValidators; i++ { validator := acm.GeneratePrivateAccountFromSecret(fmt.Sprintf("val_%v", i)) privValidators[i] = validator + basicAcc := BasicAccount{ + Address: validator.GetAddress(), + PublicKey: validator.GetPublicKey(), + // Avoid max validator cap + Amount: uint64(dg.random.Int63()/16 + 1), + } + fullAcc := Account{ + BasicAccount: basicAcc, + Permissions: defaultPerms.Clone(), + } + accounts[numAccounts+i] = fullAcc validators[i] = Validator{ - BasicAccount: BasicAccount{ - Address: validator.GetAddress(), - PublicKey: validator.GetPublicKey(), - // Avoid max validator cap - Amount: uint64(dg.random.Int63()/16 + 1), - }, + BasicAccount: basicAcc, UnbondTo: []BasicAccount{ { Address: validator.GetAddress(), From 4745d2a4a0b45935ced0f33dbee0e9b7583dbca6 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Thu, 27 Jun 2019 16:30:44 +0100 Subject: [PATCH 10/70] tidyup explore cli Signed-off-by: Gregory Hill --- cmd/burrow/commands/explore.go | 42 ++++++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/cmd/burrow/commands/explore.go b/cmd/burrow/commands/explore.go index 02e42ebe7..4d37cbbb6 100644 --- a/cmd/burrow/commands/explore.go +++ b/cmd/burrow/commands/explore.go @@ -15,6 +15,7 @@ import ( "github.com/tendermint/tendermint/libs/db" ) +// Explore chain state(s) func Explore(output Output) func(cmd *cli.Cmd) { return func(cmd *cli.Cmd) { configOpts := addConfigOptions(cmd) @@ -39,10 +40,40 @@ func Explore(output Output) func(cmd *cli.Cmd) { explorer = bcm.NewBlockExplorer(db.DBBackendType(tmConf.DBBackend), tmConf.DBDir()) } - cmd.Command("compare", "granularly compare the state from two .burrow directories", func(cmd *cli.Cmd) { + cmd.Command("dump", "pretty print the state tree at the given height", func(cmd *cli.Cmd) { + heightOpt := cmd.IntOpt("height", 0, "The height to read, defaults to latest") + stateDir := cmd.StringArg("STATE", "", "Directory containing burrow state") + cmd.Spec = "[--height] [STATE]" + + cmd.Before = func() { + if err := isDir(*stateDir); err != nil { + output.Fatalf("could not obtain state: %v", err) + } + } + + cmd.Action = func() { + replay := forensics.NewReplayFromDir(conf.GenesisDoc, *stateDir) + height := uint64(*heightOpt) + if height == 0 { + height, err = replay.LatestHeight() + if err != nil { + output.Fatalf("could not read latest height: %v", err) + } + } + err := replay.LoadAt(height) + if err != nil { + output.Fatalf("could not load state: %v", err) + } + + fmt.Println(replay.State.Dump()) + } + }) + + cmd.Command("compare", "diff the state of two .burrow directories", func(cmd *cli.Cmd) { goodDir := cmd.StringArg("GOOD", "", "Directory containing expected state") badDir := cmd.StringArg("BAD", "", "Directory containing invalid state") - cmd.Spec = "[GOOD] [BAD]" + heightOpt := cmd.IntOpt("height", 0, "The height to read, defaults to latest") + cmd.Spec = "[--height] [GOOD] [BAD]" cmd.Before = func() { if err := isDir(*goodDir); err != nil { @@ -67,11 +98,13 @@ func Explore(output Output) func(cmd *cli.Cmd) { } height := h1 - if h2 < h1 { + if *heightOpt != 0 { + height = uint64(*heightOpt) + } else if h2 < h1 { height = h2 output.Printf("States do not agree on last height, using min: %d", h2) } else { - output.Printf("Using last height: %d", h1) + output.Printf("Using default last height: %d", h1) } recap1, err := replay1.Blocks(1, height) @@ -86,6 +119,7 @@ func Explore(output Output) func(cmd *cli.Cmd) { if height, err := forensics.CompareCaptures(recap1, recap2); err != nil { output.Printf("difference in capture: %v", err) + // TODO: compare at every height? if err := forensics.CompareStateAtHeight(replay1.State, replay2.State, height); err != nil { output.Fatalf("difference in state: %v", err) } From 5c84f6478050d1d4fcb98bc89e7899317e52fcb1 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Tue, 2 Jul 2019 09:43:19 +0100 Subject: [PATCH 11/70] export state prefixes Signed-off-by: Gregory Hill --- execution/state/state.go | 5 ++++- forensics/replay.go | 10 +++++----- forensics/replay_test.go | 4 ++-- storage/key_format.go | 10 ++++++---- storage/key_format_test.go | 18 +++++++++--------- 5 files changed, 26 insertions(+), 21 deletions(-) diff --git a/execution/state/state.go b/execution/state/state.go index 15f4a948f..9eee23e10 100644 --- a/execution/state/state.go +++ b/execution/state/state.go @@ -78,8 +78,11 @@ var keys = KeyFormatStore{ TxHash: storage.NewMustKeyFormat("th", txs.HashLength), } +var Prefixes [][]byte + func init() { - err := storage.EnsureKeyFormatStore(keys) + var err error + Prefixes, err = storage.EnsureKeyFormatStore(keys) if err != nil { panic(fmt.Errorf("KeyFormatStore is invalid: %v", err)) } diff --git a/forensics/replay.go b/forensics/replay.go index 54bd60d96..72fcab816 100644 --- a/forensics/replay.go +++ b/forensics/replay.go @@ -170,13 +170,13 @@ func (re *Replay) Commit(height uint64) (*ReplayCapture, error) { return recap, err } -func iterComp(exp, act *state.ReadState, tree treeprint.Tree, prefix string) (uint, error) { - reader1, err := exp.Forest.Reader([]byte(prefix)) +func iterComp(exp, act *state.ReadState, tree treeprint.Tree, prefix []byte) (uint, error) { + reader1, err := exp.Forest.Reader(prefix) if err != nil { return 0, err } - reader2, err := act.Forest.Reader([]byte(prefix)) + reader2, err := act.Forest.Reader(prefix) if err != nil { return 0, err } @@ -208,8 +208,8 @@ func CompareStateAtHeight(exp, act *state.State, height uint64) error { var diffs uint tree := treeprint.New() - prefixes := []string{"a", "s", "n", "p", "v", "e", "th"} - for _, p := range prefixes { + + for _, p := range state.Prefixes { n, err := iterComp(rs1, rs2, tree, p) if err != nil { return err diff --git a/forensics/replay_test.go b/forensics/replay_test.go index e94a43026..0996a0fd1 100644 --- a/forensics/replay_test.go +++ b/forensics/replay_test.go @@ -96,7 +96,7 @@ func makeChain(t *testing.T, max uint64) (*genesis.GenesisDoc, dbm.DB, dbm.DB) { for i := uint64(1); i < max; i++ { makeBlock(t, st, bs, func(block *types.Block) { - decoder := txs.NewAminoCodec() + decoder := txs.NewProtobufCodec() err = bcm.NewBlock(decoder, block).Transactions(func(txEnv *txs.Envelope) error { _, err := committer.Execute(txEnv) require.NoError(t, err) @@ -141,7 +141,7 @@ func makeTx(t *testing.T, chainID string, height int64, val *acm.PrivateAccount) err := txEnv.Sign(val) require.NoError(t, err) - data, err := txs.NewAminoCodec().EncodeTx(txEnv) + data, err := txs.NewProtobufCodec().EncodeTx(txEnv) require.NoError(t, err) return types.Tx(data) } diff --git a/storage/key_format.go b/storage/key_format.go index dc3201153..f716cd01c 100644 --- a/storage/key_format.go +++ b/storage/key_format.go @@ -64,34 +64,36 @@ func NewKeyFormat(prefix string, layout ...int) (*KeyFormat, error) { var expectedKeyFormatType = reflect.TypeOf(MustKeyFormat{}) // Checks that a struct containing KeyFormat fields has no collisions on prefix and so acts as a sane 'KeyFormatStore' -func EnsureKeyFormatStore(ks interface{}) error { +func EnsureKeyFormatStore(ks interface{}) ([][]byte, error) { rv := reflect.ValueOf(ks) if rv.Kind() == reflect.Ptr { rv = rv.Elem() } rt := rv.Type() + prefixes := make([][]byte, rt.NumField()) keyFormats := make(map[string]MustKeyFormat) for i := 0; i < rt.NumField(); i++ { fv := rv.Field(i) if fv.Kind() == reflect.Ptr { if fv.IsNil() { - return fmt.Errorf("key format field '%s' is nil", rt.Field(i).Name) + return nil, fmt.Errorf("key format field '%s' is nil", rt.Field(i).Name) } fv = fv.Elem() } ft := fv.Type() if ft == expectedKeyFormatType { kf := fv.Interface().(MustKeyFormat) + prefixes = append(prefixes, kf.Prefix()) prefix := kf.Prefix().String() if kfDuplicate, ok := keyFormats[prefix]; ok { - return fmt.Errorf("duplicate prefix %q between key format %v and %v", + return nil, fmt.Errorf("duplicate prefix %q between key format %v and %v", prefix, kfDuplicate, kf) } keyFormats[prefix] = kf } } - return nil + return prefixes, nil } // Format the byte segments into the key format - will panic if the segment lengths do not match the layout. diff --git a/storage/key_format_test.go b/storage/key_format_test.go index bc7d61ee1..d20b63762 100644 --- a/storage/key_format_test.go +++ b/storage/key_format_test.go @@ -121,28 +121,28 @@ func TestEnsureKeyStore(t *testing.T) { Accounts: NewMustKeyFormat("foo", 4, 5, 6), Storage: NewMustKeyFormat("foos", 4, 5, 6), } - err := EnsureKeyFormatStore(keyStore) + _, err := EnsureKeyFormatStore(keyStore) require.NoError(t, err) - err = EnsureKeyFormatStore(&keyStore) + _, err = EnsureKeyFormatStore(&keyStore) require.NoError(t, err, "pointer to keystore should work") keyStore = testKeyStore{ Accounts: NewMustKeyFormat("foo", 4, 5, 6), Storage: NewMustKeyFormat("foo", 4, 5, 6), } - err = EnsureKeyFormatStore(&keyStore) + _, err = EnsureKeyFormatStore(&keyStore) require.Error(t, err, "duplicate prefixes should be detected") // Test missing formats keyStore = testKeyStore{} - err = EnsureKeyFormatStore(&keyStore) + _, err = EnsureKeyFormatStore(&keyStore) require.Error(t, err, "all formats should be set") keyStore = testKeyStore{ Accounts: NewMustKeyFormat("foo", 4, 5, 6), } - err = EnsureKeyFormatStore(&keyStore) + _, err = EnsureKeyFormatStore(&keyStore) require.Error(t, err, "all formats should be set") keyStore2 := struct { @@ -153,7 +153,7 @@ func TestEnsureKeyStore(t *testing.T) { Storage: NewMustKeyFormat("foo2", 1, 2), } - err = EnsureKeyFormatStore(keyStore2) + _, err = EnsureKeyFormatStore(keyStore2) require.NoError(t, err) keyStore2 = struct { @@ -162,10 +162,10 @@ func TestEnsureKeyStore(t *testing.T) { }{ Storage: NewMustKeyFormat("foo2", 1, 2), } - err = EnsureKeyFormatStore(keyStore2) + _, err = EnsureKeyFormatStore(keyStore2) require.NoError(t, err) - err = EnsureKeyFormatStore(keyStore2) + _, err = EnsureKeyFormatStore(keyStore2) require.NoError(t, err) keyStore2 = struct { @@ -176,6 +176,6 @@ func TestEnsureKeyStore(t *testing.T) { Storage: NewMustKeyFormat("foo", 1, 2), } - err = EnsureKeyFormatStore(keyStore2) + _, err = EnsureKeyFormatStore(keyStore2) require.Error(t, err, "duplicate prefixes should be detected") } From d59c15f304bcbd21cc00bf852532cc4109c25f7f Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Tue, 2 Jul 2019 18:46:48 +0100 Subject: [PATCH 12/70] fix nil element in det genesis accounts Signed-off-by: Gregory Hill --- genesis/deterministic_genesis.go | 2 +- integration/core/kernel_test.go | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/genesis/deterministic_genesis.go b/genesis/deterministic_genesis.go index 5604b2db9..2e0e666ce 100644 --- a/genesis/deterministic_genesis.go +++ b/genesis/deterministic_genesis.go @@ -23,7 +23,7 @@ func NewDeterministicGenesis(seed int64) *deterministicGenesis { func (dg *deterministicGenesis) GenesisDoc(numAccounts int, numValidators int) (*GenesisDoc, []*acm.PrivateAccount, []*acm.PrivateAccount) { accounts := make([]Account, numAccounts+numValidators) - privAccounts := make([]*acm.PrivateAccount, numAccounts+numValidators) + privAccounts := make([]*acm.PrivateAccount, numAccounts) defaultPerms := permission.DefaultAccountPermissions for i := 0; i < numAccounts; i++ { account, privAccount := dg.Account(9999999) diff --git a/integration/core/kernel_test.go b/integration/core/kernel_test.go index d80abfd9c..f495cff4b 100644 --- a/integration/core/kernel_test.go +++ b/integration/core/kernel_test.go @@ -52,7 +52,8 @@ func testKernel(t *testing.T, opts ...func(*config.BurrowConfig)) { t.Run("BootThenShutdown", func(t *testing.T) { conf, cleanup := integration.NewTestConfig(genesisDoc, opts...) defer cleanup() - //logger, _ := lifecycle.NewStdErrLogger() + require.NotNil(t, privateAccounts) + require.NotNil(t, privateValidators) assert.NoError(t, bootWaitBlocksShutdown(t, privateValidators[0], privateAccounts, conf, nil, nil)) }) From 34e1ecc6381f4f2068a94f124804dbe129af6085 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 3 Jul 2019 09:59:24 +0100 Subject: [PATCH 13/70] circleci 2.1 Signed-off-by: Gregory Hill --- .circleci/config.yml | 29 +++++++++++++---------------- Makefile | 5 +++++ 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3c07c3f35..13b6f0067 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,17 +16,8 @@ setup_docker: &setup_docker orbs: codecov: codecov/codecov@1.0.4 -# Start of CircleCI 2.0 config -version: 2 +version: 2.1 jobs: - cover: - <<: *defaults - steps: - - checkout - - run: go test -coverprofile=c.out - - run: go tool cover -html=c.out -o coverage.html - - codecov/upload: - file: coverage.html test: <<: *defaults @@ -38,10 +29,16 @@ jobs: # In case we miss compile errors not pulled into test paths - run: make build + test_cover: + <<: *defaults + steps: + - checkout + - run: make test_cover + - codecov/upload: + file: coverage.html + test_integration: machine: - working_directory: /go/src/github.com/hyperledger/burrow - enabled: true image: circleci/classic:201808-01 steps: - checkout @@ -75,17 +72,17 @@ jobs: workflows: - version: 2 test_and_release: jobs: - - cover: + - test: filters: <<: *tags_filters - - test: + - test_cover: filters: - <<: *tags_filters + branches: + only: develop - test_integration: filters: diff --git a/Makefile b/Makefile index abdc013aa..7f74d6fdf 100644 --- a/Makefile +++ b/Makefile @@ -167,6 +167,11 @@ test: check bin/solc # limit parallelism with -p to prevent OOM on circleci @tests/scripts/bin_wrapper.sh go test ./... -p 2 +.PHONY: test_cover +test_cover: check bin/solc + @tests/scripts/bin_wrapper.sh go test -coverprofile=c.out ./... -p 2 + @tests/scripts/bin_wrapper.sh go tool cover -html=c.out -o coverage.html + .PHONY: test_keys test_keys: build_burrow burrow_bin="${REPO}/bin/burrow" tests/keys_server/test.sh From 207399c154ff662f614488e3719c410d4a663164 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Tue, 2 Jul 2019 13:32:01 +0100 Subject: [PATCH 14/70] Allow -p 2 to be set for circleci, and for not for local testing This makes sure we locally test faster and test more parallelism. Signed-off-by: Sean Young --- .circleci/config.yml | 2 ++ Makefile | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 13b6f0067..50c3d5e72 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -28,6 +28,8 @@ jobs: - run: make test # In case we miss compile errors not pulled into test paths - run: make build + environment: + - GO_TEST_ARGS: -p 2 test_cover: <<: *defaults diff --git a/Makefile b/Makefile index 7f74d6fdf..795704eef 100644 --- a/Makefile +++ b/Makefile @@ -164,12 +164,12 @@ solang: $(SOLANG_GO_FILES) .PHONY: test test: check bin/solc -# limit parallelism with -p to prevent OOM on circleci - @tests/scripts/bin_wrapper.sh go test ./... -p 2 +# on circleci we might want to limit memory usage through GO_TEST_ARGS + @tests/scripts/bin_wrapper.sh go test ./... ${GO_TEST_ARGS} .PHONY: test_cover test_cover: check bin/solc - @tests/scripts/bin_wrapper.sh go test -coverprofile=c.out ./... -p 2 + @tests/scripts/bin_wrapper.sh go test -coverprofile=c.out ./... ${GO_TEST_ARGS} @tests/scripts/bin_wrapper.sh go tool cover -html=c.out -o coverage.html .PHONY: test_keys From 4a862222fb1f1e6463d3c2db9a2e0ada15d07646 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 3 Jul 2019 11:29:03 +0100 Subject: [PATCH 15/70] make release_dev also requires GO_TEST_ARGS The release_dev target depends on test, which needs GO_TEST_ARGS to run successfully in a default circleci container. Signed-off-by: Sean Young --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 50c3d5e72..8e7245369 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,6 +5,8 @@ defaults: &defaults working_directory: /go/src/github.com/hyperledger/burrow docker: - image: hyperledger/burrow:ci + environment: + - GO_TEST_ARGS: -p 2 tag_filters: &tags_filters tags: @@ -28,8 +30,6 @@ jobs: - run: make test # In case we miss compile errors not pulled into test paths - run: make build - environment: - - GO_TEST_ARGS: -p 2 test_cover: <<: *defaults From e7d0b725ab52a85327ebf0c48b57c1b517c158bf Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Mon, 1 Jul 2019 16:13:38 +0100 Subject: [PATCH 16/70] Refactor dump and add tests. Fix events not served at dump end height Dump in protobuf not amino and make JSON default dump format Misc fixes to state. Signed-off-by: Silas Davis --- cmd/burrow/commands/dump.go | 26 ++-- dump/dump.go | 140 ++++++++++++------ dump/dump_test.go | 82 ++++++++++ dump/load.go | 12 +- dump/load_test.go | 30 ++-- dump/mock.go | 131 ++++++++++++++++ dump/mock_reader_test.go | 82 ---------- dump/mock_test.go | 28 ++++ dump/pipe.go | 10 +- dump/reader.go | 53 ------- dump/sink.go | 7 + dump/source.go | 89 +++++++++++ dump/source_test.go | 41 +++++ execution/execution_test.go | 2 +- execution/state/events.go | 16 +- execution/state/events_test.go | 4 +- execution/state/state.go | 19 +-- genesis/genesis.go | 20 ++- genesis/spec/genesis_spec.go | 3 +- go.mod | 2 +- .../rpcevents/execution_events_server_test.go | 3 +- project/history.go | 13 ++ rpc/rpcdump/dump_server.go | 2 +- rpc/rpcevents/blocks.go | 3 +- rpc/rpcevents/blocks_test.go | 2 +- rpc/rpcevents/execution_events_server.go | 28 ++-- tests/dump/test.sh | 6 +- 27 files changed, 571 insertions(+), 283 deletions(-) create mode 100644 dump/dump_test.go create mode 100644 dump/mock.go delete mode 100644 dump/mock_reader_test.go create mode 100644 dump/mock_test.go delete mode 100644 dump/reader.go create mode 100644 dump/sink.go create mode 100644 dump/source.go create mode 100644 dump/source_test.go diff --git a/cmd/burrow/commands/dump.go b/cmd/burrow/commands/dump.go index 1975bc6b8..63369fdb0 100644 --- a/cmd/burrow/commands/dump.go +++ b/cmd/burrow/commands/dump.go @@ -16,21 +16,21 @@ import ( ) type dumpOptions struct { - height *int - filename *string - useJSON *bool + height *int + filename *string + useBinaryEncoding *bool } func addDumpOptions(cmd *cli.Cmd, specOptions ...string) *dumpOptions { - cmd.Spec += "[--height=] [--json]" + cmd.Spec += "[--height=] [--binary]" for _, spec := range specOptions { cmd.Spec += " " + spec } cmd.Spec += " FILE" return &dumpOptions{ - height: cmd.IntOpt("h height", 0, "Block height to dump to, defaults to latest block height"), - useJSON: cmd.BoolOpt("j json", false, "Output in json"), - filename: cmd.StringArg("FILE", "", "Save dump here"), + height: cmd.IntOpt("h height", 0, "Block height to dump to, defaults to latest block height"), + useBinaryEncoding: cmd.BoolOpt("b binary", false, "Output in binary encoding (default is JSON)"), + filename: cmd.StringArg("FILE", "", "Save dump here"), } } @@ -66,10 +66,10 @@ func Dump(output Output) func(cmd *cli.Cmd) { output.Fatalf("could not make logger: %v", err) } - receiver := dump.NewDumper(kern.State, kern.Blockchain, logger). - Pipe(0, uint64(*dumpOpts.height), dump.All) + source := dump.NewDumper(kern.State, kern.Blockchain).WithLogger(logger). + Source(0, uint64(*dumpOpts.height), dump.All) - err = dumpToFile(receiver, *dumpOpts.filename, *dumpOpts.useJSON) + err = dumpToFile(*dumpOpts.filename, source, *dumpOpts.useBinaryEncoding) if err != nil { output.Fatalf("could not dump to file %s': %v", *dumpOpts.filename, err) } @@ -117,7 +117,7 @@ func Dump(output Output) func(cmd *cli.Cmd) { output.Fatalf("failed to retrieve dump: %v", err) } - err = dumpToFile(receiver, *dumpOpts.filename, *dumpOpts.useJSON) + err = dumpToFile(*dumpOpts.filename, receiver, *dumpOpts.useBinaryEncoding) if err != nil { output.Fatalf("could not dump to file %s': %v", *dumpOpts.filename, err) } @@ -128,14 +128,14 @@ func Dump(output Output) func(cmd *cli.Cmd) { } } -func dumpToFile(receiver dump.Receiver, filename string, useJSON bool) error { +func dumpToFile(filename string, source dump.Source, useBinaryEncoding bool) error { f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) if err != nil { return err } // Receive - err = dump.Write(receiver, f, useJSON, dump.All) + err = dump.Write(f, source, useBinaryEncoding, dump.All) if err != nil { return err } diff --git a/dump/dump.go b/dump/dump.go index 929593bbd..7a1f9c210 100644 --- a/dump/dump.go +++ b/dump/dump.go @@ -7,20 +7,15 @@ import ( "time" "github.com/hyperledger/burrow/acm" - "github.com/hyperledger/burrow/bcm" "github.com/hyperledger/burrow/binary" + "github.com/hyperledger/burrow/encoding" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/execution/names" "github.com/hyperledger/burrow/execution/state" "github.com/hyperledger/burrow/logging" - "github.com/tendermint/go-amino" "github.com/tendermint/tendermint/libs/db" ) -var cdc = amino.NewCodec() - -type Option uint64 - const ( // Whether to send/receive these classes of data Accounts Option = 1 << iota @@ -28,30 +23,47 @@ const ( Events ) -const ( - None Option = 0 - All = Accounts | Names | Events -) +// Chunk account storage into rows that are less than 1 MiB +const thresholdAccountStorageBytesPerRow = 1 << 20 -func (options Option) Enabled(option Option) bool { - return options&option > 0 +type Sink interface { + Send(*Dump) error +} + +type Blockchain interface { + ChainID() string + LastBlockHeight() uint64 } type Dumper struct { state *state.State - blockchain bcm.BlockchainInfo + blockchain Blockchain logger *logging.Logger } -func NewDumper(state *state.State, blockchain bcm.BlockchainInfo, logger *logging.Logger) *Dumper { +// Return a Dumper that can Transmit Dump rows to a Sink by pulling them out of the the provided State +func NewDumper(state *state.State, blockchain Blockchain) *Dumper { return &Dumper{ state: state, blockchain: blockchain, - logger: logger, + logger: logging.NewNoopLogger(), } } -func (ds *Dumper) Transmit(stream Sender, startHeight, endHeight uint64, options Option) error { +type Option uint64 + +const ( + None Option = 0 + All = Accounts | Names | Events +) + +func (options Option) Enabled(option Option) bool { + return options&option > 0 +} + +// Transmit Dump rows to the provided Sink over the inclusive range of heights provided, if endHeight is 0 the latest +// height is used. +func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Option) error { height := endHeight if height == 0 { height = ds.blockchain.LastBlockHeight() @@ -64,30 +76,56 @@ func (ds *Dumper) Transmit(stream Sender, startHeight, endHeight uint64, options if options.Enabled(Accounts) { ds.logger.InfoMsg("Dumping accounts") err = st.IterateAccounts(func(acc *acm.Account) error { - err = stream.Send(&Dump{Height: height, Account: acc}) - if err != nil { - return err - } - - storage := AccountStorage{ - Address: acc.Address, - Storage: make([]*Storage, 0), + // Since we tend to want to handle accounts and their storage as a single unit we multiplex account + // and storage within the same row. If the storage gets too large we chunk it and send in separate rows + // (so that we stay well below the 4MiB GRPC message size limit and generally maintain stream-ability) + row := &Dump{ + Height: height, + Account: acc, + AccountStorage: &AccountStorage{ + Address: acc.Address, + Storage: make([]*Storage, 0), + }, } + var storageBytes int err = st.IterateStorage(acc.Address, func(key binary.Word256, value []byte) error { - storage.Storage = append(storage.Storage, &Storage{Key: key, Value: value}) + if storageBytes > thresholdAccountStorageBytesPerRow { + // Send the current row + err = sink.Send(row) + if err != nil { + return err + } + // Start a new pure storage row + row = &Dump{ + Height: height, + AccountStorage: &AccountStorage{ + Address: acc.Address, + Storage: make([]*Storage, 0), + }, + } + } + row.AccountStorage.Storage = append(row.AccountStorage.Storage, &Storage{Key: key, Value: value}) + storageBytes += len(key) + len(value) return nil }) - if err != nil { return err } - if len(storage.Storage) > 0 { - return stream.Send(&Dump{ - Height: height, - AccountStorage: &storage, - }) + // Don't send empty storage + if len(row.AccountStorage.Storage) == 0 { + row.AccountStorage = nil + // Don't send an empty row + if row.Account == nil { + // We started a new storage row, but there was no subsequent storage to go in it + return nil + } + } + + err = sink.Send(row) + if err != nil { + return err } return nil @@ -101,7 +139,7 @@ func (ds *Dumper) Transmit(stream Sender, startHeight, endHeight uint64, options if options.Enabled(Names) { ds.logger.InfoMsg("Dumping names") err = st.IterateNames(func(entry *names.Entry) error { - return stream.Send(&Dump{Height: height, Name: entry}) + return sink.Send(&Dump{Height: height, Name: entry}) }) if err != nil { return err @@ -133,7 +171,7 @@ func (ds *Dumper) Transmit(stream Sender, startHeight, endHeight uint64, options evmevent.ChainID = ds.blockchain.ChainID() evmevent.Time = blockTime } - err := stream.Send(&Dump{Height: ev.Event.Header.Height, EVMEvent: &evmevent}) + err := sink.Send(&Dump{Height: ev.Event.Header.Height, EVMEvent: &evmevent}) if err != nil { return err } @@ -150,7 +188,8 @@ func (ds *Dumper) Transmit(stream Sender, startHeight, endHeight uint64, options return nil } -func (ds *Dumper) Pipe(startHeight, endHeight uint64, options Option) Pipe { +// Return a Source that is a Pipe fed from this Dumper's Transmit function +func (ds *Dumper) Source(startHeight, endHeight uint64, options Option) Source { p := make(Pipe) go func() { err := ds.Transmit(p, startHeight, endHeight, options) @@ -162,11 +201,17 @@ func (ds *Dumper) Pipe(startHeight, endHeight uint64, options Option) Pipe { return p } -func Write(stream Receiver, out io.Writer, useJSON bool, options Option) error { +func (ds *Dumper) WithLogger(logger *logging.Logger) *Dumper { + ds.logger = logger + return ds +} + +// Write a dump to the Writer out by pulling rows from stream +func Write(out io.Writer, source Source, useBinaryEncoding bool, options Option) error { st := state.NewState(db.NewMemDB()) _, _, err := st.Update(func(ws state.Updatable) error { for { - resp, err := stream.Recv() + resp, err := source.Recv() if err == io.EOF { break } @@ -201,22 +246,25 @@ func Write(stream Receiver, out io.Writer, useJSON bool, options Option) error { } } - var bs []byte - if useJSON { - bs, err = json.Marshal(resp) - if bs != nil { - bs = append(bs, []byte("\n")...) + if useBinaryEncoding { + _, err := encoding.WriteMessage(out, resp) + if err != nil { + return fmt.Errorf("failed write to binary dump message: %v", err) } - } else { - bs, err = cdc.MarshalBinaryLengthPrefixed(resp) + return nil } + + bs, err := json.Marshal(resp) if err != nil { return fmt.Errorf("failed to marshall dump: %v", err) } - n, err := out.Write(bs) - if err == nil && n < len(bs) { - return fmt.Errorf("failed to write dump: %v", err) + if len(bs) > 0 { + bs = append(bs, []byte("\n")...) + n, err := out.Write(bs) + if err == nil && n < len(bs) { + return fmt.Errorf("failed to write dump: %v", err) + } } } diff --git a/dump/dump_test.go b/dump/dump_test.go new file mode 100644 index 000000000..b72eda5be --- /dev/null +++ b/dump/dump_test.go @@ -0,0 +1,82 @@ +package dump + +import ( + "bytes" + "io/ioutil" + "log" + "net/http" + _ "net/http/pprof" + "os" + "sort" + "strings" + "testing" + + "github.com/hyperledger/burrow/bcm" + "github.com/hyperledger/burrow/execution/state" + "github.com/stretchr/testify/require" + "github.com/syndtr/goleveldb/leveldb/opt" + dbm "github.com/tendermint/tendermint/libs/db" +) + +// The tests in this package are quite a good starting point for investigating the inadequacies of IAVL... +func TestMain(m *testing.M) { + // For pprof + go func() { + log.Println(http.ListenAndServe("localhost:6060", nil)) + }() + code := m.Run() + os.Exit(code) +} + +func BenchmarkDump(b *testing.B) { + b.StopTimer() + st := testLoad(b, NewMockSource(1000, 1000, 100, 10000)) + dumper := NewDumper(st, &bcm.Blockchain{}) + b.StartTimer() + for n := 0; n < b.N; n++ { + testDump(b, dumper) + } +} + +func TestDump(t *testing.T) { + st := testLoad(t, NewMockSource(1000, 1000, 100, 10000)) + dumper := NewDumper(st, &bcm.Blockchain{}) + testDump(t, dumper) +} + +func testDump(t testing.TB, dumper *Dumper) { + err := dumper.Transmit(NullSink{}, 0, 0, All) + require.NoError(t, err) +} + +// Test util + +func normaliseDump(dump string) string { + rows := strings.Split(dump, "\n") + sort.Stable(sort.StringSlice(rows)) + return strings.Join(rows, "\n") +} + +func dumpToJSONString(t *testing.T, st *state.State, blockchain Blockchain) string { + buf := new(bytes.Buffer) + receiver := NewDumper(st, blockchain).Source(0, 0, All) + err := Write(buf, receiver, false, All) + require.NoError(t, err) + return string(buf.Bytes()) +} + +func loadDumpFromJSONString(t *testing.T, st *state.State, jsonDump string) { + reader, err := NewJSONReader(bytes.NewBufferString(jsonDump)) + require.NoError(t, err) + err = Load(reader, st) + require.NoError(t, err) +} + +func testDB(t testing.TB) dbm.DB { + testDir, err := ioutil.TempDir("", "TestDump") + require.NoError(t, err) + var options *opt.Options + db, err := dbm.NewGoLevelDBWithOpts("TestDumpDB", testDir, options) + require.NoError(t, err) + return db +} diff --git a/dump/load.go b/dump/load.go index cadce858b..f42178f0a 100644 --- a/dump/load.go +++ b/dump/load.go @@ -3,6 +3,7 @@ package dump import ( "crypto/sha256" bin "encoding/binary" + "io" "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/binary" @@ -12,22 +13,21 @@ import ( ) // Load a dump into state -func Load(reader Reader, st *state.State) error { +func Load(source Source, st *state.State) error { _, _, err := st.Update(func(s state.Updatable) error { txs := make([]*exec.TxExecution, 0) var tx *exec.TxExecution for { - row, err := reader.Next() + row, err := source.Recv() + if err == io.EOF { + break + } if err != nil { return err } - if row == nil { - break - } - if row.Account != nil { if row.Account.Address != acm.GlobalPermissionsAddress { err := s.UpdateAccount(row.Account) diff --git a/dump/load_test.go b/dump/load_test.go index 45def2412..f4d459418 100644 --- a/dump/load_test.go +++ b/dump/load_test.go @@ -1,33 +1,33 @@ package dump import ( + "fmt" "testing" "github.com/hyperledger/burrow/execution/state" "github.com/hyperledger/burrow/genesis" "github.com/stretchr/testify/require" - dbm "github.com/tendermint/tendermint/libs/db" ) func TestLoad(t *testing.T) { - testLoad(t) + testLoad(t, NewMockSource(100, 10, 20, 1000)) } func BenchmarkLoad(b *testing.B) { - for n := 0; n < b.N; n++ { - testLoad(b) + for f := 1; f <= 64; f *= 2 { + b.Run(fmt.Sprintf("factor/%d", f), func(b *testing.B) { + fmt.Println(f, b.N) + for n := 0; n < b.N; n++ { + testLoad(b, NewMockSource(10*f, f, f, 100*f)) + } + }) } } -func testLoad(tb testing.TB) { - mock := MockDumpReader{ - accounts: 2000, - storage: 1000, - names: 100, - events: 100000, - } - st, err := state.MakeGenesisState(dbm.NewMemDB(), &genesis.GenesisDoc{}) - require.NoError(tb, err) - err = Load(&mock, st) - require.NoError(tb, err) +func testLoad(t testing.TB, mock *MockSource) *state.State { + st, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{}) + require.NoError(t, err) + err = Load(mock, st) + require.NoError(t, err) + return st } diff --git a/dump/mock.go b/dump/mock.go new file mode 100644 index 000000000..7793b0c54 --- /dev/null +++ b/dump/mock.go @@ -0,0 +1,131 @@ +package dump + +import ( + bin "encoding/binary" + "fmt" + "io" + "math/rand" + + "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/binary" + "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/execution/names" + "github.com/hyperledger/burrow/genesis" +) + +type MockSource struct { + Accounts int + MaxStorage int + Names int + Events int + *Mockchain + rand *rand.Rand +} + +var _ Source = &MockSource{} + +func NewMockSource(accounts, maxStorage, names, events int) *MockSource { + return &MockSource{ + Accounts: accounts, + MaxStorage: maxStorage, + Names: names, + Events: events, + Mockchain: NewMockchain("Mockchain", 999999), + rand: rand.New(rand.NewSource(2323524)), + } +} + +func (m *MockSource) Recv() (*Dump, error) { + row := Dump{Height: m.LastBlockHeight()} + + // In order to create the same state as from a real dump we need to honour the dump order: + // [accounts[storage...]...][names...][events...] + if m.Accounts > 0 { + var addr crypto.Address + bin.BigEndian.PutUint64(addr[:], uint64(m.Accounts)) + + row.Account = &acm.Account{ + Address: addr, + Balance: rand.Uint64(), + } + + if m.Accounts%2 > 0 { + row.Account.EVMCode = make([]byte, m.rand.Intn(10000)) + m.rand.Read(row.Account.EVMCode) + } else { + row.Account.PublicKey = crypto.PublicKey{} + } + m.Accounts-- + if m.MaxStorage > 0 { + // We don't send empty storage + storagelen := 1 + m.rand.Intn(m.MaxStorage) + + row.AccountStorage = &AccountStorage{ + Address: addr, + Storage: make([]*Storage, storagelen), + } + + for i := 0; i < storagelen; i++ { + var key binary.Word256 + // Put account index in first 8 bytes + copy(key[:8], addr[:8]) + // Put storage index in last 8 bytes + bin.BigEndian.PutUint64(key[24:], uint64(i)) + row.AccountStorage.Storage[i] = &Storage{Key: key, Value: key[:]} + } + } + } else if m.Accounts == 0 { + // Finally send the global permissions account (makes for easier equality checks with genesis state dump) + row.Account = genesis.DefaultPermissionsAccount + m.Accounts-- + } else if m.Names > 0 { + row.Name = &names.Entry{ + Name: fmt.Sprintf("name%d", m.Names), + Data: fmt.Sprintf("data%x", m.Names), + Owner: crypto.ZeroAddress, + Expires: 1337, + } + m.Names-- + } else if m.Events > 0 { + datalen := 1 + m.rand.Intn(10) + data := make([]byte, datalen*32) + topiclen := 1 + m.rand.Intn(5) + topics := make([]binary.Word256, topiclen) + row.EVMEvent = &EVMEvent{ + ChainID: m.ChainID(), + Event: &exec.LogEvent{ + Address: crypto.ZeroAddress, + Data: data, + Topics: topics, + }, + } + m.Events-- + } else { + return nil, io.EOF + } + + return &row, nil +} + +type Mockchain struct { + chainID string + lastBlockHeight uint64 +} + +var _ Blockchain = &Mockchain{} + +func NewMockchain(chainID string, lastBlockHeight uint64) *Mockchain { + return &Mockchain{ + chainID: chainID, + lastBlockHeight: lastBlockHeight, + } +} + +func (mc *Mockchain) ChainID() string { + return mc.chainID +} + +func (mc *Mockchain) LastBlockHeight() uint64 { + return mc.lastBlockHeight +} diff --git a/dump/mock_reader_test.go b/dump/mock_reader_test.go deleted file mode 100644 index 8eecb732b..000000000 --- a/dump/mock_reader_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package dump - -import ( - bin "encoding/binary" - "fmt" - "math/rand" - - "github.com/hyperledger/burrow/acm" - "github.com/hyperledger/burrow/binary" - "github.com/hyperledger/burrow/crypto" - "github.com/hyperledger/burrow/execution/exec" - "github.com/hyperledger/burrow/execution/names" -) - -type MockDumpReader struct { - accounts int - storage int - names int - events int -} - -func (m *MockDumpReader) Next() (*Dump, error) { - row := Dump{Height: 102} - - if m.accounts > 0 { - var addr crypto.Address - bin.BigEndian.PutUint64(addr.Bytes(), uint64(m.accounts)) - - row.Account = &acm.Account{ - Address: addr, - Balance: 102, - } - - if m.accounts%2 > 0 { - row.Account.EVMCode = make([]byte, rand.Int()%10000) - } else { - row.Account.PublicKey = crypto.PublicKey{} - } - m.accounts-- - } else if m.storage > 0 { - var addr crypto.Address - bin.BigEndian.PutUint64(addr.Bytes(), uint64(m.storage)) - storagelen := rand.Int() % 25 - - row.AccountStorage = &AccountStorage{ - Address: addr, - Storage: make([]*Storage, storagelen), - } - - for i := 0; i < storagelen; i++ { - row.AccountStorage.Storage[i] = &Storage{} - } - - m.storage-- - } else if m.names > 0 { - row.Name = &names.Entry{ - Name: fmt.Sprintf("name%d", m.names), - Data: fmt.Sprintf("data%x", m.names), - Owner: crypto.ZeroAddress, - Expires: 1337, - } - m.names-- - } else if m.events > 0 { - datalen := rand.Int() % 10 - data := make([]byte, datalen*32) - topiclen := rand.Int() % 5 - topics := make([]binary.Word256, topiclen) - row.EVMEvent = &EVMEvent{ - ChainID: "MockChain", - Event: &exec.LogEvent{ - Address: crypto.ZeroAddress, - Data: data, - Topics: topics, - }, - } - m.events-- - } else { - return nil, nil - } - - return &row, nil -} diff --git a/dump/mock_test.go b/dump/mock_test.go new file mode 100644 index 000000000..6df2a7614 --- /dev/null +++ b/dump/mock_test.go @@ -0,0 +1,28 @@ +package dump + +import ( + "bytes" + "testing" + + "github.com/hyperledger/burrow/execution/state" + "github.com/hyperledger/burrow/genesis" + "github.com/hyperledger/burrow/permission" + "github.com/stretchr/testify/require" +) + +func TestMockReader(t *testing.T) { + mock := NewMockSource(100, 5, 20, 1000) + mock.Mockchain = NewMockchain("TestChain", 0) + buf := new(bytes.Buffer) + err := Write(buf, mock, false, All) + require.NoError(t, err) + dump := normaliseDump(buf.String()) + + st, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{GlobalPermissions: permission.DefaultAccountPermissions}) + require.NoError(t, err) + loadDumpFromJSONString(t, st, dump) + + dumpOut := normaliseDump(dumpToJSONString(t, st, mock)) + require.True(t, dump == dumpOut) + require.Equal(t, dump, dumpOut) +} diff --git a/dump/pipe.go b/dump/pipe.go index 96fc631fe..26b196693 100644 --- a/dump/pipe.go +++ b/dump/pipe.go @@ -2,7 +2,7 @@ package dump import "io" -// Implements both Sender and Receiver +// Implements both Sink and Source type Pipe chan msg type msg struct { @@ -10,14 +10,6 @@ type msg struct { err error } -type Sender interface { - Send(*Dump) error -} - -type Receiver interface { - Recv() (*Dump, error) -} - func (p Pipe) Recv() (*Dump, error) { msg, ok := <-p if !ok { diff --git a/dump/reader.go b/dump/reader.go deleted file mode 100644 index 89a2a636c..000000000 --- a/dump/reader.go +++ /dev/null @@ -1,53 +0,0 @@ -package dump - -import ( - "encoding/json" - "io" - "os" -) - -type Reader interface { - Next() (*Dump, error) -} - -type FileReader struct { - file *os.File - decoder *json.Decoder -} - -func NewFileReader(filename string) (Reader, error) { - f, err := os.OpenFile(filename, os.O_RDONLY, 0644) - if err != nil { - return nil, err - } - - return &FileReader{file: f}, nil -} - -func (f *FileReader) Next() (*Dump, error) { - var row Dump - var err error - - if f.decoder != nil { - err = f.decoder.Decode(&row) - } else { - _, err = cdc.UnmarshalBinaryLengthPrefixedReader(f.file, &row, 0) - - if err != nil && err != io.EOF && f.decoder == nil { - f.file.Seek(0, 0) - - f.decoder = json.NewDecoder(f.file) - - return f.Next() - } - } - - if err == io.EOF { - return nil, nil - } - if err != nil { - return nil, err - } - - return &row, err -} diff --git a/dump/sink.go b/dump/sink.go new file mode 100644 index 000000000..237805098 --- /dev/null +++ b/dump/sink.go @@ -0,0 +1,7 @@ +package dump + +type NullSink struct{} + +func (NullSink) Send(*Dump) error { + return nil +} diff --git a/dump/source.go b/dump/source.go new file mode 100644 index 000000000..97355aab8 --- /dev/null +++ b/dump/source.go @@ -0,0 +1,89 @@ +package dump + +import ( + "encoding/json" + "fmt" + "io" + "os" + + "github.com/hyperledger/burrow/encoding" +) + +type Source interface { + Recv() (*Dump, error) +} + +type StreamReader struct { + reader io.Reader + decode func(*Dump) error +} + +func NewFileReader(filename string) (Source, error) { + f, err := os.OpenFile(filename, os.O_RDONLY, 0644) + if err != nil { + return nil, err + } + decoder, err := decoderFor(f) + if err != nil { + return nil, err + } + + return NewStreamReader(f, decoder) +} + +func NewProtobufReader(reader io.Reader) (*StreamReader, error) { + return NewStreamReader(reader, protobufDecoder(reader)) +} + +func NewJSONReader(reader io.Reader) (*StreamReader, error) { + return NewStreamReader(reader, jsonDecoder(reader)) +} + +func NewStreamReader(reader io.Reader, decode func(*Dump) error) (*StreamReader, error) { + return &StreamReader{ + reader: reader, + decode: decode, + }, nil +} + +func (sr *StreamReader) Recv() (*Dump, error) { + row := new(Dump) + + err := sr.decode(row) + if err != nil { + return nil, err + } + + return row, nil +} + +func protobufDecoder(r io.Reader) func(*Dump) error { + return func(row *Dump) error { + _, err := encoding.ReadMessage(r, row) + return err + } +} +func jsonDecoder(r io.Reader) func(*Dump) error { + decoder := json.NewDecoder(r) + return func(dump *Dump) error { + return decoder.Decode(dump) + } +} + +// Detects whether dump file appears to be protobuf or JSON encoded by trying to decode the first row with each +func decoderFor(f *os.File) (func(*Dump) error, error) { + defer f.Seek(0, 0) + + jsonErr := json.NewDecoder(f).Decode(&Dump{}) + if jsonErr == nil || jsonErr == io.EOF { + return jsonDecoder(f), nil + } + + _, binErr := encoding.ReadMessage(f, &Dump{}) + if binErr != nil && binErr != io.EOF { + return nil, fmt.Errorf("could decode first row of dump file as protobuf (%v) or JSON (%v)", + binErr, jsonErr) + } + + return protobufDecoder(f), nil +} diff --git a/dump/source_test.go b/dump/source_test.go new file mode 100644 index 000000000..fb3e8ec25 --- /dev/null +++ b/dump/source_test.go @@ -0,0 +1,41 @@ +package dump + +import ( + "testing" + + "github.com/hyperledger/burrow/bcm" + "github.com/hyperledger/burrow/execution/state" + "github.com/hyperledger/burrow/genesis" + "github.com/stretchr/testify/require" +) + +func TestDumpLoadCycle(t *testing.T) { + // Get some initial test data from a mock state + mock := NewMockSource(100, 1, 20, 10) + st, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{}) + err = Load(mock, st) + require.NoError(t, err) + + // We want to check we get the same state after a dump restore, but we cannot compare with the intial loaded state + // st because mock source does not give dump in same order and IAVL is order-dependent, so we'll chain 2 dump/restores + // and compare the two resultant states + + // Fresh states to load back into + stOut1, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{}) + require.NoError(t, err) + + stOut2, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{}) + require.NoError(t, err) + + // First dump from st and load stOut1 + dump := dumpToJSONString(t, st, &bcm.Blockchain{}) + loadDumpFromJSONString(t, stOut1, dump) + + // Now dump from stOut1 and load to stOut2 + dump2 := dumpToJSONString(t, stOut1, &bcm.Blockchain{}) + loadDumpFromJSONString(t, stOut2, dump2) + + require.Equal(t, dump, dump2) + require.Equal(t, stOut1.Version(), stOut2.Version()) + require.Equal(t, stOut1.Hash(), stOut2.Hash()) +} diff --git a/execution/execution_test.go b/execution/execution_test.go index 9b80710c4..535a5b51d 100644 --- a/execution/execution_test.go +++ b/execution/execution_test.go @@ -1509,7 +1509,7 @@ func makeUsers(n int) []acm.AddressableSigner { func newBlockchain(genesisDoc *genesis.GenesisDoc) *bcm.Blockchain { testDB := dbm.NewDB("test", dbBackend, ".") - blockchain, _, _ := bcm.LoadOrNewBlockchain(testDB, testGenesisDoc, logger) + blockchain, _, _ := bcm.LoadOrNewBlockchain(testDB, genesisDoc, logger) return blockchain } diff --git a/execution/state/events.go b/execution/state/events.go index 3b4029334..265499dd7 100644 --- a/execution/state/events.go +++ b/execution/state/events.go @@ -46,17 +46,19 @@ func (ws *writeState) AddBlock(be *exec.BlockExecution) error { return nil } -func (s *ReadState) IterateStreamEvents(start, end *uint64, consumer func(*exec.StreamEvent) error) error { +// Iterate SteamEvents over the closed interval [startHeight, endHeight] - i.e. startHeight and endHeight inclusive +func (s *ReadState) IterateStreamEvents(startHeight, endHeight *uint64, consumer func(*exec.StreamEvent) error) error { tree, err := s.Forest.Reader(keys.Event.Prefix()) if err != nil { return err } var startKey, endKey []byte - if start != nil { - startKey = keys.Event.KeyNoPrefix(*start) + if startHeight != nil { + startKey = keys.Event.KeyNoPrefix(*startHeight) } - if end != nil { - endKey = keys.Event.KeyNoPrefix(*end) + if endHeight != nil { + // Convert to inclusive end bounds since this generally makes more sense for block height + endKey = keys.Event.KeyNoPrefix(*endHeight + 1) } return tree.Iterate(startKey, endKey, true, func(_, value []byte) error { buf := bytes.NewBuffer(value) @@ -82,9 +84,7 @@ func (s *ReadState) IterateStreamEvents(start, end *uint64, consumer func(*exec. func (s *ReadState) TxsAtHeight(height uint64) ([]*exec.TxExecution, error) { var stack exec.TxStack var txExecutions []*exec.TxExecution - start := height - end := height + 1 - err := s.IterateStreamEvents(&start, &end, + err := s.IterateStreamEvents(&height, &height, func(ev *exec.StreamEvent) error { // Keep trying to consume TxExecutions at from events at this height txe := stack.Consume(ev) diff --git a/execution/state/events_test.go b/execution/state/events_test.go index b927da6e0..5ba55cd23 100644 --- a/execution/state/events_test.go +++ b/execution/state/events_test.go @@ -27,9 +27,7 @@ func TestWriteState_AddBlock(t *testing.T) { txIndex := uint64(0) eventIndex := uint64(0) - start := height - end := height + 1 - err = s.IterateStreamEvents(&start, &end, + err = s.IterateStreamEvents(&height, &height, func(ev *exec.StreamEvent) error { switch { case ev.BeginTx != nil: diff --git a/execution/state/state.go b/execution/state/state.go index 9eee23e10..ae643eb2f 100644 --- a/execution/state/state.go +++ b/execution/state/state.go @@ -29,7 +29,6 @@ import ( "github.com/hyperledger/burrow/execution/proposal" "github.com/hyperledger/burrow/genesis" "github.com/hyperledger/burrow/logging" - "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/storage" "github.com/hyperledger/burrow/txs" dbm "github.com/tendermint/tendermint/libs/db" @@ -72,7 +71,7 @@ var keys = KeyFormatStore{ Proposal: storage.NewMustKeyFormat("p", sha256.Size), // ValidatorAddress -> Power Validator: storage.NewMustKeyFormat("v", crypto.AddressLength), - // Height, EventIndex -> StreamEvent + // Height -> StreamEvent Event: storage.NewMustKeyFormat("e", uint64Length), // TxHash -> TxHeight, TxIndex TxHash: storage.NewMustKeyFormat("th", txs.HashLength), @@ -167,20 +166,8 @@ func MakeGenesisState(db dbm.DB, genesisDoc *genesis.GenesisDoc) (*State, error) if err != nil { return nil, fmt.Errorf("%s %v", errHeader, err) } - // global permissions are saved as the 0 address - // so they are included in the accounts tree - globalPerms := permission.DefaultAccountPermissions - globalPerms = genesisDoc.GlobalPermissions - // XXX: make sure the set bits are all true - // Without it the HasPermission() functions will fail - globalPerms.Base.SetBit = permission.AllPermFlags - - permsAcc := &acm.Account{ - Address: acm.GlobalPermissionsAddress, - Balance: 1337, - Permissions: globalPerms, - } - err = s.writeState.UpdateAccount(permsAcc) + // Set up fallback global permissions + err = s.writeState.UpdateAccount(genesisDoc.GlobalPermissionsAccount()) if err != nil { return nil, fmt.Errorf("%s %v", errHeader, err) } diff --git a/genesis/genesis.go b/genesis/genesis.go index 170b1248f..0ffee843f 100644 --- a/genesis/genesis.go +++ b/genesis/genesis.go @@ -56,11 +56,10 @@ type Validator struct { UnbondTo []BasicAccount } -//------------------------------------------------------------ -// GenesisDoc is stored in the state database - const DefaultProposalThreshold uint64 = 3 +var DefaultPermissionsAccount = PermissionsAccount(permission.DefaultAccountPermissions) + type params struct { ProposalThreshold uint64 } @@ -79,6 +78,21 @@ type GenesisDoc struct { chainID string } +func (genesisDoc *GenesisDoc) GlobalPermissionsAccount() *acm.Account { + return PermissionsAccount(genesisDoc.GlobalPermissions) +} + +func PermissionsAccount(globalPerms permission.AccountPermissions) *acm.Account { + // Ensure the set bits are all true otherwise the HasPermission() functions will fail + globalPerms.Base.SetBit = permission.AllPermFlags + + return &acm.Account{ + Address: acm.GlobalPermissionsAddress, + Balance: 1337, + Permissions: globalPerms, + } +} + func (genesisDoc *GenesisDoc) JSONString() string { bs, err := genesisDoc.JSONBytes() if err != nil { diff --git a/genesis/spec/genesis_spec.go b/genesis/spec/genesis_spec.go index db13d1928..7ea37e6f5 100644 --- a/genesis/spec/genesis_spec.go +++ b/genesis/spec/genesis_spec.go @@ -15,7 +15,6 @@ import ( const DefaultAmount uint64 = 1000000 const DefaultPower uint64 = 10000 -const DefaultProposalThreshold uint64 = 3 // A GenesisSpec is schematic representation of a genesis state, that is it is a template // for a GenesisDoc excluding that which needs to be instantiated at the point of genesis @@ -62,7 +61,7 @@ func (gs *GenesisSpec) GenesisDoc(keyClient keys.KeyClient) (*genesis.GenesisDoc } if gs.Params.ProposalThreshold != 0 { - genesisDoc.Params.ProposalThreshold = DefaultProposalThreshold + genesisDoc.Params.ProposalThreshold = genesis.DefaultProposalThreshold } if len(gs.GlobalPermissions) == 0 { diff --git a/go.mod b/go.mod index f4429cfa0..1c4e30d4d 100644 --- a/go.mod +++ b/go.mod @@ -52,7 +52,7 @@ require ( github.com/spf13/viper v1.3.2 github.com/streadway/simpleuuid v0.0.0-20130420165545-6617b501e485 github.com/stretchr/testify v1.3.0 - github.com/syndtr/goleveldb v1.0.0 // indirect + github.com/syndtr/goleveldb v1.0.0 github.com/tendermint/go-amino v0.14.1 github.com/tendermint/iavl v0.12.2 github.com/tendermint/tendermint v0.31.5 diff --git a/integration/rpcevents/execution_events_server_test.go b/integration/rpcevents/execution_events_server_test.go index 67f80ec7a..6c52f9908 100644 --- a/integration/rpcevents/execution_events_server_test.go +++ b/integration/rpcevents/execution_events_server_test.go @@ -131,8 +131,7 @@ func TestExecutionEventsTest(t *testing.T) { assert.Contains(t, strconv.FormatUint(be.Height, 10), "2") return nil }) - // should record blocks 2 and 12 - require.Len(t, blocks, 2) + require.Len(t, blocks, 2, "should record blocks 2 and 12") assert.Equal(t, uint64(2), blocks[0].Height) assert.Equal(t, uint64(12), blocks[1].Height) diff --git a/project/history.go b/project/history.go index f9d484cf3..8dcf32a30 100644 --- a/project/history.go +++ b/project/history.go @@ -48,6 +48,19 @@ func FullVersion() string { // release tagging script: ./scripts/tag_release.sh var History relic.ImmutableHistory = relic.NewHistory("Hyperledger Burrow", "https://github.com/hyperledger/burrow"). MustDeclareReleases( + "", + `### Fixed +- [Dump] Fix dump missing events emitted at end height provided + +### Changed +- [State] IterateStreamEvents now takes inclusive start and end points (end used to be exclusive) avoid bug-prone conversion +- [Dump] Improved structure and API +- [Dump] Default to JSON output and use protobuf for binary output + +### Added +- [Dump] Better tests, mock, and benchmarks - suitable for profiling IAVL + +`, "0.27.0 - 2019-06-23", `### Added - [WASM] Support for WASM contracts written in Solidity compiled using solang diff --git a/rpc/rpcdump/dump_server.go b/rpc/rpcdump/dump_server.go index b2488052b..d72172c62 100644 --- a/rpc/rpcdump/dump_server.go +++ b/rpc/rpcdump/dump_server.go @@ -15,7 +15,7 @@ var _ DumpServer = &dumpServer{} func NewDumpServer(state *state.State, blockchain bcm.BlockchainInfo, logger *logging.Logger) *dumpServer { return &dumpServer{ - dumper: dump.NewDumper(state, blockchain, logger), + dumper: dump.NewDumper(state, blockchain).WithLogger(logger), } } diff --git a/rpc/rpcevents/blocks.go b/rpc/rpcevents/blocks.go index db6c303ef..761a63711 100644 --- a/rpc/rpcevents/blocks.go +++ b/rpc/rpcevents/blocks.go @@ -6,8 +6,7 @@ import ( // Get bounds suitable for events.Provider func (br *BlockRange) Bounds(latestBlockHeight uint64) (startHeight, endHeight uint64, streaming bool) { - // End bound is exclusive in state.GetEvents so we increment the height - return br.GetStart().Bound(latestBlockHeight), br.GetEnd().Bound(latestBlockHeight) + 1, + return br.GetStart().Bound(latestBlockHeight), br.GetEnd().Bound(latestBlockHeight), br.GetEnd().GetType() == Bound_STREAM } diff --git a/rpc/rpcevents/blocks_test.go b/rpc/rpcevents/blocks_test.go index e773a84e7..f1a3437b3 100644 --- a/rpc/rpcevents/blocks_test.go +++ b/rpc/rpcevents/blocks_test.go @@ -11,6 +11,6 @@ func TestBlockRange_Bounds(t *testing.T) { br := &BlockRange{} start, end, streaming := br.Bounds(latestHeight) assert.Equal(t, latestHeight, start) - assert.Equal(t, latestHeight+1, end) + assert.Equal(t, latestHeight, end) assert.False(t, streaming) } diff --git a/rpc/rpcevents/execution_events_server.go b/rpc/rpcevents/execution_events_server.go index f773acda0..f4b468db5 100644 --- a/rpc/rpcevents/execution_events_server.go +++ b/rpc/rpcevents/execution_events_server.go @@ -116,7 +116,6 @@ func (ees *executionEventsServer) Events(request *BlocksRequest, stream Executio func (ees *executionEventsServer) streamEvents(ctx context.Context, blockRange *BlockRange, consumer func(execution *exec.StreamEvent) error) error { - // Converts the bounds to half-open interval needed start, end, streaming := blockRange.Bounds(ees.tip.LastBlockHeight()) ees.logger.TraceMsg("Streaming blocks", "start", start, "end", end, "streaming", streaming) @@ -125,32 +124,29 @@ func (ees *executionEventsServer) streamEvents(ctx context.Context, blockRange * start, err := ees.iterateStreamEvents(start, end, consumer) // If we are not streaming and all blocks requested were retrieved from state then we are done - if !streaming && start >= end { + if !streaming && start > end { return err } return ees.subscribeBlockExecution(ctx, func(block *exec.BlockExecution) error { - streamEnd := block.Height - if streamEnd < start { + if block.Height < start { // We've managed to receive a block event we already processed directly from state above - wait for next block return nil } - - finished := !streaming && streamEnd >= end - if finished { - // Truncate streamEnd to final end to get exactly the blocks we want from state - streamEnd = end - } - if start < streamEnd { - // This implies there are some blocks between the previous batchEnd (now start) and the current BlockExecution that - // we have not emitted so we will pull them from state. This can occur if a block is emitted during/after - // the initial streaming but before we have subscribed to block events or if we spill BlockExecutions - // when streaming them and need to catch up - _, err := ees.iterateStreamEvents(start, streamEnd, consumer) + // Check if we have missed blocks we need to catch up on + if start < block.Height { + // We expect start == block.Height when processing consecutive blocks but we may have missed a block by + // dropping an event - if so we can fill in here + catchupEnd := block.Height + if catchupEnd > end { + catchupEnd = end + } + start, err = ees.iterateStreamEvents(start, catchupEnd, consumer) if err != nil { return err } } + finished := !streaming && block.Height > end if finished { return io.EOF } diff --git a/tests/dump/test.sh b/tests/dump/test.sh index a69a4f290..d3ab11da5 100755 --- a/tests/dump/test.sh +++ b/tests/dump/test.sh @@ -46,8 +46,8 @@ $burrow_bin deploy -o '' -a Validator_0 --dir $burrow_dump deploy.yaml title="Dumping chain..." echo -e "${title//?/-}\n${title}\n${title//?/-}\n" -$burrow_bin dump remote dump.bin -$burrow_bin dump remote -j dump.json +$burrow_bin dump remote -b dump.bin +$burrow_bin dump remote dump.json height=$(head -1 dump.json | jq .Height) kill $burrow_pid @@ -70,7 +70,7 @@ sleep 13 title="Dumping restored chain for comparison..." echo -e "\n${title//?/-}\n${title}\n${title//?/-}\n" -$burrow_bin dump remote -j --height $height dump-after-restore.json +$burrow_bin dump remote --height $height dump-after-restore.json kill $burrow_pid From 5e2c845eaf4365eebff872f056ed1df65c3d7752 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Tue, 9 Jul 2019 14:22:15 +0100 Subject: [PATCH 17/70] burrow dump remote without height does not dump any events If the height is 0 it should be set to the latest height, but this is not true for the EVM events. Signed-off-by: Sean Young --- dump/dump.go | 15 ++++++++------- project/history.go | 1 + 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/dump/dump.go b/dump/dump.go index 7a1f9c210..61fc10a30 100644 --- a/dump/dump.go +++ b/dump/dump.go @@ -63,12 +63,13 @@ func (options Option) Enabled(option Option) bool { // Transmit Dump rows to the provided Sink over the inclusive range of heights provided, if endHeight is 0 the latest // height is used. + func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Option) error { - height := endHeight - if height == 0 { - height = ds.blockchain.LastBlockHeight() + lastHeight := ds.blockchain.LastBlockHeight() + if endHeight == 0 || endHeight > lastHeight { + endHeight = lastHeight } - st, err := ds.state.LoadHeight(height) + st, err := ds.state.LoadHeight(endHeight) if err != nil { return err } @@ -80,7 +81,7 @@ func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Opt // and storage within the same row. If the storage gets too large we chunk it and send in separate rows // (so that we stay well below the 4MiB GRPC message size limit and generally maintain stream-ability) row := &Dump{ - Height: height, + Height: endHeight, Account: acc, AccountStorage: &AccountStorage{ Address: acc.Address, @@ -98,7 +99,7 @@ func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Opt } // Start a new pure storage row row = &Dump{ - Height: height, + Height: endHeight, AccountStorage: &AccountStorage{ Address: acc.Address, Storage: make([]*Storage, 0), @@ -139,7 +140,7 @@ func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Opt if options.Enabled(Names) { ds.logger.InfoMsg("Dumping names") err = st.IterateNames(func(entry *names.Entry) error { - return sink.Send(&Dump{Height: height, Name: entry}) + return sink.Send(&Dump{Height: endHeight, Name: entry}) }) if err != nil { return err diff --git a/project/history.go b/project/history.go index 8dcf32a30..1c49f30ab 100644 --- a/project/history.go +++ b/project/history.go @@ -51,6 +51,7 @@ var History relic.ImmutableHistory = relic.NewHistory("Hyperledger Burrow", "htt "", `### Fixed - [Dump] Fix dump missing events emitted at end height provided +- [Dump] EVM events were not dumped if no height was provided to burrow dump remote commandline ### Changed - [State] IterateStreamEvents now takes inclusive start and end points (end used to be exclusive) avoid bug-prone conversion From 74d2ec7bdcca2afea8ff95dcc75259cc6fcded3d Mon Sep 17 00:00:00 2001 From: Sean Young Date: Tue, 9 Jul 2019 14:55:08 +0100 Subject: [PATCH 18/70] Ensure test checks for presence of EVM events and code Signed-off-by: Sean Young --- tests/dump/test.sh | 6 ++++++ tests/dump/undeclared.sol | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/dump/test.sh b/tests/dump/test.sh index d3ab11da5..f4bb869f0 100755 --- a/tests/dump/test.sh +++ b/tests/dump/test.sh @@ -74,6 +74,12 @@ $burrow_bin dump remote --height $height dump-after-restore.json kill $burrow_pid +deadcafe=$(grep DEADCAFE dump.json | wc -l) +if [[ $deadcafe -ne 2 ]]; then + echo "DUMP FAILURE -- missing DEADCAFE" + exit 1 +fi + if cmp dump.json dump-after-restore.json then title="Done." diff --git a/tests/dump/undeclared.sol b/tests/dump/undeclared.sol index 66817bc06..a7e72078c 100644 --- a/tests/dump/undeclared.sol +++ b/tests/dump/undeclared.sol @@ -1,7 +1,7 @@ pragma solidity ^0.5.4; contract test { - event Bar(string a, int b); + event Bar(bytes32 a, int b); constructor() public { emit Bar("constructor", 0); @@ -23,7 +23,7 @@ contract test { int c = a * b; - emit Bar("result", c); + emit Bar(hex"DEADCAFE", c); return c; } From fe67bd16838fe2185d5d0863c4cbd539af6975bc Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 10 Jul 2019 11:08:42 +0100 Subject: [PATCH 19/70] Compare restored dump against original Signed-off-by: Sean Young --- dump/dump_test.go | 37 +++++++++++++++++++++++++++++-------- dump/load_test.go | 3 ++- dump/mock.go | 4 ++-- dump/sink.go | 16 ++++++++++++++++ 4 files changed, 49 insertions(+), 11 deletions(-) diff --git a/dump/dump_test.go b/dump/dump_test.go index b72eda5be..8c47ade6f 100644 --- a/dump/dump_test.go +++ b/dump/dump_test.go @@ -2,6 +2,8 @@ package dump import ( "bytes" + "encoding/json" + "io" "io/ioutil" "log" "net/http" @@ -34,19 +36,38 @@ func BenchmarkDump(b *testing.B) { dumper := NewDumper(st, &bcm.Blockchain{}) b.StartTimer() for n := 0; n < b.N; n++ { - testDump(b, dumper) + err := dumper.Transmit(NullSink{}, 0, 0, All) + require.NoError(b, err) } } func TestDump(t *testing.T) { - st := testLoad(t, NewMockSource(1000, 1000, 100, 10000)) - dumper := NewDumper(st, &bcm.Blockchain{}) - testDump(t, dumper) -} - -func testDump(t testing.TB, dumper *Dumper) { - err := dumper.Transmit(NullSink{}, 0, 0, All) + mockSource := NewMockSource(50, 50, 100, 100) + st := testLoad(t, mockSource) + dumper := NewDumper(st, mockSource) + sink := CollectSink{ + Rows: make([]string, 0), + } + err := dumper.Transmit(&sink, 0, 0, All) require.NoError(t, err) + + sort.Strings(sink.Rows) + + m := NewMockSource(50, 50, 100, 100) + data := make([]string, 0) + + for { + row, err := m.Recv() + if err == io.EOF { + break + } + bs, _ := json.Marshal(row) + data = append(data, string(bs)) + } + + sort.Strings(data) + + require.Equal(t, sink.Rows, data) } // Test util diff --git a/dump/load_test.go b/dump/load_test.go index f4d459418..7715efd50 100644 --- a/dump/load_test.go +++ b/dump/load_test.go @@ -6,6 +6,7 @@ import ( "github.com/hyperledger/burrow/execution/state" "github.com/hyperledger/burrow/genesis" + "github.com/hyperledger/burrow/permission" "github.com/stretchr/testify/require" ) @@ -25,7 +26,7 @@ func BenchmarkLoad(b *testing.B) { } func testLoad(t testing.TB, mock *MockSource) *state.State { - st, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{}) + st, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{GlobalPermissions: permission.DefaultAccountPermissions}) require.NoError(t, err) err = Load(mock, st) require.NoError(t, err) diff --git a/dump/mock.go b/dump/mock.go index 7793b0c54..06547bbc5 100644 --- a/dump/mock.go +++ b/dump/mock.go @@ -31,7 +31,7 @@ func NewMockSource(accounts, maxStorage, names, events int) *MockSource { MaxStorage: maxStorage, Names: names, Events: events, - Mockchain: NewMockchain("Mockchain", 999999), + Mockchain: NewMockchain("Mockchain", 0), rand: rand.New(rand.NewSource(2323524)), } } @@ -47,7 +47,7 @@ func (m *MockSource) Recv() (*Dump, error) { row.Account = &acm.Account{ Address: addr, - Balance: rand.Uint64(), + Balance: m.rand.Uint64(), } if m.Accounts%2 > 0 { diff --git a/dump/sink.go b/dump/sink.go index 237805098..9b4fa9ef5 100644 --- a/dump/sink.go +++ b/dump/sink.go @@ -1,7 +1,23 @@ package dump +import ( + "encoding/json" +) + type NullSink struct{} func (NullSink) Send(*Dump) error { return nil } + +type CollectSink struct { + Rows []string +} + +func (c *CollectSink) Send(d *Dump) error { + bs, _ := json.Marshal(d) + + c.Rows = append(c.Rows, string(bs)) + + return nil +} From 643da271372d98bb959ed455ac550e8a68995e08 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Thu, 11 Jul 2019 11:19:33 +0100 Subject: [PATCH 20/70] Explain why we need two of these Signed-off-by: Sean Young --- tests/dump/test.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/dump/test.sh b/tests/dump/test.sh index f4bb869f0..2f40fcee6 100755 --- a/tests/dump/test.sh +++ b/tests/dump/test.sh @@ -74,6 +74,11 @@ $burrow_bin dump remote --height $height dump-after-restore.json kill $burrow_pid +# +# The contract emits an event which contains the hex string DEADCAFE. So, +# this string should be present both in contract code and as an emitted +# event. We should have two in our dump. +# deadcafe=$(grep DEADCAFE dump.json | wc -l) if [[ $deadcafe -ne 2 ]]; then echo "DUMP FAILURE -- missing DEADCAFE" From 871b1a5260480a236b20a2cd699035ab8681c66b Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Wed, 17 Jul 2019 16:55:20 +0100 Subject: [PATCH 21/70] Remove superflous logging and improve config API Signed-off-by: Silas Davis --- cmd/burrow/commands/dump.go | 4 +- cmd/burrow/commands/vent.go | 7 +- core/config.go | 3 +- logging/adapters/stdlib/capture.go | 39 ---------- logging/lifecycle/lifecycle.go | 99 ------------------------- logging/lifecycle/lifecycle_test.go | 36 --------- logging/logconfig/config.go | 53 ++++++++++++- logging/logconfig/format.go | 11 +++ logging/loggers/burrow_format_logger.go | 4 +- logging/loggers/format.go | 8 ++ logging/loggers/stream_logger.go | 9 +-- logging/structure/structure.go | 2 +- rpc/lib/rpc_test.go | 9 ++- 13 files changed, 88 insertions(+), 196 deletions(-) delete mode 100644 logging/adapters/stdlib/capture.go delete mode 100644 logging/lifecycle/lifecycle.go delete mode 100644 logging/lifecycle/lifecycle_test.go create mode 100644 logging/logconfig/format.go create mode 100644 logging/loggers/format.go diff --git a/cmd/burrow/commands/dump.go b/cmd/burrow/commands/dump.go index 63369fdb0..f7fcf208d 100644 --- a/cmd/burrow/commands/dump.go +++ b/cmd/burrow/commands/dump.go @@ -8,7 +8,7 @@ import ( "github.com/hyperledger/burrow/core" "github.com/hyperledger/burrow/dump" - "github.com/hyperledger/burrow/logging/lifecycle" + "github.com/hyperledger/burrow/logging/logconfig" "github.com/hyperledger/burrow/rpc/rpcdump" "github.com/hyperledger/burrow/rpc/rpcquery" cli "github.com/jawher/mow.cli" @@ -61,7 +61,7 @@ func Dump(output Output) func(cmd *cli.Cmd) { } // Include all logging by default - logger, err := lifecycle.NewStdErrLogger() + logger, err := logconfig.New().NewLogger() if err != nil { output.Fatalf("could not make logger: %v", err) } diff --git a/cmd/burrow/commands/vent.go b/cmd/burrow/commands/vent.go index 5dda1038f..bceab7cfd 100644 --- a/cmd/burrow/commands/vent.go +++ b/cmd/burrow/commands/vent.go @@ -8,10 +8,9 @@ import ( "syscall" "time" - "github.com/hyperledger/burrow/logging/lifecycle" - "github.com/hyperledger/burrow/config/source" "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/logging/logconfig" "github.com/hyperledger/burrow/vent/config" "github.com/hyperledger/burrow/vent/service" "github.com/hyperledger/burrow/vent/sqldb" @@ -69,7 +68,7 @@ func Vent(output Output) func(cmd *cli.Cmd) { "[--blocks] [--txs] [--grpc-addr] [--http-addr] [--log-level] [--announce-every=]" cmd.Action = func() { - log, err := lifecycle.NewStdErrLogger() + log, err := logconfig.New().NewLogger() if err != nil { output.Fatalf("failed to load logger: %v", err) } @@ -161,7 +160,7 @@ func Vent(output Output) func(cmd *cli.Cmd) { } cmd.Action = func() { - log, err := lifecycle.NewStdErrLogger() + log, err := logconfig.New().NewLogger() if err != nil { output.Fatalf("failed to load logger: %v", err) } diff --git a/core/config.go b/core/config.go index ae38b4b93..225945622 100644 --- a/core/config.go +++ b/core/config.go @@ -9,7 +9,6 @@ import ( "github.com/hyperledger/burrow/consensus/tendermint" "github.com/hyperledger/burrow/execution" "github.com/hyperledger/burrow/keys" - "github.com/hyperledger/burrow/logging/lifecycle" "github.com/hyperledger/burrow/logging/logconfig" "github.com/hyperledger/burrow/logging/structure" "github.com/hyperledger/burrow/project" @@ -34,7 +33,7 @@ func (kern *Kernel) LoadKeysFromConfig(conf *keys.KeysConfig) (err error) { // LoadLoggerFromConfig adds a logging configuration to the kernel func (kern *Kernel) LoadLoggerFromConfig(conf *logconfig.LoggingConfig) error { - logger, err := lifecycle.NewLoggerFromLoggingConfig(conf) + logger, err := conf.NewLogger() kern.SetLogger(logger) return err } diff --git a/logging/adapters/stdlib/capture.go b/logging/adapters/stdlib/capture.go deleted file mode 100644 index a9e214752..000000000 --- a/logging/adapters/stdlib/capture.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2017 Monax Industries Limited -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package stdlib - -import ( - "io" - stdlog "log" - - "github.com/go-kit/kit/log" - "github.com/hyperledger/burrow/logging" -) - -func Capture(stdLibLogger stdlog.Logger, logger *logging.Logger) io.Writer { - adapter := newAdapter(logger) - stdLibLogger.SetOutput(adapter) - return adapter -} - -func CaptureRootLogger(logger *logging.Logger) io.Writer { - adapter := newAdapter(logger) - stdlog.SetOutput(adapter) - return adapter -} - -func newAdapter(logger *logging.Logger) io.Writer { - return log.NewStdlibAdapter(logger.Trace) -} diff --git a/logging/lifecycle/lifecycle.go b/logging/lifecycle/lifecycle.go deleted file mode 100644 index d9eb037c2..000000000 --- a/logging/lifecycle/lifecycle.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2017 Monax Industries Limited -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package lifecycle - -// No package in ./logging/... should depend on lifecycle -import ( - "os" - - "github.com/hyperledger/burrow/logging/adapters/stdlib" - "github.com/hyperledger/burrow/logging/logconfig" - "github.com/hyperledger/burrow/logging/loggers" - "github.com/hyperledger/burrow/logging/structure" - - "fmt" - - "github.com/eapache/channels" - "github.com/go-kit/kit/log" - "github.com/hyperledger/burrow/logging" -) - -// Lifecycle provides a canonical source for burrow loggers. Components should use the functions here -// to set up their root logger and capture any other logging output. - -// Obtain a logger from a LoggingConfig -func NewLoggerFromLoggingConfig(loggingConfig *logconfig.LoggingConfig) (*logging.Logger, error) { - if loggingConfig == nil { - return NewStdErrLogger() - } else { - outputLogger, errCh, err := loggerFromLoggingConfig(loggingConfig) - if err != nil { - return nil, err - } - logger := logging.NewLogger(outputLogger) - if !loggingConfig.Trace { - logger.Trace = log.NewNopLogger() - } - go func() { - err := <-errCh.Out() - if err != nil { - fmt.Printf("Logging error: %v", err) - } - }() - return logger, nil - } -} - -// Hot swap logging config by replacing output loggers of passed InfoTraceLogger -// with those built from loggingConfig -func SwapOutputLoggersFromLoggingConfig(logger *logging.Logger, loggingConfig *logconfig.LoggingConfig) (channels.Channel, error) { - outputLogger, errCh, err := loggerFromLoggingConfig(loggingConfig) - if err != nil { - return channels.NewDeadChannel(), err - } - logger.SwapOutput(outputLogger) - return errCh, nil -} - -func NewStdErrLogger() (*logging.Logger, error) { - outputLogger, err := loggers.NewStreamLogger(os.Stderr, loggers.TerminalFormat) - if err != nil { - return nil, err - } - return logging.NewLogger(outputLogger), nil -} - -func JustLogger(logger *logging.Logger, _ channels.Channel) *logging.Logger { - return logger -} - -func CaptureStdlibLogOutput(logger *logging.Logger) { - stdlib.CaptureRootLogger(logger.With(structure.CapturedLoggingSourceKey, "stdlib_log")) -} - -// Helpers -func loggerFromLoggingConfig(loggingConfig *logconfig.LoggingConfig) (log.Logger, channels.Channel, error) { - outputLogger, _, err := loggingConfig.RootSink.BuildLogger() - if err != nil { - return nil, nil, err - } - var errCh channels.Channel = channels.NewDeadChannel() - var logger log.Logger = loggers.BurrowFormatLogger(outputLogger) - if loggingConfig.NonBlocking { - logger, errCh = loggers.NonBlockingLogger(logger) - return logger, errCh, nil - } - return logger, errCh, err -} diff --git a/logging/lifecycle/lifecycle_test.go b/logging/lifecycle/lifecycle_test.go deleted file mode 100644 index 91ae8a16c..000000000 --- a/logging/lifecycle/lifecycle_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package lifecycle - -import ( - "os" - "testing" - - "bufio" - - "github.com/stretchr/testify/assert" -) - -func TestNewLoggerFromLoggingConfig(t *testing.T) { - reader := CaptureStderr(t, func() { - logger, err := NewLoggerFromLoggingConfig(nil) - assert.NoError(t, err) - logger.Info.Log("Quick", "Test") - }) - line, _, err := reader.ReadLine() - assert.NoError(t, err) - lineString := string(line) - assert.NotEmpty(t, lineString) -} - -func CaptureStderr(t *testing.T, runner func()) *bufio.Reader { - stderr := os.Stderr - defer func() { - os.Stderr = stderr - }() - r, w, err := os.Pipe() - assert.NoError(t, err, "Couldn't make fifo") - os.Stderr = w - - runner() - - return bufio.NewReader(r) -} diff --git a/logging/logconfig/config.go b/logging/logconfig/config.go index 88a0b543c..cb0739668 100644 --- a/logging/logconfig/config.go +++ b/logging/logconfig/config.go @@ -4,6 +4,9 @@ import ( "bytes" "fmt" + "github.com/eapache/channels" + "github.com/go-kit/kit/log" + "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/logging/structure" "encoding/json" @@ -35,8 +38,12 @@ func DefaultNodeLoggingConfig() *LoggingConfig { } } +// Provide a defeault logging config func New() *LoggingConfig { - return &LoggingConfig{} + return &LoggingConfig{ + NonBlocking: false, + RootSink: Sink().SetOutput(StderrOutput().SetFormat(JSONFormat)), + } } func (lc *LoggingConfig) Root(configure func(sink *SinkConfig) *SinkConfig) *LoggingConfig { @@ -61,6 +68,50 @@ func (lc *LoggingConfig) JSONString() string { return JSONString(lc) } +// Obtain a logger from this LoggingConfig +func (lc *LoggingConfig) NewLogger() (*logging.Logger, error) { + outputLogger, errCh, err := newLogger(lc) + if err != nil { + return nil, err + } + logger := logging.NewLogger(outputLogger) + if !lc.Trace { + logger.Trace = log.NewNopLogger() + } + go func() { + err := <-errCh.Out() + if err != nil { + fmt.Printf("Logging error: %v", err) + } + }() + return logger, nil +} + +// Hot swap logging config by replacing output loggers built from this LoggingConfig +func (lc *LoggingConfig) UpdateLogger(logger *logging.Logger) (channels.Channel, error) { + outputLogger, errCh, err := newLogger(lc) + if err != nil { + return channels.NewDeadChannel(), err + } + logger.SwapOutput(outputLogger) + return errCh, nil +} + +// Helpers +func newLogger(loggingConfig *LoggingConfig) (log.Logger, channels.Channel, error) { + outputLogger, _, err := loggingConfig.RootSink.BuildLogger() + if err != nil { + return nil, nil, err + } + var errCh channels.Channel = channels.NewDeadChannel() + var logger log.Logger = loggers.BurrowFormatLogger(outputLogger) + if loggingConfig.NonBlocking { + logger, errCh = loggers.NonBlockingLogger(logger) + return logger, errCh, nil + } + return logger, errCh, err +} + func TOMLString(v interface{}) string { buf := new(bytes.Buffer) encoder := toml.NewEncoder(buf) diff --git a/logging/logconfig/format.go b/logging/logconfig/format.go new file mode 100644 index 000000000..6c29d295f --- /dev/null +++ b/logging/logconfig/format.go @@ -0,0 +1,11 @@ +package logconfig + +import "github.com/hyperledger/burrow/logging/loggers" + +// Keep these in sync +const ( + JSONFormat = loggers.JSONFormat + LogfmtFormat = loggers.LogfmtFormat + TerminalFormat = loggers.TerminalFormat + DefaultFormat = loggers.DefaultFormat +) diff --git a/logging/loggers/burrow_format_logger.go b/logging/loggers/burrow_format_logger.go index cf02702fd..f0b45c48b 100644 --- a/logging/loggers/burrow_format_logger.go +++ b/logging/loggers/burrow_format_logger.go @@ -15,6 +15,8 @@ package loggers import ( + "encoding" + "encoding/json" "fmt" "sync" "time" @@ -47,7 +49,7 @@ func (bfl *burrowFormatLogger) Log(keyvals ...interface{}) error { keyvals = structure.MapKeyValues(keyvals, func(key interface{}, value interface{}) (interface{}, interface{}) { switch v := value.(type) { - case string: + case string, json.Marshaler, encoding.TextMarshaler: case time.Time: value = v.Format(time.RFC3339Nano) case fmt.Stringer: diff --git a/logging/loggers/format.go b/logging/loggers/format.go new file mode 100644 index 000000000..b5beeef07 --- /dev/null +++ b/logging/loggers/format.go @@ -0,0 +1,8 @@ +package loggers + +const ( + JSONFormat = "json" + LogfmtFormat = "logfmt" + TerminalFormat = "terminal" + DefaultFormat = TerminalFormat +) diff --git a/logging/loggers/stream_logger.go b/logging/loggers/stream_logger.go index 70ba74b57..7e3c3fad7 100644 --- a/logging/loggers/stream_logger.go +++ b/logging/loggers/stream_logger.go @@ -10,13 +10,6 @@ import ( "github.com/hyperledger/burrow/logging/structure" ) -const ( - JSONFormat = "json" - LogfmtFormat = "logfmt" - TerminalFormat = "terminal" - defaultFormatName = TerminalFormat -) - type Syncable interface { Sync() error } @@ -26,7 +19,7 @@ func NewStreamLogger(writer io.Writer, format string) (log.Logger, error) { var err error switch format { case "": - return NewStreamLogger(writer, defaultFormatName) + return NewStreamLogger(writer, DefaultFormat) case JSONFormat: logger = log.NewJSONLogger(writer) case LogfmtFormat: diff --git a/logging/structure/structure.go b/logging/structure/structure.go index f9e196ec3..c1024696f 100644 --- a/logging/structure/structure.go +++ b/logging/structure/structure.go @@ -250,7 +250,7 @@ func StringifyKey(key interface{}) string { case fmt.Stringer: return k.String() default: - return fmt.Sprintf("%v", key) + return fmt.Sprint(key) } } } diff --git a/rpc/lib/rpc_test.go b/rpc/lib/rpc_test.go index 9d2840bbe..d66727439 100644 --- a/rpc/lib/rpc_test.go +++ b/rpc/lib/rpc_test.go @@ -13,9 +13,9 @@ import ( "testing" "time" + "github.com/hyperledger/burrow/logging/logconfig" "github.com/hyperledger/burrow/process" - "github.com/hyperledger/burrow/logging/lifecycle" "github.com/hyperledger/burrow/rpc/lib/client" "github.com/hyperledger/burrow/rpc/lib/server" "github.com/hyperledger/burrow/rpc/lib/types" @@ -88,10 +88,13 @@ func TestMain(m *testing.M) { // launch unix and tcp servers func setup() { - logger, _ := lifecycle.NewStdErrLogger() + logger, err := logconfig.New().NewLogger() + if err != nil { + panic(err) + } cmd := exec.Command("rm", "-f", unixSocket) - err := cmd.Start() + err = cmd.Start() if err != nil { panic(err) } From 24d2f44f69dd6204928e8b3e3da755558e87023a Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 15 Jul 2019 16:33:47 +0100 Subject: [PATCH 22/70] vent: generate tables spec from ABI for log mode When there are no primary keys in the table spec, then chain, height, index become the primary key. Signed-off-by: Sean Young --- cmd/burrow/commands/vent.go | 24 ++++++++ vent/service/decoder.go | 1 + vent/sqlsol/generate.go | 55 ++++++++++++++++++ vent/sqlsol/projection.go | 69 ++++++++++++++++++++-- vent/sqlsol/projection_test.go | 102 +++++++++++++++++++++++++++++++++ vent/test/sqlsol_example.json | 2 +- vent/types/sql_table.go | 1 + 7 files changed, 248 insertions(+), 6 deletions(-) create mode 100644 vent/sqlsol/generate.go diff --git a/cmd/burrow/commands/vent.go b/cmd/burrow/commands/vent.go index bceab7cfd..0bbf56613 100644 --- a/cmd/burrow/commands/vent.go +++ b/cmd/burrow/commands/vent.go @@ -2,6 +2,7 @@ package commands import ( "fmt" + "io/ioutil" "os" "os/signal" "sync" @@ -133,6 +134,29 @@ func Vent(output Output) func(cmd *cli.Cmd) { } }) + cmd.Command("spec", "Generate SQLSOL specification from ABIs", + func(cmd *cli.Cmd) { + abiFileOpt := cmd.StringsOpt("abi", nil, "EVM Contract ABI file or folder") + dest := cmd.StringArg("SPEC", "", "Write resulting spec to this json file") + + cmd.Action = func() { + abiSpec, err := abi.LoadPath(*abiFileOpt...) + if err != nil { + output.Fatalf("ABI loader error: %v", err) + } + + spec, err := sqlsol.GenerateSpecFromAbis(abiSpec) + if err != nil { + output.Fatalf("error generating spec: %s\n", err) + } + + err = ioutil.WriteFile(*dest, []byte(source.JSONString(spec)), 0644) + if err != nil { + output.Fatalf("error writing file: %v\n", err) + } + } + }) + cmd.Command("restore", "Restore the mapped tables from the _vent_log table", func(cmd *cli.Cmd) { const timeLayout = "2006-01-02 15:04:05" diff --git a/vent/service/decoder.go b/vent/service/decoder.go index 28c462e5b..267bcf20f 100644 --- a/vent/service/decoder.go +++ b/vent/service/decoder.go @@ -28,6 +28,7 @@ func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, a data[types.EventNameLabel] = evAbi.Name data[types.ChainIDLabel] = origin.ChainID data[types.BlockHeightLabel] = fmt.Sprintf("%v", origin.GetHeight()) + data[types.BlockIndexLabel] = fmt.Sprintf("%v", origin.GetIndex()) data[types.EventTypeLabel] = header.GetEventType().String() data[types.TxTxHashLabel] = header.TxHash.String() diff --git a/vent/sqlsol/generate.go b/vent/sqlsol/generate.go new file mode 100644 index 000000000..1597d1f9d --- /dev/null +++ b/vent/sqlsol/generate.go @@ -0,0 +1,55 @@ +package sqlsol + +import ( + "fmt" + + "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/vent/types" +) + +// GenerateSpecFromAbis creates a simple spec which just logs all events +func GenerateSpecFromAbis(spec *abi.AbiSpec) ([]*types.EventClass, error) { + type field struct { + Type abi.EVMType + Events []string + } + + fields := make(map[string]field) + + for _, ev := range spec.EventsById { + for _, in := range ev.Inputs { + field, ok := fields[in.Name] + if ok { + if field.Type != in.EVM { + fmt.Printf("WARNING: field %s in event %s has different definitions in events %v (%s rather than %s)\n", in.Name, ev.Name, field.Events, field.Type, ty) + } else { + field.Events = append(field.Events, ev.Name) + } + } else { + field.Type = in.EVM + field.Events = []string{ev.Name} + } + fields[in.Name] = field + } + } + + ev := types.EventClass{ + TableName: "event", + Filter: "EventType = 'LogEvent'", + FieldMappings: make([]*types.EventFieldMapping, len(fields)), + } + + i := 0 + + for name, field := range fields { + ev.FieldMappings[i] = &types.EventFieldMapping{ + Field: name, + ColumnName: name, + Type: field.Type.GetSignature(), + Primary: false, + } + i++ + } + + return []*types.EventClass{&ev}, nil +} diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index d8b2a6d7a..5955702c1 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -88,9 +88,6 @@ func NewProjectionFromEventSpec(eventSpec types.EventSpec) (*Projection, error) // builds abi information from specification tables := make(types.EventTables) - // obtain global field mappings to add to table definitions - globalFieldMappings := getGlobalFieldMappings() - for _, eventClass := range eventSpec { // validate json structure if err := eventClass.Validate(); err != nil { @@ -101,8 +98,25 @@ func NewProjectionFromEventSpec(eventSpec types.EventSpec) (*Projection, error) var columns []*types.SQLTableColumn channels := make(map[string][]string) + // do we have a primary key + primary := false + for _, mapping := range eventClass.FieldMappings { + if mapping.Primary { + primary = true + break + } + } + + if !primary && eventClass.DeleteMarkerField != "" { + return nil, fmt.Errorf("no DeleteMarkerField allowed if no primary key on %v", eventClass) + } + // Add the global mappings - eventClass.FieldMappings = append(globalFieldMappings, eventClass.FieldMappings...) + if primary { + eventClass.FieldMappings = append(getGlobalFieldMappings(), eventClass.FieldMappings...) + } else { + eventClass.FieldMappings = append(getGlobalFieldMappingsLogMode(), eventClass.FieldMappings...) + } i := 0 for _, mapping := range eventClass.FieldMappings { @@ -272,8 +286,53 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { { ColumnName: columns.Height, Field: types.BlockHeightLabel, + Type: types.EventFieldTypeUInt, + }, + { + ColumnName: columns.Index, + Field: types.BlockIndexLabel, + Type: types.EventFieldTypeUInt, + }, + { + ColumnName: columns.TxHash, + Field: types.TxTxHashLabel, + Type: types.EventFieldTypeString, + }, + { + ColumnName: columns.EventType, + Field: types.EventTypeLabel, + Type: types.EventFieldTypeString, + }, + { + ColumnName: columns.EventName, + Field: types.EventNameLabel, Type: types.EventFieldTypeString, }, + } +} + +// getGlobalColumns returns global columns for event table structures, +// these columns will be part of every SQL event table to relate data with source events +func getGlobalFieldMappingsLogMode() []*types.EventFieldMapping { + return []*types.EventFieldMapping{ + { + ColumnName: columns.ChainID, + Field: types.ChainIDLabel, + Type: types.EventFieldTypeString, + Primary: true, + }, + { + ColumnName: columns.Height, + Field: types.BlockHeightLabel, + Type: types.EventFieldTypeUInt, + Primary: true, + }, + { + ColumnName: columns.Index, + Field: types.BlockIndexLabel, + Type: types.EventFieldTypeUInt, + Primary: true, + }, { ColumnName: columns.TxHash, Field: types.TxTxHashLabel, @@ -308,7 +367,7 @@ func mergeTables(tables ...*types.SQLTable) (*types.SQLTable, error) { if columnA, ok := columns[columnB.Name]; ok { if !columnA.Equals(columnB) { return nil, fmt.Errorf("cannot merge event class tables for %s because of "+ - "conflicting columns: %v and %v", t.Name, columnB, columnB) + "conflicting columns: %v and %v", t.Name, columnA, columnB) } // Just keep existing column from A - they match } else { diff --git a/vent/sqlsol/projection_test.go b/vent/sqlsol/projection_test.go index 40a5c2530..6774655b2 100644 --- a/vent/sqlsol/projection_test.go +++ b/vent/sqlsol/projection_test.go @@ -148,6 +148,7 @@ func TestNewProjectionFromEventSpec(t *testing.T) { Type: types.EventFieldTypeString, ColumnName: "name", Notify: []string{"burn"}, + Primary: true, }, { Field: "burn", @@ -178,6 +179,7 @@ func TestNewProjectionFromEventSpec(t *testing.T) { Type: types.EventFieldTypeString, ColumnName: "name", Notify: []string{"burn"}, + Primary: true, }, { Field: "unreliable", @@ -225,3 +227,103 @@ func TestNewProjectionFromEventSpec(t *testing.T) { _, err = sqlsol.NewProjectionFromEventSpec(eventSpec) require.Error(t, err) } + +func TestWithNoPrimaryKey(t *testing.T) { + tableName := "BurnNotices" + eventSpec := types.EventSpec{ + { + TableName: tableName, + Filter: "LOG1Text = 'CIA/burn'", + DeleteMarkerField: "__DELETE__", + FieldMappings: []*types.EventFieldMapping{ + { + Field: "codename", + Type: types.EventFieldTypeString, + ColumnName: "name", + Notify: []string{"burn"}, + }, + { + Field: "burn", + Type: types.EventFieldTypeBool, + ColumnName: "burnt", + Notify: []string{"burn"}, + }, + { + Field: "dairy", + Type: types.EventFieldTypeString, + ColumnName: "coffee_milk", + Notify: []string{"mrs_doyle"}, + }, + { + Field: "datetime", + Type: types.EventFieldTypeInt, + ColumnName: "time_changed", + Notify: []string{"last_heard", "mrs_doyle"}, + }, + }, + }, + } + + _, err := sqlsol.NewProjectionFromEventSpec(eventSpec) + require.Error(t, err, "no DeleteMarkerField allowed if no primary key on") + + // Try again and now check that the right fields are primary + eventSpec[0].DeleteMarkerField = "" + + projection, err := sqlsol.NewProjectionFromEventSpec(eventSpec) + require.NoError(t, err, "projection with no primary key should be allowed") + + for _, c := range projection.Tables[tableName].Columns { + switch c.Name { + case "_chainid": + require.Equal(t, true, c.Primary) + case "_height": + require.Equal(t, true, c.Primary) + case "_index": + require.Equal(t, true, c.Primary) + default: + require.Equal(t, false, c.Primary) + } + } + + eventSpec = types.EventSpec{ + { + TableName: tableName, + Filter: "LOG1Text = 'CIA/burn'", + FieldMappings: []*types.EventFieldMapping{ + { + Field: "codename", + Type: types.EventFieldTypeString, + ColumnName: "name", + Notify: []string{"burn"}, + Primary: true, + }, + { + Field: "burn", + Type: types.EventFieldTypeBool, + ColumnName: "burnt", + Notify: []string{"burn"}, + }, + { + Field: "dairy", + Type: types.EventFieldTypeString, + ColumnName: "coffee_milk", + Notify: []string{"mrs_doyle"}, + }, + { + Field: "datetime", + Type: types.EventFieldTypeInt, + ColumnName: "time_changed", + Notify: []string{"last_heard", "mrs_doyle"}, + }, + }, + }, + } + + projection, err = sqlsol.NewProjectionFromEventSpec(eventSpec) + require.NoError(t, err, "projection with primary key should be allowed") + + for _, c := range projection.Tables[tableName].Columns { + require.Equal(t, c.Name == "name", c.Primary) + } +} diff --git a/vent/test/sqlsol_example.json b/vent/test/sqlsol_example.json index d11e604a1..24a0c119b 100644 --- a/vent/test/sqlsol_example.json +++ b/vent/test/sqlsol_example.json @@ -35,7 +35,7 @@ { "Field": "height", "ColumnName": "_height", - "Type": "string", + "Type": "uint", "Notify": ["meta", "keyed_meta"] }, { diff --git a/vent/types/sql_table.go b/vent/types/sql_table.go index 1567dcbc3..ec96015a1 100644 --- a/vent/types/sql_table.go +++ b/vent/types/sql_table.go @@ -163,6 +163,7 @@ const ( // block related ChainIDLabel = "chainid" BlockHeightLabel = "height" + BlockIndexLabel = "index" // transaction related TxTxHashLabel = "txHash" From a726498617420ba62513a36c054603004f10f429 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 17 Jul 2019 10:13:06 +0100 Subject: [PATCH 23/70] burrow vent spec should automatically upcast conflicting types Signed-off-by: Sean Young --- execution/evm/abi/abi.go | 31 +++++++++++++++ execution/solidity/event_emitter.sol | 12 ++++++ execution/solidity/event_emitter.sol.go | 4 +- vent/sqlsol/generate.go | 6 ++- vent/sqlsol/generate_test.go | 53 +++++++++++++++++++++++++ 5 files changed, 103 insertions(+), 3 deletions(-) create mode 100644 vent/sqlsol/generate_test.go diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index c12013312..04054b150 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -27,6 +27,7 @@ type EVMType interface { pack(v interface{}) ([]byte, error) unpack(data []byte, offset int, v interface{}) (int, error) Dynamic() bool + ImplicitCast(o EVMType) bool } var _ EVMType = (*EVMBool)(nil) @@ -112,6 +113,10 @@ func (e EVMBool) Dynamic() bool { return false } +func (e EVMBool) ImplicitCast(o EVMType) bool { + return false +} + var _ EVMType = (*EVMUint)(nil) type EVMUint struct { @@ -293,6 +298,11 @@ func (e EVMInt) getGoType() interface{} { } } +func (e EVMInt) ImplicitCast(o EVMType) bool { + i, ok := o.(EVMInt) + return ok && i.M >= e.M +} + func (e EVMInt) GetSignature() string { return fmt.Sprintf("int%d", e.M) } @@ -455,6 +465,11 @@ func (e EVMInt) Dynamic() bool { return false } +func (e EVMUint) ImplicitCast(o EVMType) bool { + u, ok := o.(EVMUint) + return ok && u.M >= e.M +} + var _ EVMType = (*EVMAddress)(nil) type EVMAddress struct { @@ -517,6 +532,10 @@ func (e EVMAddress) Dynamic() bool { return false } +func (e EVMAddress) ImplicitCast(o EVMType) bool { + return false +} + var _ EVMType = (*EVMBytes)(nil) type EVMBytes struct { @@ -605,6 +624,10 @@ func (e EVMBytes) GetSignature() string { } } +func (e EVMBytes) ImplicitCast(o EVMType) bool { + return false +} + var _ EVMType = (*EVMString)(nil) type EVMString struct { @@ -649,6 +672,10 @@ func (e EVMString) Dynamic() bool { return true } +func (e EVMString) ImplicitCast(o EVMType) bool { + return false +} + var _ EVMType = (*EVMFixed)(nil) type EVMFixed struct { @@ -685,6 +712,10 @@ func (e EVMFixed) Dynamic() bool { return false } +func (e EVMFixed) ImplicitCast(o EVMType) bool { + return false +} + type Argument struct { Name string EVM EVMType diff --git a/execution/solidity/event_emitter.sol b/execution/solidity/event_emitter.sol index 8c06f1882..0af29d9f7 100644 --- a/execution/solidity/event_emitter.sol +++ b/execution/solidity/event_emitter.sol @@ -10,7 +10,19 @@ contract EventEmitter { int bignum, string indexed hash); + event ManyTypes2( + bytes32 indexed direction, + bool trueism, + string german , + int128 indexed newDepth, + int8 bignum, + string indexed hash); + function EmitOne() public { emit ManyTypes("Downsie!", true, "Donaudampfschifffahrtselektrizitätenhauptbetriebswerkbauunterbeamtengesellschaft", 102, 42, "hash"); } + + function EmitTwo() public { + emit ManyTypes2("Downsie!", true, "Donaudampfschifffahrtselektrizitätenhauptbetriebswerkbauunterbeamtengesellschaft", 102, 42, "hash"); + } } \ No newline at end of file diff --git a/execution/solidity/event_emitter.sol.go b/execution/solidity/event_emitter.sol.go index c42288dec..3fc42b7f9 100644 --- a/execution/solidity/event_emitter.sol.go +++ b/execution/solidity/event_emitter.sol.go @@ -2,5 +2,5 @@ package solidity import hex "github.com/tmthrgd/go-hex" -var Bytecode_EventEmitter = hex.MustDecodeString("6080604052348015600f57600080fd5b506101908061001f6000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063e8e49a711461004d575b600080fd5b610055610057565b005b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f20aec2a3bcd8050a3a9e852e9d424805bad75ba33b57077464c73ae98d0582696001602a6040518083151515158152602001806020018381526020018281038252605181526020018061011460519139606001935050505060405180910390a456fe446f6e617564616d7066736368696666666168727473656c656b7472697a6974c3a474656e686175707462657472696562737765726b626175756e7465726265616d74656e676573656c6c736368616674a165627a7a7230582043472c03b2946767b21150a9f581b7cee0c585db6817446b4fa045bff32809450029") -var Abi_EventEmitter = []byte(`[{"constant":false,"inputs":[],"name":"EmitOne","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":false,"name":"trueism","type":"bool"},{"indexed":false,"name":"german","type":"string"},{"indexed":true,"name":"newDepth","type":"int64"},{"indexed":false,"name":"bignum","type":"int256"},{"indexed":true,"name":"hash","type":"string"}],"name":"ManyTypes","type":"event"}]`) +var Bytecode_EventEmitter = hex.MustDecodeString("608060405234801561001057600080fd5b50610264806100206000396000f3fe608060405234801561001057600080fd5b5060043610610053576000357c010000000000000000000000000000000000000000000000000000000090048063508ed79914610058578063e8e49a7114610062575b600080fd5b61006061006c565b005b61006a61012b565b005b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f2d989eca8871e173291c8e287f34adebef09917027f9e904c22ce459a2cff0ca6001602a6040518083151515158152602001806020018360000b8152602001828103825260518152602001806101e860519139606001935050505060405180910390a4565b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f20aec2a3bcd8050a3a9e852e9d424805bad75ba33b57077464c73ae98d0582696001602a604051808315151515815260200180602001838152602001828103825260518152602001806101e860519139606001935050505060405180910390a456fe446f6e617564616d7066736368696666666168727473656c656b7472697a6974c3a474656e686175707462657472696562737765726b626175756e7465726265616d74656e676573656c6c736368616674a165627a7a72305820b11ba06d97e4448c1d8bf72e259b4d14daf0355f8aae146b40a708406fe037dd0029") +var Abi_EventEmitter = []byte(`[{"constant":false,"inputs":[],"name":"EmitTwo","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"EmitOne","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":false,"name":"trueism","type":"bool"},{"indexed":false,"name":"german","type":"string"},{"indexed":true,"name":"newDepth","type":"int64"},{"indexed":false,"name":"bignum","type":"int256"},{"indexed":true,"name":"hash","type":"string"}],"name":"ManyTypes","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":false,"name":"trueism","type":"bool"},{"indexed":false,"name":"german","type":"string"},{"indexed":true,"name":"newDepth","type":"int128"},{"indexed":false,"name":"bignum","type":"int8"},{"indexed":true,"name":"hash","type":"string"}],"name":"ManyTypes2","type":"event"}]`) diff --git a/vent/sqlsol/generate.go b/vent/sqlsol/generate.go index 1597d1f9d..14a424831 100644 --- a/vent/sqlsol/generate.go +++ b/vent/sqlsol/generate.go @@ -21,7 +21,11 @@ func GenerateSpecFromAbis(spec *abi.AbiSpec) ([]*types.EventClass, error) { field, ok := fields[in.Name] if ok { if field.Type != in.EVM { - fmt.Printf("WARNING: field %s in event %s has different definitions in events %v (%s rather than %s)\n", in.Name, ev.Name, field.Events, field.Type, ty) + if field.Type.ImplicitCast(in.EVM) { + field.Type = in.EVM + } else if !in.EVM.ImplicitCast(field.Type) { + fmt.Printf("WARNING: field %s in event %s has different definitions in events %v (%s rather than %s)\n", in.Name, ev.Name, field.Events, field.Type, in.EVM) + } } else { field.Events = append(field.Events, ev.Name) } diff --git a/vent/sqlsol/generate_test.go b/vent/sqlsol/generate_test.go new file mode 100644 index 000000000..e60b4b6d8 --- /dev/null +++ b/vent/sqlsol/generate_test.go @@ -0,0 +1,53 @@ +package sqlsol_test + +import ( + "testing" + + "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/execution/solidity" + "github.com/hyperledger/burrow/vent/sqlsol" + "github.com/hyperledger/burrow/vent/types" + "github.com/stretchr/testify/require" +) + +func TestGenerateSpecFromAbis(t *testing.T) { + spec, err := abi.ReadAbiSpec(solidity.Abi_EventEmitter) + require.NoError(t, err) + + project, err := sqlsol.GenerateSpecFromAbis(spec) + require.NoError(t, err) + + require.ElementsMatch(t, project[0].FieldMappings, + []*types.EventFieldMapping{ + &types.EventFieldMapping{ + Field: "trueism", + ColumnName: "trueism", + Type: "bool", + }, + &types.EventFieldMapping{ + Field: "german", + ColumnName: "german", + Type: "string", + }, + &types.EventFieldMapping{ + Field: "newDepth", + ColumnName: "newDepth", + Type: "int128", + }, + &types.EventFieldMapping{ + Field: "bignum", + ColumnName: "bignum", + Type: "int256", + }, + &types.EventFieldMapping{ + Field: "hash", + ColumnName: "hash", + Type: "bytes32", + }, + &types.EventFieldMapping{ + Field: "direction", + ColumnName: "direction", + Type: "bytes32", + }, + }) +} From 89e6f681c097e37bec0b0dff04fe7dc65ff1e0dc Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 17 Jul 2019 12:41:12 +0100 Subject: [PATCH 24/70] Rename AbiSpec -> Spec Signed-off-by: Sean Young --- deploy/def/client.go | 2 +- deploy/jobs/jobs_contracts.go | 6 ++-- execution/evm/abi/abi.go | 28 +++++++++---------- execution/evm/abi/core.go | 20 ++++++------- execution/wasm/wasm_test.go | 2 +- .../rpcevents/execution_events_server_test.go | 2 +- integration/rpctransact/call_test.go | 14 +++++----- vent/service/consumer.go | 4 +-- vent/service/decoder.go | 2 +- vent/service/rowbuilder.go | 2 +- vent/sqlsol/generate.go | 2 +- vent/sqlsol/generate_test.go | 2 +- vent/test/events.go | 2 +- 13 files changed, 44 insertions(+), 44 deletions(-) diff --git a/deploy/def/client.go b/deploy/def/client.go index 1670680fc..15db245a7 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -41,7 +41,7 @@ type Client struct { queryClient rpcquery.QueryClient executionEventsClient rpcevents.ExecutionEventsClient keyClient keys.KeyClient - AllSpecs *abi.AbiSpec + AllSpecs *abi.Spec } func NewClient(chain, keysClientAddress string, mempoolSigning bool, timeout time.Duration) *Client { diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index e8bc54065..0c5250ee6 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -376,7 +376,7 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook data = data + callData } else { // No constructor arguments were provided. Did the constructor want any? - spec, err := abi.ReadAbiSpec(compilersResponse.Contract.Abi) + spec, err := abi.ReadSpec(compilersResponse.Contract.Abi) if err != nil { return nil, err } @@ -593,8 +593,8 @@ func logEvents(txe *exec.TxExecution, client *def.Client, logger *logging.Logger } func mergeAbiSpecBytes(client *def.Client, bs []byte) { - spec, err := abi.ReadAbiSpec(bs) + spec, err := abi.ReadSpec(bs) if err == nil { - client.AllSpecs = abi.MergeAbiSpec([]*abi.AbiSpec{client.AllSpecs, spec}) + client.AllSpecs = abi.MergeSpec([]*abi.Spec{client.AllSpecs, spec}) } } diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index 04054b150..7baa08fe7 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -747,7 +747,7 @@ type EventSpec struct { Anonymous bool } -type AbiSpec struct { +type Spec struct { Constructor FunctionSpec Fallback FunctionSpec Functions map[string]FunctionSpec @@ -762,7 +762,7 @@ type ArgumentJSON struct { Indexed bool } -type AbiSpecJSON struct { +type SpecJSON struct { Name string Type string Inputs []ArgumentJSON @@ -875,13 +875,13 @@ func readArgSpec(argsJ []ArgumentJSON) ([]Argument, error) { return args, nil } -func ReadAbiSpec(specBytes []byte) (*AbiSpec, error) { - var specJ []AbiSpecJSON +func ReadSpec(specBytes []byte) (*Spec, error) { + var specJ []SpecJSON err := json.Unmarshal(specBytes, &specJ) if err != nil { // The abi spec file might a bin file, with the Abi under the Abi field in json var binFile struct { - Abi []AbiSpecJSON + Abi []SpecJSON } err = json.Unmarshal(specBytes, &binFile) if err != nil { @@ -890,7 +890,7 @@ func ReadAbiSpec(specBytes []byte) (*AbiSpec, error) { specJ = binFile.Abi } - abiSpec := AbiSpec{ + abiSpec := Spec{ Events: make(map[string]EventSpec), EventsById: make(map[EventID]EventSpec), Functions: make(map[string]FunctionSpec), @@ -941,20 +941,20 @@ func ReadAbiSpec(specBytes []byte) (*AbiSpec, error) { return &abiSpec, nil } -func ReadAbiSpecFile(filename string) (*AbiSpec, error) { +func ReadSpecFile(filename string) (*Spec, error) { specBytes, err := ioutil.ReadFile(filename) if err != nil { return nil, err } - return ReadAbiSpec(specBytes) + return ReadSpec(specBytes) } -// MergeAbiSpec takes multiple AbiSpecs and merges them into once structure. Note that +// MergeSpec takes multiple Specs and merges them into once structure. Note that // the same function name or event name can occur in different abis, so there might be // some information loss. -func MergeAbiSpec(abiSpec []*AbiSpec) *AbiSpec { - newSpec := AbiSpec{ +func MergeSpec(abiSpec []*Spec) *Spec { + newSpec := Spec{ Events: make(map[string]EventSpec), EventsById: make(map[EventID]EventSpec), Functions: make(map[string]FunctionSpec), @@ -1141,7 +1141,7 @@ func UnpackEvent(eventSpec *EventSpec, topics []burrow_binary.Word256, data []by }) } -func (abiSpec *AbiSpec) Unpack(data []byte, fname string, args ...interface{}) error { +func (abiSpec *Spec) Unpack(data []byte, fname string, args ...interface{}) error { var funcSpec FunctionSpec var argSpec []Argument if fname != "" { @@ -1165,7 +1165,7 @@ func (abiSpec *AbiSpec) Unpack(data []byte, fname string, args ...interface{}) e }) } -func (abiSpec *AbiSpec) UnpackWithID(data []byte, args ...interface{}) error { +func (abiSpec *Spec) UnpackWithID(data []byte, args ...interface{}) error { var argSpec []Argument var id FunctionID @@ -1191,7 +1191,7 @@ func (abiSpec *AbiSpec) UnpackWithID(data []byte, args ...interface{}) error { // must match the function being called. // Returns the ABI encoded function call, whether the function is constant according // to the ABI (which means it does not modified contract state) -func (abiSpec *AbiSpec) Pack(fname string, args ...interface{}) ([]byte, *FunctionSpec, error) { +func (abiSpec *Spec) Pack(fname string, args ...interface{}) ([]byte, *FunctionSpec, error) { var funcSpec FunctionSpec var argSpec []Argument if fname != "" { diff --git a/execution/evm/abi/core.go b/execution/evm/abi/core.go index eb5dd9e32..0e9b8a7a6 100644 --- a/execution/evm/abi/core.go +++ b/execution/evm/abi/core.go @@ -20,7 +20,7 @@ type Variable struct { func init() { var err error - RevertAbi, err = ReadAbiSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) + RevertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) if err != nil { panic(fmt.Sprintf("internal error: failed to build revert abi: %v", err)) } @@ -28,7 +28,7 @@ func init() { // RevertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). // If a function exits this way, the this hardcoded ABI will be used. -var RevertAbi *AbiSpec +var RevertAbi *Spec // EncodeFunctionCallFromFile ABI encodes a function call based on ABI in file, and the // arguments specified as strings. @@ -63,7 +63,7 @@ func EncodeFunctionCall(abiData, funcName string, logger *logging.Logger, args . "arguments", fmt.Sprintf("%v", args), ) - abiSpec, err := ReadAbiSpec([]byte(abiData)) + abiSpec, err := ReadSpec([]byte(abiData)) if err != nil { logger.InfoMsg("Failed to decode abi spec", "abi", abiData, @@ -97,7 +97,7 @@ func DecodeFunctionReturnFromFile(abiLocation, binPath, funcName string, resultR } func DecodeFunctionReturn(abiData, name string, data []byte) ([]*Variable, error) { - abiSpec, err := ReadAbiSpec([]byte(abiData)) + abiSpec, err := ReadSpec([]byte(abiData)) if err != nil { return nil, err } @@ -162,12 +162,12 @@ func readAbi(root, contract string, logger *logging.Logger) (string, error) { } // LoadPath loads one abi file or finds all files in a directory -func LoadPath(abiFileOrDirs ...string) (*AbiSpec, error) { +func LoadPath(abiFileOrDirs ...string) (*Spec, error) { if len(abiFileOrDirs) == 0 { - return &AbiSpec{}, fmt.Errorf("no ABI file or directory provided") + return &Spec{}, fmt.Errorf("no ABI file or directory provided") } - specs := make([]*AbiSpec, 0) + specs := make([]*Spec, 0) for _, dir := range abiFileOrDirs { err := filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { @@ -179,7 +179,7 @@ func LoadPath(abiFileOrDirs ...string) (*AbiSpec, error) { return nil } if err == nil { - abiSpc, err := ReadAbiSpecFile(path) + abiSpc, err := ReadSpecFile(path) if err != nil { return errors.Wrap(err, "Error parsing abi file "+path) } @@ -188,10 +188,10 @@ func LoadPath(abiFileOrDirs ...string) (*AbiSpec, error) { return nil }) if err != nil { - return &AbiSpec{}, err + return &Spec{}, err } } - return MergeAbiSpec(specs), nil + return MergeSpec(specs), nil } func stripHex(s string) string { diff --git a/execution/wasm/wasm_test.go b/execution/wasm/wasm_test.go index 7e9a29293..2bfce5330 100644 --- a/execution/wasm/wasm_test.go +++ b/execution/wasm/wasm_test.go @@ -23,7 +23,7 @@ func TestStaticCallWithValue(t *testing.T) { require.NoError(t, cerr) // run getFooPlus2 - spec, err := abi.ReadAbiSpec(Abi_storage_test) + spec, err := abi.ReadSpec(Abi_storage_test) require.NoError(t, err) calldata, _, err := spec.Pack("getFooPlus2") diff --git a/integration/rpcevents/execution_events_server_test.go b/integration/rpcevents/execution_events_server_test.go index 6c52f9908..94be456c4 100644 --- a/integration/rpcevents/execution_events_server_test.go +++ b/integration/rpcevents/execution_events_server_test.go @@ -174,7 +174,7 @@ func TestExecutionEventsTest(t *testing.T) { t.Run("Revert", func(t *testing.T) { txe, err := rpctest.CreateContract(tcli, inputAddress0, solidity.Bytecode_Revert) require.NoError(t, err) - spec, err := abi.ReadAbiSpec(solidity.Abi_Revert) + spec, err := abi.ReadSpec(solidity.Abi_Revert) require.NoError(t, err) data, _, err := spec.Pack("RevertAt", 4) require.NoError(t, err) diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index c56748be6..fc3e52adc 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -127,7 +127,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Parallel() numGoroutines := 40 numRuns := 5 - spec, err := abi.ReadAbiSpec(solidity.Abi_StrangeLoop) + spec, err := abi.ReadSpec(solidity.Abi_StrangeLoop) require.NoError(t, err) data, _, err := spec.Pack("UpsieDownsie") require.NoError(t, err) @@ -282,7 +282,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee - spec, err := abi.ReadAbiSpec(solidity.Abi_StrangeLoop) + spec, err := abi.ReadSpec(solidity.Abi_StrangeLoop) require.NoError(t, err) data, _, err := spec.Pack("UpsieDownsie") require.NoError(t, err) @@ -301,7 +301,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee - spec, err := abi.ReadAbiSpec(solidity.Abi_StrangeLoop) + spec, err := abi.ReadSpec(solidity.Abi_StrangeLoop) require.NoError(t, err) data, _, err := spec.Pack("UpsieDownsie") require.NoError(t, err) @@ -326,7 +326,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_EventEmitter) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee - spec, err := abi.ReadAbiSpec(solidity.Abi_EventEmitter) + spec, err := abi.ReadSpec(solidity.Abi_EventEmitter) require.NoError(t, err) calldata, _, err := spec.Pack("EmitOne") require.NoError(t, err) @@ -365,7 +365,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_EventEmitter) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee - spec, err := abi.ReadAbiSpec(solidity.Abi_EventEmitter) + spec, err := abi.ReadSpec(solidity.Abi_EventEmitter) require.NoError(t, err) calldata, _, err := spec.Pack("EmitOne") require.NoError(t, err) @@ -400,7 +400,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Parallel() txe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_Revert) require.NoError(t, err) - spec, err := abi.ReadAbiSpec(solidity.Abi_Revert) + spec, err := abi.ReadSpec(solidity.Abi_Revert) require.NoError(t, err) data, _, err := spec.Pack("RevertAt", 4) require.NoError(t, err) @@ -417,7 +417,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Parallel() txe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_Revert) require.NoError(t, err) - spec, err := abi.ReadAbiSpec(solidity.Abi_Revert) + spec, err := abi.ReadSpec(solidity.Abi_Revert) require.NoError(t, err) data, _, err := spec.Pack("RevertNoReason") require.NoError(t, err) diff --git a/vent/service/consumer.go b/vent/service/consumer.go index d1282929b..044fdb8b0 100644 --- a/vent/service/consumer.go +++ b/vent/service/consumer.go @@ -55,7 +55,7 @@ func NewConsumer(cfg *config.VentConfig, log *logging.Logger, eventChannel chan // Run connects to a grpc service and subscribes to log events, // then gets tables structures, maps them & parse event data. // Store data in SQL event tables, it runs forever -func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stream bool) error { +func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.Spec, stream bool) error { var err error c.Log.InfoMsg("Connecting to Burrow gRPC server") @@ -211,7 +211,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, stre } } -func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi.AbiSpec, +func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi.Spec, eventCh chan<- types.EventData) func(blockExecution *exec.BlockExecution) error { return func(blockExecution *exec.BlockExecution) error { diff --git a/vent/service/decoder.go b/vent/service/decoder.go index 267bcf20f..c237a6648 100644 --- a/vent/service/decoder.go +++ b/vent/service/decoder.go @@ -12,7 +12,7 @@ import ( ) // decodeEvent unpacks & decodes event data -func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, abiSpec *abi.AbiSpec) (map[string]interface{}, error) { +func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, abiSpec *abi.Spec) (map[string]interface{}, error) { // to prepare decoded data and map to event item name data := make(map[string]interface{}) diff --git a/vent/service/rowbuilder.go b/vent/service/rowbuilder.go index 3c0b726a4..dde4a5170 100644 --- a/vent/service/rowbuilder.go +++ b/vent/service/rowbuilder.go @@ -16,7 +16,7 @@ import ( ) // buildEventData builds event data from transactions -func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, origin *exec.Origin, abiSpec *abi.AbiSpec, +func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, origin *exec.Origin, abiSpec *abi.Spec, l *logging.Logger) (types.EventDataRow, error) { // a fresh new row to store column/value data diff --git a/vent/sqlsol/generate.go b/vent/sqlsol/generate.go index 14a424831..12644aafa 100644 --- a/vent/sqlsol/generate.go +++ b/vent/sqlsol/generate.go @@ -8,7 +8,7 @@ import ( ) // GenerateSpecFromAbis creates a simple spec which just logs all events -func GenerateSpecFromAbis(spec *abi.AbiSpec) ([]*types.EventClass, error) { +func GenerateSpecFromAbis(spec *abi.Spec) ([]*types.EventClass, error) { type field struct { Type abi.EVMType Events []string diff --git a/vent/sqlsol/generate_test.go b/vent/sqlsol/generate_test.go index e60b4b6d8..7a87561ef 100644 --- a/vent/sqlsol/generate_test.go +++ b/vent/sqlsol/generate_test.go @@ -11,7 +11,7 @@ import ( ) func TestGenerateSpecFromAbis(t *testing.T) { - spec, err := abi.ReadAbiSpec(solidity.Abi_EventEmitter) + spec, err := abi.ReadSpec(solidity.Abi_EventEmitter) require.NoError(t, err) project, err := sqlsol.GenerateSpecFromAbis(spec) diff --git a/vent/test/events.go b/vent/test/events.go index f36b14eba..51148226e 100644 --- a/vent/test/events.go +++ b/vent/test/events.go @@ -58,7 +58,7 @@ func Call(t testing.TB, cli rpctransact.TransactClient, inputAddress, contractAd functionName string, args ...interface{}) *exec.TxExecution { t.Helper() - spec, err := abi.ReadAbiSpec(Abi_EventsTest) + spec, err := abi.ReadSpec(Abi_EventsTest) require.NoError(t, err) data, _, err := spec.Pack(functionName, args...) From 750a0d20f700c7907adecbf0eb64b0b7bf46dedf Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 17 Jul 2019 12:48:52 +0100 Subject: [PATCH 25/70] abi: rename source files to sensible names Signed-off-by: Sean Young --- .../evm/abi/{core_test.go => ab_test.go} | 3 +- execution/evm/abi/abi.go | 793 ++++-------------- execution/evm/abi/core.go | 208 ----- execution/evm/abi/primitives.go | 711 ++++++++++++++++ 4 files changed, 861 insertions(+), 854 deletions(-) rename execution/evm/abi/{core_test.go => ab_test.go} (99%) delete mode 100644 execution/evm/abi/core.go create mode 100644 execution/evm/abi/primitives.go diff --git a/execution/evm/abi/core_test.go b/execution/evm/abi/ab_test.go similarity index 99% rename from execution/evm/abi/core_test.go rename to execution/evm/abi/ab_test.go index b88f13e7c..a904a21bf 100644 --- a/execution/evm/abi/core_test.go +++ b/execution/evm/abi/ab_test.go @@ -2,11 +2,12 @@ package abi import ( "bytes" - "encoding/hex" "math/big" "strings" "testing" + hex "github.com/tmthrgd/go-hex" + "github.com/hyperledger/burrow/logging" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index 7baa08fe7..cec94f435 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -1,7 +1,6 @@ package abi import ( - "encoding/binary" "encoding/json" "fmt" "io/ioutil" @@ -10,710 +9,214 @@ import ( "regexp" "strconv" "strings" - "unsafe" // just for Sizeof burrow_binary "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/crypto/sha3" -) - -// EVM Solidity calls and return values are packed into -// pieces of 32 bytes, including a bool (wasting 255 out of 256 bits) -const ElementSize = 32 - -type EVMType interface { - GetSignature() string - getGoType() interface{} - pack(v interface{}) ([]byte, error) - unpack(data []byte, offset int, v interface{}) (int, error) - Dynamic() bool - ImplicitCast(o EVMType) bool -} - -var _ EVMType = (*EVMBool)(nil) - -type EVMBool struct { -} - -func (e EVMBool) GetSignature() string { - return "bool" -} - -func (e EVMBool) getGoType() interface{} { - return new(bool) -} - -func (e EVMBool) pack(v interface{}) ([]byte, error) { - var b bool - arg := reflect.ValueOf(v) - if arg.Kind() == reflect.String { - val := arg.String() - if strings.EqualFold(val, "true") || val == "1" { - b = true - } else if strings.EqualFold(val, "false") || val == "0" { - b = false - } else { - return nil, fmt.Errorf("%s is not a valid value for EVM Bool type", val) - } - } else if arg.Kind() == reflect.Bool { - b = arg.Bool() - } else { - return nil, fmt.Errorf("%s cannot be converted to EVM Bool type", arg.Kind().String()) - } - res := make([]byte, ElementSize) - if b { - res[ElementSize-1] = 1 - } - return res, nil -} -func (e EVMBool) unpack(data []byte, offset int, v interface{}) (int, error) { - if len(data)-offset < 32 { - return 0, fmt.Errorf("not enough data") - } - data = data[offset:] - switch v := v.(type) { - case *string: - if data[ElementSize-1] == 1 { - *v = "true" - } else if data[ElementSize-1] == 0 { - *v = "false" - } else { - return 0, fmt.Errorf("unexpected value for EVM bool") - } - case *int8: - *v = int8(data[ElementSize-1]) - case *int16: - *v = int16(data[ElementSize-1]) - case *int32: - *v = int32(data[ElementSize-1]) - case *int64: - *v = int64(data[ElementSize-1]) - case *int: - *v = int(data[ElementSize-1]) - case *uint8: - *v = uint8(data[ElementSize-1]) - case *uint16: - *v = uint16(data[ElementSize-1]) - case *uint32: - *v = uint32(data[ElementSize-1]) - case *uint64: - *v = uint64(data[ElementSize-1]) - case *uint: - *v = uint(data[ElementSize-1]) - case *bool: - *v = data[ElementSize-1] == 1 - default: - return 0, fmt.Errorf("cannot set type %s for EVM bool", reflect.ValueOf(v).Kind().String()) - } - return 32, nil -} - -func (e EVMBool) Dynamic() bool { - return false -} - -func (e EVMBool) ImplicitCast(o EVMType) bool { - return false -} + "os" + "path" + "path/filepath" -var _ EVMType = (*EVMUint)(nil) - -type EVMUint struct { - M uint64 -} + "github.com/hyperledger/burrow/deploy/compile" + "github.com/hyperledger/burrow/execution/errors" + "github.com/hyperledger/burrow/logging" +) -func (e EVMUint) GetSignature() string { - return fmt.Sprintf("uint%d", e.M) +// Variable exist to unpack return values into, so have both the return +// value and its name +type Variable struct { + Name string + Value string } -func (e EVMUint) getGoType() interface{} { - switch e.M { - case 8: - return new(uint8) - case 16: - return new(uint16) - case 32: - return new(uint32) - case 64: - return new(uint64) - default: - return new(big.Int) +func init() { + var err error + RevertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) + if err != nil { + panic(fmt.Sprintf("internal error: failed to build revert abi: %v", err)) } } -func (e EVMUint) pack(v interface{}) ([]byte, error) { - n := new(big.Int) +// RevertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). +// If a function exits this way, the this hardcoded ABI will be used. +var RevertAbi *Spec - arg := reflect.ValueOf(v) - switch arg.Kind() { - case reflect.String: - _, ok := n.SetString(arg.String(), 0) - if !ok { - return nil, fmt.Errorf("Failed to parse `%s", arg.String()) - } - if n.Sign() < 0 { - return nil, fmt.Errorf("negative value not allowed for uint%d", e.M) - } - case reflect.Uint8: - fallthrough - case reflect.Uint16: - fallthrough - case reflect.Uint32: - fallthrough - case reflect.Uint64: - fallthrough - case reflect.Uint: - n.SetUint64(arg.Uint()) - case reflect.Int8: - fallthrough - case reflect.Int16: - fallthrough - case reflect.Int32: - fallthrough - case reflect.Int64: - fallthrough - case reflect.Int: - x := arg.Int() - if x < 0 { - return nil, fmt.Errorf("negative value not allowed for uint%d", e.M) - } - n.SetInt64(x) - default: - t := reflect.TypeOf(new(uint64)) - if reflect.TypeOf(v).ConvertibleTo(t) { - n.SetUint64(reflect.ValueOf(v).Convert(t).Uint()) - } else { - return nil, fmt.Errorf("cannot convert type %s to uint%d", arg.Kind().String(), e.M) - } +// EncodeFunctionCallFromFile ABI encodes a function call based on ABI in file, and the +// arguments specified as strings. +// The abiFileName specifies the name of the ABI file, and abiPath the path where it can be found. +// The fname specifies which function should called, if +// it doesn't exist exist the fallback function will be called. If fname is the empty +// string, the constructor is called. The arguments must be specified in args. The count +// must match the function being called. +// Returns the ABI encoded function call, whether the function is constant according +// to the ABI (which means it does not modified contract state) +func EncodeFunctionCallFromFile(abiFileName, abiPath, funcName string, logger *logging.Logger, args ...interface{}) ([]byte, *FunctionSpec, error) { + abiSpecBytes, err := readAbi(abiPath, abiFileName, logger) + if err != nil { + return []byte{}, nil, err } - b := n.Bytes() - if uint64(len(b)) > e.M { - return nil, fmt.Errorf("value to large for int%d", e.M) - } - return pad(b, ElementSize, true), nil + return EncodeFunctionCall(abiSpecBytes, funcName, logger, args...) } -func (e EVMUint) unpack(data []byte, offset int, v interface{}) (int, error) { - if len(data)-offset < ElementSize { - return 0, fmt.Errorf("not enough data") - } - - data = data[offset:] - empty := 0 - for empty = 0; empty < ElementSize; empty++ { - if data[empty] != 0 { - break - } +// EncodeFunctionCall ABI encodes a function call based on ABI in string abiData +// and the arguments specified as strings. +// The fname specifies which function should called, if +// it doesn't exist exist the fallback function will be called. If fname is the empty +// string, the constructor is called. The arguments must be specified in args. The count +// must match the function being called. +// Returns the ABI encoded function call, whether the function is constant according +// to the ABI (which means it does not modified contract state) +func EncodeFunctionCall(abiData, funcName string, logger *logging.Logger, args ...interface{}) ([]byte, *FunctionSpec, error) { + logger.TraceMsg("Packing Call via ABI", + "spec", abiData, + "function", funcName, + "arguments", fmt.Sprintf("%v", args), + ) + + abiSpec, err := ReadSpec([]byte(abiData)) + if err != nil { + logger.InfoMsg("Failed to decode abi spec", + "abi", abiData, + "error", err.Error(), + ) + return nil, nil, err } - length := ElementSize - empty - - switch v := v.(type) { - case *string: - b := new(big.Int) - b.SetBytes(data[empty:ElementSize]) - *v = b.String() - case *big.Int: - b := new(big.Int) - *v = *b.SetBytes(data[0:ElementSize]) - case *uint64: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen { - return 0, fmt.Errorf("value to large for uint64") - } - *v = binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize]) - case *uint32: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen { - return 0, fmt.Errorf("value to large for uint64") - } - *v = binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize]) - case *uint16: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen { - return 0, fmt.Errorf("value to large for uint16") - } - *v = binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize]) - case *uint8: - maxLen := 1 - if length > maxLen { - return 0, fmt.Errorf("value to large for uint8") - } - *v = uint8(data[31]) - case *int64: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for int64") - } - *v = int64(binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize])) - case *int32: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for int64") - } - *v = int32(binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize])) - case *int16: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for int16") - } - *v = int16(binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize])) - case *int8: - maxLen := 1 - if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for int8") - } - *v = int8(data[ElementSize-1]) - default: - return 0, fmt.Errorf("unable to convert %s to %s", e.GetSignature(), reflect.ValueOf(v).Kind().String()) + packedBytes, funcSpec, err := abiSpec.Pack(funcName, args...) + if err != nil { + logger.InfoMsg("Failed to encode abi spec", + "abi", abiData, + "error", err.Error(), + ) + return nil, nil, err } - return 32, nil -} - -func (e EVMUint) Dynamic() bool { - return false -} - -var _ EVMType = (*EVMInt)(nil) - -type EVMInt struct { - M uint64 + return packedBytes, funcSpec, nil } -func (e EVMInt) getGoType() interface{} { - switch e.M { - case 8: - return new(int8) - case 16: - return new(int16) - case 32: - return new(int32) - case 64: - return new(int64) - default: - return new(big.Int) +// DecodeFunctionReturnFromFile ABI decodes the return value from a contract function call. +func DecodeFunctionReturnFromFile(abiLocation, binPath, funcName string, resultRaw []byte, logger *logging.Logger) ([]*Variable, error) { + abiSpecBytes, err := readAbi(binPath, abiLocation, logger) + if err != nil { + return nil, err } -} + logger.TraceMsg("ABI Specification (Decode)", "spec", abiSpecBytes) -func (e EVMInt) ImplicitCast(o EVMType) bool { - i, ok := o.(EVMInt) - return ok && i.M >= e.M + // Unpack the result + return DecodeFunctionReturn(abiSpecBytes, funcName, resultRaw) } -func (e EVMInt) GetSignature() string { - return fmt.Sprintf("int%d", e.M) -} +func DecodeFunctionReturn(abiData, name string, data []byte) ([]*Variable, error) { + abiSpec, err := ReadSpec([]byte(abiData)) + if err != nil { + return nil, err + } -func (e EVMInt) pack(v interface{}) ([]byte, error) { - n := new(big.Int) + var args []Argument - arg := reflect.ValueOf(v) - switch arg.Kind() { - case reflect.String: - _, ok := n.SetString(arg.String(), 0) - if !ok { - return nil, fmt.Errorf("Failed to parse `%s", arg.String()) - } - case reflect.Uint8: - fallthrough - case reflect.Uint16: - fallthrough - case reflect.Uint32: - fallthrough - case reflect.Uint64: - fallthrough - case reflect.Uint: - n.SetUint64(arg.Uint()) - case reflect.Int8: - fallthrough - case reflect.Int16: - fallthrough - case reflect.Int32: - fallthrough - case reflect.Int64: - fallthrough - case reflect.Int: - n.SetInt64(arg.Int()) - default: - t := reflect.TypeOf(new(int64)) - if reflect.TypeOf(v).ConvertibleTo(t) { - n.SetInt64(reflect.ValueOf(v).Convert(t).Int()) + if name == "" { + args = abiSpec.Constructor.Outputs + } else { + if _, ok := abiSpec.Functions[name]; ok { + args = abiSpec.Functions[name].Outputs } else { - return nil, fmt.Errorf("cannot convert type %s to int%d", arg.Kind().String(), e.M) + args = abiSpec.Fallback.Outputs } } - b := n.Bytes() - if uint64(len(b)) > e.M { - return nil, fmt.Errorf("value to large for int%d", e.M) + if args == nil { + return nil, fmt.Errorf("no such function") } - res := pad(b, ElementSize, true) - if (res[0] & 0x80) != 0 { - return nil, fmt.Errorf("value to large for int%d", e.M) - } - if n.Sign() < 0 { - // One's complement; i.e. 0xffff is -1, not 0. - n.Add(n, big.NewInt(1)) - b := n.Bytes() - res = pad(b, ElementSize, true) - for i := 0; i < len(res); i++ { - res[i] = ^res[i] - } - } - return res, nil -} + vars := make([]*Variable, len(args)) -func (e EVMInt) unpack(data []byte, offset int, v interface{}) (int, error) { - if len(data)-offset < ElementSize { - return 0, fmt.Errorf("not enough data") + if len(args) == 0 { + return nil, nil } - data = data[offset:] - sign := (data[0] & 0x80) != 0 - - empty := 0 - for empty = 0; empty < ElementSize; empty++ { - if (sign && data[empty] != 255) || (!sign && data[empty] != 0) { - break - } + vals := make([]interface{}, len(args)) + for i := range vals { + vals[i] = new(string) } - - length := ElementSize - empty - inv := make([]byte, ElementSize) - for i := 0; i < ElementSize; i++ { - if sign { - inv[i] = ^data[i] - } else { - inv[i] = data[i] - } + err = Unpack(args, data, vals...) + if err != nil { + return nil, err } - toType := reflect.ValueOf(v).Kind().String() - - switch v := v.(type) { - case *string: - b := new(big.Int) - b.SetBytes(inv[empty:ElementSize]) - if sign { - *v = b.Sub(big.NewInt(-1), b).String() - } else { - *v = b.String() - } - case *big.Int: - b := new(big.Int) - b.SetBytes(inv[0:ElementSize]) - if sign { - *v = *b.Sub(big.NewInt(-1), b) + + for i, a := range args { + if a.Name != "" { + vars[i] = &Variable{Name: a.Name, Value: *(vals[i].(*string))} } else { - *v = *b - } - case *uint64: - if sign { - return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) - } - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen { - return 0, fmt.Errorf("value to large for uint64") - } - *v = binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize]) - case *uint32: - if sign { - return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) - } - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen { - return 0, fmt.Errorf("value to large for int32") + vars[i] = &Variable{Name: fmt.Sprintf("%d", i), Value: *(vals[i].(*string))} } - *v = binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize]) - case *uint16: - if sign { - return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) - } - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen { - return 0, fmt.Errorf("value to large for uint16") - } - *v = binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize]) - case *int64: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for int64") - } - *v = int64(binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize])) - case *int32: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for uint64") - } - *v = int32(binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize])) - case *int16: - maxLen := int(unsafe.Sizeof(*v)) - if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for uint16") - } - *v = int16(binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize])) - default: - return 0, fmt.Errorf("unable to convert %s to %s", e.GetSignature(), toType) } - return ElementSize, nil + return vars, nil } -func (e EVMInt) Dynamic() bool { - return false -} - -func (e EVMUint) ImplicitCast(o EVMType) bool { - u, ok := o.(EVMUint) - return ok && u.M >= e.M -} - -var _ EVMType = (*EVMAddress)(nil) - -type EVMAddress struct { -} - -func (e EVMAddress) getGoType() interface{} { - return new(crypto.Address) -} - -func (e EVMAddress) GetSignature() string { - return "address" -} - -func (e EVMAddress) pack(v interface{}) ([]byte, error) { - var err error - a, ok := v.(crypto.Address) - if !ok { - s, ok := v.(string) - if ok { - a, err = crypto.AddressFromHexString(s) - if err != nil { - return nil, err - } - } - } else { - b, ok := v.([]byte) - if !ok { - return nil, fmt.Errorf("cannot map to %s to EVM address", reflect.ValueOf(v).Kind().String()) - } - - a, err = crypto.AddressFromBytes(b) - if err != nil { - return nil, err +func readAbi(root, contract string, logger *logging.Logger) (string, error) { + p := path.Join(root, stripHex(contract)) + if _, err := os.Stat(p); err != nil { + logger.TraceMsg("abifile not found", "tried", p) + p = path.Join(root, stripHex(contract)+".bin") + if _, err = os.Stat(p); err != nil { + logger.TraceMsg("abifile not found", "tried", p) + return "", fmt.Errorf("abi doesn't exist for =>\t%s", p) } } - - return pad(a[:], ElementSize, true), nil -} - -func (e EVMAddress) unpack(data []byte, offset int, v interface{}) (int, error) { - addr, err := crypto.AddressFromBytes(data[offset+ElementSize-crypto.AddressLength : offset+ElementSize]) + logger.TraceMsg("Found ABI file", "path", p) + sol, err := compile.LoadSolidityContract(p) if err != nil { - return 0, err + return "", err } - switch v := v.(type) { - case *string: - *v = addr.String() - case *crypto.Address: - *v = addr - case *([]byte): - *v = data[offset+ElementSize-crypto.AddressLength : offset+ElementSize] - default: - return 0, fmt.Errorf("cannot map EVM address to %s", reflect.ValueOf(v).Kind().String()) - } - - return ElementSize, nil + return string(sol.Abi), nil } -func (e EVMAddress) Dynamic() bool { - return false -} - -func (e EVMAddress) ImplicitCast(o EVMType) bool { - return false -} - -var _ EVMType = (*EVMBytes)(nil) - -type EVMBytes struct { - M uint64 -} - -func (e EVMBytes) getGoType() interface{} { - v := make([]byte, e.M) - return &v -} - -func (e EVMBytes) pack(v interface{}) ([]byte, error) { - b, ok := v.([]byte) - if !ok { - s, ok := v.(string) - if ok { - b = []byte(s) - } else { - return nil, fmt.Errorf("cannot map to %s to EVM bytes", reflect.ValueOf(v).Kind().String()) - } +// LoadPath loads one abi file or finds all files in a directory +func LoadPath(abiFileOrDirs ...string) (*Spec, error) { + if len(abiFileOrDirs) == 0 { + return &Spec{}, fmt.Errorf("no ABI file or directory provided") } - if e.M > 0 { - if uint64(len(b)) > e.M { - return nil, fmt.Errorf("[%d]byte to long for %s", len(b), e.GetSignature()) - } - return pad(b, ElementSize, false), nil - } else { - length := EVMUint{M: 256} - p, err := length.pack(len(b)) - if err != nil { - return nil, err - } - for i := 0; i < len(b); i += ElementSize { - a := b[i:] - if len(a) == 0 { - break + specs := make([]*Spec, 0) + + for _, dir := range abiFileOrDirs { + err := filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil { + return fmt.Errorf("error returned while walking abiDir '%s': %v", dir, err) + } + ext := filepath.Ext(path) + if fi.IsDir() || !(ext == ".bin" || ext == ".abi") { + return nil + } + if err == nil { + abiSpc, err := ReadSpecFile(path) + if err != nil { + return errors.Wrap(err, "Error parsing abi file "+path) + } + specs = append(specs, abiSpc) } - p = append(p, pad(a, ElementSize, false)...) + return nil + }) + if err != nil { + return &Spec{}, err } - - return p, nil } + return MergeSpec(specs), nil } -func (e EVMBytes) unpack(data []byte, offset int, v interface{}) (int, error) { - if e.M == 0 { - s := EVMString{} - - return s.unpack(data, offset, v) - } - - v2 := reflect.ValueOf(v).Elem() - switch v2.Type().Kind() { - case reflect.String: - start := 0 - end := int(e.M) - - for start < ElementSize-1 && data[offset+start] == 0 && start < end { - start++ - } - for end > start && data[offset+end-1] == 0 { - end-- +func stripHex(s string) string { + if len(s) > 1 { + if s[:2] == "0x" { + s = s[2:] + if len(s)%2 != 0 { + s = "0" + s + } + return s } - v2.SetString(string(data[offset+start : offset+end])) - case reflect.Array: - fallthrough - case reflect.Slice: - v2.SetBytes(data[offset : offset+int(e.M)]) - default: - return 0, fmt.Errorf("cannot map EVM %s to %s", e.GetSignature(), reflect.ValueOf(v).Kind().String()) - } - - return ElementSize, nil -} - -func (e EVMBytes) Dynamic() bool { - return e.M == 0 -} - -func (e EVMBytes) GetSignature() string { - if e.M > 0 { - return fmt.Sprintf("bytes%d", e.M) - } else { - return "bytes" - } -} - -func (e EVMBytes) ImplicitCast(o EVMType) bool { - return false -} - -var _ EVMType = (*EVMString)(nil) - -type EVMString struct { -} - -func (e EVMString) GetSignature() string { - return "string" -} - -func (e EVMString) getGoType() interface{} { - return new(string) -} - -func (e EVMString) pack(v interface{}) ([]byte, error) { - b := EVMBytes{M: 0} - - return b.pack(v) -} - -func (e EVMString) unpack(data []byte, offset int, v interface{}) (int, error) { - lenType := EVMInt{M: 64} - var len int64 - l, err := lenType.unpack(data, offset, &len) - if err != nil { - return 0, err } - offset += l - - switch v := v.(type) { - case *string: - *v = string(data[offset : offset+int(len)]) - case *[]byte: - *v = data[offset : offset+int(len)] - default: - return 0, fmt.Errorf("cannot map EVM string to %s", reflect.ValueOf(v).Kind().String()) - } - - return ElementSize, nil -} - -func (e EVMString) Dynamic() bool { - return true -} - -func (e EVMString) ImplicitCast(o EVMType) bool { - return false -} - -var _ EVMType = (*EVMFixed)(nil) - -type EVMFixed struct { - N, M uint64 - signed bool -} - -func (e EVMFixed) getGoType() interface{} { - // This is not right, obviously - return new(big.Float) -} - -func (e EVMFixed) GetSignature() string { - if e.signed { - return fmt.Sprintf("fixed%dx%d", e.M, e.N) - } else { - return fmt.Sprintf("ufixed%dx%d", e.M, e.N) - } -} - -func (e EVMFixed) pack(v interface{}) ([]byte, error) { - // The ABI spec does not describe how this should be packed; go-ethereum abi does not implement this - // need to dig in solidity to find out how this is packed - return nil, fmt.Errorf("packing of %s not implemented, patches welcome", e.GetSignature()) -} - -func (e EVMFixed) unpack(data []byte, offset int, v interface{}) (int, error) { - // The ABI spec does not describe how this should be packed; go-ethereum abi does not implement this - // need to dig in solidity to find out how this is packed - return 0, fmt.Errorf("unpacking of %s not implemented, patches welcome", e.GetSignature()) -} - -func (e EVMFixed) Dynamic() bool { - return false -} - -func (e EVMFixed) ImplicitCast(o EVMType) bool { - return false + return s } type Argument struct { diff --git a/execution/evm/abi/core.go b/execution/evm/abi/core.go deleted file mode 100644 index 0e9b8a7a6..000000000 --- a/execution/evm/abi/core.go +++ /dev/null @@ -1,208 +0,0 @@ -package abi - -import ( - "fmt" - "os" - "path" - "path/filepath" - - "github.com/hyperledger/burrow/deploy/compile" - "github.com/hyperledger/burrow/execution/errors" - "github.com/hyperledger/burrow/logging" -) - -// Variable exist to unpack return values into, so have both the return -// value and its name -type Variable struct { - Name string - Value string -} - -func init() { - var err error - RevertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) - if err != nil { - panic(fmt.Sprintf("internal error: failed to build revert abi: %v", err)) - } -} - -// RevertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). -// If a function exits this way, the this hardcoded ABI will be used. -var RevertAbi *Spec - -// EncodeFunctionCallFromFile ABI encodes a function call based on ABI in file, and the -// arguments specified as strings. -// The abiFileName specifies the name of the ABI file, and abiPath the path where it can be found. -// The fname specifies which function should called, if -// it doesn't exist exist the fallback function will be called. If fname is the empty -// string, the constructor is called. The arguments must be specified in args. The count -// must match the function being called. -// Returns the ABI encoded function call, whether the function is constant according -// to the ABI (which means it does not modified contract state) -func EncodeFunctionCallFromFile(abiFileName, abiPath, funcName string, logger *logging.Logger, args ...interface{}) ([]byte, *FunctionSpec, error) { - abiSpecBytes, err := readAbi(abiPath, abiFileName, logger) - if err != nil { - return []byte{}, nil, err - } - - return EncodeFunctionCall(abiSpecBytes, funcName, logger, args...) -} - -// EncodeFunctionCall ABI encodes a function call based on ABI in string abiData -// and the arguments specified as strings. -// The fname specifies which function should called, if -// it doesn't exist exist the fallback function will be called. If fname is the empty -// string, the constructor is called. The arguments must be specified in args. The count -// must match the function being called. -// Returns the ABI encoded function call, whether the function is constant according -// to the ABI (which means it does not modified contract state) -func EncodeFunctionCall(abiData, funcName string, logger *logging.Logger, args ...interface{}) ([]byte, *FunctionSpec, error) { - logger.TraceMsg("Packing Call via ABI", - "spec", abiData, - "function", funcName, - "arguments", fmt.Sprintf("%v", args), - ) - - abiSpec, err := ReadSpec([]byte(abiData)) - if err != nil { - logger.InfoMsg("Failed to decode abi spec", - "abi", abiData, - "error", err.Error(), - ) - return nil, nil, err - } - - packedBytes, funcSpec, err := abiSpec.Pack(funcName, args...) - if err != nil { - logger.InfoMsg("Failed to encode abi spec", - "abi", abiData, - "error", err.Error(), - ) - return nil, nil, err - } - - return packedBytes, funcSpec, nil -} - -// DecodeFunctionReturnFromFile ABI decodes the return value from a contract function call. -func DecodeFunctionReturnFromFile(abiLocation, binPath, funcName string, resultRaw []byte, logger *logging.Logger) ([]*Variable, error) { - abiSpecBytes, err := readAbi(binPath, abiLocation, logger) - if err != nil { - return nil, err - } - logger.TraceMsg("ABI Specification (Decode)", "spec", abiSpecBytes) - - // Unpack the result - return DecodeFunctionReturn(abiSpecBytes, funcName, resultRaw) -} - -func DecodeFunctionReturn(abiData, name string, data []byte) ([]*Variable, error) { - abiSpec, err := ReadSpec([]byte(abiData)) - if err != nil { - return nil, err - } - - var args []Argument - - if name == "" { - args = abiSpec.Constructor.Outputs - } else { - if _, ok := abiSpec.Functions[name]; ok { - args = abiSpec.Functions[name].Outputs - } else { - args = abiSpec.Fallback.Outputs - } - } - - if args == nil { - return nil, fmt.Errorf("no such function") - } - vars := make([]*Variable, len(args)) - - if len(args) == 0 { - return nil, nil - } - - vals := make([]interface{}, len(args)) - for i := range vals { - vals[i] = new(string) - } - err = Unpack(args, data, vals...) - if err != nil { - return nil, err - } - - for i, a := range args { - if a.Name != "" { - vars[i] = &Variable{Name: a.Name, Value: *(vals[i].(*string))} - } else { - vars[i] = &Variable{Name: fmt.Sprintf("%d", i), Value: *(vals[i].(*string))} - } - } - - return vars, nil -} - -func readAbi(root, contract string, logger *logging.Logger) (string, error) { - p := path.Join(root, stripHex(contract)) - if _, err := os.Stat(p); err != nil { - logger.TraceMsg("abifile not found", "tried", p) - p = path.Join(root, stripHex(contract)+".bin") - if _, err = os.Stat(p); err != nil { - logger.TraceMsg("abifile not found", "tried", p) - return "", fmt.Errorf("abi doesn't exist for =>\t%s", p) - } - } - logger.TraceMsg("Found ABI file", "path", p) - sol, err := compile.LoadSolidityContract(p) - if err != nil { - return "", err - } - return string(sol.Abi), nil -} - -// LoadPath loads one abi file or finds all files in a directory -func LoadPath(abiFileOrDirs ...string) (*Spec, error) { - if len(abiFileOrDirs) == 0 { - return &Spec{}, fmt.Errorf("no ABI file or directory provided") - } - - specs := make([]*Spec, 0) - - for _, dir := range abiFileOrDirs { - err := filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil { - return fmt.Errorf("error returned while walking abiDir '%s': %v", dir, err) - } - ext := filepath.Ext(path) - if fi.IsDir() || !(ext == ".bin" || ext == ".abi") { - return nil - } - if err == nil { - abiSpc, err := ReadSpecFile(path) - if err != nil { - return errors.Wrap(err, "Error parsing abi file "+path) - } - specs = append(specs, abiSpc) - } - return nil - }) - if err != nil { - return &Spec{}, err - } - } - return MergeSpec(specs), nil -} - -func stripHex(s string) string { - if len(s) > 1 { - if s[:2] == "0x" { - s = s[2:] - if len(s)%2 != 0 { - s = "0" + s - } - return s - } - } - return s -} diff --git a/execution/evm/abi/primitives.go b/execution/evm/abi/primitives.go new file mode 100644 index 000000000..5051a92e0 --- /dev/null +++ b/execution/evm/abi/primitives.go @@ -0,0 +1,711 @@ +package abi + +import ( + "encoding/binary" + "fmt" + "math/big" + "reflect" + "strings" + "unsafe" // just for Sizeof + + "github.com/hyperledger/burrow/crypto" +) + +// EVM Solidity calls and return values are packed into +// pieces of 32 bytes, including a bool (wasting 255 out of 256 bits) +const ElementSize = 32 + +type EVMType interface { + GetSignature() string + getGoType() interface{} + pack(v interface{}) ([]byte, error) + unpack(data []byte, offset int, v interface{}) (int, error) + Dynamic() bool + ImplicitCast(o EVMType) bool +} + +var _ EVMType = (*EVMBool)(nil) + +type EVMBool struct { +} + +func (e EVMBool) GetSignature() string { + return "bool" +} + +func (e EVMBool) getGoType() interface{} { + return new(bool) +} + +func (e EVMBool) pack(v interface{}) ([]byte, error) { + var b bool + arg := reflect.ValueOf(v) + if arg.Kind() == reflect.String { + val := arg.String() + if strings.EqualFold(val, "true") || val == "1" { + b = true + } else if strings.EqualFold(val, "false") || val == "0" { + b = false + } else { + return nil, fmt.Errorf("%s is not a valid value for EVM Bool type", val) + } + } else if arg.Kind() == reflect.Bool { + b = arg.Bool() + } else { + return nil, fmt.Errorf("%s cannot be converted to EVM Bool type", arg.Kind().String()) + } + res := make([]byte, ElementSize) + if b { + res[ElementSize-1] = 1 + } + return res, nil +} + +func (e EVMBool) unpack(data []byte, offset int, v interface{}) (int, error) { + if len(data)-offset < 32 { + return 0, fmt.Errorf("not enough data") + } + data = data[offset:] + switch v := v.(type) { + case *string: + if data[ElementSize-1] == 1 { + *v = "true" + } else if data[ElementSize-1] == 0 { + *v = "false" + } else { + return 0, fmt.Errorf("unexpected value for EVM bool") + } + case *int8: + *v = int8(data[ElementSize-1]) + case *int16: + *v = int16(data[ElementSize-1]) + case *int32: + *v = int32(data[ElementSize-1]) + case *int64: + *v = int64(data[ElementSize-1]) + case *int: + *v = int(data[ElementSize-1]) + case *uint8: + *v = uint8(data[ElementSize-1]) + case *uint16: + *v = uint16(data[ElementSize-1]) + case *uint32: + *v = uint32(data[ElementSize-1]) + case *uint64: + *v = uint64(data[ElementSize-1]) + case *uint: + *v = uint(data[ElementSize-1]) + case *bool: + *v = data[ElementSize-1] == 1 + default: + return 0, fmt.Errorf("cannot set type %s for EVM bool", reflect.ValueOf(v).Kind().String()) + } + return 32, nil +} + +func (e EVMBool) Dynamic() bool { + return false +} + +func (e EVMBool) ImplicitCast(o EVMType) bool { + return false +} + +var _ EVMType = (*EVMUint)(nil) + +type EVMUint struct { + M uint64 +} + +func (e EVMUint) GetSignature() string { + return fmt.Sprintf("uint%d", e.M) +} + +func (e EVMUint) getGoType() interface{} { + switch e.M { + case 8: + return new(uint8) + case 16: + return new(uint16) + case 32: + return new(uint32) + case 64: + return new(uint64) + default: + return new(big.Int) + } +} + +func (e EVMUint) pack(v interface{}) ([]byte, error) { + n := new(big.Int) + + arg := reflect.ValueOf(v) + switch arg.Kind() { + case reflect.String: + _, ok := n.SetString(arg.String(), 0) + if !ok { + return nil, fmt.Errorf("Failed to parse `%s", arg.String()) + } + if n.Sign() < 0 { + return nil, fmt.Errorf("negative value not allowed for uint%d", e.M) + } + case reflect.Uint8: + fallthrough + case reflect.Uint16: + fallthrough + case reflect.Uint32: + fallthrough + case reflect.Uint64: + fallthrough + case reflect.Uint: + n.SetUint64(arg.Uint()) + case reflect.Int8: + fallthrough + case reflect.Int16: + fallthrough + case reflect.Int32: + fallthrough + case reflect.Int64: + fallthrough + case reflect.Int: + x := arg.Int() + if x < 0 { + return nil, fmt.Errorf("negative value not allowed for uint%d", e.M) + } + n.SetInt64(x) + default: + t := reflect.TypeOf(new(uint64)) + if reflect.TypeOf(v).ConvertibleTo(t) { + n.SetUint64(reflect.ValueOf(v).Convert(t).Uint()) + } else { + return nil, fmt.Errorf("cannot convert type %s to uint%d", arg.Kind().String(), e.M) + } + } + + b := n.Bytes() + if uint64(len(b)) > e.M { + return nil, fmt.Errorf("value to large for int%d", e.M) + } + return pad(b, ElementSize, true), nil +} + +func (e EVMUint) unpack(data []byte, offset int, v interface{}) (int, error) { + if len(data)-offset < ElementSize { + return 0, fmt.Errorf("not enough data") + } + + data = data[offset:] + empty := 0 + for empty = 0; empty < ElementSize; empty++ { + if data[empty] != 0 { + break + } + } + + length := ElementSize - empty + + switch v := v.(type) { + case *string: + b := new(big.Int) + b.SetBytes(data[empty:ElementSize]) + *v = b.String() + case *big.Int: + b := new(big.Int) + *v = *b.SetBytes(data[0:ElementSize]) + case *uint64: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen { + return 0, fmt.Errorf("value to large for uint64") + } + *v = binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize]) + case *uint32: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen { + return 0, fmt.Errorf("value to large for uint64") + } + *v = binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize]) + case *uint16: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen { + return 0, fmt.Errorf("value to large for uint16") + } + *v = binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize]) + case *uint8: + maxLen := 1 + if length > maxLen { + return 0, fmt.Errorf("value to large for uint8") + } + *v = uint8(data[31]) + case *int64: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for int64") + } + *v = int64(binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize])) + case *int32: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for int64") + } + *v = int32(binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize])) + case *int16: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for int16") + } + *v = int16(binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize])) + case *int8: + maxLen := 1 + if length > maxLen || (data[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for int8") + } + *v = int8(data[ElementSize-1]) + default: + return 0, fmt.Errorf("unable to convert %s to %s", e.GetSignature(), reflect.ValueOf(v).Kind().String()) + } + + return 32, nil +} + +func (e EVMUint) Dynamic() bool { + return false +} + +var _ EVMType = (*EVMInt)(nil) + +type EVMInt struct { + M uint64 +} + +func (e EVMInt) getGoType() interface{} { + switch e.M { + case 8: + return new(int8) + case 16: + return new(int16) + case 32: + return new(int32) + case 64: + return new(int64) + default: + return new(big.Int) + } +} + +func (e EVMInt) ImplicitCast(o EVMType) bool { + i, ok := o.(EVMInt) + return ok && i.M >= e.M +} + +func (e EVMInt) GetSignature() string { + return fmt.Sprintf("int%d", e.M) +} + +func (e EVMInt) pack(v interface{}) ([]byte, error) { + n := new(big.Int) + + arg := reflect.ValueOf(v) + switch arg.Kind() { + case reflect.String: + _, ok := n.SetString(arg.String(), 0) + if !ok { + return nil, fmt.Errorf("Failed to parse `%s", arg.String()) + } + case reflect.Uint8: + fallthrough + case reflect.Uint16: + fallthrough + case reflect.Uint32: + fallthrough + case reflect.Uint64: + fallthrough + case reflect.Uint: + n.SetUint64(arg.Uint()) + case reflect.Int8: + fallthrough + case reflect.Int16: + fallthrough + case reflect.Int32: + fallthrough + case reflect.Int64: + fallthrough + case reflect.Int: + n.SetInt64(arg.Int()) + default: + t := reflect.TypeOf(new(int64)) + if reflect.TypeOf(v).ConvertibleTo(t) { + n.SetInt64(reflect.ValueOf(v).Convert(t).Int()) + } else { + return nil, fmt.Errorf("cannot convert type %s to int%d", arg.Kind().String(), e.M) + } + } + + b := n.Bytes() + if uint64(len(b)) > e.M { + return nil, fmt.Errorf("value to large for int%d", e.M) + } + res := pad(b, ElementSize, true) + if (res[0] & 0x80) != 0 { + return nil, fmt.Errorf("value to large for int%d", e.M) + } + if n.Sign() < 0 { + // One's complement; i.e. 0xffff is -1, not 0. + n.Add(n, big.NewInt(1)) + b := n.Bytes() + res = pad(b, ElementSize, true) + for i := 0; i < len(res); i++ { + res[i] = ^res[i] + } + } + return res, nil +} + +func (e EVMInt) unpack(data []byte, offset int, v interface{}) (int, error) { + if len(data)-offset < ElementSize { + return 0, fmt.Errorf("not enough data") + } + + data = data[offset:] + sign := (data[0] & 0x80) != 0 + + empty := 0 + for empty = 0; empty < ElementSize; empty++ { + if (sign && data[empty] != 255) || (!sign && data[empty] != 0) { + break + } + } + + length := ElementSize - empty + inv := make([]byte, ElementSize) + for i := 0; i < ElementSize; i++ { + if sign { + inv[i] = ^data[i] + } else { + inv[i] = data[i] + } + } + toType := reflect.ValueOf(v).Kind().String() + + switch v := v.(type) { + case *string: + b := new(big.Int) + b.SetBytes(inv[empty:ElementSize]) + if sign { + *v = b.Sub(big.NewInt(-1), b).String() + } else { + *v = b.String() + } + case *big.Int: + b := new(big.Int) + b.SetBytes(inv[0:ElementSize]) + if sign { + *v = *b.Sub(big.NewInt(-1), b) + } else { + *v = *b + } + case *uint64: + if sign { + return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) + } + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen { + return 0, fmt.Errorf("value to large for uint64") + } + *v = binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize]) + case *uint32: + if sign { + return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) + } + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen { + return 0, fmt.Errorf("value to large for int32") + } + *v = binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize]) + case *uint16: + if sign { + return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) + } + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen { + return 0, fmt.Errorf("value to large for uint16") + } + *v = binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize]) + case *int64: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for int64") + } + *v = int64(binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize])) + case *int32: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for uint64") + } + *v = int32(binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize])) + case *int16: + maxLen := int(unsafe.Sizeof(*v)) + if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { + return 0, fmt.Errorf("value to large for uint16") + } + *v = int16(binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize])) + default: + return 0, fmt.Errorf("unable to convert %s to %s", e.GetSignature(), toType) + } + + return ElementSize, nil +} + +func (e EVMInt) Dynamic() bool { + return false +} + +func (e EVMUint) ImplicitCast(o EVMType) bool { + u, ok := o.(EVMUint) + return ok && u.M >= e.M +} + +var _ EVMType = (*EVMAddress)(nil) + +type EVMAddress struct { +} + +func (e EVMAddress) getGoType() interface{} { + return new(crypto.Address) +} + +func (e EVMAddress) GetSignature() string { + return "address" +} + +func (e EVMAddress) pack(v interface{}) ([]byte, error) { + var err error + a, ok := v.(crypto.Address) + if !ok { + s, ok := v.(string) + if ok { + a, err = crypto.AddressFromHexString(s) + if err != nil { + return nil, err + } + } + } else { + b, ok := v.([]byte) + if !ok { + return nil, fmt.Errorf("cannot map to %s to EVM address", reflect.ValueOf(v).Kind().String()) + } + + a, err = crypto.AddressFromBytes(b) + if err != nil { + return nil, err + } + } + + return pad(a[:], ElementSize, true), nil +} + +func (e EVMAddress) unpack(data []byte, offset int, v interface{}) (int, error) { + addr, err := crypto.AddressFromBytes(data[offset+ElementSize-crypto.AddressLength : offset+ElementSize]) + if err != nil { + return 0, err + } + switch v := v.(type) { + case *string: + *v = addr.String() + case *crypto.Address: + *v = addr + case *([]byte): + *v = data[offset+ElementSize-crypto.AddressLength : offset+ElementSize] + default: + return 0, fmt.Errorf("cannot map EVM address to %s", reflect.ValueOf(v).Kind().String()) + } + + return ElementSize, nil +} + +func (e EVMAddress) Dynamic() bool { + return false +} + +func (e EVMAddress) ImplicitCast(o EVMType) bool { + return false +} + +var _ EVMType = (*EVMBytes)(nil) + +type EVMBytes struct { + M uint64 +} + +func (e EVMBytes) getGoType() interface{} { + v := make([]byte, e.M) + return &v +} + +func (e EVMBytes) pack(v interface{}) ([]byte, error) { + b, ok := v.([]byte) + if !ok { + s, ok := v.(string) + if ok { + b = []byte(s) + } else { + return nil, fmt.Errorf("cannot map to %s to EVM bytes", reflect.ValueOf(v).Kind().String()) + } + } + + if e.M > 0 { + if uint64(len(b)) > e.M { + return nil, fmt.Errorf("[%d]byte to long for %s", len(b), e.GetSignature()) + } + return pad(b, ElementSize, false), nil + } else { + length := EVMUint{M: 256} + p, err := length.pack(len(b)) + if err != nil { + return nil, err + } + for i := 0; i < len(b); i += ElementSize { + a := b[i:] + if len(a) == 0 { + break + } + p = append(p, pad(a, ElementSize, false)...) + } + + return p, nil + } +} + +func (e EVMBytes) unpack(data []byte, offset int, v interface{}) (int, error) { + if e.M == 0 { + s := EVMString{} + + return s.unpack(data, offset, v) + } + + v2 := reflect.ValueOf(v).Elem() + switch v2.Type().Kind() { + case reflect.String: + start := 0 + end := int(e.M) + + for start < ElementSize-1 && data[offset+start] == 0 && start < end { + start++ + } + for end > start && data[offset+end-1] == 0 { + end-- + } + v2.SetString(string(data[offset+start : offset+end])) + case reflect.Array: + fallthrough + case reflect.Slice: + v2.SetBytes(data[offset : offset+int(e.M)]) + default: + return 0, fmt.Errorf("cannot map EVM %s to %s", e.GetSignature(), reflect.ValueOf(v).Kind().String()) + } + + return ElementSize, nil +} + +func (e EVMBytes) Dynamic() bool { + return e.M == 0 +} + +func (e EVMBytes) GetSignature() string { + if e.M > 0 { + return fmt.Sprintf("bytes%d", e.M) + } else { + return "bytes" + } +} + +func (e EVMBytes) ImplicitCast(o EVMType) bool { + return false +} + +var _ EVMType = (*EVMString)(nil) + +type EVMString struct { +} + +func (e EVMString) GetSignature() string { + return "string" +} + +func (e EVMString) getGoType() interface{} { + return new(string) +} + +func (e EVMString) pack(v interface{}) ([]byte, error) { + b := EVMBytes{M: 0} + + return b.pack(v) +} + +func (e EVMString) unpack(data []byte, offset int, v interface{}) (int, error) { + lenType := EVMInt{M: 64} + var len int64 + l, err := lenType.unpack(data, offset, &len) + if err != nil { + return 0, err + } + offset += l + + switch v := v.(type) { + case *string: + *v = string(data[offset : offset+int(len)]) + case *[]byte: + *v = data[offset : offset+int(len)] + default: + return 0, fmt.Errorf("cannot map EVM string to %s", reflect.ValueOf(v).Kind().String()) + } + + return ElementSize, nil +} + +func (e EVMString) Dynamic() bool { + return true +} + +func (e EVMString) ImplicitCast(o EVMType) bool { + return false +} + +var _ EVMType = (*EVMFixed)(nil) + +type EVMFixed struct { + N, M uint64 + signed bool +} + +func (e EVMFixed) getGoType() interface{} { + // This is not right, obviously + return new(big.Float) +} + +func (e EVMFixed) GetSignature() string { + if e.signed { + return fmt.Sprintf("fixed%dx%d", e.M, e.N) + } else { + return fmt.Sprintf("ufixed%dx%d", e.M, e.N) + } +} + +func (e EVMFixed) pack(v interface{}) ([]byte, error) { + // The ABI spec does not describe how this should be packed; go-ethereum abi does not implement this + // need to dig in solidity to find out how this is packed + return nil, fmt.Errorf("packing of %s not implemented, patches welcome", e.GetSignature()) +} + +func (e EVMFixed) unpack(data []byte, offset int, v interface{}) (int, error) { + // The ABI spec does not describe how this should be packed; go-ethereum abi does not implement this + // need to dig in solidity to find out how this is packed + return 0, fmt.Errorf("unpacking of %s not implemented, patches welcome", e.GetSignature()) +} + +func (e EVMFixed) Dynamic() bool { + return false +} + +func (e EVMFixed) ImplicitCast(o EVMType) bool { + return false +} From 84f03e20482be17f5a4826fd37e47722469b9640 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 17 Jul 2019 13:03:39 +0100 Subject: [PATCH 26/70] abi: add comments or unexport some fields Signed-off-by: Sean Young --- deploy/jobs/jobs_contracts.go | 2 +- execution/evm/abi/abi.go | 77 +++++++++++++++------------- integration/rpctransact/call_test.go | 6 +-- vent/service/decoder.go | 2 +- vent/sqlsol/generate.go | 2 +- 5 files changed, 47 insertions(+), 42 deletions(-) diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index 0c5250ee6..861aa22a7 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -567,7 +567,7 @@ func logEvents(txe *exec.TxExecution, client *def.Client, logger *logging.Logger var eventID abi.EventID copy(eventID[:], eventLog.GetTopic(0).Bytes()) - evAbi, ok := client.AllSpecs.EventsById[eventID] + evAbi, ok := client.AllSpecs.EventsByID[eventID] if !ok { logger.InfoMsg("Could not find ABI for Event", "Event ID", hex.EncodeUpperToString(eventID[:])) continue diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index cec94f435..5d1f770b0 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -32,15 +32,15 @@ type Variable struct { func init() { var err error - RevertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) + revertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) if err != nil { panic(fmt.Sprintf("internal error: failed to build revert abi: %v", err)) } } -// RevertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). +// revertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). // If a function exits this way, the this hardcoded ABI will be used. -var RevertAbi *Spec +var revertAbi *Spec // EncodeFunctionCallFromFile ABI encodes a function call based on ABI in file, and the // arguments specified as strings. @@ -219,6 +219,7 @@ func stripHex(s string) string { return s } +// Argument is a decoded function parameter, return or event field type Argument struct { Name string EVM EVMType @@ -228,10 +229,12 @@ type Argument struct { ArrayLength uint64 } +// FunctionIDSize is the length of the function selector const FunctionIDSize = 4 type FunctionID [FunctionIDSize]byte +// EventIDSize is the length of the event selector const EventIDSize = 32 type EventID [EventIDSize]byte @@ -250,33 +253,34 @@ type EventSpec struct { Anonymous bool } +// Spec is the ABI for contract decoded. type Spec struct { - Constructor FunctionSpec - Fallback FunctionSpec - Functions map[string]FunctionSpec - Events map[string]EventSpec - EventsById map[EventID]EventSpec + Constructor FunctionSpec + Fallback FunctionSpec + Functions map[string]FunctionSpec + EventsByName map[string]EventSpec + EventsByID map[EventID]EventSpec } -type ArgumentJSON struct { +type argumentJSON struct { Name string Type string - Components []ArgumentJSON + Components []argumentJSON Indexed bool } -type SpecJSON struct { +type specJSON struct { Name string Type string - Inputs []ArgumentJSON - Outputs []ArgumentJSON + Inputs []argumentJSON + Outputs []argumentJSON Constant bool Payable bool StateMutability string Anonymous bool } -func readArgSpec(argsJ []ArgumentJSON) ([]Argument, error) { +func readArgSpec(argsJ []argumentJSON) ([]Argument, error) { args := make([]Argument, len(argsJ)) var err error @@ -378,13 +382,14 @@ func readArgSpec(argsJ []ArgumentJSON) ([]Argument, error) { return args, nil } +// ReadSpec takes an ABI and decodes it for futher use func ReadSpec(specBytes []byte) (*Spec, error) { - var specJ []SpecJSON + var specJ []specJSON err := json.Unmarshal(specBytes, &specJ) if err != nil { // The abi spec file might a bin file, with the Abi under the Abi field in json var binFile struct { - Abi []SpecJSON + Abi []specJSON } err = json.Unmarshal(specBytes, &binFile) if err != nil { @@ -394,9 +399,9 @@ func ReadSpec(specBytes []byte) (*Spec, error) { } abiSpec := Spec{ - Events: make(map[string]EventSpec), - EventsById: make(map[EventID]EventSpec), - Functions: make(map[string]FunctionSpec), + EventsByName: make(map[string]EventSpec), + EventsByID: make(map[EventID]EventSpec), + Functions: make(map[string]FunctionSpec), } for _, s := range specJ { @@ -424,8 +429,8 @@ func ReadSpec(specBytes []byte) (*Spec, error) { } } ev := EventSpec{Name: s.Name, EventID: GetEventID(sig), Inputs: inputs, Anonymous: s.Anonymous} - abiSpec.Events[ev.Name] = ev - abiSpec.EventsById[ev.EventID] = ev + abiSpec.EventsByName[ev.Name] = ev + abiSpec.EventsByID[ev.EventID] = ev case "function": inputs, err := readArgSpec(s.Inputs) if err != nil { @@ -444,6 +449,7 @@ func ReadSpec(specBytes []byte) (*Spec, error) { return &abiSpec, nil } +// ReadSpecFile reads an ABI file from a file func ReadSpecFile(filename string) (*Spec, error) { specBytes, err := ioutil.ReadFile(filename) if err != nil { @@ -458,9 +464,9 @@ func ReadSpecFile(filename string) (*Spec, error) { // some information loss. func MergeSpec(abiSpec []*Spec) *Spec { newSpec := Spec{ - Events: make(map[string]EventSpec), - EventsById: make(map[EventID]EventSpec), - Functions: make(map[string]FunctionSpec), + EventsByName: make(map[string]EventSpec), + EventsByID: make(map[EventID]EventSpec), + Functions: make(map[string]FunctionSpec), } for _, s := range abiSpec { @@ -470,16 +476,16 @@ func MergeSpec(abiSpec []*Spec) *Spec { // Different Abis can have the Event name, but with a different signature // Loop over the signatures, as these are less likely to have collisions - for _, e := range s.EventsById { - newSpec.Events[e.Name] = e - newSpec.EventsById[e.EventID] = e + for _, e := range s.EventsByID { + newSpec.EventsByName[e.Name] = e + newSpec.EventsByID[e.EventID] = e } } return &newSpec } -func EVMTypeFromReflect(v reflect.Type) Argument { +func typeFromReflect(v reflect.Type) Argument { arg := Argument{Name: v.Name()} if v == reflect.TypeOf(crypto.Address{}) { @@ -523,13 +529,13 @@ func SpecFromStructReflect(fname string, args reflect.Type, rets reflect.Type) * } for i := 0; i < args.NumField(); i++ { f := args.Field(i) - a := EVMTypeFromReflect(f.Type) + a := typeFromReflect(f.Type) a.Name = f.Name s.Inputs[i] = a } for i := 0; i < rets.NumField(); i++ { f := rets.Field(i) - a := EVMTypeFromReflect(f.Type) + a := typeFromReflect(f.Type) a.Name = f.Name s.Outputs[i] = a } @@ -550,11 +556,11 @@ func SpecFromFunctionReflect(fname string, v reflect.Value, skipIn, skipOut int) s.Outputs = make([]Argument, t.NumOut()-skipOut) for i := range s.Inputs { - s.Inputs[i] = EVMTypeFromReflect(t.In(i + skipIn)) + s.Inputs[i] = typeFromReflect(t.In(i + skipIn)) } for i := range s.Outputs { - s.Outputs[i] = EVMTypeFromReflect(t.Out(i)) + s.Outputs[i] = typeFromReflect(t.Out(i)) } s.SetFunctionID(fname) @@ -612,15 +618,13 @@ func GetEventID(signature string) (id EventID) { func UnpackRevert(data []byte) (message *string, err error) { if len(data) > 0 { var msg string - err = RevertAbi.UnpackWithID(data, &msg) + err = revertAbi.UnpackWithID(data, &msg) message = &msg } return } -/* - * Given a eventSpec, get all the fields (topic fields or not) - */ +// UnpackEvent decodes all the fields in an event (indexed topic fields or not) func UnpackEvent(eventSpec *EventSpec, topics []burrow_binary.Word256, data []byte, args ...interface{}) error { // First unpack the topic fields topicIndex := 0 @@ -644,6 +648,7 @@ func UnpackEvent(eventSpec *EventSpec, topics []burrow_binary.Word256, data []by }) } +// Unpack decodes the return values from a function call func (abiSpec *Spec) Unpack(data []byte, fname string, args ...interface{}) error { var funcSpec FunctionSpec var argSpec []Argument diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index fc3e52adc..84d829219 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -312,7 +312,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) log := evs[0] var direction string var depth int64 - evAbi := spec.Events["ChangeLevel"] + evAbi := spec.EventsByName["ChangeLevel"] err = abi.UnpackEvent(&evAbi, log.Topics, log.Data, &direction, &depth) require.NoError(t, err) assert.Equal(t, evAbi.EventID.Bytes(), log.Topics[0].Bytes()) @@ -334,7 +334,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) require.NoError(t, err) evs := filterLogs(callTxe.Events) log := evs[0] - evAbi := spec.Events["ManyTypes"] + evAbi := spec.EventsByName["ManyTypes"] data := abi.GetPackingTypes(evAbi.Inputs) // Check signature assert.Equal(t, evAbi.EventID.Bytes(), log.Topics[0].Bytes()) @@ -373,7 +373,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) require.NoError(t, err) evs := filterLogs(callTxe.Events) log := evs[0] - evAbi := spec.Events["ManyTypes"] + evAbi := spec.EventsByName["ManyTypes"] data := abi.GetPackingTypes(evAbi.Inputs) for i, a := range evAbi.Inputs { if a.Indexed && !a.Hashed && a.EVM.GetSignature() == "bytes32" { diff --git a/vent/service/decoder.go b/vent/service/decoder.go index c237a6648..77f7034fa 100644 --- a/vent/service/decoder.go +++ b/vent/service/decoder.go @@ -19,7 +19,7 @@ func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, a var eventID abi.EventID copy(eventID[:], log.Topics[0].Bytes()) - evAbi, ok := abiSpec.EventsById[eventID] + evAbi, ok := abiSpec.EventsByID[eventID] if !ok { return nil, fmt.Errorf("abi spec not found for event %x", eventID) } diff --git a/vent/sqlsol/generate.go b/vent/sqlsol/generate.go index 12644aafa..3a9f2bbc4 100644 --- a/vent/sqlsol/generate.go +++ b/vent/sqlsol/generate.go @@ -16,7 +16,7 @@ func GenerateSpecFromAbis(spec *abi.Spec) ([]*types.EventClass, error) { fields := make(map[string]field) - for _, ev := range spec.EventsById { + for _, ev := range spec.EventsByID { for _, in := range ev.Inputs { field, ok := fields[in.Name] if ok { From 285935f2d7bbc3b4ce259792f7fbde1964ce3ed5 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 17 Jul 2019 16:35:07 +0100 Subject: [PATCH 27/70] vent: ensure json is logged and burrowFormatLogger is used Signed-off-by: Sean Young --- logging/loggers/burrow_format_logger.go | 9 ++++----- logging/structure/structure.go | 27 ++++++++++++++++++------- vent/service/consumer.go | 6 ++++-- vent/service/rowbuilder.go | 4 ++-- vent/sqldb/sqldb.go | 6 +++--- vent/types/event_data.go | 2 +- 6 files changed, 34 insertions(+), 20 deletions(-) diff --git a/logging/loggers/burrow_format_logger.go b/logging/loggers/burrow_format_logger.go index f0b45c48b..c4ccfb497 100644 --- a/logging/loggers/burrow_format_logger.go +++ b/logging/loggers/burrow_format_logger.go @@ -42,11 +42,7 @@ func (bfl *burrowFormatLogger) Log(keyvals ...interface{}) error { if bfl.logger == nil { return nil } - if len(keyvals)%2 != 0 { - return fmt.Errorf("log line contains an odd number of elements so "+ - "was dropped: %v", keyvals) - } - keyvals = structure.MapKeyValues(keyvals, + keyvals, err := structure.MapKeyValues(keyvals, func(key interface{}, value interface{}) (interface{}, interface{}) { switch v := value.(type) { case string, json.Marshaler, encoding.TextMarshaler: @@ -59,6 +55,9 @@ func (bfl *burrowFormatLogger) Log(keyvals ...interface{}) error { } return structure.StringifyKey(key), value }) + if err != nil { + return err + } bfl.Lock() defer bfl.Unlock() return bfl.logger.Log(keyvals...) diff --git a/logging/structure/structure.go b/logging/structure/structure.go index c1024696f..a4d5e2461 100644 --- a/logging/structure/structure.go +++ b/logging/structure/structure.go @@ -216,14 +216,27 @@ func Value(keyvals []interface{}, key interface{}) interface{} { } // Maps key values pairs with a function (key, value) -> (new key, new value) -func MapKeyValues(keyvals []interface{}, fn func(interface{}, interface{}) (interface{}, interface{})) []interface{} { - mappedKeyvals := make([]interface{}, len(keyvals)) - for i := 0; i < 2*(len(keyvals)/2); i += 2 { - key := keyvals[i] - val := keyvals[i+1] - mappedKeyvals[i], mappedKeyvals[i+1] = fn(key, val) +func MapKeyValues(keyvals []interface{}, fn func(interface{}, interface{}) (interface{}, interface{})) ([]interface{}, error) { + mappedKeyvals := make([]interface{}, 0) + for i := 0; i < len(keyvals); { + keymap, ok := keyvals[i].(map[string]interface{}) + if ok { + for key, val := range keymap { + k, v := fn(key, val) + mappedKeyvals = append(mappedKeyvals, k, v) + } + i++ + } else { + if i+1 >= len(keyvals) { + return nil, fmt.Errorf("log line contains an odd number of elements so "+ + "was dropped: %v", keyvals) + } + k, v := fn(keyvals[i], keyvals[i+1]) + mappedKeyvals = append(mappedKeyvals, k, v) + i += 2 + } } - return mappedKeyvals + return mappedKeyvals, nil } // Deletes n elements starting with the ith from a slice by splicing. diff --git a/vent/service/consumer.go b/vent/service/consumer.go index 044fdb8b0..430d90956 100644 --- a/vent/service/consumer.go +++ b/vent/service/consumer.go @@ -281,7 +281,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi // there's a matching filter, add data to the rows if qry.Matches(taggedEvent) { - c.Log.InfoMsg(fmt.Sprintf("Matched event header: %v", event.Header), + c.Log.InfoMsg("Matched event", "header", event.Header, "filter", eventClass.Filter) // unpack, decode & build event data @@ -304,7 +304,9 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi // gets block data to upsert blk := blockData.Data - c.Log.InfoMsg(fmt.Sprintf("Upserting rows in SQL tables %v", blk), "block", fromBlock) + for name, rows := range blk.Tables { + c.Log.InfoMsg("Upserting rows in SQL table", "height", fromBlock, "table", name, "action", "UPSERT", "rows", rows) + } eventCh <- blk } diff --git a/vent/service/rowbuilder.go b/vent/service/rowbuilder.go index dde4a5170..942a3b35e 100644 --- a/vent/service/rowbuilder.go +++ b/vent/service/rowbuilder.go @@ -32,7 +32,7 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, return types.EventDataRow{}, errors.Wrapf(err, "Error decoding event (filter: %s)", eventClass.Filter) } - l.InfoMsg(fmt.Sprintf("Unpacked data: %v", decodedData), "eventName", decodedData[types.EventNameLabel]) + l.InfoMsg("Decoded event", decodedData) rowAction := types.ActionUpsert @@ -75,7 +75,7 @@ func buildBlkData(tbls types.EventTables, block *exec.BlockExecution) (types.Eve if _, ok := tbls[tables.Block]; ok { blockHeader, err := json.Marshal(block.Header) if err != nil { - return types.EventDataRow{}, fmt.Errorf("couldn not marshal BlockHeader in block %v", block) + return types.EventDataRow{}, fmt.Errorf("could not marshal BlockHeader in block %v", block) } row[columns.Height] = fmt.Sprintf("%v", block.Height) diff --git a/vent/sqldb/sqldb.go b/vent/sqldb/sqldb.go index db45a3058..07bac8953 100644 --- a/vent/sqldb/sqldb.go +++ b/vent/sqldb/sqldb.go @@ -289,7 +289,7 @@ func (db *SQLDB) SynchronizeDB(chainID string, eventTables types.EventTables) er // SetBlock inserts or updates multiple rows and stores log info in SQL tables func (db *SQLDB) SetBlock(chainID string, eventTables types.EventTables, eventData types.EventData) error { - db.Log.InfoMsg("Synchronize Block..........") + db.Log.InfoMsg("Synchronize Block", "action", "SYNC") // Begin tx tx, err := db.DB.Beginx() @@ -366,7 +366,7 @@ loop: eventName, _ := row.RowData[db.Columns.EventName].(string) // Insert in log - db.Log.InfoMsg("INSERT LOG", "query", logQuery, "value", + db.Log.InfoMsg("INSERT LOG", "action", "INSERT", "query", logQuery, "value", fmt.Sprintf("chainid = %s tableName = %s eventName = %s block = %d", chainID, safeTable, en, eventData.BlockHeight)) if _, err = logStmt.Exec(chainID, safeTable, eventName, row.EventClass.GetFilter(), eventData.BlockHeight, txHash, @@ -421,7 +421,7 @@ loop: return err } - db.Log.InfoMsg("COMMIT") + db.Log.InfoMsg("COMMIT", "action", "COMMIT") err = db.SetBlockHeight(tx, chainID, eventData.BlockHeight) if err != nil { diff --git a/vent/types/event_data.go b/vent/types/event_data.go index 7acdcf0cb..a4e4cbbb7 100644 --- a/vent/types/event_data.go +++ b/vent/types/event_data.go @@ -29,5 +29,5 @@ type EventDataRow struct { Action DBAction RowData map[string]interface{} // The EventClass that caused this row to be emitted (if it was caused by an specific event) - EventClass *EventClass + EventClass *EventClass `json:"-"` } From 67605b4b84cdc2cb605510fdb8865b28774e3224 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Thu, 20 Jun 2019 15:03:09 +0100 Subject: [PATCH 28/70] reimplement bond / unbond Signed-off-by: Gregory Hill --- acm/account.go | 2 +- acm/acm.pb.go | 45 +- acm/acmstate/state_cache.go | 3 +- acm/balance/balance.pb.go | 15 +- acm/validator/validator.pb.go | 13 +- bcm/bcm.pb.go | 65 +-- cmd/burrow/commands/config_options.go | 2 +- cmd/burrow/commands/configure.go | 2 - cmd/burrow/commands/tx.go | 64 ++- consensus/tendermint/tendermint.pb.go | 13 +- crypto/crypto.pb.go | 21 +- deploy/def/client.go | 60 +++ deploy/def/job.go | 4 + deploy/def/jobs.go | 43 ++ deploy/jobs/job_manager.go | 20 + deploy/jobs/jobs_transact.go | 76 ++++ docs/bonding.md | 52 +++ dump/dump.pb.go | 71 ++-- encoding/encoding.pb.go | 15 +- execution/contexts/bond_context.go | 171 ++++++++ execution/contexts/governance_context.go | 56 +-- execution/errors/errors.pb.go | 15 +- execution/exec/exec.pb.go | 263 ++++++------ execution/execution.go | 10 + execution/names/names.pb.go | 21 +- genesis/genesis.go | 2 +- genesis/spec/spec.pb.go | 33 +- go.sum | 1 + keys/keys.pb.go | 53 +-- permission/permission.pb.go | 27 +- protobuf/payload.proto | 13 +- rpc/rpc.pb.go | 13 +- rpc/rpcdump/rpcdump.pb.go | 23 +- rpc/rpcevents/rpcevents.pb.go | 55 +-- rpc/rpcquery/rpcquery.pb.go | 56 +-- rpc/rpctransact/rpctransact.pb.go | 90 ++-- storage/storage.pb.go | 15 +- tests/test_runner.sh | 2 +- txs/payload/bond_tx.go | 21 +- txs/payload/payload.pb.go | 511 ++++++++++++----------- txs/payload/unbond_tx.go | 6 +- txs/tx.go | 6 + txs/tx_test.go | 35 +- txs/txs.pb.go | 51 ++- 44 files changed, 1306 insertions(+), 829 deletions(-) create mode 100644 docs/bonding.md create mode 100644 execution/contexts/bond_context.go diff --git a/acm/account.go b/acm/account.go index 7a8d19f5d..d1aafda90 100644 --- a/acm/account.go +++ b/acm/account.go @@ -19,12 +19,12 @@ import ( "fmt" "github.com/gogo/protobuf/proto" - "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/event/query" "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/permission" + amino "github.com/tendermint/go-amino" ) var GlobalPermissionsAddress = crypto.Address(binary.Zero160) diff --git a/acm/acm.pb.go b/acm/acm.pb.go index dcc36a128..12ab2752b 100644 --- a/acm/acm.pb.go +++ b/acm/acm.pb.go @@ -5,16 +5,14 @@ package acm import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" permission "github.com/hyperledger/burrow/permission" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -154,17 +152,17 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintAcm(dAtA, i, uint64(m.Address.Size())) - n1, err1 := m.Address.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 dAtA[i] = 0x12 i++ i = encodeVarintAcm(dAtA, i, uint64(m.PublicKey.Size())) - n2, err2 := m.PublicKey.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.PublicKey.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 if m.Sequence != 0 { @@ -180,25 +178,25 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintAcm(dAtA, i, uint64(m.EVMCode.Size())) - n3, err3 := m.EVMCode.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.EVMCode.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 dAtA[i] = 0x32 i++ i = encodeVarintAcm(dAtA, i, uint64(m.Permissions.Size())) - n4, err4 := m.Permissions.MarshalTo(dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := m.Permissions.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n4 dAtA[i] = 0x3a i++ i = encodeVarintAcm(dAtA, i, uint64(m.WASMCode.Size())) - n5, err5 := m.WASMCode.MarshalTo(dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := m.WASMCode.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n5 if m.XXX_unrecognized != nil { @@ -245,7 +243,14 @@ func (m *Account) Size() (n int) { } func sovAcm(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozAcm(x uint64) (n int) { return sovAcm(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/acm/acmstate/state_cache.go b/acm/acmstate/state_cache.go index 52aeb49b0..ee18d83a1 100644 --- a/acm/acmstate/state_cache.go +++ b/acm/acmstate/state_cache.go @@ -19,11 +19,10 @@ import ( "sort" "sync" - "github.com/hyperledger/burrow/execution/errors" - "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/execution/errors" ) type Cache struct { diff --git a/acm/balance/balance.pb.go b/acm/balance/balance.pb.go index 163d4dfa2..30fe3253a 100644 --- a/acm/balance/balance.pb.go +++ b/acm/balance/balance.pb.go @@ -5,13 +5,11 @@ package balance import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -161,7 +159,14 @@ func (m *Balance) Size() (n int) { } func sovBalance(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozBalance(x uint64) (n int) { return sovBalance(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/acm/validator/validator.pb.go b/acm/validator/validator.pb.go index 2caeb7c8a..4ccac95a6 100644 --- a/acm/validator/validator.pb.go +++ b/acm/validator/validator.pb.go @@ -5,14 +5,12 @@ package validator import ( fmt "fmt" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -125,7 +123,14 @@ func (m *Validator) Size() (n int) { } func sovValidator(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozValidator(x uint64) (n int) { return sovValidator(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/bcm/bcm.pb.go b/bcm/bcm.pb.go index 4afb005b5..9964a9c63 100644 --- a/bcm/bcm.pb.go +++ b/bcm/bcm.pb.go @@ -5,11 +5,6 @@ package bcm import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - time "time" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -17,6 +12,9 @@ import ( _ "github.com/golang/protobuf/ptypes/duration" _ "github.com/golang/protobuf/ptypes/timestamp" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" + io "io" + math "math" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -229,41 +227,41 @@ func (m *SyncInfo) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintBcm(dAtA, i, uint64(m.LatestBlockHash.Size())) - n1, err1 := m.LatestBlockHash.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.LatestBlockHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 dAtA[i] = 0x1a i++ i = encodeVarintBcm(dAtA, i, uint64(m.LatestAppHash.Size())) - n2, err2 := m.LatestAppHash.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.LatestAppHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 dAtA[i] = 0x22 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LatestBlockTime))) - n3, err3 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockTime, dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockTime, dAtA[i:]) + if err != nil { + return 0, err } i += n3 dAtA[i] = 0x2a i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LatestBlockSeenTime))) - n4, err4 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockSeenTime, dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockSeenTime, dAtA[i:]) + if err != nil { + return 0, err } i += n4 dAtA[i] = 0x32 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdDuration(m.LatestBlockDuration))) - n5, err5 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.LatestBlockDuration, dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.LatestBlockDuration, dAtA[i:]) + if err != nil { + return 0, err } i += n5 if m.XXX_unrecognized != nil { @@ -290,17 +288,17 @@ func (m *PersistedState) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintBcm(dAtA, i, uint64(m.AppHashAfterLastBlock.Size())) - n6, err6 := m.AppHashAfterLastBlock.MarshalTo(dAtA[i:]) - if err6 != nil { - return 0, err6 + n6, err := m.AppHashAfterLastBlock.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n6 dAtA[i] = 0x12 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LastBlockTime))) - n7, err7 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastBlockTime, dAtA[i:]) - if err7 != nil { - return 0, err7 + n7, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastBlockTime, dAtA[i:]) + if err != nil { + return 0, err } i += n7 if m.LastBlockHeight != 0 { @@ -311,9 +309,9 @@ func (m *PersistedState) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintBcm(dAtA, i, uint64(m.GenesisHash.Size())) - n8, err8 := m.GenesisHash.MarshalTo(dAtA[i:]) - if err8 != nil { - return 0, err8 + n8, err := m.GenesisHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n8 if m.XXX_unrecognized != nil { @@ -378,7 +376,14 @@ func (m *PersistedState) Size() (n int) { } func sovBcm(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozBcm(x uint64) (n int) { return sovBcm(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/cmd/burrow/commands/config_options.go b/cmd/burrow/commands/config_options.go index 31a30d713..e7c695f4b 100644 --- a/cmd/burrow/commands/config_options.go +++ b/cmd/burrow/commands/config_options.go @@ -58,7 +58,7 @@ func addConfigOptions(cmd *cli.Cmd) *configOptions { Name: "v validator", Desc: "Validator index (in validators list - GenesisSpec or GenesisDoc) from which to set Address", Value: -1, - EnvVar: "BURROW_VALIDATOR_INDEX", + EnvVar: "BURROW_NODE_INDEX", }), initAddressOpt: cmd.String(cli.StringOpt{ diff --git a/cmd/burrow/commands/configure.go b/cmd/burrow/commands/configure.go index e8a57b8f2..295646a07 100644 --- a/cmd/burrow/commands/configure.go +++ b/cmd/burrow/commands/configure.go @@ -46,8 +46,6 @@ func Configure(output Output) func(cmd *cli.Cmd) { configTemplateOut := cmd.StringsOpt("config-out", nil, "Go text/template output filename. Template filename specified with --config-template-in") - separateGenesisDoc := cmd.StringOpt("w separate-genesis-doc", "", "Emit a separate genesis doc as JSON or TOML") - loggingOpt := cmd.StringOpt("l logging", "", "Comma separated list of logging instructions which form a 'program' which is a depth-first "+ "pre-order of instructions that will build the root logging sink. See 'burrow help' for more information.") diff --git a/cmd/burrow/commands/tx.go b/cmd/burrow/commands/tx.go index 946296104..aeb37b10a 100644 --- a/cmd/burrow/commands/tx.go +++ b/cmd/burrow/commands/tx.go @@ -69,15 +69,71 @@ func Tx(output Output) func(cmd *cli.Cmd) { })) } }) + + cmd.Command("bond", "bond a new validator", func(cmd *cli.Cmd) { + sourceOpt := cmd.StringOpt("source", "", "Account with bonding perm, if not set config is used") + targetOpt := cmd.StringOpt("target", "", "Validator account to bond, created if doesn't exist") + powerOpt := cmd.StringOpt("power", "", "Amount of value to bond, required") + nodeOpt := cmd.StringOpt("node", "", "Optional Tendermint node address") + urlOpt := cmd.StringOpt("url", "", "Optional network address for validator") + cmd.Spec += "[--source=
] [--target=] [--power=] [--node=
] [--url=]" + + cmd.Action = func() { + bond := &def.Bond{ + Source: jobs.FirstOf(*sourceOpt, address), + Target: jobs.FirstOf(*targetOpt, address), + Power: *powerOpt, + Node: *nodeOpt, + Network: *urlOpt, + } + + if err := bond.Validate(); err != nil { + output.Fatalf("could not validate BondTx: %v", err) + } + + tx, err := jobs.FormulateBondJob(bond, address, client, logger) + if err != nil { + output.Fatalf("could not formulate BondTx: %v", err) + } + + output.Printf("%s", source.JSONString(payload.Any{ + BondTx: tx, + })) + } + }) + + cmd.Command("unbond", "unbond an existing validator", func(cmd *cli.Cmd) { + sourceOpt := cmd.StringOpt("source", "", "Validator to unbond, if not set config is used") + targetOpt := cmd.StringOpt("target", "", "Account to receive tokens, created if doesn't exist") + cmd.Spec += "[--source=
] [--target=
]" + + cmd.Action = func() { + unbond := &def.Unbond{ + Source: jobs.FirstOf(*sourceOpt, address), + Target: jobs.FirstOf(*targetOpt, address), + } + + if err := unbond.Validate(); err != nil { + output.Fatalf("could not validate UnbondTx: %v", err) + } + + tx, err := jobs.FormulateUnbondJob(unbond, address, client, logger) + if err != nil { + output.Fatalf("could not formulate UnbondTx: %v", err) + } + + output.Printf("%s", source.JSONString(payload.Any{ + UnbondTx: tx, + })) + } + }) }) cmd.Command("commit", "read and send a tx to mempool", func(cmd *cli.Cmd) { - configOpts := addConfigOptions(cmd) conf, err := configOpts.obtainBurrowConfig() if err != nil { output.Fatalf("could not set up config: %v", err) } - fileOpt := cmd.StringOpt("f file", "", "Read the tx spec from a file") cmd.Spec += "[--file=]" @@ -105,6 +161,10 @@ func Tx(output Output) func(cmd *cli.Cmd) { switch tx := rawTx.GetValue().(type) { case *payload.SendTx: hash, err = makeTx(client, tx) + case *payload.BondTx: + hash, err = makeTx(client, tx) + case *payload.UnbondTx: + hash, err = makeTx(client, tx) default: output.Fatalf("payload type not recognized") } diff --git a/consensus/tendermint/tendermint.pb.go b/consensus/tendermint/tendermint.pb.go index 7031992bb..e69ccf726 100644 --- a/consensus/tendermint/tendermint.pb.go +++ b/consensus/tendermint/tendermint.pb.go @@ -5,14 +5,12 @@ package tendermint import ( fmt "fmt" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -184,7 +182,14 @@ func (m *NodeInfo) Size() (n int) { } func sovTendermint(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozTendermint(x uint64) (n int) { return sovTendermint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/crypto/crypto.pb.go b/crypto/crypto.pb.go index 161cb490d..aa7490042 100644 --- a/crypto/crypto.pb.go +++ b/crypto/crypto.pb.go @@ -5,14 +5,12 @@ package crypto import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -226,9 +224,9 @@ func (m *PublicKey) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintCrypto(dAtA, i, uint64(m.PublicKey.Size())) - n1, err1 := m.PublicKey.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.PublicKey.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 if m.XXX_unrecognized != nil { @@ -376,7 +374,14 @@ func (m *Signature) Size() (n int) { } func sovCrypto(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozCrypto(x uint64) (n int) { return sovCrypto(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/deploy/def/client.go b/deploy/def/client.go index 15db245a7..4fdaab3b1 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -504,6 +504,66 @@ func (c *Client) Send(arg *SendArg, logger *logging.Logger) (*payload.SendTx, er return tx, nil } +type BondArg struct { + Input string + Amount string + Sequence string + Address string + PublicKey string + NodeAddress string + NetAddress string +} + +func (c *Client) Bond(arg *BondArg, logger *logging.Logger) (*payload.BondTx, error) { + logger.InfoMsg("BondTx", "account", arg) + err := c.dial(logger) + if err != nil { + return nil, err + } + // TODO: disable mempool signing + input, err := c.TxInput(arg.Input, arg.Amount, arg.Sequence, true, logger) + if err != nil { + return nil, err + } + val := &spec.TemplateAccount{} + err = c.getIdentity(val, arg.Address, arg.PublicKey, logger) + if err != nil { + return nil, err + } + return &payload.BondTx{ + Input: input, + Validator: val, + }, nil +} + +type UnbondArg struct { + Input string + Output string + Sequence string +} + +func (c *Client) Unbond(arg *UnbondArg, logger *logging.Logger) (*payload.UnbondTx, error) { + logger.InfoMsg("UnbondTx", "account", arg) + if err := c.dial(logger); err != nil { + return nil, err + } + input, err := c.TxInput(arg.Input, "", arg.Sequence, true, logger) + if err != nil { + return nil, err + } + addr, err := c.GetKeyAddress(arg.Output, logger) + if err != nil { + return nil, fmt.Errorf("could not parse address: %v", err) + } + output := &payload.TxOutput{ + Address: addr, + } + return &payload.UnbondTx{ + Input: input, + Output: output, + }, nil +} + type NameArg struct { Input string Amount string diff --git a/deploy/def/job.go b/deploy/def/job.go index f1ab75d57..3368d5c29 100644 --- a/deploy/def/job.go +++ b/deploy/def/job.go @@ -39,6 +39,10 @@ type Job struct { Build *Build `mapstructure:"build,omitempty" json:"build,omitempty" yaml:"build,omitempty" toml:"build"` // Send tokens from one account to another Send *Send `mapstructure:"send,omitempty" json:"send,omitempty" yaml:"send,omitempty" toml:"send"` + // Bond tokens from an account + Bond *Bond `mapstructure:"bond,omitempty" json:"bond,omitempty" yaml:"bond,omitempty" toml:"bond"` + // Unbond tokens from an account + Unbond *Unbond `mapstructure:"unbond,omitempty" json:"unbond,omitempty" yaml:"unbond,omitempty" toml:"unbond"` // Utilize monax:db's native name registry to register a name RegisterName *RegisterName `mapstructure:"register,omitempty" json:"register,omitempty" yaml:"register,omitempty" toml:"register"` // Sends a transaction which will update the permissions of an account. Must be sent from an account which diff --git a/deploy/def/jobs.go b/deploy/def/jobs.go index 4ca2f8347..c3068fa75 100644 --- a/deploy/def/jobs.go +++ b/deploy/def/jobs.go @@ -175,6 +175,49 @@ func (job *Send) Validate() error { ) } +type Bond struct { + // (Optional, if account job or global account set) address of the account from which to bond (the + // public key for the account must be available to burrow keys) + Source string `mapstructure:"source" json:"source" yaml:"source" toml:"source"` + // (Required) the identity of the bonding validator + Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` + // (Required) the Tendermint validator power to claim + Power string `mapstructure:"power" json:"power" yaml:"power" toml:"power"` + // (Optional) Tendermint node address of the validator + Node string `mapstructure:"node" json:"node" yaml:"node" toml:"node"` + // (Optional) network ip address of the validator + Network string `mapstructure:"network" json:"network" yaml:"network" toml:"network"` + // (Optional, advanced only) sequence to use when burrow keys signs the transaction + // (do not use unless you know what you're doing) + Sequence string `mapstructure:"sequence" json:"sequence" yaml:"sequence" toml:"sequence"` +} + +func (job *Bond) Validate() error { + return validation.ValidateStruct(job, + validation.Field(&job.Target, validation.Required), + validation.Field(&job.Power, validation.Required), + validation.Field(&job.Sequence, rule.Uint64OrPlaceholder), + ) +} + +type Unbond struct { + // (Optional, if account job or global account set) address of the validator to unbond (the + // public key for the validator must be available to burrow keys) + Source string `mapstructure:"source" json:"source" yaml:"source" toml:"source"` + // (Required) the identity of the unbonding validator + Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` + // (Optional, advanced only) sequence to use when burrow keys signs the transaction (do not use unless you + // know what you're doing) + Sequence string `mapstructure:"sequence" json:"sequence" yaml:"sequence" toml:"sequence"` +} + +func (job *Unbond) Validate() error { + return validation.ValidateStruct(job, + validation.Field(&job.Target, validation.Required), + validation.Field(&job.Sequence, rule.Uint64OrPlaceholder), + ) +} + type RegisterName struct { // (Optional, if account job or global account set) address of the account from which to send (the // public key for the account must be available to burrow keys) diff --git a/deploy/jobs/job_manager.go b/deploy/jobs/job_manager.go index 10fe303b0..5396c7317 100644 --- a/deploy/jobs/job_manager.go +++ b/deploy/jobs/job_manager.go @@ -176,6 +176,26 @@ func doJobs(playbook *def.Playbook, args *def.DeployArgs, client *def.Client, lo if err != nil { return err } + case *def.Bond: + announce(job.Name, "Bond", logger) + tx, err := FormulateBondJob(job.Bond, playbook.Account, client, logger) + if err != nil { + return err + } + job.Result, err = BondJob(job.Bond, tx, playbook.Account, client, logger) + if err != nil { + return err + } + case *def.Unbond: + announce(job.Name, "Unbond", logger) + tx, err := FormulateUnbondJob(job.Unbond, playbook.Account, client, logger) + if err != nil { + return err + } + job.Result, err = UnbondJob(job.Unbond, tx, playbook.Account, client, logger) + if err != nil { + return err + } case *def.RegisterName: announce(job.Name, "RegisterName", logger) txs, err := FormulateRegisterNameJob(job.RegisterName, args, playbook, client, logger) diff --git a/deploy/jobs/jobs_transact.go b/deploy/jobs/jobs_transact.go index 0b052407c..ada2da1c9 100644 --- a/deploy/jobs/jobs_transact.go +++ b/deploy/jobs/jobs_transact.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" + "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/txs/payload" @@ -47,6 +48,81 @@ func SendJob(send *def.Send, tx *payload.SendTx, account string, client *def.Cli return txe.Receipt.TxHash.String(), nil } +func FormulateBondJob(bond *def.Bond, account string, client *def.Client, logger *logging.Logger) (*payload.BondTx, error) { + // Use Default + bond.Source = FirstOf(bond.Source, account) + + // Formulate tx + logger.InfoMsg("Bonding Transaction", + "source", bond.Source, + "target", bond.Target, + "power", bond.Power) + + arg := &def.BondArg{ + Input: bond.Source, + Amount: bond.Power, + Sequence: bond.Sequence, + NodeAddress: bond.Node, + NetAddress: bond.Network, + } + + if len(bond.Source) == crypto.AddressHexLength { + arg.Address = bond.Target + } else { + arg.PublicKey = bond.Target + } + + return client.Bond(arg, logger) +} + +func BondJob(bond *def.Bond, tx *payload.BondTx, account string, client *def.Client, logger *logging.Logger) (string, error) { + // Sign, broadcast, display + txe, err := client.SignAndBroadcast(tx, logger) + if err != nil { + return "", util.ChainErrorHandler(account, err, logger) + } + + util.ReadTxSignAndBroadcast(txe, err, logger) + if err != nil { + return "", err + } + + return txe.Receipt.TxHash.String(), nil +} + +func FormulateUnbondJob(unbond *def.Unbond, account string, client *def.Client, logger *logging.Logger) (*payload.UnbondTx, error) { + // Use Default + unbond.Source = FirstOf(unbond.Source, account) + + // Formulate tx + logger.InfoMsg("Unbonding Transaction", + "source", unbond.Source, + "target", unbond.Target) + + arg := &def.UnbondArg{ + Input: unbond.Source, + Output: unbond.Target, + Sequence: unbond.Sequence, + } + + return client.Unbond(arg, logger) +} + +func UnbondJob(bond *def.Unbond, tx *payload.UnbondTx, account string, client *def.Client, logger *logging.Logger) (string, error) { + // Sign, broadcast, display + txe, err := client.SignAndBroadcast(tx, logger) + if err != nil { + return "", util.ChainErrorHandler(account, err, logger) + } + + util.ReadTxSignAndBroadcast(txe, err, logger) + if err != nil { + return "", err + } + + return txe.Receipt.TxHash.String(), nil +} + func FormulateRegisterNameJob(name *def.RegisterName, do *def.DeployArgs, playbook *def.Playbook, client *def.Client, logger *logging.Logger) ([]*payload.NameTx, error) { txs := make([]*payload.NameTx, 0) diff --git a/docs/bonding.md b/docs/bonding.md new file mode 100644 index 000000000..8a907f93a --- /dev/null +++ b/docs/bonding.md @@ -0,0 +1,52 @@ +# Bonding Validators + +As Burrow runs on Tendermint, it supports the notion of bonding validators. + +## Example + +We need at least one validator to start the chain, so run the following to construct +a genesis of two accounts with the `Bond` permission, one of which is pre-bonded: + +```bash +burrow spec -v1 -r1 | burrow configure -s- --pool +``` + +Let's start the both nodes: + +```bash +burrow start --config burrow000.toml & +burrow start --config burrow001.toml & +``` + +Query the JSON RPC for all validators in the active set: + +```bash +curl -s "localhost:26758/validators" +``` + +This will return the pre-bonded validator, defined in our pool. + +To have the second node bond on and produce blocks: + +```bash +burrow tx --config burrow001.toml formulate bond --power 10000 | burrow tx commit +``` + +Note that this will bond the current account, to bond an alternate account (which is created if it doesn't exist) +simply specific the `--target=
` flag in formulation: + +```bash +burrow tx --config burrow001.toml formulate bond --target 8A468CC3A28A6E84ED52E433DA21D6E9ED7C1577 --power 10000 +``` + +It should now be in the validator set: + +```bash +curl -s "localhost:26759/validators" +``` + +To unbond this validator: + +```bash +burrow tx formulate unbond | burrow tx commit +``` \ No newline at end of file diff --git a/dump/dump.pb.go b/dump/dump.pb.go index a0990f643..75e01e226 100644 --- a/dump/dump.pb.go +++ b/dump/dump.pb.go @@ -5,11 +5,6 @@ package dump import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - time "time" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -20,6 +15,9 @@ import ( github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" exec "github.com/hyperledger/burrow/execution/exec" names "github.com/hyperledger/burrow/execution/names" + io "io" + math "math" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -334,17 +332,17 @@ func (m *Storage) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintDump(dAtA, i, uint64(m.Key.Size())) - n1, err1 := m.Key.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.Key.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(m.Value.Size())) - n2, err2 := m.Value.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.Value.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 if m.XXX_unrecognized != nil { @@ -371,9 +369,9 @@ func (m *AccountStorage) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintDump(dAtA, i, uint64(m.Address.Size())) - n3, err3 := m.Address.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 if len(m.Storage) > 0 { @@ -418,18 +416,18 @@ func (m *EVMEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.Time))) - n4, err4 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) + if err != nil { + return 0, err } i += n4 if m.Event != nil { dAtA[i] = 0x1a i++ i = encodeVarintDump(dAtA, i, uint64(m.Event.Size())) - n5, err5 := m.Event.MarshalTo(dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := m.Event.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n5 } @@ -463,9 +461,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(m.Account.Size())) - n6, err6 := m.Account.MarshalTo(dAtA[i:]) - if err6 != nil { - return 0, err6 + n6, err := m.Account.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n6 } @@ -473,9 +471,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintDump(dAtA, i, uint64(m.AccountStorage.Size())) - n7, err7 := m.AccountStorage.MarshalTo(dAtA[i:]) - if err7 != nil { - return 0, err7 + n7, err := m.AccountStorage.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n7 } @@ -483,9 +481,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintDump(dAtA, i, uint64(m.EVMEvent.Size())) - n8, err8 := m.EVMEvent.MarshalTo(dAtA[i:]) - if err8 != nil { - return 0, err8 + n8, err := m.EVMEvent.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n8 } @@ -493,9 +491,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintDump(dAtA, i, uint64(m.Name.Size())) - n9, err9 := m.Name.MarshalTo(dAtA[i:]) - if err9 != nil { - return 0, err9 + n9, err := m.Name.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n9 } @@ -604,7 +602,14 @@ func (m *Dump) Size() (n int) { } func sovDump(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozDump(x uint64) (n int) { return sovDump(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/encoding/encoding.pb.go b/encoding/encoding.pb.go index 11209f200..19b1f18ab 100644 --- a/encoding/encoding.pb.go +++ b/encoding/encoding.pb.go @@ -5,13 +5,11 @@ package encoding import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -163,7 +161,14 @@ func (m *TestMessage) Size() (n int) { } func sovEncoding(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozEncoding(x uint64) (n int) { return sovEncoding(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go new file mode 100644 index 000000000..15fb79dc2 --- /dev/null +++ b/execution/contexts/bond_context.go @@ -0,0 +1,171 @@ +package contexts + +import ( + "fmt" + "math/big" + + "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/acm/acmstate" + "github.com/hyperledger/burrow/acm/validator" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/permission" + "github.com/hyperledger/burrow/txs/payload" +) + +type BondContext struct { + StateWriter acmstate.ReaderWriter + ValidatorSet validator.ReaderWriter + Logger *logging.Logger + tx *payload.BondTx +} + +// Execute a BondTx to add a new validator +func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error { + var ok bool + ctx.tx, ok = p.(*payload.BondTx) + if !ok { + return fmt.Errorf("payload must be BondTx, but is: %v", txe.Envelope.Tx.Payload) + } + + // the account initiating the bond (may be validator) + account, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) + if err != nil { + return err + } + + // check if account is validator + power, err := ctx.ValidatorSet.Power(account.Address) + if err != nil { + return err + } else if power != nil && power.Cmp(big.NewInt(0)) == 1 { + // TODO: something with nodekey + // ctx.tx.NetAddress + } + + // account is not validator, can it bond someone? + if !hasBondPermission(ctx.StateWriter, account, ctx.Logger) { + return fmt.Errorf("account '%s' lacks bond permission", account.Address) + } + + // check account has enough to bond + amount := ctx.tx.Input.GetAmount() + if amount == 0 { + return fmt.Errorf("nothing to bond") + } else if account.Balance < amount { + return fmt.Errorf("insufficient funds, account %s only has balance %v and "+ + "we are deducting %v", account.Address, account.Balance, amount) + } + + // ensure pubKey of validator is set + val := ctx.tx.Validator + err = GetIdentity(ctx.StateWriter, val) + if err != nil { + return fmt.Errorf("BondTx: %v", err) + } + + // can power be added? + power = new(big.Int).SetUint64(amount) + if !power.IsInt64() { + return fmt.Errorf("power supplied by %v does not fit into int64 and "+ + "so is not supported by Tendermint", account.Address) + } + priorPow, err := ctx.ValidatorSet.Power(*val.Address) + if err != nil { + return err + } + postPow := big.NewInt(0).Add(priorPow, power) + if !postPow.IsInt64() { + return fmt.Errorf("power supplied in update to validator power for %v does not fit into int64 and "+ + "so is not supported by Tendermint", *val.Address) + } + + // create the account if not bonder + if *val.Address != account.Address { + valAcc, err := ctx.StateWriter.GetAccount(*val.Address) + if err != nil { + return err + } else if valAcc == nil { + // validator account doesn't exist + valAcc = &acm.Account{ + Address: *val.Address, + Sequence: 0, + Balance: 0, + Permissions: permission.ZeroAccountPermissions, + } + } + // pk must be known later to unbond + valAcc.PublicKey = *val.PublicKey + err = ctx.StateWriter.UpdateAccount(valAcc) + if err != nil { + return err + } + } + + // we're good to go + err = account.SubtractFromBalance(amount) + if err != nil { + return err + } + err = validator.AddPower(ctx.ValidatorSet, *val.PublicKey, power) + if err != nil { + return err + } + + return ctx.StateWriter.UpdateAccount(account) +} + +type UnbondContext struct { + StateWriter acmstate.ReaderWriter + ValidatorSet validator.ReaderWriter + Logger *logging.Logger + tx *payload.UnbondTx +} + +// Execute an UnbondTx to remove a validator +func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) error { + var ok bool + ctx.tx, ok = p.(*payload.UnbondTx) + if !ok { + return fmt.Errorf("payload must be UnbondTx, but is: %v", txe.Envelope.Tx.Payload) + } + + // the unbonding validator + sender, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) + if err != nil { + return err + } + + recipient, err := ctx.StateWriter.GetAccount(ctx.tx.Output.Address) + if err != nil { + return err + } + + // make sure that the validator has power to remove + power, err := ctx.ValidatorSet.Power(sender.Address) + if err != nil { + return err + } else if power == nil || power.Cmp(big.NewInt(0)) == 0 { + return fmt.Errorf("nothing bonded for validator '%s'", sender.Address) + } + + publicKey, err := MaybeGetPublicKey(ctx.StateWriter, sender.Address) + if err != nil { + return err + } else if publicKey == nil { + return fmt.Errorf("need public key to unbond '%s'", sender.Address) + } + + // remove power and transfer to output + err = validator.SubtractPower(ctx.ValidatorSet, *publicKey, power) + if err != nil { + return err + } + + err = recipient.AddToBalance(power.Uint64()) + if err != nil { + return err + } + + return ctx.StateWriter.UpdateAccount(recipient) +} diff --git a/execution/contexts/governance_context.go b/execution/contexts/governance_context.go index 3847edd36..544c4e3ea 100644 --- a/execution/contexts/governance_context.go +++ b/execution/contexts/governance_context.go @@ -50,28 +50,9 @@ func (ctx *GovernanceContext) Execute(txe *exec.TxExecution, p payload.Payload) } for _, update := range ctx.tx.AccountUpdates { - if update.Address == nil && update.PublicKey == nil { - // We do not want to generate a key - return fmt.Errorf("could not execution GovTx since account template %v contains neither "+ - "address or public key", update) - } - if update.PublicKey == nil { - update.PublicKey, err = ctx.MaybeGetPublicKey(*update.Address) - if err != nil { - return err - } - } - // Check address - if update.PublicKey != nil { - address := update.PublicKey.GetAddress() - if update.Address != nil && address != *update.Address { - return fmt.Errorf("supplied public key %v whose address %v does not match %v provided by"+ - "GovTx", update.PublicKey, address, update.Address) - } - update.Address = &address - } else if update.Balances().HasPower() { - // If we are updating power we will need the key - return fmt.Errorf("GovTx must be provided with public key when updating validator power") + err := GetIdentity(ctx.StateWriter, update) + if err != nil { + return fmt.Errorf("GovTx: %v", err) } account, err := getOrMakeOutput(ctx.StateWriter, accounts, *update.Address, ctx.Logger) if err != nil { @@ -134,9 +115,36 @@ func (ctx *GovernanceContext) UpdateAccount(account *acm.Account, update *spec.T return } -func (ctx *GovernanceContext) MaybeGetPublicKey(address crypto.Address) (*crypto.PublicKey, error) { +func GetIdentity(sw acmstate.ReaderWriter, account *spec.TemplateAccount) (err error) { + if account.Address == nil && account.PublicKey == nil { + // We do not want to generate a key + return fmt.Errorf("could not execute Tx since account template %v contains neither "+ + "address or public key", account) + } + if account.PublicKey == nil { + account.PublicKey, err = MaybeGetPublicKey(sw, *account.Address) + if err != nil { + return err + } + } + // Check address + if account.PublicKey != nil { + address := account.PublicKey.GetAddress() + if account.Address != nil && address != *account.Address { + return fmt.Errorf("supplied public key %v whose address %v does not match %v provided by"+ + "GovTx", account.PublicKey, address, account.Address) + } + account.Address = &address + } else if account.Balances().HasPower() { + // If we are updating power we will need the key + return fmt.Errorf("must be provided with public key when updating validator power") + } + return nil +} + +func MaybeGetPublicKey(sw acmstate.ReaderWriter, address crypto.Address) (*crypto.PublicKey, error) { // First try state in case chain has received input previously - acc, err := ctx.StateWriter.GetAccount(address) + acc, err := sw.GetAccount(address) if err != nil { return nil, err } diff --git a/execution/errors/errors.pb.go b/execution/errors/errors.pb.go index f88cc6abe..5eafb4e87 100644 --- a/execution/errors/errors.pb.go +++ b/execution/errors/errors.pb.go @@ -5,13 +5,11 @@ package errors import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -165,7 +163,14 @@ func (m *Exception) Size() (n int) { } func sovErrors(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozErrors(x uint64) (n int) { return sovErrors(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/exec/exec.pb.go b/execution/exec/exec.pb.go index 30c9a7708..c0e6e4d92 100644 --- a/execution/exec/exec.pb.go +++ b/execution/exec/exec.pb.go @@ -5,11 +5,6 @@ package exec import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - time "time" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -25,6 +20,9 @@ import ( txs "github.com/hyperledger/burrow/txs" github_com_hyperledger_burrow_txs_payload "github.com/hyperledger/burrow/txs/payload" types "github.com/tendermint/tendermint/abci/types" + io "io" + math "math" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1466,9 +1464,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.BeginBlock.Size())) - n1, err1 := m.BeginBlock.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.BeginBlock.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 } @@ -1476,9 +1474,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.BeginTx.Size())) - n2, err2 := m.BeginTx.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.BeginTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -1486,9 +1484,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Envelope.Size())) - n3, err3 := m.Envelope.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.Envelope.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 } @@ -1496,9 +1494,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Event.Size())) - n4, err4 := m.Event.MarshalTo(dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := m.Event.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n4 } @@ -1506,9 +1504,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.EndTx.Size())) - n5, err5 := m.EndTx.MarshalTo(dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := m.EndTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n5 } @@ -1516,9 +1514,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.EndBlock.Size())) - n6, err6 := m.EndBlock.MarshalTo(dAtA[i:]) - if err6 != nil { - return 0, err6 + n6, err := m.EndBlock.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n6 } @@ -1552,9 +1550,9 @@ func (m *BeginBlock) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n7, err7 := m.Header.MarshalTo(dAtA[i:]) - if err7 != nil { - return 0, err7 + n7, err := m.Header.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n7 } @@ -1609,9 +1607,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHeader.Size())) - n8, err8 := m.TxHeader.MarshalTo(dAtA[i:]) - if err8 != nil { - return 0, err8 + n8, err := m.TxHeader.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n8 } @@ -1619,9 +1617,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Result.Size())) - n9, err9 := m.Result.MarshalTo(dAtA[i:]) - if err9 != nil { - return 0, err9 + n9, err := m.Result.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n9 } @@ -1629,9 +1627,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n10, err10 := m.Exception.MarshalTo(dAtA[i:]) - if err10 != nil { - return 0, err10 + n10, err := m.Exception.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n10 } @@ -1659,9 +1657,9 @@ func (m *EndTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n11, err11 := m.TxHash.MarshalTo(dAtA[i:]) - if err11 != nil { - return 0, err11 + n11, err := m.TxHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n11 if m.XXX_unrecognized != nil { @@ -1693,9 +1691,9 @@ func (m *TxHeader) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n12, err12 := m.TxHash.MarshalTo(dAtA[i:]) - if err12 != nil { - return 0, err12 + n12, err := m.TxHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n12 if m.Height != 0 { @@ -1712,9 +1710,9 @@ func (m *TxHeader) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.Origin.Size())) - n13, err13 := m.Origin.MarshalTo(dAtA[i:]) - if err13 != nil { - return 0, err13 + n13, err := m.Origin.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n13 } @@ -1748,9 +1746,9 @@ func (m *BlockExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n14, err14 := m.Header.MarshalTo(dAtA[i:]) - if err14 != nil { - return 0, err14 + n14, err := m.Header.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n14 } @@ -1822,9 +1820,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHeader.Size())) - n15, err15 := m.TxHeader.MarshalTo(dAtA[i:]) - if err15 != nil { - return 0, err15 + n15, err := m.TxHeader.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n15 } @@ -1832,9 +1830,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.Envelope.Size())) - n16, err16 := m.Envelope.MarshalTo(dAtA[i:]) - if err16 != nil { - return 0, err16 + n16, err := m.Envelope.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n16 } @@ -1854,9 +1852,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x42 i++ i = encodeVarintExec(dAtA, i, uint64(m.Result.Size())) - n17, err17 := m.Result.MarshalTo(dAtA[i:]) - if err17 != nil { - return 0, err17 + n17, err := m.Result.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n17 } @@ -1864,9 +1862,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x4a i++ i = encodeVarintExec(dAtA, i, uint64(m.Receipt.Size())) - n18, err18 := m.Receipt.MarshalTo(dAtA[i:]) - if err18 != nil { - return 0, err18 + n18, err := m.Receipt.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n18 } @@ -1874,9 +1872,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x52 i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n19, err19 := m.Exception.MarshalTo(dAtA[i:]) - if err19 != nil { - return 0, err19 + n19, err := m.Exception.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n19 } @@ -1932,9 +1930,9 @@ func (m *Origin) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.Time))) - n20, err20 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) - if err20 != nil { - return 0, err20 + n20, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) + if err != nil { + return 0, err } i += n20 if m.XXX_unrecognized != nil { @@ -1966,9 +1964,9 @@ func (m *Header) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n21, err21 := m.TxHash.MarshalTo(dAtA[i:]) - if err21 != nil { - return 0, err21 + n21, err := m.TxHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n21 if m.EventType != 0 { @@ -1996,9 +1994,9 @@ func (m *Header) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n22, err22 := m.Exception.MarshalTo(dAtA[i:]) - if err22 != nil { - return 0, err22 + n22, err := m.Exception.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n22 } @@ -2027,9 +2025,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n23, err23 := m.Header.MarshalTo(dAtA[i:]) - if err23 != nil { - return 0, err23 + n23, err := m.Header.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n23 } @@ -2037,9 +2035,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Input.Size())) - n24, err24 := m.Input.MarshalTo(dAtA[i:]) - if err24 != nil { - return 0, err24 + n24, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n24 } @@ -2047,9 +2045,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Output.Size())) - n25, err25 := m.Output.MarshalTo(dAtA[i:]) - if err25 != nil { - return 0, err25 + n25, err := m.Output.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n25 } @@ -2057,9 +2055,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Call.Size())) - n26, err26 := m.Call.MarshalTo(dAtA[i:]) - if err26 != nil { - return 0, err26 + n26, err := m.Call.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n26 } @@ -2067,9 +2065,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.Log.Size())) - n27, err27 := m.Log.MarshalTo(dAtA[i:]) - if err27 != nil { - return 0, err27 + n27, err := m.Log.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n27 } @@ -2077,9 +2075,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.GovernAccount.Size())) - n28, err28 := m.GovernAccount.MarshalTo(dAtA[i:]) - if err28 != nil { - return 0, err28 + n28, err := m.GovernAccount.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n28 } @@ -2119,9 +2117,9 @@ func (m *Result) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.NameEntry.Size())) - n29, err29 := m.NameEntry.MarshalTo(dAtA[i:]) - if err29 != nil { - return 0, err29 + n29, err := m.NameEntry.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n29 } @@ -2129,9 +2127,9 @@ func (m *Result) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.PermArgs.Size())) - n30, err30 := m.PermArgs.MarshalTo(dAtA[i:]) - if err30 != nil { - return 0, err30 + n30, err := m.PermArgs.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n30 } @@ -2159,17 +2157,17 @@ func (m *LogEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n31, err31 := m.Address.MarshalTo(dAtA[i:]) - if err31 != nil { - return 0, err31 + n31, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n31 dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Data.Size())) - n32, err32 := m.Data.MarshalTo(dAtA[i:]) - if err32 != nil { - return 0, err32 + n32, err := m.Data.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n32 if len(m.Topics) > 0 { @@ -2209,18 +2207,18 @@ func (m *CallEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.CallData.Size())) - n33, err33 := m.CallData.MarshalTo(dAtA[i:]) - if err33 != nil { - return 0, err33 + n33, err := m.CallData.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n33 } dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Origin.Size())) - n34, err34 := m.Origin.MarshalTo(dAtA[i:]) - if err34 != nil { - return 0, err34 + n34, err := m.Origin.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n34 if m.StackDepth != 0 { @@ -2231,9 +2229,9 @@ func (m *CallEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Return.Size())) - n35, err35 := m.Return.MarshalTo(dAtA[i:]) - if err35 != nil { - return 0, err35 + n35, err := m.Return.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n35 if m.CallType != 0 { @@ -2266,9 +2264,9 @@ func (m *GovernAccountEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.AccountUpdate.Size())) - n36, err36 := m.AccountUpdate.MarshalTo(dAtA[i:]) - if err36 != nil { - return 0, err36 + n36, err := m.AccountUpdate.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n36 } @@ -2296,9 +2294,9 @@ func (m *InputEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n37, err37 := m.Address.MarshalTo(dAtA[i:]) - if err37 != nil { - return 0, err37 + n37, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n37 if m.XXX_unrecognized != nil { @@ -2325,9 +2323,9 @@ func (m *OutputEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n38, err38 := m.Address.MarshalTo(dAtA[i:]) - if err38 != nil { - return 0, err38 + n38, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n38 if m.XXX_unrecognized != nil { @@ -2354,25 +2352,25 @@ func (m *CallData) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Caller.Size())) - n39, err39 := m.Caller.MarshalTo(dAtA[i:]) - if err39 != nil { - return 0, err39 + n39, err := m.Caller.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n39 dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Callee.Size())) - n40, err40 := m.Callee.MarshalTo(dAtA[i:]) - if err40 != nil { - return 0, err40 + n40, err := m.Callee.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n40 dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Data.Size())) - n41, err41 := m.Data.MarshalTo(dAtA[i:]) - if err41 != nil { - return 0, err41 + n41, err := m.Data.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n41 if m.Value != 0 { @@ -2878,7 +2876,14 @@ func (m *CallData) Size() (n int) { } func sovExec(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozExec(x uint64) (n int) { return sovExec(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/execution.go b/execution/execution.go index d741e253f..ce94b1812 100644 --- a/execution/execution.go +++ b/execution/execution.go @@ -173,6 +173,16 @@ func newExecutor(name string, runCall bool, params Params, backend ExecutorState StateWriter: exe.stateCache, Logger: exe.logger, }, + payload.TypeBond: &contexts.BondContext{ + ValidatorSet: exe.validatorCache, + StateWriter: exe.stateCache, + Logger: exe.logger, + }, + payload.TypeUnbond: &contexts.UnbondContext{ + ValidatorSet: exe.validatorCache, + StateWriter: exe.stateCache, + Logger: exe.logger, + }, } exe.contexts = map[payload.Type]contexts.Context{ diff --git a/execution/names/names.pb.go b/execution/names/names.pb.go index 5a34f0ce9..32b5f872e 100644 --- a/execution/names/names.pb.go +++ b/execution/names/names.pb.go @@ -5,14 +5,12 @@ package names import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -147,9 +145,9 @@ func (m *Entry) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintNames(dAtA, i, uint64(m.Owner.Size())) - n1, err1 := m.Owner.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.Owner.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 if len(m.Data) > 0 { @@ -204,7 +202,14 @@ func (m *Entry) Size() (n int) { } func sovNames(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozNames(x uint64) (n int) { return sovNames(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/genesis/genesis.go b/genesis/genesis.go index 0ffee843f..b31b7a8cf 100644 --- a/genesis/genesis.go +++ b/genesis/genesis.go @@ -38,7 +38,7 @@ const ShortHashSuffixBytes = 3 // core types for a genesis definition type BasicAccount struct { - // Address is convenient to have in file for reference, but otherwise ignored since derived from PublicKey + // Address is convenient to have in file for reference, but otherwise ignored since derived from PublicKey Address crypto.Address PublicKey crypto.PublicKey Amount uint64 diff --git a/genesis/spec/spec.pb.go b/genesis/spec/spec.pb.go index 886e49e0f..313653bb6 100644 --- a/genesis/spec/spec.pb.go +++ b/genesis/spec/spec.pb.go @@ -5,10 +5,6 @@ package spec import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -16,6 +12,8 @@ import ( balance "github.com/hyperledger/burrow/acm/balance" crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -171,9 +169,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintSpec(dAtA, i, uint64(m.Address.Size())) - n1, err1 := m.Address.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 } @@ -181,9 +179,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintSpec(dAtA, i, uint64(m.PublicKey.Size())) - n2, err2 := m.PublicKey.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.PublicKey.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -233,9 +231,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintSpec(dAtA, i, uint64(m.Code.Size())) - n3, err3 := m.Code.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.Code.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 } @@ -301,7 +299,14 @@ func (m *TemplateAccount) Size() (n int) { } func sovSpec(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozSpec(x uint64) (n int) { return sovSpec(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/go.sum b/go.sum index 7aaab95b6..e5c6abbc6 100644 --- a/go.sum +++ b/go.sum @@ -96,6 +96,7 @@ github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= diff --git a/keys/keys.pb.go b/keys/keys.pb.go index 7458fbb32..3bc1c9df1 100644 --- a/keys/keys.pb.go +++ b/keys/keys.pb.go @@ -6,16 +6,12 @@ package keys import ( context "context" fmt "fmt" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" crypto "github.com/hyperledger/burrow/crypto" grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1346,44 +1342,6 @@ type KeysServer interface { AddName(context.Context, *AddNameRequest) (*AddNameResponse, error) } -// UnimplementedKeysServer can be embedded to have forward compatible implementations. -type UnimplementedKeysServer struct { -} - -func (*UnimplementedKeysServer) GenerateKey(ctx context.Context, req *GenRequest) (*GenResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GenerateKey not implemented") -} -func (*UnimplementedKeysServer) PublicKey(ctx context.Context, req *PubRequest) (*PubResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method PublicKey not implemented") -} -func (*UnimplementedKeysServer) Sign(ctx context.Context, req *SignRequest) (*SignResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Sign not implemented") -} -func (*UnimplementedKeysServer) Verify(ctx context.Context, req *VerifyRequest) (*VerifyResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Verify not implemented") -} -func (*UnimplementedKeysServer) Import(ctx context.Context, req *ImportRequest) (*ImportResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Import not implemented") -} -func (*UnimplementedKeysServer) ImportJSON(ctx context.Context, req *ImportJSONRequest) (*ImportResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ImportJSON not implemented") -} -func (*UnimplementedKeysServer) Export(ctx context.Context, req *ExportRequest) (*ExportResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Export not implemented") -} -func (*UnimplementedKeysServer) Hash(ctx context.Context, req *HashRequest) (*HashResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Hash not implemented") -} -func (*UnimplementedKeysServer) RemoveName(ctx context.Context, req *RemoveNameRequest) (*RemoveNameResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RemoveName not implemented") -} -func (*UnimplementedKeysServer) List(ctx context.Context, req *ListRequest) (*ListResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method List not implemented") -} -func (*UnimplementedKeysServer) AddName(ctx context.Context, req *AddNameRequest) (*AddNameResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method AddName not implemented") -} - func RegisterKeysServer(s *grpc.Server, srv KeysServer) { s.RegisterService(&_Keys_serviceDesc, srv) } @@ -2068,7 +2026,14 @@ func (m *AddNameRequest) Size() (n int) { } func sovKeys(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozKeys(x uint64) (n int) { return sovKeys(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/permission/permission.pb.go b/permission/permission.pb.go index 41abb94c9..bb07ce156 100644 --- a/permission/permission.pb.go +++ b/permission/permission.pb.go @@ -5,14 +5,12 @@ package permission import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -273,9 +271,9 @@ func (m *AccountPermissions) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPermission(dAtA, i, uint64(m.Base.Size())) - n1, err1 := m.Base.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.Base.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 if len(m.Roles) > 0 { @@ -348,9 +346,9 @@ func (m *PermArgs) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPermission(dAtA, i, uint64(m.Target.Size())) - n2, err2 := m.Target.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.Target.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -446,7 +444,14 @@ func (m *PermArgs) Size() (n int) { } func sovPermission(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozPermission(x uint64) (n int) { return sovPermission(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/protobuf/payload.proto b/protobuf/payload.proto index 867787ee5..1667e1778 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -108,17 +108,22 @@ message BondTx { option (gogoproto.goproto_stringer) = false; option (gogoproto.goproto_getters) = false; - repeated TxInput Inputs = 1; - repeated TxOutput UnbondTo = 2; + // Account with bonding permission + TxInput Input = 1; + // The validator to bond, public key must be known + spec.TemplateAccount Validator = 2 [(gogoproto.nullable) = true]; + // Optional network address to identify + string NetAddress = 3; } message UnbondTx { option (gogoproto.goproto_stringer) = false; option (gogoproto.goproto_getters) = false; + // The validator to unbond TxInput Input = 1; - bytes Address = 2 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; - uint64 Height = 3; + // The account to unbond power to + TxOutput Output = 2; } message GovTx { diff --git a/rpc/rpc.pb.go b/rpc/rpc.pb.go index 4e387f401..f8a6ab389 100644 --- a/rpc/rpc.pb.go +++ b/rpc/rpc.pb.go @@ -5,9 +5,6 @@ package rpc import ( fmt "fmt" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -15,6 +12,7 @@ import ( bcm "github.com/hyperledger/burrow/bcm" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" tendermint "github.com/hyperledger/burrow/consensus/tendermint" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -197,7 +195,14 @@ func (m *ResultStatus) Size() (n int) { } func sovRpc(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozRpc(x uint64) (n int) { return sovRpc(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcdump/rpcdump.pb.go b/rpc/rpcdump/rpcdump.pb.go index 34bb4554b..11f635b75 100644 --- a/rpc/rpcdump/rpcdump.pb.go +++ b/rpc/rpcdump/rpcdump.pb.go @@ -6,16 +6,12 @@ package rpcdump import ( context "context" fmt "fmt" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" dump "github.com/hyperledger/burrow/dump" grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -156,14 +152,6 @@ type DumpServer interface { GetDump(*GetDumpParam, Dump_GetDumpServer) error } -// UnimplementedDumpServer can be embedded to have forward compatible implementations. -type UnimplementedDumpServer struct { -} - -func (*UnimplementedDumpServer) GetDump(req *GetDumpParam, srv Dump_GetDumpServer) error { - return status.Errorf(codes.Unimplemented, "method GetDump not implemented") -} - func RegisterDumpServer(s *grpc.Server, srv DumpServer) { s.RegisterService(&_Dump_serviceDesc, srv) } @@ -219,7 +207,14 @@ func (m *GetDumpParam) Size() (n int) { } func sovRpcdump(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozRpcdump(x uint64) (n int) { return sovRpcdump(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcevents/rpcevents.pb.go b/rpc/rpcevents/rpcevents.pb.go index ba120cae2..db293aaea 100644 --- a/rpc/rpcevents/rpcevents.pb.go +++ b/rpc/rpcevents/rpcevents.pb.go @@ -6,18 +6,14 @@ package rpcevents import ( context "context" fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" exec "github.com/hyperledger/burrow/execution/exec" grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -766,20 +762,6 @@ type ExecutionEventsServer interface { Events(*BlocksRequest, ExecutionEvents_EventsServer) error } -// UnimplementedExecutionEventsServer can be embedded to have forward compatible implementations. -type UnimplementedExecutionEventsServer struct { -} - -func (*UnimplementedExecutionEventsServer) Stream(req *BlocksRequest, srv ExecutionEvents_StreamServer) error { - return status.Errorf(codes.Unimplemented, "method Stream not implemented") -} -func (*UnimplementedExecutionEventsServer) Tx(ctx context.Context, req *TxRequest) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method Tx not implemented") -} -func (*UnimplementedExecutionEventsServer) Events(req *BlocksRequest, srv ExecutionEvents_EventsServer) error { - return status.Errorf(codes.Unimplemented, "method Events not implemented") -} - func RegisterExecutionEventsServer(s *grpc.Server, srv ExecutionEventsServer) { s.RegisterService(&_ExecutionEvents_serviceDesc, srv) } @@ -922,9 +904,9 @@ func (m *TxRequest) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.TxHash.Size())) - n1, err1 := m.TxHash.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.TxHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 if m.Wait { @@ -962,9 +944,9 @@ func (m *BlocksRequest) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.BlockRange.Size())) - n2, err2 := m.BlockRange.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.BlockRange.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -1143,9 +1125,9 @@ func (m *BlockRange) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.Start.Size())) - n3, err3 := m.Start.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.Start.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 } @@ -1153,9 +1135,9 @@ func (m *BlockRange) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.End.Size())) - n4, err4 := m.End.MarshalTo(dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := m.End.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n4 } @@ -1332,7 +1314,14 @@ func (m *BlockRange) Size() (n int) { } func sovRpcevents(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozRpcevents(x uint64) (n int) { return sovRpcevents(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcquery/rpcquery.pb.go b/rpc/rpcquery/rpcquery.pb.go index 7e44f38e5..5197b6315 100644 --- a/rpc/rpcquery/rpcquery.pb.go +++ b/rpc/rpcquery/rpcquery.pb.go @@ -6,9 +6,6 @@ package rpcquery import ( context "context" fmt "fmt" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -21,8 +18,7 @@ import ( payload "github.com/hyperledger/burrow/txs/payload" types "github.com/tendermint/tendermint/abci/types" grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1073,47 +1069,6 @@ type QueryServer interface { GetBlockHeader(context.Context, *GetBlockParam) (*types.Header, error) } -// UnimplementedQueryServer can be embedded to have forward compatible implementations. -type UnimplementedQueryServer struct { -} - -func (*UnimplementedQueryServer) Status(ctx context.Context, req *StatusParam) (*rpc.ResultStatus, error) { - return nil, status.Errorf(codes.Unimplemented, "method Status not implemented") -} -func (*UnimplementedQueryServer) GetAccount(ctx context.Context, req *GetAccountParam) (*acm.Account, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetAccount not implemented") -} -func (*UnimplementedQueryServer) GetStorage(ctx context.Context, req *GetStorageParam) (*StorageValue, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetStorage not implemented") -} -func (*UnimplementedQueryServer) ListAccounts(req *ListAccountsParam, srv Query_ListAccountsServer) error { - return status.Errorf(codes.Unimplemented, "method ListAccounts not implemented") -} -func (*UnimplementedQueryServer) GetName(ctx context.Context, req *GetNameParam) (*names.Entry, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetName not implemented") -} -func (*UnimplementedQueryServer) ListNames(req *ListNamesParam, srv Query_ListNamesServer) error { - return status.Errorf(codes.Unimplemented, "method ListNames not implemented") -} -func (*UnimplementedQueryServer) GetValidatorSet(ctx context.Context, req *GetValidatorSetParam) (*ValidatorSet, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetValidatorSet not implemented") -} -func (*UnimplementedQueryServer) GetValidatorSetHistory(ctx context.Context, req *GetValidatorSetHistoryParam) (*ValidatorSetHistory, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetValidatorSetHistory not implemented") -} -func (*UnimplementedQueryServer) GetProposal(ctx context.Context, req *GetProposalParam) (*payload.Ballot, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetProposal not implemented") -} -func (*UnimplementedQueryServer) ListProposals(req *ListProposalsParam, srv Query_ListProposalsServer) error { - return status.Errorf(codes.Unimplemented, "method ListProposals not implemented") -} -func (*UnimplementedQueryServer) GetStats(ctx context.Context, req *GetStatsParam) (*Stats, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetStats not implemented") -} -func (*UnimplementedQueryServer) GetBlockHeader(ctx context.Context, req *GetBlockParam) (*types.Header, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetBlockHeader not implemented") -} - func RegisterQueryServer(s *grpc.Server, srv QueryServer) { s.RegisterService(&_Query_serviceDesc, srv) } @@ -1679,7 +1634,14 @@ func (m *GetBlockParam) Size() (n int) { } func sovRpcquery(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozRpcquery(x uint64) (n int) { return sovRpcquery(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpctransact/rpctransact.pb.go b/rpc/rpctransact/rpctransact.pb.go index a11f42d3a..915e0c193 100644 --- a/rpc/rpctransact/rpctransact.pb.go +++ b/rpc/rpctransact/rpctransact.pb.go @@ -6,11 +6,6 @@ package rpctransact import ( context "context" fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - time "time" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -22,8 +17,9 @@ import ( txs "github.com/hyperledger/burrow/txs" payload "github.com/hyperledger/burrow/txs/payload" grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" + io "io" + math "math" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -444,47 +440,6 @@ type TransactServer interface { NameTxAsync(context.Context, *payload.NameTx) (*txs.Receipt, error) } -// UnimplementedTransactServer can be embedded to have forward compatible implementations. -type UnimplementedTransactServer struct { -} - -func (*UnimplementedTransactServer) BroadcastTxSync(ctx context.Context, req *TxEnvelopeParam) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method BroadcastTxSync not implemented") -} -func (*UnimplementedTransactServer) BroadcastTxAsync(ctx context.Context, req *TxEnvelopeParam) (*txs.Receipt, error) { - return nil, status.Errorf(codes.Unimplemented, "method BroadcastTxAsync not implemented") -} -func (*UnimplementedTransactServer) SignTx(ctx context.Context, req *TxEnvelopeParam) (*TxEnvelope, error) { - return nil, status.Errorf(codes.Unimplemented, "method SignTx not implemented") -} -func (*UnimplementedTransactServer) FormulateTx(ctx context.Context, req *payload.Any) (*TxEnvelope, error) { - return nil, status.Errorf(codes.Unimplemented, "method FormulateTx not implemented") -} -func (*UnimplementedTransactServer) CallTxSync(ctx context.Context, req *payload.CallTx) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method CallTxSync not implemented") -} -func (*UnimplementedTransactServer) CallTxAsync(ctx context.Context, req *payload.CallTx) (*txs.Receipt, error) { - return nil, status.Errorf(codes.Unimplemented, "method CallTxAsync not implemented") -} -func (*UnimplementedTransactServer) CallTxSim(ctx context.Context, req *payload.CallTx) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method CallTxSim not implemented") -} -func (*UnimplementedTransactServer) CallCodeSim(ctx context.Context, req *CallCodeParam) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method CallCodeSim not implemented") -} -func (*UnimplementedTransactServer) SendTxSync(ctx context.Context, req *payload.SendTx) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method SendTxSync not implemented") -} -func (*UnimplementedTransactServer) SendTxAsync(ctx context.Context, req *payload.SendTx) (*txs.Receipt, error) { - return nil, status.Errorf(codes.Unimplemented, "method SendTxAsync not implemented") -} -func (*UnimplementedTransactServer) NameTxSync(ctx context.Context, req *payload.NameTx) (*exec.TxExecution, error) { - return nil, status.Errorf(codes.Unimplemented, "method NameTxSync not implemented") -} -func (*UnimplementedTransactServer) NameTxAsync(ctx context.Context, req *payload.NameTx) (*txs.Receipt, error) { - return nil, status.Errorf(codes.Unimplemented, "method NameTxAsync not implemented") -} - func RegisterTransactServer(s *grpc.Server, srv TransactServer) { s.RegisterService(&_Transact_serviceDesc, srv) } @@ -780,9 +735,9 @@ func (m *CallCodeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.FromAddress.Size())) - n1, err1 := m.FromAddress.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.FromAddress.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 if len(m.Code) > 0 { @@ -822,9 +777,9 @@ func (m *TxEnvelope) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Envelope.Size())) - n2, err2 := m.Envelope.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.Envelope.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -853,9 +808,9 @@ func (m *TxEnvelopeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Envelope.Size())) - n3, err3 := m.Envelope.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.Envelope.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 } @@ -863,18 +818,18 @@ func (m *TxEnvelopeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Payload.Size())) - n4, err4 := m.Payload.MarshalTo(dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := m.Payload.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n4 } dAtA[i] = 0x1a i++ i = encodeVarintRpctransact(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdDuration(m.Timeout))) - n5, err5 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i:]) + if err != nil { + return 0, err } i += n5 if m.XXX_unrecognized != nil { @@ -953,7 +908,14 @@ func (m *TxEnvelopeParam) Size() (n int) { } func sovRpctransact(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozRpctransact(x uint64) (n int) { return sovRpctransact(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/storage/storage.pb.go b/storage/storage.pb.go index b29a673a8..dc3eb476b 100644 --- a/storage/storage.pb.go +++ b/storage/storage.pb.go @@ -5,13 +5,11 @@ package storage import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -166,7 +164,14 @@ func (m *CommitID) Size() (n int) { } func sovStorage(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozStorage(x uint64) (n int) { return sovStorage(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/tests/test_runner.sh b/tests/test_runner.sh index dbfb1be3c..638ac5d9f 100755 --- a/tests/test_runner.sh +++ b/tests/test_runner.sh @@ -77,7 +77,7 @@ test_setup(){ echo rm -rf ${burrow_root} pushd "$chain_dir" - ${burrow_bin} start -v0 2> "$burrow_log"& + ${burrow_bin} start -i0 2> "$burrow_log"& burrow_pid=$! popd else diff --git a/txs/payload/bond_tx.go b/txs/payload/bond_tx.go index 1aeb83f38..1fd30b817 100644 --- a/txs/payload/bond_tx.go +++ b/txs/payload/bond_tx.go @@ -5,12 +5,13 @@ import ( "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/genesis/spec" ) func NewBondTx(pubkey crypto.PublicKey) (*BondTx, error) { return &BondTx{ - Inputs: []*TxInput{}, - UnbondTo: []*TxOutput{}, + Input: &TxInput{}, + Validator: &spec.TemplateAccount{}, }, nil } @@ -19,11 +20,11 @@ func (tx *BondTx) Type() Type { } func (tx *BondTx) GetInputs() []*TxInput { - return tx.Inputs + return []*TxInput{tx.Input} } func (tx *BondTx) String() string { - return fmt.Sprintf("BondTx{%v -> %v}", tx.Inputs, tx.UnbondTo) + return fmt.Sprintf("BondTx{%v -> %v}", tx.Input, tx.Validator) } func (tx *BondTx) AddInput(st acmstate.AccountGetter, pubkey crypto.PublicKey, amt uint64) error { @@ -39,19 +40,11 @@ func (tx *BondTx) AddInput(st acmstate.AccountGetter, pubkey crypto.PublicKey, a } func (tx *BondTx) AddInputWithSequence(pubkey crypto.PublicKey, amt uint64, sequence uint64) error { - tx.Inputs = append(tx.Inputs, &TxInput{ + tx.Input = &TxInput{ Address: pubkey.GetAddress(), Amount: amt, Sequence: sequence, - }) - return nil -} - -func (tx *BondTx) AddOutput(addr crypto.Address, amt uint64) error { - tx.UnbondTo = append(tx.UnbondTo, &TxOutput{ - Address: addr, - Amount: amt, - }) + } return nil } diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 400f266b2..6f132d49c 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -5,10 +5,6 @@ package payload import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -16,6 +12,8 @@ import ( github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" spec "github.com/hyperledger/burrow/genesis/spec" permission "github.com/hyperledger/burrow/permission" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -556,11 +554,15 @@ func (*NameTx) XXX_MessageName() string { } type BondTx struct { - Inputs []*TxInput `protobuf:"bytes,1,rep,name=Inputs,proto3" json:"Inputs,omitempty"` - UnbondTo []*TxOutput `protobuf:"bytes,2,rep,name=UnbondTo,proto3" json:"UnbondTo,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + // Account with bonding permission + Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` + // The validator to bond, public key must be known + Validator *spec.TemplateAccount `protobuf:"bytes,2,opt,name=Validator,proto3" json:"Validator,omitempty"` + // Optional network address to identify + NetAddress string `protobuf:"bytes,3,opt,name=NetAddress,proto3" json:"NetAddress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *BondTx) Reset() { *m = BondTx{} } @@ -600,12 +602,13 @@ func (*BondTx) XXX_MessageName() string { } type UnbondTx struct { - Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` - Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,2,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` - Height uint64 `protobuf:"varint,3,opt,name=Height,proto3" json:"Height,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + // The validator to unbond + Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` + // The account to unbond power to + Output *TxOutput `protobuf:"bytes,2,opt,name=Output,proto3" json:"Output,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *UnbondTx) Reset() { *m = UnbondTx{} } @@ -973,69 +976,70 @@ func init() { proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d5 func init() { golang_proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d56) } var fileDescriptor_678c914f1bee6d56 = []byte{ - // 991 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4b, 0x6f, 0x23, 0x45, - 0x10, 0xce, 0x78, 0xc6, 0x8f, 0xad, 0x75, 0x82, 0xb7, 0x79, 0xc8, 0x8a, 0x84, 0xbd, 0x32, 0x08, - 0x16, 0xd8, 0xd8, 0xb0, 0xcb, 0x43, 0xca, 0x05, 0x79, 0x62, 0xe7, 0x81, 0x96, 0x24, 0x6a, 0x4f, - 0x76, 0x11, 0x88, 0x43, 0xdb, 0x6e, 0xec, 0x11, 0x9e, 0xe9, 0x61, 0xa6, 0xbd, 0x8c, 0x39, 0x71, - 0xe0, 0xc0, 0x15, 0x71, 0xe1, 0x98, 0x03, 0x7f, 0x80, 0x7f, 0xc0, 0x31, 0x47, 0x8e, 0x88, 0x43, - 0x84, 0xb2, 0x17, 0xc4, 0xaf, 0x40, 0xdd, 0xd3, 0x3d, 0x1e, 0x7b, 0x61, 0xd7, 0x09, 0x88, 0xdb, - 0x54, 0xd5, 0xd7, 0x55, 0xd5, 0x5f, 0x3d, 0x7a, 0x60, 0x3d, 0x20, 0xb3, 0x09, 0x23, 0xc3, 0x66, - 0x10, 0x32, 0xce, 0x50, 0x51, 0x89, 0x9b, 0x5b, 0x23, 0x97, 0x8f, 0xa7, 0xfd, 0xe6, 0x80, 0x79, - 0xad, 0x11, 0x1b, 0xb1, 0x96, 0xb4, 0xf7, 0xa7, 0x9f, 0x49, 0x49, 0x0a, 0xf2, 0x2b, 0x39, 0xb7, - 0x59, 0x09, 0x68, 0xe8, 0xb9, 0x51, 0xe4, 0x32, 0x5f, 0x69, 0x20, 0x0a, 0xe8, 0x20, 0xf9, 0x6e, - 0x7c, 0x67, 0x82, 0xd9, 0xf6, 0x67, 0xe8, 0x55, 0x28, 0xec, 0x90, 0xc9, 0xc4, 0x89, 0xab, 0xc6, - 0x4d, 0xe3, 0xd6, 0xf5, 0x3b, 0xcf, 0x34, 0x75, 0xf4, 0x44, 0x8d, 0x95, 0x59, 0x00, 0x7b, 0xd4, - 0x1f, 0x3a, 0x71, 0x35, 0xb7, 0x04, 0x4c, 0xd4, 0x58, 0x99, 0x05, 0xf0, 0x90, 0x78, 0xd4, 0x89, - 0xab, 0xe6, 0x12, 0x30, 0x51, 0x63, 0x65, 0x46, 0xaf, 0x43, 0xf1, 0x98, 0x86, 0x5e, 0xe4, 0xc4, - 0x55, 0x4b, 0x22, 0x2b, 0x29, 0x52, 0xe9, 0xb1, 0x06, 0xa0, 0x97, 0x21, 0xbf, 0xc7, 0x1e, 0x3a, - 0x71, 0x35, 0x2f, 0x91, 0x1b, 0x29, 0x52, 0x6a, 0x71, 0x62, 0x14, 0xa1, 0x6d, 0x26, 0x73, 0x2c, - 0x2c, 0x85, 0x4e, 0xd4, 0x58, 0x99, 0xd1, 0x16, 0x94, 0x4e, 0xfc, 0x7e, 0x02, 0x2d, 0x4a, 0xe8, - 0x8d, 0x14, 0xaa, 0x0d, 0x38, 0x85, 0x88, 0x4c, 0x6d, 0xc2, 0x07, 0x63, 0x27, 0xae, 0x96, 0x96, - 0x32, 0x55, 0x7a, 0xac, 0x01, 0xe8, 0x2e, 0xc0, 0x71, 0xc8, 0x02, 0x16, 0x11, 0x41, 0xea, 0x35, - 0x09, 0x7f, 0x76, 0x7e, 0xb1, 0xd4, 0x84, 0x33, 0xb0, 0x6d, 0xeb, 0xec, 0xb4, 0x6e, 0x34, 0xbe, - 0x37, 0xa0, 0xe8, 0xc4, 0x07, 0x7e, 0x30, 0xe5, 0xe8, 0x10, 0x8a, 0xed, 0xe1, 0x30, 0xa4, 0x51, - 0x24, 0x0b, 0x53, 0xb6, 0xdf, 0x3e, 0x3b, 0xaf, 0xaf, 0xfd, 0x76, 0x5e, 0xbf, 0x9d, 0xe9, 0x82, - 0xf1, 0x2c, 0xa0, 0xe1, 0x84, 0x0e, 0x47, 0x34, 0x6c, 0xf5, 0xa7, 0x61, 0xc8, 0xbe, 0x6c, 0x0d, - 0xc2, 0x59, 0xc0, 0x59, 0x53, 0x9d, 0xc5, 0xda, 0x09, 0x7a, 0x01, 0x0a, 0x6d, 0x8f, 0x4d, 0x7d, - 0x2e, 0xcb, 0x67, 0x61, 0x25, 0xa1, 0x4d, 0x28, 0xf5, 0xe8, 0x17, 0x53, 0xea, 0x0f, 0xa8, 0xac, - 0x97, 0x85, 0x53, 0x79, 0xdb, 0xfa, 0xe1, 0xb4, 0xbe, 0xd6, 0x88, 0xa1, 0xe4, 0xc4, 0x47, 0x53, - 0xfe, 0x3f, 0x66, 0xa5, 0x22, 0xff, 0x9a, 0xd3, 0xcd, 0x89, 0x5e, 0x81, 0xbc, 0xe4, 0x45, 0x75, - 0xe9, 0x9c, 0x7f, 0xc5, 0x17, 0x4e, 0xcc, 0xe8, 0x83, 0x79, 0x82, 0x39, 0x99, 0xe0, 0x9b, 0x57, - 0x4f, 0x6e, 0x13, 0x4a, 0x7b, 0x24, 0xba, 0xe7, 0x7a, 0x2e, 0xd7, 0xd4, 0x68, 0x19, 0x55, 0xc0, - 0xdc, 0xa5, 0x54, 0xf6, 0xad, 0x85, 0xc5, 0x27, 0x3a, 0x00, 0xab, 0x43, 0x38, 0x91, 0x0d, 0x5a, - 0xb6, 0xdf, 0x51, 0xbc, 0x6c, 0x3d, 0x39, 0x74, 0xdf, 0xf5, 0x49, 0x38, 0x6b, 0xee, 0xd3, 0xd8, - 0x9e, 0x71, 0x1a, 0x61, 0xe9, 0x02, 0x7d, 0x02, 0xd6, 0x83, 0x76, 0xef, 0x43, 0xd9, 0xc4, 0x65, - 0x7b, 0xef, 0x4a, 0xae, 0xfe, 0x3c, 0xaf, 0x6f, 0x70, 0x32, 0x8a, 0x6e, 0x33, 0xcf, 0xe5, 0xd4, - 0x0b, 0xf8, 0x0c, 0x4b, 0xa7, 0x8a, 0x5a, 0x57, 0x4f, 0x33, 0xba, 0x05, 0x05, 0x49, 0x9d, 0xa8, - 0xa8, 0xf9, 0xb7, 0xd4, 0x2a, 0x3b, 0x7a, 0x03, 0x8a, 0x49, 0x1b, 0x08, 0x6e, 0xcd, 0x85, 0x99, - 0xd1, 0x0d, 0x82, 0x35, 0x62, 0xbb, 0xf4, 0xed, 0x69, 0x7d, 0x4d, 0x86, 0x62, 0xe9, 0x98, 0xaf, - 0x5c, 0xc5, 0x77, 0xa1, 0x24, 0x8e, 0xb4, 0xc3, 0x51, 0xa4, 0xb6, 0xcd, 0x73, 0xcd, 0xcc, 0x36, - 0xd3, 0x36, 0xdb, 0x12, 0xd4, 0xe0, 0x14, 0xab, 0xee, 0x16, 0xe8, 0x05, 0xb4, 0x72, 0x3c, 0x04, - 0x96, 0x38, 0x21, 0x63, 0x5d, 0xc3, 0xf2, 0x5b, 0xe8, 0x64, 0x3d, 0xcd, 0x44, 0x27, 0x0b, 0xf3, - 0x58, 0xd5, 0x55, 0xc4, 0xcf, 0xf5, 0xde, 0xb9, 0x04, 0x9b, 0xf3, 0x15, 0xc4, 0xfe, 0x99, 0xce, - 0x14, 0x92, 0xe1, 0xf3, 0x47, 0x63, 0xbe, 0xbc, 0x56, 0xbe, 0xe1, 0xe1, 0xf2, 0x5c, 0xfc, 0xfb, - 0xc1, 0xdd, 0xa7, 0xee, 0x68, 0xac, 0x27, 0x43, 0x49, 0x99, 0x34, 0xbf, 0x36, 0xd4, 0xca, 0xbe, - 0x04, 0x27, 0x3b, 0xb0, 0xd1, 0x1e, 0x0c, 0xc4, 0x06, 0x38, 0x09, 0x86, 0x84, 0x53, 0xdd, 0x68, - 0xcf, 0x37, 0xe5, 0xcb, 0xe5, 0x50, 0x2f, 0x98, 0x10, 0x4e, 0x15, 0x46, 0x96, 0xdf, 0xc0, 0x4b, - 0x47, 0x32, 0x29, 0xfc, 0x61, 0x64, 0x77, 0xf1, 0xca, 0x5c, 0x35, 0xa0, 0x7c, 0x9f, 0x71, 0xd7, - 0x1f, 0x3d, 0x48, 0x6e, 0x28, 0x08, 0x33, 0xf1, 0x82, 0x0e, 0x9d, 0x40, 0x59, 0x7b, 0xde, 0x27, - 0xd1, 0x58, 0xb2, 0x50, 0xb6, 0xdf, 0xba, 0xfc, 0xc4, 0x2f, 0xb8, 0x11, 0x4d, 0xa1, 0x65, 0xf5, - 0x26, 0xde, 0x78, 0xec, 0xe9, 0xc0, 0x29, 0x24, 0x73, 0xd5, 0x4f, 0xd3, 0x17, 0xea, 0x12, 0x74, - 0xd7, 0xc0, 0x74, 0x62, 0xcd, 0x71, 0x39, 0x85, 0xb5, 0xfd, 0x19, 0x16, 0x86, 0x8c, 0xfb, 0x6f, - 0x0c, 0xb0, 0xee, 0x33, 0x4e, 0xff, 0xf3, 0x07, 0x60, 0x05, 0xae, 0x33, 0x69, 0x3c, 0x9c, 0xd3, - 0x93, 0xce, 0xac, 0x91, 0x99, 0xd9, 0x9b, 0x70, 0xbd, 0x43, 0xa3, 0x41, 0xe8, 0x06, 0xdc, 0x65, - 0xbe, 0x1a, 0xe7, 0xac, 0x2a, 0xfb, 0x92, 0x9b, 0x4f, 0x79, 0xc9, 0x33, 0x71, 0x7f, 0xca, 0x41, - 0xc1, 0x26, 0x93, 0x09, 0xe3, 0x0b, 0x15, 0x32, 0x9e, 0x5a, 0x21, 0xd1, 0x27, 0xbb, 0xae, 0x4f, - 0x26, 0xee, 0x57, 0xae, 0x3f, 0x52, 0xff, 0x4e, 0x57, 0xeb, 0x93, 0xac, 0x1b, 0xb4, 0x03, 0xeb, - 0x81, 0x0a, 0xd1, 0xe3, 0x84, 0x27, 0x2b, 0x69, 0xe3, 0xce, 0x8b, 0x99, 0xcb, 0x88, 0x6c, 0xd3, - 0x8c, 0x24, 0x08, 0x2f, 0x9e, 0x41, 0x2f, 0x41, 0x5e, 0xd4, 0x34, 0xaa, 0xe6, 0x65, 0x03, 0xac, - 0xa7, 0x87, 0x85, 0x16, 0x27, 0xb6, 0xc6, 0x7b, 0xb0, 0xbe, 0xe0, 0x04, 0x95, 0xa1, 0x74, 0x8c, - 0x8f, 0x8e, 0x8f, 0x7a, 0xdd, 0x4e, 0x65, 0x4d, 0x48, 0xdd, 0x8f, 0xba, 0x3b, 0x27, 0x4e, 0xb7, - 0x53, 0x31, 0x10, 0x40, 0x61, 0xb7, 0x7d, 0x70, 0xaf, 0xdb, 0xa9, 0xe4, 0xec, 0xf7, 0xcf, 0x2e, - 0x6a, 0xc6, 0x2f, 0x17, 0x35, 0xe3, 0xf7, 0x8b, 0x9a, 0xf1, 0xf3, 0xa3, 0x9a, 0x71, 0xf6, 0xa8, - 0x66, 0x7c, 0xfc, 0xda, 0x93, 0x6f, 0xcd, 0xe3, 0xa8, 0xa5, 0xb2, 0xe8, 0x17, 0xe4, 0x8f, 0xea, - 0xdd, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x02, 0x7a, 0xba, 0xca, 0x0f, 0x0b, 0x00, 0x00, + // 1006 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcb, 0x6f, 0xe3, 0x44, + 0x18, 0xaf, 0x6b, 0xe7, 0xd1, 0x6f, 0xd3, 0x92, 0x1d, 0x1e, 0x8a, 0x2a, 0x91, 0xac, 0x02, 0x82, + 0x5d, 0xd8, 0x26, 0xb0, 0xcb, 0x43, 0xf4, 0x82, 0xe2, 0x26, 0xed, 0x16, 0x2d, 0x6d, 0x35, 0x71, + 0xbb, 0x08, 0xc4, 0x61, 0x92, 0x0c, 0x8e, 0xa5, 0xc4, 0x63, 0xec, 0xc9, 0xe2, 0x70, 0xe2, 0xc0, + 0x81, 0x2b, 0x42, 0x48, 0x1c, 0xfb, 0x2f, 0xf0, 0x1f, 0x70, 0xec, 0x91, 0x23, 0xe2, 0x50, 0xa1, + 0xee, 0x05, 0xf1, 0x57, 0xa0, 0x19, 0xcf, 0x38, 0x4e, 0x76, 0xd5, 0x4d, 0x2b, 0xc4, 0xcd, 0xf3, + 0x7d, 0xbf, 0xf9, 0x1e, 0xbf, 0xef, 0x31, 0x86, 0xf5, 0x80, 0x4c, 0x47, 0x8c, 0x0c, 0x1a, 0x41, + 0xc8, 0x38, 0x43, 0x05, 0x75, 0xdc, 0xdc, 0x72, 0x3d, 0x3e, 0x9c, 0xf4, 0x1a, 0x7d, 0x36, 0x6e, + 0xba, 0xcc, 0x65, 0x4d, 0xa9, 0xef, 0x4d, 0xbe, 0x92, 0x27, 0x79, 0x90, 0x5f, 0xc9, 0xbd, 0xcd, + 0x72, 0x40, 0xc3, 0xb1, 0x17, 0x45, 0x1e, 0xf3, 0x95, 0x04, 0xa2, 0x80, 0xf6, 0x93, 0xef, 0xfa, + 0x8f, 0x26, 0x98, 0x2d, 0x7f, 0x8a, 0xde, 0x84, 0xfc, 0x0e, 0x19, 0x8d, 0x9c, 0xb8, 0x62, 0xdc, + 0x32, 0x6e, 0xdf, 0xb8, 0xf7, 0x42, 0x43, 0x7b, 0x4f, 0xc4, 0x58, 0xa9, 0x05, 0xb0, 0x4b, 0xfd, + 0x81, 0x13, 0x57, 0x56, 0x17, 0x80, 0x89, 0x18, 0x2b, 0xb5, 0x00, 0x1e, 0x90, 0x31, 0x75, 0xe2, + 0x8a, 0xb9, 0x00, 0x4c, 0xc4, 0x58, 0xa9, 0xd1, 0x5b, 0x50, 0x38, 0xa2, 0xe1, 0x38, 0x72, 0xe2, + 0x8a, 0x25, 0x91, 0xe5, 0x14, 0xa9, 0xe4, 0x58, 0x03, 0xd0, 0xeb, 0x90, 0xdb, 0x63, 0x8f, 0x9d, + 0xb8, 0x92, 0x93, 0xc8, 0x8d, 0x14, 0x29, 0xa5, 0x38, 0x51, 0x0a, 0xd7, 0x36, 0x93, 0x31, 0xe6, + 0x17, 0x5c, 0x27, 0x62, 0xac, 0xd4, 0x68, 0x0b, 0x8a, 0xc7, 0x7e, 0x2f, 0x81, 0x16, 0x24, 0xf4, + 0x66, 0x0a, 0xd5, 0x0a, 0x9c, 0x42, 0x44, 0xa4, 0x36, 0xe1, 0xfd, 0xa1, 0x13, 0x57, 0x8a, 0x0b, + 0x91, 0x2a, 0x39, 0xd6, 0x00, 0x74, 0x1f, 0xe0, 0x28, 0x64, 0x01, 0x8b, 0x88, 0x20, 0x75, 0x4d, + 0xc2, 0x5f, 0x9c, 0x25, 0x96, 0xaa, 0x70, 0x06, 0xb6, 0x6d, 0x9d, 0x9d, 0xd6, 0x8c, 0xfa, 0x4f, + 0x06, 0x14, 0x9c, 0x78, 0xdf, 0x0f, 0x26, 0x1c, 0x1d, 0x40, 0xa1, 0x35, 0x18, 0x84, 0x34, 0x8a, + 0x64, 0x61, 0x4a, 0xf6, 0x7b, 0x67, 0xe7, 0xb5, 0x95, 0x3f, 0xcf, 0x6b, 0x77, 0x33, 0x5d, 0x30, + 0x9c, 0x06, 0x34, 0x1c, 0xd1, 0x81, 0x4b, 0xc3, 0x66, 0x6f, 0x12, 0x86, 0xec, 0x9b, 0x66, 0x3f, + 0x9c, 0x06, 0x9c, 0x35, 0xd4, 0x5d, 0xac, 0x8d, 0xa0, 0x57, 0x20, 0xdf, 0x1a, 0xb3, 0x89, 0xcf, + 0x65, 0xf9, 0x2c, 0xac, 0x4e, 0x68, 0x13, 0x8a, 0x5d, 0xfa, 0xf5, 0x84, 0xfa, 0x7d, 0x2a, 0xeb, + 0x65, 0xe1, 0xf4, 0xbc, 0x6d, 0xfd, 0x72, 0x5a, 0x5b, 0xa9, 0xc7, 0x50, 0x74, 0xe2, 0xc3, 0x09, + 0xff, 0x1f, 0xa3, 0x52, 0x9e, 0xff, 0x58, 0xd5, 0xcd, 0x89, 0xde, 0x80, 0x9c, 0xe4, 0x45, 0x75, + 0xe9, 0x8c, 0x7f, 0xc5, 0x17, 0x4e, 0xd4, 0xe8, 0x93, 0x59, 0x80, 0xab, 0x32, 0xc0, 0x77, 0xae, + 0x1f, 0xdc, 0x26, 0x14, 0xf7, 0x48, 0xf4, 0xd0, 0x1b, 0x7b, 0x5c, 0x53, 0xa3, 0xcf, 0xa8, 0x0c, + 0xe6, 0x2e, 0xa5, 0xb2, 0x6f, 0x2d, 0x2c, 0x3e, 0xd1, 0x3e, 0x58, 0x6d, 0xc2, 0x89, 0x6c, 0xd0, + 0x92, 0xfd, 0xbe, 0xe2, 0x65, 0xeb, 0x72, 0xd7, 0x3d, 0xcf, 0x27, 0xe1, 0xb4, 0xf1, 0x80, 0xc6, + 0xf6, 0x94, 0xd3, 0x08, 0x4b, 0x13, 0xe8, 0x0b, 0xb0, 0x1e, 0xb5, 0xba, 0x9f, 0xca, 0x26, 0x2e, + 0xd9, 0x7b, 0xd7, 0x32, 0xf5, 0xcf, 0x79, 0x6d, 0x83, 0x13, 0x37, 0xba, 0xcb, 0xc6, 0x1e, 0xa7, + 0xe3, 0x80, 0x4f, 0xb1, 0x34, 0xaa, 0xa8, 0xf5, 0xf4, 0x34, 0xa3, 0xdb, 0x90, 0x97, 0xd4, 0x89, + 0x8a, 0x9a, 0xcf, 0xa4, 0x56, 0xe9, 0xd1, 0xdb, 0x50, 0x48, 0xda, 0x40, 0x70, 0x6b, 0xce, 0xcd, + 0x8c, 0x6e, 0x10, 0xac, 0x11, 0xdb, 0xc5, 0x1f, 0x4e, 0x6b, 0x2b, 0xd2, 0x15, 0x4b, 0xc7, 0x7c, + 0xe9, 0x2a, 0x7e, 0x00, 0x45, 0x71, 0xa5, 0x15, 0xba, 0x91, 0xda, 0x36, 0x2f, 0x35, 0x32, 0xdb, + 0x4c, 0xeb, 0x6c, 0x4b, 0x50, 0x83, 0x53, 0xac, 0xca, 0x2d, 0xd0, 0x0b, 0x68, 0x69, 0x7f, 0x08, + 0x2c, 0x71, 0x43, 0xfa, 0x5a, 0xc3, 0xf2, 0x5b, 0xc8, 0x64, 0x3d, 0xcd, 0x44, 0x26, 0x0b, 0xf3, + 0x54, 0xd5, 0x95, 0xc7, 0x9f, 0x0d, 0xbd, 0x78, 0x96, 0x76, 0xf9, 0x11, 0xac, 0x9d, 0x90, 0x91, + 0x37, 0x20, 0x9c, 0x85, 0x2a, 0xc7, 0x97, 0x1b, 0x72, 0x3f, 0x3b, 0x74, 0x1c, 0x8c, 0x08, 0xa7, + 0xad, 0x7e, 0x5f, 0xcc, 0x82, 0x4c, 0xd2, 0xc0, 0x33, 0x34, 0xaa, 0x02, 0x1c, 0x50, 0xae, 0xdb, + 0x3c, 0x89, 0x2f, 0x23, 0xc9, 0x50, 0xef, 0xce, 0xd6, 0xdc, 0xd2, 0x81, 0xdd, 0x81, 0x7c, 0x52, + 0x43, 0x15, 0xd5, 0x33, 0x8a, 0xac, 0x00, 0x19, 0x47, 0xdf, 0x19, 0x6a, 0x3f, 0x5f, 0xa1, 0x9d, + 0x76, 0x60, 0x43, 0xa5, 0x78, 0x1c, 0x0c, 0x08, 0xa7, 0xba, 0xab, 0x2e, 0xa5, 0x61, 0xe1, 0x4a, + 0x26, 0x84, 0xbf, 0x8d, 0xec, 0xe2, 0x5d, 0x3a, 0xdd, 0x3a, 0x94, 0x4e, 0x18, 0xf7, 0x7c, 0xf7, + 0x11, 0xf5, 0xdc, 0x61, 0x92, 0xb4, 0x89, 0xe7, 0x64, 0xe8, 0x18, 0x4a, 0xda, 0xf2, 0x03, 0x12, + 0x0d, 0x25, 0xe5, 0x25, 0xfb, 0xdd, 0xab, 0x8f, 0xf7, 0x9c, 0x19, 0xf1, 0x08, 0xe9, 0xb3, 0x7a, + 0x00, 0x6f, 0x3e, 0xf5, 0x4e, 0xe0, 0x14, 0x92, 0x49, 0xf5, 0xcb, 0xf4, 0x39, 0xba, 0x02, 0xdd, + 0x55, 0x30, 0x9d, 0x58, 0x73, 0x5c, 0x4a, 0x61, 0x2d, 0x7f, 0x8a, 0x85, 0x22, 0x63, 0xfe, 0x7b, + 0x03, 0xac, 0x13, 0xc6, 0xe9, 0x7f, 0xbe, 0xed, 0x97, 0xe0, 0x3a, 0x13, 0xc6, 0xe3, 0x19, 0x3d, + 0xe9, 0x80, 0x1a, 0x99, 0x01, 0xbd, 0x05, 0x37, 0xda, 0x34, 0xea, 0x87, 0x5e, 0xc0, 0x3d, 0xe6, + 0xab, 0xd9, 0xcd, 0x8a, 0xb2, 0xcf, 0xb6, 0xf9, 0x9c, 0x67, 0x3b, 0xe3, 0xf7, 0xd7, 0x55, 0xc8, + 0xdb, 0x64, 0x34, 0x62, 0x7c, 0xae, 0x42, 0xc6, 0x73, 0x2b, 0x24, 0xfa, 0x64, 0xd7, 0xf3, 0xc9, + 0xc8, 0xfb, 0xd6, 0xf3, 0x5d, 0xf5, 0xa3, 0x74, 0xbd, 0x3e, 0xc9, 0x9a, 0x41, 0x3b, 0xb0, 0x1e, + 0x28, 0x17, 0x5d, 0x4e, 0x78, 0xb2, 0x7f, 0x36, 0xee, 0xbd, 0x9a, 0x49, 0x46, 0x44, 0x9b, 0x46, + 0x24, 0x41, 0x78, 0xfe, 0x0e, 0x7a, 0x0d, 0x72, 0xa2, 0xa6, 0x51, 0x25, 0x27, 0x1b, 0x60, 0x3d, + 0xbd, 0x2c, 0xa4, 0x38, 0xd1, 0xd5, 0x3f, 0x84, 0xf5, 0x39, 0x23, 0xa8, 0x04, 0xc5, 0x23, 0x7c, + 0x78, 0x74, 0xd8, 0xed, 0xb4, 0xcb, 0x2b, 0xe2, 0xd4, 0xf9, 0xac, 0xb3, 0x73, 0xec, 0x74, 0xda, + 0x65, 0x03, 0x01, 0xe4, 0x77, 0x5b, 0xfb, 0x0f, 0x3b, 0xed, 0xf2, 0xaa, 0xfd, 0xf1, 0xd9, 0x45, + 0xd5, 0xf8, 0xfd, 0xa2, 0x6a, 0xfc, 0x75, 0x51, 0x35, 0x7e, 0x7b, 0x52, 0x35, 0xce, 0x9e, 0x54, + 0x8d, 0xcf, 0xef, 0x5c, 0x9e, 0x35, 0x8f, 0xa3, 0xa6, 0x8a, 0xa2, 0x97, 0x97, 0x7f, 0xa5, 0xf7, + 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x06, 0x89, 0xb8, 0xbf, 0xfc, 0x0a, 0x00, 0x00, } func (m *Any) Marshal() (dAtA []byte, err error) { @@ -1057,9 +1061,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.CallTx.Size())) - n1, err1 := m.CallTx.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.CallTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 } @@ -1067,9 +1071,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.SendTx.Size())) - n2, err2 := m.SendTx.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.SendTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -1077,9 +1081,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.NameTx.Size())) - n3, err3 := m.NameTx.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.NameTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 } @@ -1087,9 +1091,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermsTx.Size())) - n4, err4 := m.PermsTx.MarshalTo(dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := m.PermsTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n4 } @@ -1097,9 +1101,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintPayload(dAtA, i, uint64(m.GovTx.Size())) - n5, err5 := m.GovTx.MarshalTo(dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := m.GovTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n5 } @@ -1107,9 +1111,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintPayload(dAtA, i, uint64(m.BondTx.Size())) - n6, err6 := m.BondTx.MarshalTo(dAtA[i:]) - if err6 != nil { - return 0, err6 + n6, err := m.BondTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n6 } @@ -1117,9 +1121,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintPayload(dAtA, i, uint64(m.UnbondTx.Size())) - n7, err7 := m.UnbondTx.MarshalTo(dAtA[i:]) - if err7 != nil { - return 0, err7 + n7, err := m.UnbondTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n7 } @@ -1127,9 +1131,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x42 i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n8, err8 := m.BatchTx.MarshalTo(dAtA[i:]) - if err8 != nil { - return 0, err8 + n8, err := m.BatchTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n8 } @@ -1137,9 +1141,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x4a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalTx.Size())) - n9, err9 := m.ProposalTx.MarshalTo(dAtA[i:]) - if err9 != nil { - return 0, err9 + n9, err := m.ProposalTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n9 } @@ -1167,9 +1171,9 @@ func (m *TxInput) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n10, err10 := m.Address.MarshalTo(dAtA[i:]) - if err10 != nil { - return 0, err10 + n10, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n10 if m.Amount != 0 { @@ -1206,9 +1210,9 @@ func (m *TxOutput) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n11, err11 := m.Address.MarshalTo(dAtA[i:]) - if err11 != nil { - return 0, err11 + n11, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n11 if m.Amount != 0 { @@ -1241,9 +1245,9 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n12, err12 := m.Input.MarshalTo(dAtA[i:]) - if err12 != nil { - return 0, err12 + n12, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n12 } @@ -1251,9 +1255,9 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n13, err13 := m.Address.MarshalTo(dAtA[i:]) - if err13 != nil { - return 0, err13 + n13, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n13 } @@ -1270,17 +1274,17 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintPayload(dAtA, i, uint64(m.Data.Size())) - n14, err14 := m.Data.MarshalTo(dAtA[i:]) - if err14 != nil { - return 0, err14 + n14, err := m.Data.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n14 dAtA[i] = 0x32 i++ i = encodeVarintPayload(dAtA, i, uint64(m.WASM.Size())) - n15, err15 := m.WASM.MarshalTo(dAtA[i:]) - if err15 != nil { - return 0, err15 + n15, err := m.WASM.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n15 if m.XXX_unrecognized != nil { @@ -1353,18 +1357,18 @@ func (m *PermsTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n16, err16 := m.Input.MarshalTo(dAtA[i:]) - if err16 != nil { - return 0, err16 + n16, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n16 } dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermArgs.Size())) - n17, err17 := m.PermArgs.MarshalTo(dAtA[i:]) - if err17 != nil { - return 0, err17 + n17, err := m.PermArgs.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n17 if m.XXX_unrecognized != nil { @@ -1392,9 +1396,9 @@ func (m *NameTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n18, err18 := m.Input.MarshalTo(dAtA[i:]) - if err18 != nil { - return 0, err18 + n18, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n18 } @@ -1436,29 +1440,31 @@ func (m *BondTx) MarshalTo(dAtA []byte) (int, error) { _ = i var l int _ = l - if len(m.Inputs) > 0 { - for _, msg := range m.Inputs { - dAtA[i] = 0xa - i++ - i = encodeVarintPayload(dAtA, i, uint64(msg.Size())) - n, err := msg.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n + if m.Input != nil { + dAtA[i] = 0xa + i++ + i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) + n19, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } + i += n19 } - if len(m.UnbondTo) > 0 { - for _, msg := range m.UnbondTo { - dAtA[i] = 0x12 - i++ - i = encodeVarintPayload(dAtA, i, uint64(msg.Size())) - n, err := msg.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n + if m.Validator != nil { + dAtA[i] = 0x12 + i++ + i = encodeVarintPayload(dAtA, i, uint64(m.Validator.Size())) + n20, err := m.Validator.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } + i += n20 + } + if len(m.NetAddress) > 0 { + dAtA[i] = 0x1a + i++ + i = encodeVarintPayload(dAtA, i, uint64(len(m.NetAddress))) + i += copy(dAtA[i:], m.NetAddress) } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1485,24 +1491,21 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n19, err19 := m.Input.MarshalTo(dAtA[i:]) - if err19 != nil { - return 0, err19 + n21, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n19 - } - dAtA[i] = 0x12 - i++ - i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n20, err20 := m.Address.MarshalTo(dAtA[i:]) - if err20 != nil { - return 0, err20 + i += n21 } - i += n20 - if m.Height != 0 { - dAtA[i] = 0x18 + if m.Output != nil { + dAtA[i] = 0x12 i++ - i = encodeVarintPayload(dAtA, i, uint64(m.Height)) + i = encodeVarintPayload(dAtA, i, uint64(m.Output.Size())) + n22, err := m.Output.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n22 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1574,11 +1577,11 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n21, err21 := m.Input.MarshalTo(dAtA[i:]) - if err21 != nil { - return 0, err21 + n23, err := m.Input.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n21 + i += n23 } if m.VotingWeight != 0 { dAtA[i] = 0x10 @@ -1589,21 +1592,21 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalHash.Size())) - n22, err22 := m.ProposalHash.MarshalTo(dAtA[i:]) - if err22 != nil { - return 0, err22 + n24, err := m.ProposalHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n22 + i += n24 } if m.Proposal != nil { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n23, err23 := m.Proposal.MarshalTo(dAtA[i:]) - if err23 != nil { - return 0, err23 + n25, err := m.Proposal.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n23 + i += n25 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1674,11 +1677,11 @@ func (m *Vote) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n24, err24 := m.Address.MarshalTo(dAtA[i:]) - if err24 != nil { - return 0, err24 + n26, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n24 + i += n26 if m.VotingWeight != 0 { dAtA[i] = 0x10 i++ @@ -1721,11 +1724,11 @@ func (m *Proposal) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n25, err25 := m.BatchTx.MarshalTo(dAtA[i:]) - if err25 != nil { - return 0, err25 + n27, err := m.BatchTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n25 + i += n27 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1752,21 +1755,21 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n26, err26 := m.Proposal.MarshalTo(dAtA[i:]) - if err26 != nil { - return 0, err26 + n28, err := m.Proposal.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n26 + i += n28 } if m.FinalizingTx != nil { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.FinalizingTx.Size())) - n27, err27 := m.FinalizingTx.MarshalTo(dAtA[i:]) - if err27 != nil { - return 0, err27 + n29, err := m.FinalizingTx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } - i += n27 + i += n29 } if m.ProposalState != 0 { dAtA[i] = 0x20 @@ -1990,17 +1993,17 @@ func (m *BondTx) Size() (n int) { } var l int _ = l - if len(m.Inputs) > 0 { - for _, e := range m.Inputs { - l = e.Size() - n += 1 + l + sovPayload(uint64(l)) - } + if m.Input != nil { + l = m.Input.Size() + n += 1 + l + sovPayload(uint64(l)) } - if len(m.UnbondTo) > 0 { - for _, e := range m.UnbondTo { - l = e.Size() - n += 1 + l + sovPayload(uint64(l)) - } + if m.Validator != nil { + l = m.Validator.Size() + n += 1 + l + sovPayload(uint64(l)) + } + l = len(m.NetAddress) + if l > 0 { + n += 1 + l + sovPayload(uint64(l)) } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) @@ -2018,10 +2021,9 @@ func (m *UnbondTx) Size() (n int) { l = m.Input.Size() n += 1 + l + sovPayload(uint64(l)) } - l = m.Address.Size() - n += 1 + l + sovPayload(uint64(l)) - if m.Height != 0 { - n += 1 + sovPayload(uint64(m.Height)) + if m.Output != nil { + l = m.Output.Size() + n += 1 + l + sovPayload(uint64(l)) } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) @@ -2175,7 +2177,14 @@ func (m *Ballot) Size() (n int) { } func sovPayload(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozPayload(x uint64) (n int) { return sovPayload(uint64((x << 1) ^ uint64((int64(x) >> 63)))) @@ -3523,7 +3532,7 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Inputs", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Input", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -3550,14 +3559,16 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Inputs = append(m.Inputs, &TxInput{}) - if err := m.Inputs[len(m.Inputs)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if m.Input == nil { + m.Input = &TxInput{} + } + if err := m.Input.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field UnbondTo", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Validator", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -3584,11 +3595,45 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.UnbondTo = append(m.UnbondTo, &TxOutput{}) - if err := m.UnbondTo[len(m.UnbondTo)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if m.Validator == nil { + m.Validator = &spec.TemplateAccount{} + } + if err := m.Validator.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field NetAddress", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowPayload + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthPayload + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthPayload + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.NetAddress = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipPayload(dAtA[iNdEx:]) @@ -3681,9 +3726,9 @@ func (m *UnbondTx) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Output", wireType) } - var byteLen int + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowPayload @@ -3693,44 +3738,28 @@ func (m *UnbondTx) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - byteLen |= int(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - if byteLen < 0 { + if msglen < 0 { return ErrInvalidLengthPayload } - postIndex := iNdEx + byteLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthPayload } if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.Address.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if m.Output == nil { + m.Output = &TxOutput{} + } + if err := m.Output.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) - } - m.Height = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPayload - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Height |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } default: iNdEx = preIndex skippy, err := skipPayload(dAtA[iNdEx:]) diff --git a/txs/payload/unbond_tx.go b/txs/payload/unbond_tx.go index c295cfad4..10a058ae9 100644 --- a/txs/payload/unbond_tx.go +++ b/txs/payload/unbond_tx.go @@ -8,8 +8,8 @@ import ( func NewUnbondTx(address crypto.Address, height uint64) *UnbondTx { return &UnbondTx{ - Address: address, - Height: height, + Input: &TxInput{}, + Output: &TxOutput{}, } } @@ -22,7 +22,7 @@ func (tx *UnbondTx) GetInputs() []*TxInput { } func (tx *UnbondTx) String() string { - return fmt.Sprintf("UnbondTx{%v -> %s,%v}", tx.Input, tx.Address, tx.Height) + return fmt.Sprintf("UnbondTx{%v -> %v}", tx.Input.Address, tx.Output.Address) } func (tx *UnbondTx) Any() *Any { diff --git a/txs/tx.go b/txs/tx.go index 34d3f7e6f..3a7c56b36 100644 --- a/txs/tx.go +++ b/txs/tx.go @@ -237,5 +237,11 @@ func EnvelopeFromAny(chainID string, p *payload.Any) *Envelope { if p.BatchTx != nil { return Enclose(chainID, p.BatchTx) } + if p.BondTx != nil { + return Enclose(chainID, p.BondTx) + } + if p.UnbondTx != nil { + return Enclose(chainID, p.UnbondTx) + } return nil } diff --git a/txs/tx_test.go b/txs/tx_test.go index 6ff867bf0..15c0d69dd 100644 --- a/txs/tx_test.go +++ b/txs/tx_test.go @@ -24,6 +24,7 @@ import ( "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/event/query" + "github.com/hyperledger/burrow/genesis/spec" "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/txs/payload" "github.com/stretchr/testify/assert" @@ -112,28 +113,15 @@ func TestNameTxSignable(t *testing.T) { } func TestBondTxSignable(t *testing.T) { + val := makePrivateAccount("output1").GetPublicKey() bondTx := &payload.BondTx{ - Inputs: []*payload.TxInput{ - { - Address: makePrivateAccount("input1").GetAddress(), - Amount: 12345, - Sequence: 67890, - }, - { - Address: makePrivateAccount("input2").GetAddress(), - Amount: 111, - Sequence: 222, - }, + Input: &payload.TxInput{ + Address: makePrivateAccount("input1").GetAddress(), + Amount: 12345, + Sequence: 67890, }, - UnbondTo: []*payload.TxOutput{ - { - Address: makePrivateAccount("output1").GetAddress(), - Amount: 333, - }, - { - Address: makePrivateAccount("output2").GetAddress(), - Amount: 444, - }, + Validator: &spec.TemplateAccount{ + PublicKey: &val, }, } testTxMarshalJSON(t, bondTx) @@ -143,10 +131,11 @@ func TestBondTxSignable(t *testing.T) { func TestUnbondTxSignable(t *testing.T) { unbondTx := &payload.UnbondTx{ Input: &payload.TxInput{ - Address: makePrivateAccount("fooo1").GetAddress(), + Address: makePrivateAccount("output1").GetAddress(), + }, + Output: &payload.TxOutput{ + Address: makePrivateAccount("input1").GetAddress(), }, - Address: makePrivateAccount("address1").GetAddress(), - Height: 111, } testTxMarshalJSON(t, unbondTx) testTxSignVerify(t, unbondTx) diff --git a/txs/txs.pb.go b/txs/txs.pb.go index bebf3056a..e95880013 100644 --- a/txs/txs.pb.go +++ b/txs/txs.pb.go @@ -5,10 +5,6 @@ package txs import ( fmt "fmt" - io "io" - math "math" - math_bits "math/bits" - _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -16,6 +12,8 @@ import ( crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_txs_payload "github.com/hyperledger/burrow/txs/payload" + io "io" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -283,9 +281,9 @@ func (m *Envelope) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.Tx.Size())) - n1, err1 := m.Tx.MarshalTo(dAtA[i:]) - if err1 != nil { - return 0, err1 + n1, err := m.Tx.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n1 } @@ -314,9 +312,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintTxs(dAtA, i, uint64(m.Address.Size())) - n2, err2 := m.Address.MarshalTo(dAtA[i:]) - if err2 != nil { - return 0, err2 + n2, err := m.Address.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n2 } @@ -324,9 +322,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.PublicKey.Size())) - n3, err3 := m.PublicKey.MarshalTo(dAtA[i:]) - if err3 != nil { - return 0, err3 + n3, err := m.PublicKey.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n3 } @@ -334,9 +332,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintTxs(dAtA, i, uint64(m.Signature.Size())) - n4, err4 := m.Signature.MarshalTo(dAtA[i:]) - if err4 != nil { - return 0, err4 + n4, err := m.Signature.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n4 } @@ -369,9 +367,9 @@ func (m *Receipt) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.TxHash.Size())) - n5, err5 := m.TxHash.MarshalTo(dAtA[i:]) - if err5 != nil { - return 0, err5 + n5, err := m.TxHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n5 if m.CreatesContract { @@ -387,9 +385,9 @@ func (m *Receipt) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintTxs(dAtA, i, uint64(m.ContractAddress.Size())) - n6, err6 := m.ContractAddress.MarshalTo(dAtA[i:]) - if err6 != nil { - return 0, err6 + n6, err := m.ContractAddress.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err } i += n6 if m.XXX_unrecognized != nil { @@ -476,7 +474,14 @@ func (m *Receipt) Size() (n int) { } func sovTxs(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n } func sozTxs(x uint64) (n int) { return sovTxs(uint64((x << 1) ^ uint64((int64(x) >> 63)))) From b05ce21b01f62a03d41c9e6dfdb0a3edaa99219a Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Thu, 20 Jun 2019 16:52:40 +0100 Subject: [PATCH 29/70] integration tests for bond / unbond Signed-off-by: Gregory Hill --- execution/contexts/bond_context.go | 11 ++ integration/governance/bonding_test.go | 93 +++++++++++ integration/governance/governance_test.go | 85 +--------- integration/governance/helpers.go | 182 ++++++++++++++++++++++ integration/integration.go | 4 +- integration/rpctest/helpers.go | 4 +- 6 files changed, 293 insertions(+), 86 deletions(-) create mode 100644 integration/governance/bonding_test.go create mode 100644 integration/governance/helpers.go diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go index 15fb79dc2..f63413ac9 100644 --- a/execution/contexts/bond_context.go +++ b/execution/contexts/bond_context.go @@ -136,6 +136,17 @@ func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) erro return err } + var signed bool + // ensure tx is signed by validator + for _, sig := range txe.Envelope.GetSignatories() { + if sender.GetPublicKey().String() == sig.GetPublicKey().String() { + signed = true + } + } + if !signed { + return fmt.Errorf("can't unbond, not signed by validator") + } + recipient, err := ctx.StateWriter.GetAccount(ctx.tx.Output.Address) if err != nil { return err diff --git a/integration/governance/bonding_test.go b/integration/governance/bonding_test.go new file mode 100644 index 000000000..8ee78ec29 --- /dev/null +++ b/integration/governance/bonding_test.go @@ -0,0 +1,93 @@ +// +build integration + +package governance + +import ( + "testing" + "time" + + "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/core" + "github.com/hyperledger/burrow/integration" + "github.com/hyperledger/burrow/integration/rpctest" + "github.com/stretchr/testify/require" +) + +func TestBonding(t *testing.T) { + genesisAccounts := integration.MakePrivateAccounts("accounts", 2) + genesisKernels := make([]core.Kernel, len(genesisAccounts)) + genesisDoc := integration.TestGenesisDoc(genesisAccounts) + + // we need at least one validator to start + // in this case genesisKernels[0] + for i, acc := range genesisAccounts { + err := startNode(&genesisKernels[i], genesisDoc, acc, genesisAccounts...) + require.NoError(t, err) + defer integration.Shutdown(&genesisKernels[i]) + } + + connectKernels(&genesisKernels[0], &genesisKernels[1]) + + // lets do the bond tx from the non-validator + grpcGenVal := genesisKernels[1].GRPCListenAddress().String() + tcli := rpctest.NewTransactClient(t, grpcGenVal) + qcli := rpctest.NewQueryClient(t, grpcGenVal) + + var power uint64 = 1000 + inputAddress := genesisAccounts[1].GetAddress() + + // make a new validator to grant power to + val := acm.GeneratePrivateAccountFromSecret("validator") + + accBefore := getAccount(t, qcli, inputAddress) + + bondTx := createBondTx(inputAddress, power, val.GetPublicKey()) + _, err := sendPayload(tcli, bondTx) + require.NoError(t, err) + accAfter := getAccount(t, qcli, inputAddress) + // ensure power is subtracted from original account balance + require.Equal(t, accBefore.GetBalance()-power, accAfter.GetBalance()) + + valAfter := getAccount(t, qcli, val.GetAddress()) + // validator must have associated account + // typically without balance if just created + require.NotEmpty(t, valAfter.GetAddress()) + require.Equal(t, uint64(0), valAfter.GetBalance()) + + // make sure our new validator exists in the set + vsOut := getValidators(t, qcli) + require.Contains(t, vsOut, val.GetAddress()) + require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) + + // start the new validator + valKernel := &core.Kernel{} + err = startNode(valKernel, genesisDoc, val, append(genesisAccounts, val)...) + require.NoError(t, err) + connectKernels(&genesisKernels[0], valKernel) + + // wait for new validator to see themself in set + time.Sleep(2 * time.Second) + grpcBondedVal := valKernel.GRPCListenAddress().String() + qcli = rpctest.NewQueryClient(t, grpcBondedVal) + vsOut = getValidators(t, qcli) + require.Contains(t, vsOut, val.GetAddress()) + require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) + + unbondTx := createUnbondTx(val.GetAddress(), inputAddress) + tcli = rpctest.NewTransactClient(t, grpcBondedVal) + _, err = sendPayload(tcli, unbondTx) + require.NoError(t, err) + + tcli = rpctest.NewTransactClient(t, grpcGenVal) + qcli = rpctest.NewQueryClient(t, grpcGenVal) + vsOut = getValidators(t, qcli) + require.NotContains(t, vsOut, val.GetAddress()) + accAfter = getAccount(t, qcli, inputAddress) + require.Equal(t, accBefore.GetBalance(), accAfter.GetBalance()) + + // TODO: + // - ensure bonded validator can vote + // - add / remove too quickly + // - only validator can unbond themselves + // - cannot bond more than one validator? / delegated bonding? +} diff --git a/integration/governance/governance_test.go b/integration/governance/governance_test.go index a8bedff94..dc456443c 100644 --- a/integration/governance/governance_test.go +++ b/integration/governance/governance_test.go @@ -4,7 +4,6 @@ package governance import ( "context" - "fmt" "math/big" "net" "testing" @@ -16,7 +15,6 @@ import ( "github.com/hyperledger/burrow/core" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/execution/errors" - "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/genesis/spec" "github.com/hyperledger/burrow/governance" "github.com/hyperledger/burrow/integration" @@ -24,18 +22,14 @@ import ( "github.com/hyperledger/burrow/logging/logconfig" "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/rpc/rpcquery" - "github.com/hyperledger/burrow/rpc/rpctransact" - "github.com/hyperledger/burrow/txs" - "github.com/hyperledger/burrow/txs/payload" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/p2p" tmcore "github.com/tendermint/tendermint/rpc/core" rpctypes "github.com/tendermint/tendermint/rpc/lib/types" ) func TestGovernance(t *testing.T) { - privateAccounts := integration.MakePrivateAccounts(10) // make keys + privateAccounts := integration.MakePrivateAccounts("accounts", 10) // make keys genesisDoc := integration.TestGenesisDoc(privateAccounts) kernels := make([]*core.Kernel, len(privateAccounts)) genesisDoc.Accounts[4].Permissions = permission.NewAccountPermissions(permission.Send | permission.Call) @@ -233,7 +227,7 @@ func TestGovernance(t *testing.T) { Amounts: balance.New().Power(power), })) require.Error(t, err, "Should not be able to set power without providing public key") - assert.Contains(t, err.Error(), "GovTx must be provided with public key when updating validator power") + assert.Contains(t, err.Error(), "GovTx: must be provided with public key when updating validator power") }) t.Run("InvalidSequenceNumber", func(t *testing.T) { @@ -281,78 +275,3 @@ func TestGovernance(t *testing.T) { time.Sleep(4 * time.Second) } - -// Helpers - -func getMaxFlow(t testing.TB, qcli rpcquery.QueryClient) uint64 { - vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) - require.NoError(t, err) - set := validator.UnpersistSet(vs.Set) - totalPower := set.TotalPower() - maxFlow := new(big.Int) - return maxFlow.Sub(maxFlow.Div(totalPower, big.NewInt(3)), big.NewInt(1)).Uint64() -} - -func getValidatorSet(t testing.TB, qcli rpcquery.QueryClient) *validator.Set { - vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) - require.NoError(t, err) - // Include the genesis validator and compare the sets - return validator.UnpersistSet(vs.Set) -} - -func account(i int) *acm.PrivateAccount { - return rpctest.PrivateAccounts[i] -} - -func govSync(cli rpctransact.TransactClient, tx *payload.GovTx) (*exec.TxExecution, error) { - return cli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{ - Payload: tx.Any(), - }) -} - -func assertValidatorsEqual(t testing.TB, expected, actual *validator.Set) { - require.NoError(t, expected.Equal(actual), "validator sets should be equal\nExpected: %v\n\nActual: %v\n", - expected, actual) -} - -func changePower(vs *validator.Set, i int, power uint64) { - vs.ChangePower(account(i).GetPublicKey(), new(big.Int).SetUint64(power)) -} - -func setSequence(t testing.TB, qcli rpcquery.QueryClient, tx payload.Payload) { - for _, input := range tx.GetInputs() { - ca, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: input.Address}) - require.NoError(t, err) - input.Sequence = ca.Sequence + 1 - } -} - -func localSignAndBroadcastSync(t testing.TB, tcli rpctransact.TransactClient, chainID string, - signer acm.AddressableSigner, tx payload.Payload) (*exec.TxExecution, error) { - txEnv := txs.Enclose(chainID, tx) - err := txEnv.Sign(signer) - require.NoError(t, err) - - return tcli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{Envelope: txEnv}) -} - -func connectKernels(k1, k2 *core.Kernel) { - k1Address, err := k1.Node.NodeInfo().NetAddress() - if err != nil { - panic(fmt.Errorf("could not get kernel address: %v", err)) - } - k2Address, err := k2.Node.NodeInfo().NetAddress() - if err != nil { - panic(fmt.Errorf("could not get kernel address: %v", err)) - } - fmt.Printf("Connecting %v -> %v\n", k1Address, k2Address) - err = k1.Node.Switch().DialPeerWithAddress(k2Address, false) - if err != nil { - switch e := err.(type) { - case p2p.ErrRejected: - panic(fmt.Errorf("connection between test kernels was rejected: %v", e)) - default: - panic(fmt.Errorf("could not connect test kernels: %v", err)) - } - } -} diff --git a/integration/governance/helpers.go b/integration/governance/helpers.go new file mode 100644 index 000000000..e1fcec5bb --- /dev/null +++ b/integration/governance/helpers.go @@ -0,0 +1,182 @@ +// +build integration + +package governance + +import ( + "context" + "fmt" + "math/big" + "net" + "testing" + + "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/acm/validator" + "github.com/hyperledger/burrow/core" + "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/genesis" + "github.com/hyperledger/burrow/genesis/spec" + "github.com/hyperledger/burrow/integration" + "github.com/hyperledger/burrow/integration/rpctest" + "github.com/hyperledger/burrow/logging/logconfig" + "github.com/hyperledger/burrow/rpc/rpcquery" + "github.com/hyperledger/burrow/rpc/rpctransact" + "github.com/hyperledger/burrow/txs" + "github.com/hyperledger/burrow/txs/payload" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/p2p" +) + +func startNode(kernel *core.Kernel, genesisDoc *genesis.GenesisDoc, + account *acm.PrivateAccount, keysAccounts ...*acm.PrivateAccount) error { + + testConfig, _ := integration.NewTestConfig(genesisDoc) + logconf := logconfig.New().Root(func(sink *logconfig.SinkConfig) *logconfig.SinkConfig { + return sink.SetTransform(logconfig.FilterTransform(logconfig.IncludeWhenAllMatch, + "total_validator")).SetOutput(logconfig.StdoutOutput()) + }) + + l, err := net.Listen("tcp", "localhost:0") + if err != nil { + return err + } + host, port, err := net.SplitHostPort(l.Addr().String()) + if err != nil { + return err + } + testConfig.Tendermint.ListenHost = host + testConfig.Tendermint.ListenPort = port + + kern, err := integration.TestKernel(account, keysAccounts, testConfig, logconf) + if err != nil { + return err + } + *kernel = *kern + + err = l.Close() + if err != nil { + return err + } + + return kernel.Boot() +} + +func createBondTx(address crypto.Address, amount uint64, pubKey crypto.PublicKey) *payload.BondTx { + return &payload.BondTx{ + Input: &payload.TxInput{ + Address: address, + Amount: amount, + }, + Validator: &spec.TemplateAccount{ + PublicKey: &pubKey, + }, + } +} + +func createUnbondTx(validator, account crypto.Address) *payload.UnbondTx { + return &payload.UnbondTx{ + Input: &payload.TxInput{ + Address: validator, + }, + Output: &payload.TxOutput{ + Address: account, + }, + } +} + +func getValidators(t testing.TB, qcli rpcquery.QueryClient) map[crypto.Address]*validator.Validator { + vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) + require.NoError(t, err) + vals := make(map[crypto.Address]*validator.Validator, len(vs.Set)) + for _, v := range vs.Set { + vals[v.PublicKey.GetAddress()] = v + } + return vals +} + +func getValidatorSet(t testing.TB, qcli rpcquery.QueryClient) *validator.Set { + vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) + require.NoError(t, err) + // Include the genesis validator and compare the sets + return validator.UnpersistSet(vs.Set) +} + +func getAccount(t testing.TB, qcli rpcquery.QueryClient, address crypto.Address) *acm.Account { + acc, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{ + Address: address, + }) + require.NoError(t, err) + return acc +} + +func account(i int) *acm.PrivateAccount { + return rpctest.PrivateAccounts[i] +} + +func sendPayload(cli rpctransact.TransactClient, tx payload.Payload) (*exec.TxExecution, error) { + return cli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{ + Payload: tx.Any(), + }) +} + +func assertValidatorsEqual(t testing.TB, expected, actual *validator.Set) { + require.NoError(t, expected.Equal(actual), "validator sets should be equal\nExpected: %v\n\nActual: %v\n", + expected, actual) +} + +func changePower(vs *validator.Set, i int, power uint64) { + vs.ChangePower(account(i).GetPublicKey(), new(big.Int).SetUint64(power)) +} + +func connectKernels(k1, k2 *core.Kernel) { + k1Address, err := k1.Node.NodeInfo().NetAddress() + if err != nil { + panic(fmt.Errorf("could not get kernel address: %v", err)) + } + k2Address, err := k2.Node.NodeInfo().NetAddress() + if err != nil { + panic(fmt.Errorf("could not get kernel address: %v", err)) + } + fmt.Printf("Connecting %v -> %v\n", k1Address, k2Address) + err = k1.Node.Switch().DialPeerWithAddress(k2Address, false) + if err != nil { + switch e := err.(type) { + case p2p.ErrRejected: + panic(fmt.Errorf("connection between test kernels was rejected: %v", e)) + default: + panic(fmt.Errorf("could not connect test kernels: %v", err)) + } + } +} + +func getMaxFlow(t testing.TB, qcli rpcquery.QueryClient) uint64 { + vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) + require.NoError(t, err) + set := validator.UnpersistSet(vs.Set) + totalPower := set.TotalPower() + maxFlow := new(big.Int) + return maxFlow.Sub(maxFlow.Div(totalPower, big.NewInt(3)), big.NewInt(1)).Uint64() +} + +func govSync(cli rpctransact.TransactClient, tx *payload.GovTx) (*exec.TxExecution, error) { + return cli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{ + Payload: tx.Any(), + }) +} + +func setSequence(t testing.TB, qcli rpcquery.QueryClient, tx payload.Payload) { + for _, input := range tx.GetInputs() { + ca, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: input.Address}) + require.NoError(t, err) + input.Sequence = ca.Sequence + 1 + } +} + +func localSignAndBroadcastSync(t testing.TB, tcli rpctransact.TransactClient, chainID string, + signer acm.AddressableSigner, tx payload.Payload) (*exec.TxExecution, error) { + txEnv := txs.Enclose(chainID, tx) + err := txEnv.Sign(signer) + require.NoError(t, err) + + return tcli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{Envelope: txEnv}) +} diff --git a/integration/integration.go b/integration/integration.go index c16665e77..5838cadb6 100644 --- a/integration/integration.go +++ b/integration/integration.go @@ -189,10 +189,10 @@ func TestGenesisDoc(addressables []*acm.PrivateAccount) *genesis.GenesisDoc { } // Deterministic account generation helper. Pass number of accounts to make -func MakePrivateAccounts(n int) []*acm.PrivateAccount { +func MakePrivateAccounts(sec string, n int) []*acm.PrivateAccount { accounts := make([]*acm.PrivateAccount, n) for i := 0; i < n; i++ { - accounts[i] = acm.GeneratePrivateAccountFromSecret("mysecret" + strconv.Itoa(i)) + accounts[i] = acm.GeneratePrivateAccountFromSecret(sec + strconv.Itoa(i)) } return accounts } diff --git a/integration/rpctest/helpers.go b/integration/rpctest/helpers.go index cdfa99def..71ef3c86c 100644 --- a/integration/rpctest/helpers.go +++ b/integration/rpctest/helpers.go @@ -25,7 +25,9 @@ import ( // so... (I didn't say it had to make sense): const UpsieDownsieCallCount = 1 + (34 - 17) + 1 + (34 - 23) -var PrivateAccounts = integration.MakePrivateAccounts(10) // make keys +var i = UpsieDownsieCallCount + +var PrivateAccounts = integration.MakePrivateAccounts("mysecret", 10) // make keys var GenesisDoc = integration.TestGenesisDoc(PrivateAccounts) // Helpers From 4e5b0facd7bb533511e9f4db7e2a613b25233b0f Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Thu, 20 Jun 2019 17:17:51 +0100 Subject: [PATCH 30/70] no net or node address' in bond Signed-off-by: Gregory Hill --- cmd/burrow/commands/configure.go | 2 + cmd/burrow/commands/tx.go | 12 +-- deploy/def/client.go | 12 +-- deploy/def/jobs.go | 4 - deploy/jobs/jobs_transact.go | 8 +- protobuf/payload.proto | 2 - txs/payload/payload.pb.go | 180 ++++++++++++------------------- 7 files changed, 82 insertions(+), 138 deletions(-) diff --git a/cmd/burrow/commands/configure.go b/cmd/burrow/commands/configure.go index 295646a07..506c475dd 100644 --- a/cmd/burrow/commands/configure.go +++ b/cmd/burrow/commands/configure.go @@ -40,6 +40,8 @@ func Configure(output Output) func(cmd *cli.Cmd) { keysDir := cmd.StringOpt("keys-dir", "", "Directory where keys are stored") + separateGenesisDoc := cmd.StringOpt("w separate-genesis-doc", "", "Emit a separate genesis doc as JSON or TOML") + configTemplateIn := cmd.StringsOpt("config-template-in", nil, "Go text/template input filename to generate config file specified with --config-out") diff --git a/cmd/burrow/commands/tx.go b/cmd/burrow/commands/tx.go index aeb37b10a..f2eaffe40 100644 --- a/cmd/burrow/commands/tx.go +++ b/cmd/burrow/commands/tx.go @@ -74,17 +74,13 @@ func Tx(output Output) func(cmd *cli.Cmd) { sourceOpt := cmd.StringOpt("source", "", "Account with bonding perm, if not set config is used") targetOpt := cmd.StringOpt("target", "", "Validator account to bond, created if doesn't exist") powerOpt := cmd.StringOpt("power", "", "Amount of value to bond, required") - nodeOpt := cmd.StringOpt("node", "", "Optional Tendermint node address") - urlOpt := cmd.StringOpt("url", "", "Optional network address for validator") - cmd.Spec += "[--source=
] [--target=] [--power=] [--node=
] [--url=]" + cmd.Spec += "[--source=
] [--target=] [--power=]" cmd.Action = func() { bond := &def.Bond{ - Source: jobs.FirstOf(*sourceOpt, address), - Target: jobs.FirstOf(*targetOpt, address), - Power: *powerOpt, - Node: *nodeOpt, - Network: *urlOpt, + Source: jobs.FirstOf(*sourceOpt, address), + Target: jobs.FirstOf(*targetOpt, address), + Power: *powerOpt, } if err := bond.Validate(); err != nil { diff --git a/deploy/def/client.go b/deploy/def/client.go index 4fdaab3b1..d56c695c0 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -505,13 +505,11 @@ func (c *Client) Send(arg *SendArg, logger *logging.Logger) (*payload.SendTx, er } type BondArg struct { - Input string - Amount string - Sequence string - Address string - PublicKey string - NodeAddress string - NetAddress string + Input string + Amount string + Sequence string + Address string + PublicKey string } func (c *Client) Bond(arg *BondArg, logger *logging.Logger) (*payload.BondTx, error) { diff --git a/deploy/def/jobs.go b/deploy/def/jobs.go index c3068fa75..fa2d058b9 100644 --- a/deploy/def/jobs.go +++ b/deploy/def/jobs.go @@ -183,10 +183,6 @@ type Bond struct { Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` // (Required) the Tendermint validator power to claim Power string `mapstructure:"power" json:"power" yaml:"power" toml:"power"` - // (Optional) Tendermint node address of the validator - Node string `mapstructure:"node" json:"node" yaml:"node" toml:"node"` - // (Optional) network ip address of the validator - Network string `mapstructure:"network" json:"network" yaml:"network" toml:"network"` // (Optional, advanced only) sequence to use when burrow keys signs the transaction // (do not use unless you know what you're doing) Sequence string `mapstructure:"sequence" json:"sequence" yaml:"sequence" toml:"sequence"` diff --git a/deploy/jobs/jobs_transact.go b/deploy/jobs/jobs_transact.go index ada2da1c9..25041acad 100644 --- a/deploy/jobs/jobs_transact.go +++ b/deploy/jobs/jobs_transact.go @@ -59,11 +59,9 @@ func FormulateBondJob(bond *def.Bond, account string, client *def.Client, logger "power", bond.Power) arg := &def.BondArg{ - Input: bond.Source, - Amount: bond.Power, - Sequence: bond.Sequence, - NodeAddress: bond.Node, - NetAddress: bond.Network, + Input: bond.Source, + Amount: bond.Power, + Sequence: bond.Sequence, } if len(bond.Source) == crypto.AddressHexLength { diff --git a/protobuf/payload.proto b/protobuf/payload.proto index 1667e1778..76d97203a 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -112,8 +112,6 @@ message BondTx { TxInput Input = 1; // The validator to bond, public key must be known spec.TemplateAccount Validator = 2 [(gogoproto.nullable) = true]; - // Optional network address to identify - string NetAddress = 3; } message UnbondTx { diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 6f132d49c..73a1e4d8b 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -557,12 +557,10 @@ type BondTx struct { // Account with bonding permission Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` // The validator to bond, public key must be known - Validator *spec.TemplateAccount `protobuf:"bytes,2,opt,name=Validator,proto3" json:"Validator,omitempty"` - // Optional network address to identify - NetAddress string `protobuf:"bytes,3,opt,name=NetAddress,proto3" json:"NetAddress,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Validator *spec.TemplateAccount `protobuf:"bytes,2,opt,name=Validator,proto3" json:"Validator,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *BondTx) Reset() { *m = BondTx{} } @@ -976,70 +974,70 @@ func init() { proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d5 func init() { golang_proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d56) } var fileDescriptor_678c914f1bee6d56 = []byte{ - // 1006 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcb, 0x6f, 0xe3, 0x44, - 0x18, 0xaf, 0x6b, 0xe7, 0xd1, 0x6f, 0xd3, 0x92, 0x1d, 0x1e, 0x8a, 0x2a, 0x91, 0xac, 0x02, 0x82, - 0x5d, 0xd8, 0x26, 0xb0, 0xcb, 0x43, 0xf4, 0x82, 0xe2, 0x26, 0xed, 0x16, 0x2d, 0x6d, 0x35, 0x71, - 0xbb, 0x08, 0xc4, 0x61, 0x92, 0x0c, 0x8e, 0xa5, 0xc4, 0x63, 0xec, 0xc9, 0xe2, 0x70, 0xe2, 0xc0, - 0x81, 0x2b, 0x42, 0x48, 0x1c, 0xfb, 0x2f, 0xf0, 0x1f, 0x70, 0xec, 0x91, 0x23, 0xe2, 0x50, 0xa1, - 0xee, 0x05, 0xf1, 0x57, 0xa0, 0x19, 0xcf, 0x38, 0x4e, 0x76, 0xd5, 0x4d, 0x2b, 0xc4, 0xcd, 0xf3, - 0x7d, 0xbf, 0xf9, 0x1e, 0xbf, 0xef, 0x31, 0x86, 0xf5, 0x80, 0x4c, 0x47, 0x8c, 0x0c, 0x1a, 0x41, - 0xc8, 0x38, 0x43, 0x05, 0x75, 0xdc, 0xdc, 0x72, 0x3d, 0x3e, 0x9c, 0xf4, 0x1a, 0x7d, 0x36, 0x6e, - 0xba, 0xcc, 0x65, 0x4d, 0xa9, 0xef, 0x4d, 0xbe, 0x92, 0x27, 0x79, 0x90, 0x5f, 0xc9, 0xbd, 0xcd, - 0x72, 0x40, 0xc3, 0xb1, 0x17, 0x45, 0x1e, 0xf3, 0x95, 0x04, 0xa2, 0x80, 0xf6, 0x93, 0xef, 0xfa, - 0x8f, 0x26, 0x98, 0x2d, 0x7f, 0x8a, 0xde, 0x84, 0xfc, 0x0e, 0x19, 0x8d, 0x9c, 0xb8, 0x62, 0xdc, - 0x32, 0x6e, 0xdf, 0xb8, 0xf7, 0x42, 0x43, 0x7b, 0x4f, 0xc4, 0x58, 0xa9, 0x05, 0xb0, 0x4b, 0xfd, - 0x81, 0x13, 0x57, 0x56, 0x17, 0x80, 0x89, 0x18, 0x2b, 0xb5, 0x00, 0x1e, 0x90, 0x31, 0x75, 0xe2, - 0x8a, 0xb9, 0x00, 0x4c, 0xc4, 0x58, 0xa9, 0xd1, 0x5b, 0x50, 0x38, 0xa2, 0xe1, 0x38, 0x72, 0xe2, - 0x8a, 0x25, 0x91, 0xe5, 0x14, 0xa9, 0xe4, 0x58, 0x03, 0xd0, 0xeb, 0x90, 0xdb, 0x63, 0x8f, 0x9d, - 0xb8, 0x92, 0x93, 0xc8, 0x8d, 0x14, 0x29, 0xa5, 0x38, 0x51, 0x0a, 0xd7, 0x36, 0x93, 0x31, 0xe6, - 0x17, 0x5c, 0x27, 0x62, 0xac, 0xd4, 0x68, 0x0b, 0x8a, 0xc7, 0x7e, 0x2f, 0x81, 0x16, 0x24, 0xf4, - 0x66, 0x0a, 0xd5, 0x0a, 0x9c, 0x42, 0x44, 0xa4, 0x36, 0xe1, 0xfd, 0xa1, 0x13, 0x57, 0x8a, 0x0b, - 0x91, 0x2a, 0x39, 0xd6, 0x00, 0x74, 0x1f, 0xe0, 0x28, 0x64, 0x01, 0x8b, 0x88, 0x20, 0x75, 0x4d, - 0xc2, 0x5f, 0x9c, 0x25, 0x96, 0xaa, 0x70, 0x06, 0xb6, 0x6d, 0x9d, 0x9d, 0xd6, 0x8c, 0xfa, 0x4f, - 0x06, 0x14, 0x9c, 0x78, 0xdf, 0x0f, 0x26, 0x1c, 0x1d, 0x40, 0xa1, 0x35, 0x18, 0x84, 0x34, 0x8a, - 0x64, 0x61, 0x4a, 0xf6, 0x7b, 0x67, 0xe7, 0xb5, 0x95, 0x3f, 0xcf, 0x6b, 0x77, 0x33, 0x5d, 0x30, - 0x9c, 0x06, 0x34, 0x1c, 0xd1, 0x81, 0x4b, 0xc3, 0x66, 0x6f, 0x12, 0x86, 0xec, 0x9b, 0x66, 0x3f, - 0x9c, 0x06, 0x9c, 0x35, 0xd4, 0x5d, 0xac, 0x8d, 0xa0, 0x57, 0x20, 0xdf, 0x1a, 0xb3, 0x89, 0xcf, - 0x65, 0xf9, 0x2c, 0xac, 0x4e, 0x68, 0x13, 0x8a, 0x5d, 0xfa, 0xf5, 0x84, 0xfa, 0x7d, 0x2a, 0xeb, - 0x65, 0xe1, 0xf4, 0xbc, 0x6d, 0xfd, 0x72, 0x5a, 0x5b, 0xa9, 0xc7, 0x50, 0x74, 0xe2, 0xc3, 0x09, - 0xff, 0x1f, 0xa3, 0x52, 0x9e, 0xff, 0x58, 0xd5, 0xcd, 0x89, 0xde, 0x80, 0x9c, 0xe4, 0x45, 0x75, - 0xe9, 0x8c, 0x7f, 0xc5, 0x17, 0x4e, 0xd4, 0xe8, 0x93, 0x59, 0x80, 0xab, 0x32, 0xc0, 0x77, 0xae, - 0x1f, 0xdc, 0x26, 0x14, 0xf7, 0x48, 0xf4, 0xd0, 0x1b, 0x7b, 0x5c, 0x53, 0xa3, 0xcf, 0xa8, 0x0c, - 0xe6, 0x2e, 0xa5, 0xb2, 0x6f, 0x2d, 0x2c, 0x3e, 0xd1, 0x3e, 0x58, 0x6d, 0xc2, 0x89, 0x6c, 0xd0, - 0x92, 0xfd, 0xbe, 0xe2, 0x65, 0xeb, 0x72, 0xd7, 0x3d, 0xcf, 0x27, 0xe1, 0xb4, 0xf1, 0x80, 0xc6, - 0xf6, 0x94, 0xd3, 0x08, 0x4b, 0x13, 0xe8, 0x0b, 0xb0, 0x1e, 0xb5, 0xba, 0x9f, 0xca, 0x26, 0x2e, - 0xd9, 0x7b, 0xd7, 0x32, 0xf5, 0xcf, 0x79, 0x6d, 0x83, 0x13, 0x37, 0xba, 0xcb, 0xc6, 0x1e, 0xa7, - 0xe3, 0x80, 0x4f, 0xb1, 0x34, 0xaa, 0xa8, 0xf5, 0xf4, 0x34, 0xa3, 0xdb, 0x90, 0x97, 0xd4, 0x89, - 0x8a, 0x9a, 0xcf, 0xa4, 0x56, 0xe9, 0xd1, 0xdb, 0x50, 0x48, 0xda, 0x40, 0x70, 0x6b, 0xce, 0xcd, - 0x8c, 0x6e, 0x10, 0xac, 0x11, 0xdb, 0xc5, 0x1f, 0x4e, 0x6b, 0x2b, 0xd2, 0x15, 0x4b, 0xc7, 0x7c, - 0xe9, 0x2a, 0x7e, 0x00, 0x45, 0x71, 0xa5, 0x15, 0xba, 0x91, 0xda, 0x36, 0x2f, 0x35, 0x32, 0xdb, - 0x4c, 0xeb, 0x6c, 0x4b, 0x50, 0x83, 0x53, 0xac, 0xca, 0x2d, 0xd0, 0x0b, 0x68, 0x69, 0x7f, 0x08, - 0x2c, 0x71, 0x43, 0xfa, 0x5a, 0xc3, 0xf2, 0x5b, 0xc8, 0x64, 0x3d, 0xcd, 0x44, 0x26, 0x0b, 0xf3, - 0x54, 0xd5, 0x95, 0xc7, 0x9f, 0x0d, 0xbd, 0x78, 0x96, 0x76, 0xf9, 0x11, 0xac, 0x9d, 0x90, 0x91, - 0x37, 0x20, 0x9c, 0x85, 0x2a, 0xc7, 0x97, 0x1b, 0x72, 0x3f, 0x3b, 0x74, 0x1c, 0x8c, 0x08, 0xa7, - 0xad, 0x7e, 0x5f, 0xcc, 0x82, 0x4c, 0xd2, 0xc0, 0x33, 0x34, 0xaa, 0x02, 0x1c, 0x50, 0xae, 0xdb, - 0x3c, 0x89, 0x2f, 0x23, 0xc9, 0x50, 0xef, 0xce, 0xd6, 0xdc, 0xd2, 0x81, 0xdd, 0x81, 0x7c, 0x52, - 0x43, 0x15, 0xd5, 0x33, 0x8a, 0xac, 0x00, 0x19, 0x47, 0xdf, 0x19, 0x6a, 0x3f, 0x5f, 0xa1, 0x9d, - 0x76, 0x60, 0x43, 0xa5, 0x78, 0x1c, 0x0c, 0x08, 0xa7, 0xba, 0xab, 0x2e, 0xa5, 0x61, 0xe1, 0x4a, - 0x26, 0x84, 0xbf, 0x8d, 0xec, 0xe2, 0x5d, 0x3a, 0xdd, 0x3a, 0x94, 0x4e, 0x18, 0xf7, 0x7c, 0xf7, - 0x11, 0xf5, 0xdc, 0x61, 0x92, 0xb4, 0x89, 0xe7, 0x64, 0xe8, 0x18, 0x4a, 0xda, 0xf2, 0x03, 0x12, - 0x0d, 0x25, 0xe5, 0x25, 0xfb, 0xdd, 0xab, 0x8f, 0xf7, 0x9c, 0x19, 0xf1, 0x08, 0xe9, 0xb3, 0x7a, - 0x00, 0x6f, 0x3e, 0xf5, 0x4e, 0xe0, 0x14, 0x92, 0x49, 0xf5, 0xcb, 0xf4, 0x39, 0xba, 0x02, 0xdd, - 0x55, 0x30, 0x9d, 0x58, 0x73, 0x5c, 0x4a, 0x61, 0x2d, 0x7f, 0x8a, 0x85, 0x22, 0x63, 0xfe, 0x7b, - 0x03, 0xac, 0x13, 0xc6, 0xe9, 0x7f, 0xbe, 0xed, 0x97, 0xe0, 0x3a, 0x13, 0xc6, 0xe3, 0x19, 0x3d, - 0xe9, 0x80, 0x1a, 0x99, 0x01, 0xbd, 0x05, 0x37, 0xda, 0x34, 0xea, 0x87, 0x5e, 0xc0, 0x3d, 0xe6, - 0xab, 0xd9, 0xcd, 0x8a, 0xb2, 0xcf, 0xb6, 0xf9, 0x9c, 0x67, 0x3b, 0xe3, 0xf7, 0xd7, 0x55, 0xc8, - 0xdb, 0x64, 0x34, 0x62, 0x7c, 0xae, 0x42, 0xc6, 0x73, 0x2b, 0x24, 0xfa, 0x64, 0xd7, 0xf3, 0xc9, - 0xc8, 0xfb, 0xd6, 0xf3, 0x5d, 0xf5, 0xa3, 0x74, 0xbd, 0x3e, 0xc9, 0x9a, 0x41, 0x3b, 0xb0, 0x1e, - 0x28, 0x17, 0x5d, 0x4e, 0x78, 0xb2, 0x7f, 0x36, 0xee, 0xbd, 0x9a, 0x49, 0x46, 0x44, 0x9b, 0x46, - 0x24, 0x41, 0x78, 0xfe, 0x0e, 0x7a, 0x0d, 0x72, 0xa2, 0xa6, 0x51, 0x25, 0x27, 0x1b, 0x60, 0x3d, - 0xbd, 0x2c, 0xa4, 0x38, 0xd1, 0xd5, 0x3f, 0x84, 0xf5, 0x39, 0x23, 0xa8, 0x04, 0xc5, 0x23, 0x7c, - 0x78, 0x74, 0xd8, 0xed, 0xb4, 0xcb, 0x2b, 0xe2, 0xd4, 0xf9, 0xac, 0xb3, 0x73, 0xec, 0x74, 0xda, - 0x65, 0x03, 0x01, 0xe4, 0x77, 0x5b, 0xfb, 0x0f, 0x3b, 0xed, 0xf2, 0xaa, 0xfd, 0xf1, 0xd9, 0x45, - 0xd5, 0xf8, 0xfd, 0xa2, 0x6a, 0xfc, 0x75, 0x51, 0x35, 0x7e, 0x7b, 0x52, 0x35, 0xce, 0x9e, 0x54, - 0x8d, 0xcf, 0xef, 0x5c, 0x9e, 0x35, 0x8f, 0xa3, 0xa6, 0x8a, 0xa2, 0x97, 0x97, 0x7f, 0xa5, 0xf7, - 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x06, 0x89, 0xb8, 0xbf, 0xfc, 0x0a, 0x00, 0x00, + // 995 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4d, 0x6f, 0x1b, 0xc5, + 0x1b, 0xcf, 0x7a, 0xd7, 0x2f, 0x79, 0xea, 0xe4, 0xef, 0xce, 0x1f, 0x90, 0x15, 0x09, 0xbb, 0x32, + 0x08, 0x5a, 0x68, 0x6c, 0x68, 0x79, 0x11, 0xb9, 0x20, 0x6f, 0xec, 0xa4, 0x41, 0xa5, 0x89, 0xc6, + 0x9b, 0x14, 0x81, 0x38, 0x8c, 0xed, 0x61, 0xbd, 0x92, 0xbd, 0xb3, 0xec, 0x8e, 0xcb, 0x9a, 0x13, + 0x07, 0x0e, 0x5c, 0x11, 0x17, 0x8e, 0xf9, 0x0a, 0x7c, 0x03, 0x8e, 0x39, 0x72, 0x44, 0x1c, 0x22, + 0x94, 0x5e, 0x10, 0x9f, 0x02, 0xcd, 0xec, 0xcc, 0x7a, 0xed, 0x56, 0xa9, 0x13, 0x21, 0x6e, 0x3b, + 0xcf, 0xf3, 0x9b, 0xe7, 0xe5, 0xf7, 0xbc, 0xcc, 0xc2, 0x46, 0x40, 0x66, 0x63, 0x46, 0x86, 0xcd, + 0x20, 0x64, 0x9c, 0xa1, 0xa2, 0x3a, 0x6e, 0x6d, 0xbb, 0x1e, 0x1f, 0x4d, 0xfb, 0xcd, 0x01, 0x9b, + 0xb4, 0x5c, 0xe6, 0xb2, 0x96, 0xd4, 0xf7, 0xa7, 0x5f, 0xc9, 0x93, 0x3c, 0xc8, 0xaf, 0xe4, 0xde, + 0x56, 0x25, 0xa0, 0xe1, 0xc4, 0x8b, 0x22, 0x8f, 0xf9, 0x4a, 0x02, 0x51, 0x40, 0x07, 0xc9, 0x77, + 0xe3, 0x47, 0x13, 0xcc, 0xb6, 0x3f, 0x43, 0x6f, 0x42, 0x61, 0x97, 0x8c, 0xc7, 0x4e, 0x5c, 0x35, + 0x6e, 0x19, 0xb7, 0x6f, 0xdc, 0xfb, 0x5f, 0x53, 0x7b, 0x4f, 0xc4, 0x58, 0xa9, 0x05, 0xb0, 0x47, + 0xfd, 0xa1, 0x13, 0x57, 0x73, 0x4b, 0xc0, 0x44, 0x8c, 0x95, 0x5a, 0x00, 0x1f, 0x91, 0x09, 0x75, + 0xe2, 0xaa, 0xb9, 0x04, 0x4c, 0xc4, 0x58, 0xa9, 0xd1, 0x5b, 0x50, 0x3c, 0xa2, 0xe1, 0x24, 0x72, + 0xe2, 0xaa, 0x25, 0x91, 0x95, 0x14, 0xa9, 0xe4, 0x58, 0x03, 0xd0, 0xeb, 0x90, 0xdf, 0x67, 0x4f, + 0x9c, 0xb8, 0x9a, 0x97, 0xc8, 0xcd, 0x14, 0x29, 0xa5, 0x38, 0x51, 0x0a, 0xd7, 0x36, 0x93, 0x31, + 0x16, 0x96, 0x5c, 0x27, 0x62, 0xac, 0xd4, 0x68, 0x1b, 0x4a, 0xc7, 0x7e, 0x3f, 0x81, 0x16, 0x25, + 0xf4, 0x66, 0x0a, 0xd5, 0x0a, 0x9c, 0x42, 0x44, 0xa4, 0x36, 0xe1, 0x83, 0x91, 0x13, 0x57, 0x4b, + 0x4b, 0x91, 0x2a, 0x39, 0xd6, 0x00, 0x74, 0x1f, 0xe0, 0x28, 0x64, 0x01, 0x8b, 0x88, 0x20, 0x75, + 0x5d, 0xc2, 0xff, 0x3f, 0x4f, 0x2c, 0x55, 0xe1, 0x0c, 0x6c, 0xc7, 0x3a, 0x3b, 0xad, 0x1b, 0x8d, + 0x9f, 0x0c, 0x28, 0x3a, 0xf1, 0x81, 0x1f, 0x4c, 0x39, 0x7a, 0x04, 0xc5, 0xf6, 0x70, 0x18, 0xd2, + 0x28, 0x92, 0x85, 0x29, 0xdb, 0xef, 0x9d, 0x9d, 0xd7, 0xd7, 0xfe, 0x38, 0xaf, 0xdf, 0xcd, 0x74, + 0xc1, 0x68, 0x16, 0xd0, 0x70, 0x4c, 0x87, 0x2e, 0x0d, 0x5b, 0xfd, 0x69, 0x18, 0xb2, 0x6f, 0x5a, + 0x83, 0x70, 0x16, 0x70, 0xd6, 0x54, 0x77, 0xb1, 0x36, 0x82, 0x5e, 0x81, 0x42, 0x7b, 0xc2, 0xa6, + 0x3e, 0x97, 0xe5, 0xb3, 0xb0, 0x3a, 0xa1, 0x2d, 0x28, 0xf5, 0xe8, 0xd7, 0x53, 0xea, 0x0f, 0xa8, + 0xac, 0x97, 0x85, 0xd3, 0xf3, 0x8e, 0xf5, 0xf3, 0x69, 0x7d, 0xad, 0x11, 0x43, 0xc9, 0x89, 0x0f, + 0xa7, 0xfc, 0x3f, 0x8c, 0x4a, 0x79, 0xfe, 0x3d, 0xa7, 0x9b, 0x13, 0xbd, 0x01, 0x79, 0xc9, 0x8b, + 0xea, 0xd2, 0x39, 0xff, 0x8a, 0x2f, 0x9c, 0xa8, 0xd1, 0x27, 0xf3, 0x00, 0x73, 0x32, 0xc0, 0x77, + 0xae, 0x1f, 0xdc, 0x16, 0x94, 0xf6, 0x49, 0xf4, 0xd0, 0x9b, 0x78, 0x5c, 0x53, 0xa3, 0xcf, 0xa8, + 0x02, 0xe6, 0x1e, 0xa5, 0xb2, 0x6f, 0x2d, 0x2c, 0x3e, 0xd1, 0x01, 0x58, 0x1d, 0xc2, 0x89, 0x6c, + 0xd0, 0xb2, 0xfd, 0xbe, 0xe2, 0x65, 0xfb, 0x72, 0xd7, 0x7d, 0xcf, 0x27, 0xe1, 0xac, 0xf9, 0x80, + 0xc6, 0xf6, 0x8c, 0xd3, 0x08, 0x4b, 0x13, 0xe8, 0x0b, 0xb0, 0x1e, 0xb7, 0x7b, 0x9f, 0xca, 0x26, + 0x2e, 0xdb, 0xfb, 0xd7, 0x32, 0xf5, 0xf7, 0x79, 0x7d, 0x93, 0x13, 0x37, 0xba, 0xcb, 0x26, 0x1e, + 0xa7, 0x93, 0x80, 0xcf, 0xb0, 0x34, 0xaa, 0xa8, 0xf5, 0xf4, 0x34, 0xa3, 0xdb, 0x50, 0x90, 0xd4, + 0x89, 0x8a, 0x9a, 0xcf, 0xa5, 0x56, 0xe9, 0xd1, 0xdb, 0x50, 0x4c, 0xda, 0x40, 0x70, 0x6b, 0x2e, + 0xcc, 0x8c, 0x6e, 0x10, 0xac, 0x11, 0x3b, 0xa5, 0x1f, 0x4e, 0xeb, 0x6b, 0xd2, 0x15, 0x4b, 0xc7, + 0x7c, 0xe5, 0x2a, 0x7e, 0x00, 0x25, 0x71, 0xa5, 0x1d, 0xba, 0x91, 0xda, 0x36, 0x2f, 0x35, 0x33, + 0xdb, 0x4c, 0xeb, 0x6c, 0x4b, 0x50, 0x83, 0x53, 0xac, 0xca, 0x2d, 0xd0, 0x0b, 0x68, 0x65, 0x7f, + 0x08, 0x2c, 0x71, 0x43, 0xfa, 0x5a, 0xc7, 0xf2, 0x5b, 0xc8, 0x64, 0x3d, 0xcd, 0x44, 0x26, 0x0b, + 0xf3, 0x4c, 0xd5, 0x95, 0xc7, 0xa9, 0xde, 0x3b, 0x2b, 0x7b, 0xfc, 0x08, 0xd6, 0x4f, 0xc8, 0xd8, + 0x1b, 0x12, 0xce, 0x42, 0x95, 0xe2, 0xcb, 0x4d, 0xb9, 0x9e, 0x1d, 0x3a, 0x09, 0xc6, 0x84, 0xd3, + 0xf6, 0x60, 0x20, 0x46, 0x41, 0xe6, 0x68, 0xe0, 0x39, 0x3a, 0xc3, 0xac, 0x3b, 0xdf, 0x62, 0x2b, + 0x3b, 0xbe, 0x03, 0x85, 0xa4, 0x44, 0xca, 0xeb, 0x73, 0x6a, 0xa8, 0x00, 0x19, 0x47, 0xdf, 0x19, + 0x6a, 0xfd, 0x5e, 0xa1, 0x5b, 0x76, 0x61, 0x53, 0xa5, 0x70, 0x1c, 0x0c, 0x09, 0xa7, 0xba, 0x69, + 0x2e, 0x4d, 0x73, 0xe9, 0x4a, 0x26, 0x84, 0xbf, 0x8c, 0xec, 0x5e, 0x5d, 0x39, 0xdd, 0x06, 0x94, + 0x4f, 0x18, 0xf7, 0x7c, 0xf7, 0x31, 0xf5, 0xdc, 0x51, 0x92, 0xb4, 0x89, 0x17, 0x64, 0xe8, 0x18, + 0xca, 0xda, 0xf2, 0x03, 0x12, 0x8d, 0x64, 0xc5, 0xcb, 0xf6, 0xbb, 0x57, 0x9f, 0xde, 0x05, 0x33, + 0xe2, 0x8d, 0xd1, 0x67, 0xf5, 0xbe, 0xdd, 0x7c, 0xe6, 0x19, 0xc0, 0x29, 0x24, 0x93, 0xea, 0x97, + 0xe9, 0x6b, 0x73, 0x05, 0xba, 0x6b, 0x60, 0x3a, 0xb1, 0xe6, 0xb8, 0x9c, 0xc2, 0xda, 0xfe, 0x0c, + 0x0b, 0x45, 0xc6, 0xfc, 0xf7, 0x06, 0x58, 0x27, 0x8c, 0xd3, 0x7f, 0x7d, 0x99, 0xaf, 0xc0, 0x75, + 0x26, 0x8c, 0x27, 0x73, 0x7a, 0xd2, 0xf9, 0x33, 0x32, 0xf3, 0x77, 0x0b, 0x6e, 0x74, 0x68, 0x34, + 0x08, 0xbd, 0x80, 0x7b, 0xcc, 0x57, 0xa3, 0x99, 0x15, 0x65, 0x5f, 0x65, 0xf3, 0x05, 0xaf, 0x72, + 0xc6, 0xef, 0x2f, 0x39, 0x28, 0xd8, 0x64, 0x3c, 0x66, 0x7c, 0xa1, 0x42, 0xc6, 0x0b, 0x2b, 0x24, + 0xfa, 0x64, 0xcf, 0xf3, 0xc9, 0xd8, 0xfb, 0xd6, 0xf3, 0x5d, 0xf5, 0x1f, 0x74, 0xbd, 0x3e, 0xc9, + 0x9a, 0x41, 0xbb, 0xb0, 0x11, 0x28, 0x17, 0x3d, 0x4e, 0x78, 0xb2, 0x5e, 0x36, 0xef, 0xbd, 0x9a, + 0x49, 0x46, 0x44, 0x9b, 0x46, 0x24, 0x41, 0x78, 0xf1, 0x0e, 0x7a, 0x0d, 0xf2, 0xa2, 0xa6, 0x51, + 0x35, 0x2f, 0x1b, 0x60, 0x23, 0xbd, 0x2c, 0xa4, 0x38, 0xd1, 0x35, 0x3e, 0x84, 0x8d, 0x05, 0x23, + 0xa8, 0x0c, 0xa5, 0x23, 0x7c, 0x78, 0x74, 0xd8, 0xeb, 0x76, 0x2a, 0x6b, 0xe2, 0xd4, 0xfd, 0xac, + 0xbb, 0x7b, 0xec, 0x74, 0x3b, 0x15, 0x03, 0x01, 0x14, 0xf6, 0xda, 0x07, 0x0f, 0xbb, 0x9d, 0x4a, + 0xce, 0xfe, 0xf8, 0xec, 0xa2, 0x66, 0xfc, 0x76, 0x51, 0x33, 0xfe, 0xbc, 0xa8, 0x19, 0xbf, 0x3e, + 0xad, 0x19, 0x67, 0x4f, 0x6b, 0xc6, 0xe7, 0x77, 0x2e, 0xcf, 0x9a, 0xc7, 0x51, 0x4b, 0x45, 0xd1, + 0x2f, 0xc8, 0x9f, 0xce, 0xfb, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x72, 0xd8, 0x03, 0x0f, 0xdb, + 0x0a, 0x00, 0x00, } func (m *Any) Marshal() (dAtA []byte, err error) { @@ -1460,12 +1458,6 @@ func (m *BondTx) MarshalTo(dAtA []byte) (int, error) { } i += n20 } - if len(m.NetAddress) > 0 { - dAtA[i] = 0x1a - i++ - i = encodeVarintPayload(dAtA, i, uint64(len(m.NetAddress))) - i += copy(dAtA[i:], m.NetAddress) - } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) } @@ -2001,10 +1993,6 @@ func (m *BondTx) Size() (n int) { l = m.Validator.Size() n += 1 + l + sovPayload(uint64(l)) } - l = len(m.NetAddress) - if l > 0 { - n += 1 + l + sovPayload(uint64(l)) - } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3602,38 +3590,6 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NetAddress", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPayload - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPayload - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPayload - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.NetAddress = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipPayload(dAtA[iNdEx:]) From 18433dec950707aac0697092cd1fcfca6278cbf5 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Tue, 2 Jul 2019 10:24:18 +0100 Subject: [PATCH 31/70] tidy up imports & tests Signed-off-by: Gregory Hill --- acm/account.go | 1 - cmd/burrow/commands/config_options.go | 2 +- cmd/burrow/commands/configure.go | 4 ++-- integration/core/kernel_test.go | 2 ++ integration/rpctest/helpers.go | 2 -- 5 files changed, 5 insertions(+), 6 deletions(-) diff --git a/acm/account.go b/acm/account.go index d1aafda90..f63e4e645 100644 --- a/acm/account.go +++ b/acm/account.go @@ -24,7 +24,6 @@ import ( "github.com/hyperledger/burrow/event/query" "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/permission" - amino "github.com/tendermint/go-amino" ) var GlobalPermissionsAddress = crypto.Address(binary.Zero160) diff --git a/cmd/burrow/commands/config_options.go b/cmd/burrow/commands/config_options.go index e7c695f4b..31a30d713 100644 --- a/cmd/burrow/commands/config_options.go +++ b/cmd/burrow/commands/config_options.go @@ -58,7 +58,7 @@ func addConfigOptions(cmd *cli.Cmd) *configOptions { Name: "v validator", Desc: "Validator index (in validators list - GenesisSpec or GenesisDoc) from which to set Address", Value: -1, - EnvVar: "BURROW_NODE_INDEX", + EnvVar: "BURROW_VALIDATOR_INDEX", }), initAddressOpt: cmd.String(cli.StringOpt{ diff --git a/cmd/burrow/commands/configure.go b/cmd/burrow/commands/configure.go index 506c475dd..e8a57b8f2 100644 --- a/cmd/burrow/commands/configure.go +++ b/cmd/burrow/commands/configure.go @@ -40,14 +40,14 @@ func Configure(output Output) func(cmd *cli.Cmd) { keysDir := cmd.StringOpt("keys-dir", "", "Directory where keys are stored") - separateGenesisDoc := cmd.StringOpt("w separate-genesis-doc", "", "Emit a separate genesis doc as JSON or TOML") - configTemplateIn := cmd.StringsOpt("config-template-in", nil, "Go text/template input filename to generate config file specified with --config-out") configTemplateOut := cmd.StringsOpt("config-out", nil, "Go text/template output filename. Template filename specified with --config-template-in") + separateGenesisDoc := cmd.StringOpt("w separate-genesis-doc", "", "Emit a separate genesis doc as JSON or TOML") + loggingOpt := cmd.StringOpt("l logging", "", "Comma separated list of logging instructions which form a 'program' which is a depth-first "+ "pre-order of instructions that will build the root logging sink. See 'burrow help' for more information.") diff --git a/integration/core/kernel_test.go b/integration/core/kernel_test.go index f495cff4b..962e0c7f5 100644 --- a/integration/core/kernel_test.go +++ b/integration/core/kernel_test.go @@ -49,6 +49,8 @@ func testKernel(t *testing.T, opts ...func(*config.BurrowConfig)) { t.Run(fmt.Sprintf("Group"), func(t *testing.T) { t.Parallel() genesisDoc, privateAccounts, privateValidators := genesis.NewDeterministicGenesis(123).GenesisDoc(1, 1) + require.NotNil(t, privateAccounts) + require.NotNil(t, privateValidators) t.Run("BootThenShutdown", func(t *testing.T) { conf, cleanup := integration.NewTestConfig(genesisDoc, opts...) defer cleanup() diff --git a/integration/rpctest/helpers.go b/integration/rpctest/helpers.go index 71ef3c86c..5bdacb7ba 100644 --- a/integration/rpctest/helpers.go +++ b/integration/rpctest/helpers.go @@ -25,8 +25,6 @@ import ( // so... (I didn't say it had to make sense): const UpsieDownsieCallCount = 1 + (34 - 17) + 1 + (34 - 23) -var i = UpsieDownsieCallCount - var PrivateAccounts = integration.MakePrivateAccounts("mysecret", 10) // make keys var GenesisDoc = integration.TestGenesisDoc(PrivateAccounts) From a9f2b8454ceeadcb24579ec71d1e9a12c4d02d4b Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 3 Jul 2019 15:12:40 +0100 Subject: [PATCH 32/70] check bonded validator can propose blocks Signed-off-by: Gregory Hill --- execution/contexts/bond_context.go | 56 +++++------ integration/governance/bonding_test.go | 81 ++++++++++------ integration/governance/governance_test.go | 113 ++++++++-------------- integration/governance/helpers.go | 44 ++++++--- integration/integration.go | 16 ++- integration/rpctest/helpers.go | 2 +- 6 files changed, 160 insertions(+), 152 deletions(-) diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go index f63413ac9..abc42aac0 100644 --- a/execution/contexts/bond_context.go +++ b/execution/contexts/bond_context.go @@ -28,22 +28,28 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error return fmt.Errorf("payload must be BondTx, but is: %v", txe.Envelope.Tx.Payload) } - // the account initiating the bond (may be validator) + // the account initiating the bond account, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) if err != nil { return err } - // check if account is validator - power, err := ctx.ValidatorSet.Power(account.Address) + // ensure pubKey of validator is set + val := ctx.tx.Validator + if err := GetIdentity(ctx.StateWriter, val); err != nil { + return fmt.Errorf("couldn't retrieve identity: %v", err) + } + + // check if validator already exists + power, err := ctx.ValidatorSet.Power(*val.Address) if err != nil { return err - } else if power != nil && power.Cmp(big.NewInt(0)) == 1 { - // TODO: something with nodekey - // ctx.tx.NetAddress + } else if power != nil && power.Cmp(big.NewInt(0)) == 1 && account.Address != *val.Address { + // we currently do not support delegated bonding + return fmt.Errorf("%s is already bonded", val.Address) } - // account is not validator, can it bond someone? + // can the account bond? if !hasBondPermission(ctx.StateWriter, account, ctx.Logger) { return fmt.Errorf("account '%s' lacks bond permission", account.Address) } @@ -57,13 +63,6 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error "we are deducting %v", account.Address, account.Balance, amount) } - // ensure pubKey of validator is set - val := ctx.tx.Validator - err = GetIdentity(ctx.StateWriter, val) - if err != nil { - return fmt.Errorf("BondTx: %v", err) - } - // can power be added? power = new(big.Int).SetUint64(amount) if !power.IsInt64() { @@ -80,24 +79,20 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error "so is not supported by Tendermint", *val.Address) } - // create the account if not bonder - if *val.Address != account.Address { - valAcc, err := ctx.StateWriter.GetAccount(*val.Address) - if err != nil { - return err - } else if valAcc == nil { - // validator account doesn't exist - valAcc = &acm.Account{ - Address: *val.Address, - Sequence: 0, - Balance: 0, - Permissions: permission.ZeroAccountPermissions, - } + // create the account if it doesn't exist + valAcc, err := ctx.StateWriter.GetAccount(*val.Address) + if err != nil { + return err + } else if valAcc == nil { + valAcc = &acm.Account{ + Address: *val.Address, + PublicKey: *val.PublicKey, + Sequence: 0, + Balance: 0, + Permissions: permission.NewAccountPermissions(permission.Bond), } // pk must be known later to unbond - valAcc.PublicKey = *val.PublicKey - err = ctx.StateWriter.UpdateAccount(valAcc) - if err != nil { + if err = ctx.StateWriter.UpdateAccount(valAcc); err != nil { return err } } @@ -157,6 +152,7 @@ func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) erro if err != nil { return err } else if power == nil || power.Cmp(big.NewInt(0)) == 0 { + // TODO: remove custom amount? return fmt.Errorf("nothing bonded for validator '%s'", sender.Address) } diff --git a/integration/governance/bonding_test.go b/integration/governance/bonding_test.go index 8ee78ec29..14243dd56 100644 --- a/integration/governance/bonding_test.go +++ b/integration/governance/bonding_test.go @@ -3,10 +3,11 @@ package governance import ( + "bytes" "testing" - "time" "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/bcm" "github.com/hyperledger/burrow/core" "github.com/hyperledger/burrow/integration" "github.com/hyperledger/burrow/integration/rpctest" @@ -14,37 +15,35 @@ import ( ) func TestBonding(t *testing.T) { - genesisAccounts := integration.MakePrivateAccounts("accounts", 2) - genesisKernels := make([]core.Kernel, len(genesisAccounts)) - genesisDoc := integration.TestGenesisDoc(genesisAccounts) + genesisAccounts := integration.MakePrivateAccounts("accounts", 3) + genesisKernels := make([]*core.Kernel, len(genesisAccounts)) + genesisDoc := integration.TestGenesisDoc(genesisAccounts, 0, 1) + var err error // we need at least one validator to start - // in this case genesisKernels[0] for i, acc := range genesisAccounts { - err := startNode(&genesisKernels[i], genesisDoc, acc, genesisAccounts...) + genesisKernels[i], err = createKernel(genesisDoc, acc, genesisAccounts...) require.NoError(t, err) - defer integration.Shutdown(&genesisKernels[i]) + defer integration.Shutdown(genesisKernels[i]) } - connectKernels(&genesisKernels[0], &genesisKernels[1]) + connectAllKernels(genesisKernels) // lets do the bond tx from the non-validator - grpcGenVal := genesisKernels[1].GRPCListenAddress().String() - tcli := rpctest.NewTransactClient(t, grpcGenVal) - qcli := rpctest.NewQueryClient(t, grpcGenVal) - - var power uint64 = 1000 - inputAddress := genesisAccounts[1].GetAddress() + nonValNetAdr := genesisKernels[2].GRPCListenAddress().String() + nonValInAcc := genesisAccounts[2].GetAddress() + tcli := rpctest.NewTransactClient(t, nonValNetAdr) + qcli := rpctest.NewQueryClient(t, nonValNetAdr) // make a new validator to grant power to val := acm.GeneratePrivateAccountFromSecret("validator") + accBefore := getAccount(t, qcli, nonValInAcc) - accBefore := getAccount(t, qcli, inputAddress) - - bondTx := createBondTx(inputAddress, power, val.GetPublicKey()) - _, err := sendPayload(tcli, bondTx) + var power uint64 = 1 << 16 + bondTx := createBondTx(nonValInAcc, power, val.GetPublicKey()) + _, err = payloadSync(tcli, bondTx) require.NoError(t, err) - accAfter := getAccount(t, qcli, inputAddress) + accAfter := getAccount(t, qcli, nonValInAcc) // ensure power is subtracted from original account balance require.Equal(t, accBefore.GetBalance()-power, accAfter.GetBalance()) @@ -60,34 +59,56 @@ func TestBonding(t *testing.T) { require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) // start the new validator - valKernel := &core.Kernel{} - err = startNode(valKernel, genesisDoc, val, append(genesisAccounts, val)...) + valKernel, err := createKernel(genesisDoc, val, append(genesisAccounts, val)...) require.NoError(t, err) - connectKernels(&genesisKernels[0], valKernel) + connectKernels(genesisKernels[0], valKernel) // wait for new validator to see themself in set - time.Sleep(2 * time.Second) + waitFor(3, valKernel.Blockchain) grpcBondedVal := valKernel.GRPCListenAddress().String() qcli = rpctest.NewQueryClient(t, grpcBondedVal) vsOut = getValidators(t, qcli) require.Contains(t, vsOut, val.GetAddress()) require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) - unbondTx := createUnbondTx(val.GetAddress(), inputAddress) + // wait for validator to propose a block + waitFor(5, valKernel.Blockchain) + checkProposed(t, genesisKernels[0], val.GetPublicKey().GetAddress().Bytes()) + + unbondTx := createUnbondTx(val.GetAddress(), nonValInAcc) tcli = rpctest.NewTransactClient(t, grpcBondedVal) - _, err = sendPayload(tcli, unbondTx) + _, err = payloadSync(tcli, unbondTx) require.NoError(t, err) - tcli = rpctest.NewTransactClient(t, grpcGenVal) - qcli = rpctest.NewQueryClient(t, grpcGenVal) + waitFor(2, genesisKernels[0].Blockchain) + tcli = rpctest.NewTransactClient(t, nonValNetAdr) + qcli = rpctest.NewQueryClient(t, nonValNetAdr) vsOut = getValidators(t, qcli) require.NotContains(t, vsOut, val.GetAddress()) - accAfter = getAccount(t, qcli, inputAddress) + accAfter = getAccount(t, qcli, nonValInAcc) require.Equal(t, accBefore.GetBalance(), accAfter.GetBalance()) // TODO: - // - ensure bonded validator can vote // - add / remove too quickly // - only validator can unbond themselves - // - cannot bond more than one validator? / delegated bonding? +} + +func checkProposed(t *testing.T, kern *core.Kernel, exp []byte) { + height := kern.Node.BlockStore().Height() + t.Logf("current height is %d", height) + for i := int64(1); i < height; i++ { + bm := kern.Node.BlockStore().LoadBlockMeta(i) + if bytes.Equal(bm.Header.ProposerAddress, exp) { + t.Logf("%X proposed block %d", exp, i) + return + } + } + require.Fail(t, "bonded validator did not propose any blocks") +} + +func waitFor(height uint64, blockchain *bcm.Blockchain) { + until := blockchain.LastBlockHeight() + height + for h := uint64(0); h < until; h = blockchain.LastBlockHeight() { + continue + } } diff --git a/integration/governance/governance_test.go b/integration/governance/governance_test.go index dc456443c..50d2a8dc9 100644 --- a/integration/governance/governance_test.go +++ b/integration/governance/governance_test.go @@ -5,7 +5,6 @@ package governance import ( "context" "math/big" - "net" "testing" "time" @@ -19,7 +18,6 @@ import ( "github.com/hyperledger/burrow/governance" "github.com/hyperledger/burrow/integration" "github.com/hyperledger/burrow/integration/rpctest" - "github.com/hyperledger/burrow/logging/logconfig" "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/rpc/rpcquery" "github.com/stretchr/testify/assert" @@ -29,55 +27,29 @@ import ( ) func TestGovernance(t *testing.T) { - privateAccounts := integration.MakePrivateAccounts("accounts", 10) // make keys - genesisDoc := integration.TestGenesisDoc(privateAccounts) - kernels := make([]*core.Kernel, len(privateAccounts)) + genesisAccounts := integration.MakePrivateAccounts("mysecret", 10) // make keys + genesisKernels := make([]*core.Kernel, len(genesisAccounts)) + genesisDoc := integration.TestGenesisDoc(genesisAccounts, 0) genesisDoc.Accounts[4].Permissions = permission.NewAccountPermissions(permission.Send | permission.Call) + var err error - for i, acc := range privateAccounts { - // FIXME: some combination of cleanup and shutdown seems to make tests fail on CI - //testConfig, cleanup := integration.NewTestConfig(genesisDoc) - testConfig, _ := integration.NewTestConfig(genesisDoc) - //defer cleanup() - - logconf := logconfig.New().Root(func(sink *logconfig.SinkConfig) *logconfig.SinkConfig { - return sink.SetTransform(logconfig.FilterTransform(logconfig.IncludeWhenAllMatch, - "total_validator")).SetOutput(logconfig.StdoutOutput()) - }) - - // Try and grab a free port - this is not foolproof since there is race between other concurrent tests after we close - // the listener and start the node - l, err := net.Listen("tcp", "localhost:0") - require.NoError(t, err) - host, port, err := net.SplitHostPort(l.Addr().String()) - require.NoError(t, err) - - testConfig.Tendermint.ListenHost = host - testConfig.Tendermint.ListenPort = port - - kernels[i], err = integration.TestKernel(acc, privateAccounts, testConfig, logconf) - require.NoError(t, err) - - err = l.Close() + for i, acc := range genesisAccounts { + genesisKernels[i], err = createKernel(genesisDoc, acc, genesisAccounts...) require.NoError(t, err) - - err = kernels[i].Boot() - require.NoError(t, err) - - defer integration.Shutdown(kernels[i]) + defer integration.Shutdown(genesisKernels[i]) } time.Sleep(1 * time.Second) - for i := 0; i < len(kernels); i++ { - for j := i + 1; j < len(kernels); j++ { - connectKernels(kernels[i], kernels[j]) + for i := 0; i < len(genesisKernels); i++ { + for j := i + 1; j < len(genesisKernels); j++ { + connectKernels(genesisKernels[i], genesisKernels[j]) } } t.Run("Group", func(t *testing.T) { t.Run("AlterValidators", func(t *testing.T) { - inputAddress := privateAccounts[0].GetAddress() - grpcAddress := kernels[0].GRPCListenAddress().String() + inputAddress := genesisAccounts[0].GetAddress() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() tcli := rpctest.NewTransactClient(t, grpcAddress) qcli := rpctest.NewQueryClient(t, grpcAddress) ecli := rpctest.NewExecutionEventsClient(t, grpcAddress) @@ -90,7 +62,7 @@ func TestGovernance(t *testing.T) { changePower(vs, 8, 9931) err := vs.IterateValidators(func(id crypto.Addressable, power *big.Int) error { - _, err := govSync(tcli, governance.AlterPowerTx(inputAddress, id, power.Uint64())) + _, err := payloadSync(tcli, governance.AlterPowerTx(inputAddress, id, power.Uint64())) return err }) require.NoError(t, err) @@ -101,7 +73,7 @@ func TestGovernance(t *testing.T) { assertValidatorsEqual(t, vs, vsOut) // Remove validator from chain - _, err = govSync(tcli, governance.AlterPowerTx(inputAddress, account(3), 0)) + _, err = payloadSync(tcli, governance.AlterPowerTx(inputAddress, account(3), 0)) require.NoError(t, err) // Mirror in our check set @@ -112,8 +84,8 @@ func TestGovernance(t *testing.T) { // Now check Tendermint err = rpctest.WaitNBlocks(ecli, 6) require.NoError(t, err) - height := int64(kernels[0].Blockchain.LastBlockHeight()) - kernels[0].Node.ConfigureRPC() + height := int64(genesisKernels[0].Blockchain.LastBlockHeight()) + genesisKernels[0].Node.ConfigureRPC() tmVals, err := tmcore.Validators(&rpctypes.Context{}, &height) require.NoError(t, err) vsOut = validator.NewTrimSet() @@ -127,15 +99,15 @@ func TestGovernance(t *testing.T) { }) t.Run("WaitBlocks", func(t *testing.T) { - grpcAddress := kernels[0].GRPCListenAddress().String() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() ecli := rpctest.NewExecutionEventsClient(t, grpcAddress) err := rpctest.WaitNBlocks(ecli, 2) require.NoError(t, err) }) t.Run("AlterValidatorsTooQuickly", func(t *testing.T) { - grpcAddress := kernels[0].GRPCListenAddress().String() - inputAddress := privateAccounts[0].GetAddress() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() + inputAddress := genesisAccounts[0].GetAddress() tcli := rpctest.NewTransactClient(t, grpcAddress) qcli := rpctest.NewQueryClient(t, grpcAddress) @@ -143,7 +115,7 @@ func TestGovernance(t *testing.T) { acc1 := acm.GeneratePrivateAccountFromSecret("Foo1") t.Logf("Changing power of new account %v to MaxFlow = %d that should succeed", acc1.GetAddress(), maxFlow) - _, err := govSync(tcli, governance.AlterPowerTx(inputAddress, acc1, maxFlow)) + _, err := payloadSync(tcli, governance.AlterPowerTx(inputAddress, acc1, maxFlow)) require.NoError(t, err) maxFlow = getMaxFlow(t, qcli) @@ -151,28 +123,28 @@ func TestGovernance(t *testing.T) { acc2 := acm.GeneratePrivateAccountFromSecret("Foo2") t.Logf("Changing power of new account %v to MaxFlow + 1 = %d that should fail", acc2.GetAddress(), power) - _, err = govSync(tcli, governance.AlterPowerTx(inputAddress, acc2, power)) + _, err = payloadSync(tcli, governance.AlterPowerTx(inputAddress, acc2, power)) require.Error(t, err) }) t.Run("NoRootPermission", func(t *testing.T) { - grpcAddress := kernels[0].GRPCListenAddress().String() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() tcli := rpctest.NewTransactClient(t, grpcAddress) // Account does not have Root permission - inputAddress := privateAccounts[4].GetAddress() - _, err := govSync(tcli, governance.AlterPowerTx(inputAddress, account(5), 3433)) + inputAddress := genesisAccounts[4].GetAddress() + _, err := payloadSync(tcli, governance.AlterPowerTx(inputAddress, account(5), 3433)) require.Error(t, err) assert.Contains(t, err.Error(), errors.PermissionDenied{Address: inputAddress, Perm: permission.Root}.Error()) }) t.Run("AlterAmount", func(t *testing.T) { - inputAddress := privateAccounts[0].GetAddress() - grpcAddress := kernels[0].GRPCListenAddress().String() + inputAddress := genesisAccounts[0].GetAddress() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() tcli := rpctest.NewTransactClient(t, grpcAddress) qcli := rpctest.NewQueryClient(t, grpcAddress) var amount uint64 = 18889 acc := account(5) - _, err := govSync(tcli, governance.AlterBalanceTx(inputAddress, acc, balance.New().Native(amount))) + _, err := payloadSync(tcli, governance.AlterBalanceTx(inputAddress, acc, balance.New().Native(amount))) require.NoError(t, err) ca, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: acc.GetAddress()}) require.NoError(t, err) @@ -182,12 +154,12 @@ func TestGovernance(t *testing.T) { }) t.Run("AlterPermissions", func(t *testing.T) { - inputAddress := privateAccounts[0].GetAddress() - grpcAddress := kernels[0].GRPCListenAddress().String() + inputAddress := genesisAccounts[0].GetAddress() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() tcli := rpctest.NewTransactClient(t, grpcAddress) qcli := rpctest.NewQueryClient(t, grpcAddress) acc := account(5) - _, err := govSync(tcli, governance.AlterPermissionsTx(inputAddress, acc, permission.Send)) + _, err := payloadSync(tcli, governance.AlterPermissionsTx(inputAddress, acc, permission.Send)) require.NoError(t, err) ca, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: acc.GetAddress()}) require.NoError(t, err) @@ -200,13 +172,14 @@ func TestGovernance(t *testing.T) { }) t.Run("CreateAccount", func(t *testing.T) { - inputAddress := privateAccounts[0].GetAddress() - grpcAddress := kernels[0].GRPCListenAddress().String() + inputAddress := genesisAccounts[0].GetAddress() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() tcli := rpctest.NewTransactClient(t, grpcAddress) qcli := rpctest.NewQueryClient(t, grpcAddress) var amount uint64 = 18889 acc := acm.GeneratePrivateAccountFromSecret("we almost certainly don't exist") - _, err := govSync(tcli, governance.AlterBalanceTx(inputAddress, acc, balance.New().Native(amount))) + govTx := governance.AlterBalanceTx(inputAddress, acc, balance.New().Native(amount)) + _, err := payloadSync(tcli, govTx) require.NoError(t, err) ca, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: acc.GetAddress()}) require.NoError(t, err) @@ -215,14 +188,14 @@ func TestGovernance(t *testing.T) { t.Run("ChangePowerByAddress", func(t *testing.T) { // Should use the key client to look up public key - inputAddress := privateAccounts[0].GetAddress() - grpcAddress := kernels[0].GRPCListenAddress().String() + inputAddress := genesisAccounts[0].GetAddress() + grpcAddress := genesisKernels[0].GRPCListenAddress().String() tcli := rpctest.NewTransactClient(t, grpcAddress) acc := account(2) address := acc.GetAddress() power := uint64(2445) - _, err := govSync(tcli, governance.UpdateAccountTx(inputAddress, &spec.TemplateAccount{ + _, err := payloadSync(tcli, governance.UpdateAccountTx(inputAddress, &spec.TemplateAccount{ Address: &address, Amounts: balance.New().Power(power), })) @@ -231,10 +204,10 @@ func TestGovernance(t *testing.T) { }) t.Run("InvalidSequenceNumber", func(t *testing.T) { - inputAddress := privateAccounts[0].GetAddress() - tcli1 := rpctest.NewTransactClient(t, kernels[0].GRPCListenAddress().String()) - tcli2 := rpctest.NewTransactClient(t, kernels[4].GRPCListenAddress().String()) - qcli := rpctest.NewQueryClient(t, kernels[0].GRPCListenAddress().String()) + inputAddress := genesisAccounts[0].GetAddress() + tcli1 := rpctest.NewTransactClient(t, genesisKernels[0].GRPCListenAddress().String()) + tcli2 := rpctest.NewTransactClient(t, genesisKernels[4].GRPCListenAddress().String()) + qcli := rpctest.NewQueryClient(t, genesisKernels[0].GRPCListenAddress().String()) acc := account(2) address := acc.GetAddress() @@ -247,12 +220,12 @@ func TestGovernance(t *testing.T) { }) setSequence(t, qcli, tx) - _, err := localSignAndBroadcastSync(t, tcli1, genesisDoc.ChainID(), privateAccounts[0], tx) + _, err := localSignAndBroadcastSync(t, tcli1, genesisDoc.ChainID(), genesisAccounts[0], tx) require.NoError(t, err) // Make it a different Tx hash so it can enter cache but keep sequence number tx.AccountUpdates[0].Amounts = balance.New().Power(power).Native(1) - _, err = localSignAndBroadcastSync(t, tcli2, genesisDoc.ChainID(), privateAccounts[0], tx) + _, err = localSignAndBroadcastSync(t, tcli2, genesisDoc.ChainID(), genesisAccounts[0], tx) require.Error(t, err) assert.Contains(t, err.Error(), "invalid sequence") }) diff --git a/integration/governance/helpers.go b/integration/governance/helpers.go index e1fcec5bb..b17f4b86e 100644 --- a/integration/governance/helpers.go +++ b/integration/governance/helpers.go @@ -27,38 +27,43 @@ import ( "github.com/tendermint/tendermint/p2p" ) -func startNode(kernel *core.Kernel, genesisDoc *genesis.GenesisDoc, - account *acm.PrivateAccount, keysAccounts ...*acm.PrivateAccount) error { +func createKernel(genesisDoc *genesis.GenesisDoc, account *acm.PrivateAccount, + keysAccounts ...*acm.PrivateAccount) (kernel *core.Kernel, err error) { + // FIXME: some combination of cleanup and shutdown seems to make tests fail on CI + //testConfig, cleanup := integration.NewTestConfig(genesisDoc) testConfig, _ := integration.NewTestConfig(genesisDoc) + //defer cleanup() + logconf := logconfig.New().Root(func(sink *logconfig.SinkConfig) *logconfig.SinkConfig { return sink.SetTransform(logconfig.FilterTransform(logconfig.IncludeWhenAllMatch, "total_validator")).SetOutput(logconfig.StdoutOutput()) }) + // Try and grab a free port - this is not foolproof since there is race between other concurrent tests after we close + // the listener and start the node l, err := net.Listen("tcp", "localhost:0") if err != nil { - return err + return nil, err } host, port, err := net.SplitHostPort(l.Addr().String()) if err != nil { - return err + return nil, err } testConfig.Tendermint.ListenHost = host testConfig.Tendermint.ListenPort = port - kern, err := integration.TestKernel(account, keysAccounts, testConfig, logconf) + kernel, err = integration.TestKernel(account, keysAccounts, testConfig, logconf) if err != nil { - return err + return nil, err } - *kernel = *kern err = l.Close() if err != nil { - return err + return nil, err } - return kernel.Boot() + return kernel, kernel.Boot() } func createBondTx(address crypto.Address, amount uint64, pubKey crypto.PublicKey) *payload.BondTx { @@ -84,6 +89,12 @@ func createUnbondTx(validator, account crypto.Address) *payload.UnbondTx { } } +func signTx(t *testing.T, tx payload.Payload, chainID string, from acm.AddressableSigner) (txEnv *txs.Envelope) { + txEnv = txs.Enclose(chainID, tx) + require.NoError(t, txEnv.Sign(from)) + return +} + func getValidators(t testing.TB, qcli rpcquery.QueryClient) map[crypto.Address]*validator.Validator { vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) require.NoError(t, err) @@ -113,7 +124,7 @@ func account(i int) *acm.PrivateAccount { return rpctest.PrivateAccounts[i] } -func sendPayload(cli rpctransact.TransactClient, tx payload.Payload) (*exec.TxExecution, error) { +func payloadSync(cli rpctransact.TransactClient, tx payload.Payload) (*exec.TxExecution, error) { return cli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{ Payload: tx.Any(), }) @@ -149,6 +160,13 @@ func connectKernels(k1, k2 *core.Kernel) { } } +func connectAllKernels(ks []*core.Kernel) { + source := ks[0] + for _, dest := range ks[1:] { + connectKernels(source, dest) + } +} + func getMaxFlow(t testing.TB, qcli rpcquery.QueryClient) uint64 { vs, err := qcli.GetValidatorSet(context.Background(), &rpcquery.GetValidatorSetParam{}) require.NoError(t, err) @@ -158,12 +176,6 @@ func getMaxFlow(t testing.TB, qcli rpcquery.QueryClient) uint64 { return maxFlow.Sub(maxFlow.Div(totalPower, big.NewInt(3)), big.NewInt(1)).Uint64() } -func govSync(cli rpctransact.TransactClient, tx *payload.GovTx) (*exec.TxExecution, error) { - return cli.BroadcastTxSync(context.Background(), &rpctransact.TxEnvelopeParam{ - Payload: tx.Any(), - }) -} - func setSequence(t testing.TB, qcli rpcquery.QueryClient, tx payload.Payload) { for _, input := range tx.GetInputs() { ca, err := qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: input.Address}) diff --git a/integration/integration.go b/integration/integration.go index 5838cadb6..cc9620fe3 100644 --- a/integration/integration.go +++ b/integration/integration.go @@ -170,7 +170,9 @@ func EnterTestDirectory() (testDir string, cleanup func()) { return testDir, func() { os.RemoveAll(testDir) } } -func TestGenesisDoc(addressables []*acm.PrivateAccount) *genesis.GenesisDoc { +// TestGenesisDoc creates genesis from a set of accounts +// and validators from indices within that slice +func TestGenesisDoc(addressables []*acm.PrivateAccount, vals ...int) *genesis.GenesisDoc { accounts := make(map[string]*acm.Account, len(addressables)) for i, pa := range addressables { account := acm.FromAddressable(pa) @@ -182,10 +184,14 @@ func TestGenesisDoc(addressables []*acm.PrivateAccount) *genesis.GenesisDoc { if err != nil { panic("could not parse test genesis time") } - return genesis.MakeGenesisDocFromAccounts(ChainName, nil, genesisTime, accounts, - map[string]*validator.Validator{ - "genesis_validator": validator.FromAccount(accounts["user_0"], 1<<16), - }) + + validators := make(map[string]*validator.Validator) + for _, i := range vals { + name := fmt.Sprintf("user_%d", i) + validators[name] = validator.FromAccount(accounts[name], 1<<16) + } + + return genesis.MakeGenesisDocFromAccounts(ChainName, nil, genesisTime, accounts, validators) } // Deterministic account generation helper. Pass number of accounts to make diff --git a/integration/rpctest/helpers.go b/integration/rpctest/helpers.go index 5bdacb7ba..6280d91ce 100644 --- a/integration/rpctest/helpers.go +++ b/integration/rpctest/helpers.go @@ -26,7 +26,7 @@ import ( const UpsieDownsieCallCount = 1 + (34 - 17) + 1 + (34 - 23) var PrivateAccounts = integration.MakePrivateAccounts("mysecret", 10) // make keys -var GenesisDoc = integration.TestGenesisDoc(PrivateAccounts) +var GenesisDoc = integration.TestGenesisDoc(PrivateAccounts, 0) // Helpers func NewTransactClient(t testing.TB, listenAddress string) rpctransact.TransactClient { From a85b0fa37d443bcf9d54b1e43002d21d9fe34b49 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 3 Jul 2019 16:08:29 +0100 Subject: [PATCH 33/70] test no bond perm Signed-off-by: Gregory Hill --- integration/governance/bonding_test.go | 136 ++++++++++++++----------- integration/governance/helpers.go | 2 - 2 files changed, 75 insertions(+), 63 deletions(-) diff --git a/integration/governance/bonding_test.go b/integration/governance/bonding_test.go index 14243dd56..f5b9928cb 100644 --- a/integration/governance/bonding_test.go +++ b/integration/governance/bonding_test.go @@ -1,11 +1,11 @@ -// +build integration - package governance import ( "bytes" "testing" + "github.com/hyperledger/burrow/permission" + "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/bcm" "github.com/hyperledger/burrow/core" @@ -15,9 +15,11 @@ import ( ) func TestBonding(t *testing.T) { - genesisAccounts := integration.MakePrivateAccounts("accounts", 3) + genesisAccounts := integration.MakePrivateAccounts("accounts", 4) genesisKernels := make([]*core.Kernel, len(genesisAccounts)) genesisDoc := integration.TestGenesisDoc(genesisAccounts, 0, 1) + genesisDoc.GlobalPermissions = permission.NewAccountPermissions(permission.Input) + genesisDoc.Accounts[3].Permissions = permission.ZeroAccountPermissions.Clone() var err error // we need at least one validator to start @@ -29,64 +31,76 @@ func TestBonding(t *testing.T) { connectAllKernels(genesisKernels) - // lets do the bond tx from the non-validator - nonValNetAdr := genesisKernels[2].GRPCListenAddress().String() - nonValInAcc := genesisAccounts[2].GetAddress() - tcli := rpctest.NewTransactClient(t, nonValNetAdr) - qcli := rpctest.NewQueryClient(t, nonValNetAdr) - - // make a new validator to grant power to - val := acm.GeneratePrivateAccountFromSecret("validator") - accBefore := getAccount(t, qcli, nonValInAcc) - - var power uint64 = 1 << 16 - bondTx := createBondTx(nonValInAcc, power, val.GetPublicKey()) - _, err = payloadSync(tcli, bondTx) - require.NoError(t, err) - accAfter := getAccount(t, qcli, nonValInAcc) - // ensure power is subtracted from original account balance - require.Equal(t, accBefore.GetBalance()-power, accAfter.GetBalance()) - - valAfter := getAccount(t, qcli, val.GetAddress()) - // validator must have associated account - // typically without balance if just created - require.NotEmpty(t, valAfter.GetAddress()) - require.Equal(t, uint64(0), valAfter.GetBalance()) - - // make sure our new validator exists in the set - vsOut := getValidators(t, qcli) - require.Contains(t, vsOut, val.GetAddress()) - require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) - - // start the new validator - valKernel, err := createKernel(genesisDoc, val, append(genesisAccounts, val)...) - require.NoError(t, err) - connectKernels(genesisKernels[0], valKernel) - - // wait for new validator to see themself in set - waitFor(3, valKernel.Blockchain) - grpcBondedVal := valKernel.GRPCListenAddress().String() - qcli = rpctest.NewQueryClient(t, grpcBondedVal) - vsOut = getValidators(t, qcli) - require.Contains(t, vsOut, val.GetAddress()) - require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) - - // wait for validator to propose a block - waitFor(5, valKernel.Blockchain) - checkProposed(t, genesisKernels[0], val.GetPublicKey().GetAddress().Bytes()) - - unbondTx := createUnbondTx(val.GetAddress(), nonValInAcc) - tcli = rpctest.NewTransactClient(t, grpcBondedVal) - _, err = payloadSync(tcli, unbondTx) - require.NoError(t, err) - - waitFor(2, genesisKernels[0].Blockchain) - tcli = rpctest.NewTransactClient(t, nonValNetAdr) - qcli = rpctest.NewQueryClient(t, nonValNetAdr) - vsOut = getValidators(t, qcli) - require.NotContains(t, vsOut, val.GetAddress()) - accAfter = getAccount(t, qcli, nonValInAcc) - require.Equal(t, accBefore.GetBalance(), accAfter.GetBalance()) + t.Run("NoPermission", func(t *testing.T) { + val := acm.GeneratePrivateAccountFromSecret("validator_1") + localAddress := genesisKernels[3].GRPCListenAddress().String() + inputAccount := genesisAccounts[3].GetAddress() + tcli := rpctest.NewTransactClient(t, localAddress) + bondTx := createBondTx(inputAccount, uint64(1<<2), val.GetPublicKey()) + _, err = payloadSync(tcli, bondTx) + require.Error(t, err) + }) + + t.Run("BondFromNonVal", func(t *testing.T) { + // lets do the bond tx from a non-validator node + localAddress := genesisKernels[2].GRPCListenAddress().String() + inputAccount := genesisAccounts[2].GetAddress() + tcli := rpctest.NewTransactClient(t, localAddress) + qcli := rpctest.NewQueryClient(t, localAddress) + + // make a new validator to grant power to + val := acm.GeneratePrivateAccountFromSecret("validator_2") + accBefore := getAccount(t, qcli, inputAccount) + var power uint64 = 1 << 16 + + bondTx := createBondTx(inputAccount, power, val.GetPublicKey()) + _, err = payloadSync(tcli, bondTx) + require.NoError(t, err) + accAfter := getAccount(t, qcli, inputAccount) + // ensure power is subtracted from original account balance + require.Equal(t, accBefore.GetBalance()-power, accAfter.GetBalance()) + + valAfter := getAccount(t, qcli, val.GetAddress()) + // validator must have associated account + // typically without balance if just created + require.NotEmpty(t, valAfter.GetAddress()) + require.Equal(t, uint64(0), valAfter.GetBalance()) + + // make sure our new validator exists in the set + vsOut := getValidators(t, qcli) + require.Contains(t, vsOut, val.GetAddress()) + require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) + + // start the new validator + valKernel, err := createKernel(genesisDoc, val, append(genesisAccounts, val)...) + require.NoError(t, err) + connectKernels(genesisKernels[0], valKernel) + + // wait for new validator to see themself in set + waitFor(3, valKernel.Blockchain) + grpcBondedVal := valKernel.GRPCListenAddress().String() + qcli = rpctest.NewQueryClient(t, grpcBondedVal) + vsOut = getValidators(t, qcli) + require.Contains(t, vsOut, val.GetAddress()) + require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) + + // wait for validator to propose a block + waitFor(7, valKernel.Blockchain) + checkProposed(t, genesisKernels[0], val.GetPublicKey().GetAddress().Bytes()) + + unbondTx := createUnbondTx(val.GetAddress(), inputAccount) + tcli = rpctest.NewTransactClient(t, grpcBondedVal) + _, err = payloadSync(tcli, unbondTx) + require.NoError(t, err) + + waitFor(2, genesisKernels[0].Blockchain) + tcli = rpctest.NewTransactClient(t, localAddress) + qcli = rpctest.NewQueryClient(t, localAddress) + vsOut = getValidators(t, qcli) + require.NotContains(t, vsOut, val.GetAddress()) + accAfter = getAccount(t, qcli, inputAccount) + require.Equal(t, accBefore.GetBalance(), accAfter.GetBalance()) + }) // TODO: // - add / remove too quickly diff --git a/integration/governance/helpers.go b/integration/governance/helpers.go index b17f4b86e..c6de0ef01 100644 --- a/integration/governance/helpers.go +++ b/integration/governance/helpers.go @@ -1,5 +1,3 @@ -// +build integration - package governance import ( From 267fd10e6cf1753c154df07db535d5bde597b583 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Sat, 6 Jul 2019 09:52:50 +0100 Subject: [PATCH 34/70] only bond / unbond input account Signed-off-by: Gregory Hill --- acm/validator/bucket.go | 6 +- acm/validator/set.go | 2 +- cmd/burrow/commands/tx.go | 10 +- deploy/def/client.go | 83 +++++----- deploy/def/jobs.go | 9 +- deploy/jobs/jobs_governance.go | 73 +++++++++ deploy/jobs/jobs_transact.go | 74 --------- execution/contexts/bond_context.go | 133 +--------------- execution/contexts/governance_context.go | 4 +- execution/contexts/shared.go | 23 +++ execution/contexts/unbond_context.go | 46 ++++++ integration/governance/bonding_test.go | 57 +++---- integration/governance/helpers.go | 18 +-- protobuf/payload.proto | 9 +- txs/payload/bond_tx.go | 5 +- txs/payload/payload.go | 12 +- txs/payload/payload.pb.go | 184 +++++++++++------------ txs/payload/unbond_tx.go | 6 +- txs/tx_test.go | 14 +- 19 files changed, 347 insertions(+), 421 deletions(-) create mode 100644 execution/contexts/unbond_context.go diff --git a/acm/validator/bucket.go b/acm/validator/bucket.go index e74d17c56..b675e9738 100644 --- a/acm/validator/bucket.go +++ b/acm/validator/bucket.go @@ -121,9 +121,9 @@ func (vc *Bucket) Equal(vwOther *Bucket) error { } func checkPower(power *big.Int) error { - if power.Sign() == -1 { - return fmt.Errorf("cannot set negative validator power: %v", power) - } + // if power.Sign() == -1 { + // return fmt.Errorf("cannot set negative validator power: %v", power) + // } if !power.IsInt64() { return fmt.Errorf("for tendermint compatibility validator power must fit within an int but %v "+ "does not", power) diff --git a/acm/validator/set.go b/acm/validator/set.go index bc2729daf..495b66518 100644 --- a/acm/validator/set.go +++ b/acm/validator/set.go @@ -77,7 +77,7 @@ func (vs *Set) MaxFlow() *big.Int { // Returns the flow that would be induced by a validator power change func (vs *Set) Flow(id crypto.PublicKey, power *big.Int) *big.Int { - return new(big.Int).Sub(power, vs.GetPower(id.GetAddress())) + return new(big.Int).Sub(new(big.Int).Abs(power), vs.GetPower(id.GetAddress())) } // Returns the power of id but only if it is set diff --git a/cmd/burrow/commands/tx.go b/cmd/burrow/commands/tx.go index f2eaffe40..dc6d81784 100644 --- a/cmd/burrow/commands/tx.go +++ b/cmd/burrow/commands/tx.go @@ -72,14 +72,12 @@ func Tx(output Output) func(cmd *cli.Cmd) { cmd.Command("bond", "bond a new validator", func(cmd *cli.Cmd) { sourceOpt := cmd.StringOpt("source", "", "Account with bonding perm, if not set config is used") - targetOpt := cmd.StringOpt("target", "", "Validator account to bond, created if doesn't exist") powerOpt := cmd.StringOpt("power", "", "Amount of value to bond, required") - cmd.Spec += "[--source=
] [--target=] [--power=]" + cmd.Spec += "[--source=
] [--power=]" cmd.Action = func() { bond := &def.Bond{ Source: jobs.FirstOf(*sourceOpt, address), - Target: jobs.FirstOf(*targetOpt, address), Power: *powerOpt, } @@ -100,13 +98,13 @@ func Tx(output Output) func(cmd *cli.Cmd) { cmd.Command("unbond", "unbond an existing validator", func(cmd *cli.Cmd) { sourceOpt := cmd.StringOpt("source", "", "Validator to unbond, if not set config is used") - targetOpt := cmd.StringOpt("target", "", "Account to receive tokens, created if doesn't exist") - cmd.Spec += "[--source=
] [--target=
]" + powerOpt := cmd.StringOpt("power", "", "Amount of value to unbond, required") + cmd.Spec += "[--source=
] [--power=]" cmd.Action = func() { unbond := &def.Unbond{ Source: jobs.FirstOf(*sourceOpt, address), - Target: jobs.FirstOf(*targetOpt, address), + Power: *powerOpt, } if err := unbond.Validate(); err != nil { diff --git a/deploy/def/client.go b/deploy/def/client.go index d56c695c0..485037934 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -338,9 +338,33 @@ func (c *Client) UpdateAccount(arg *GovArg, logger *logging.Logger) (*payload.Go Permissions: arg.Permissions, Roles: arg.Permissions, } - err = c.getIdentity(update, arg.Address, arg.PublicKey, logger) - if err != nil { - return nil, err + if arg.Address != "" { + addr, err := c.GetKeyAddress(arg.Address, logger) + if err != nil { + return nil, fmt.Errorf("could not parse address: %v", err) + } + update.Address = &addr + } + if arg.PublicKey != "" { + pubKey, err := publicKeyFromString(arg.PublicKey) + if err != nil { + return nil, fmt.Errorf("could not parse publicKey: %v", err) + } + update.PublicKey = &pubKey + } else { + // Attempt to get public key from connected key client + if arg.Address != "" { + // Try key client + pubKey, err := c.PublicKeyFromAddress(update.Address) + if err != nil { + logger.InfoMsg("did not get public key", "address", *update.Address) + } else { + update.PublicKey = pubKey + } + // We can still proceed with just address set + } else { + return nil, fmt.Errorf("neither target address or public key were provided") + } } _, err = permission.PermFlagFromStringList(arg.Permissions) @@ -369,38 +393,15 @@ func (c *Client) UpdateAccount(arg *GovArg, logger *logging.Logger) (*payload.Go return tx, nil } -func (c *Client) getIdentity(account *spec.TemplateAccount, address, publicKey string, logger *logging.Logger) error { - if address != "" { - addr, err := c.GetKeyAddress(address, logger) - if err != nil { - return fmt.Errorf("could not parse address: %v", err) - } - account.Address = &addr +func (c *Client) PublicKeyFromAddress(address *crypto.Address) (*crypto.PublicKey, error) { + if c.keyClient != nil { + return nil, fmt.Errorf("key client is not set") } - if publicKey != "" { - pubKey, err := publicKeyFromString(publicKey) - if err != nil { - return fmt.Errorf("could not parse publicKey: %v", err) - } - account.PublicKey = &pubKey - } else { - // Attempt to get public key from connected key client - if address != "" { - // Try key client - if c.keyClient != nil { - pubKey, err := c.keyClient.PublicKey(*account.Address) - if err != nil { - logger.InfoMsg("Could not retrieve public key from keys server", "address", *account.Address) - } else { - account.PublicKey = &pubKey - } - } - // We can still proceed with just address set - } else { - return fmt.Errorf("neither target address or public key were provided") - } + pubKey, err := c.keyClient.PublicKey(*address) + if err != nil { + return nil, fmt.Errorf("could not retrieve public key from keys server: %v", err) } - return nil + return &pubKey, nil } func publicKeyFromString(publicKey string) (crypto.PublicKey, error) { @@ -523,14 +524,13 @@ func (c *Client) Bond(arg *BondArg, logger *logging.Logger) (*payload.BondTx, er if err != nil { return nil, err } - val := &spec.TemplateAccount{} - err = c.getIdentity(val, arg.Address, arg.PublicKey, logger) + pubKey, err := c.PublicKeyFromAddress(&input.Address) if err != nil { return nil, err } return &payload.BondTx{ Input: input, - Validator: val, + PublicKey: pubKey, }, nil } @@ -549,16 +549,13 @@ func (c *Client) Unbond(arg *UnbondArg, logger *logging.Logger) (*payload.Unbond if err != nil { return nil, err } - addr, err := c.GetKeyAddress(arg.Output, logger) + pubKey, err := c.PublicKeyFromAddress(&input.Address) if err != nil { - return nil, fmt.Errorf("could not parse address: %v", err) - } - output := &payload.TxOutput{ - Address: addr, + return nil, err } return &payload.UnbondTx{ - Input: input, - Output: output, + Input: input, + PublicKey: pubKey, }, nil } diff --git a/deploy/def/jobs.go b/deploy/def/jobs.go index fa2d058b9..e01613f14 100644 --- a/deploy/def/jobs.go +++ b/deploy/def/jobs.go @@ -179,7 +179,7 @@ type Bond struct { // (Optional, if account job or global account set) address of the account from which to bond (the // public key for the account must be available to burrow keys) Source string `mapstructure:"source" json:"source" yaml:"source" toml:"source"` - // (Required) the identity of the bonding validator + // (Optional, if key client enabled) the public key of the bonding validator Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` // (Required) the Tendermint validator power to claim Power string `mapstructure:"power" json:"power" yaml:"power" toml:"power"` @@ -190,7 +190,6 @@ type Bond struct { func (job *Bond) Validate() error { return validation.ValidateStruct(job, - validation.Field(&job.Target, validation.Required), validation.Field(&job.Power, validation.Required), validation.Field(&job.Sequence, rule.Uint64OrPlaceholder), ) @@ -200,8 +199,10 @@ type Unbond struct { // (Optional, if account job or global account set) address of the validator to unbond (the // public key for the validator must be available to burrow keys) Source string `mapstructure:"source" json:"source" yaml:"source" toml:"source"` - // (Required) the identity of the unbonding validator + // (Optional, if key client enabled) the public key of the unbonding validator Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` + // (Required) the Tendermint validator power to unclaim + Power string `mapstructure:"power" json:"power" yaml:"power" toml:"power"` // (Optional, advanced only) sequence to use when burrow keys signs the transaction (do not use unless you // know what you're doing) Sequence string `mapstructure:"sequence" json:"sequence" yaml:"sequence" toml:"sequence"` @@ -209,7 +210,7 @@ type Unbond struct { func (job *Unbond) Validate() error { return validation.ValidateStruct(job, - validation.Field(&job.Target, validation.Required), + validation.Field(&job.Power, validation.Required), validation.Field(&job.Sequence, rule.Uint64OrPlaceholder), ) } diff --git a/deploy/jobs/jobs_governance.go b/deploy/jobs/jobs_governance.go index f4124a314..1388351b1 100644 --- a/deploy/jobs/jobs_governance.go +++ b/deploy/jobs/jobs_governance.go @@ -63,3 +63,76 @@ func UpdateAccountJob(gov *def.UpdateAccount, account string, tx *payload.GovTx, return nil } + +func FormulateBondJob(bond *def.Bond, account string, client *def.Client, logger *logging.Logger) (*payload.BondTx, error) { + // Use Default + bond.Source = FirstOf(bond.Source, account) + + // Formulate tx + logger.InfoMsg("Bonding Transaction", + "source", bond.Source, + "target", bond.Target, + "power", bond.Power) + + arg := &def.BondArg{ + Input: bond.Source, + Amount: bond.Power, + Sequence: bond.Sequence, + } + + if len(bond.Source) == crypto.AddressHexLength { + arg.Address = bond.Target + } else { + arg.PublicKey = bond.Target + } + + return client.Bond(arg, logger) +} + +func BondJob(bond *def.Bond, tx *payload.BondTx, account string, client *def.Client, logger *logging.Logger) (string, error) { + // Sign, broadcast, display + txe, err := client.SignAndBroadcast(tx, logger) + if err != nil { + return "", util.ChainErrorHandler(account, err, logger) + } + + util.ReadTxSignAndBroadcast(txe, err, logger) + if err != nil { + return "", err + } + + return txe.Receipt.TxHash.String(), nil +} + +func FormulateUnbondJob(unbond *def.Unbond, account string, client *def.Client, logger *logging.Logger) (*payload.UnbondTx, error) { + // Use Default + unbond.Source = FirstOf(unbond.Source, account) + + // Formulate tx + logger.InfoMsg("Unbonding Transaction", + "source", unbond.Source, + "target", unbond.Target) + + arg := &def.UnbondArg{ + Input: unbond.Source, + Output: unbond.Target, + Sequence: unbond.Sequence, + } + + return client.Unbond(arg, logger) +} + +func UnbondJob(bond *def.Unbond, tx *payload.UnbondTx, account string, client *def.Client, logger *logging.Logger) (string, error) { + // Sign, broadcast, display + txe, err := client.SignAndBroadcast(tx, logger) + if err != nil { + return "", util.ChainErrorHandler(account, err, logger) + } + + util.ReadTxSignAndBroadcast(txe, err, logger) + if err != nil { + return "", err + } + + return txe.Receipt.TxHash.String(), nil +} diff --git a/deploy/jobs/jobs_transact.go b/deploy/jobs/jobs_transact.go index 25041acad..0b052407c 100644 --- a/deploy/jobs/jobs_transact.go +++ b/deploy/jobs/jobs_transact.go @@ -7,7 +7,6 @@ import ( "os" "path/filepath" - "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/txs/payload" @@ -48,79 +47,6 @@ func SendJob(send *def.Send, tx *payload.SendTx, account string, client *def.Cli return txe.Receipt.TxHash.String(), nil } -func FormulateBondJob(bond *def.Bond, account string, client *def.Client, logger *logging.Logger) (*payload.BondTx, error) { - // Use Default - bond.Source = FirstOf(bond.Source, account) - - // Formulate tx - logger.InfoMsg("Bonding Transaction", - "source", bond.Source, - "target", bond.Target, - "power", bond.Power) - - arg := &def.BondArg{ - Input: bond.Source, - Amount: bond.Power, - Sequence: bond.Sequence, - } - - if len(bond.Source) == crypto.AddressHexLength { - arg.Address = bond.Target - } else { - arg.PublicKey = bond.Target - } - - return client.Bond(arg, logger) -} - -func BondJob(bond *def.Bond, tx *payload.BondTx, account string, client *def.Client, logger *logging.Logger) (string, error) { - // Sign, broadcast, display - txe, err := client.SignAndBroadcast(tx, logger) - if err != nil { - return "", util.ChainErrorHandler(account, err, logger) - } - - util.ReadTxSignAndBroadcast(txe, err, logger) - if err != nil { - return "", err - } - - return txe.Receipt.TxHash.String(), nil -} - -func FormulateUnbondJob(unbond *def.Unbond, account string, client *def.Client, logger *logging.Logger) (*payload.UnbondTx, error) { - // Use Default - unbond.Source = FirstOf(unbond.Source, account) - - // Formulate tx - logger.InfoMsg("Unbonding Transaction", - "source", unbond.Source, - "target", unbond.Target) - - arg := &def.UnbondArg{ - Input: unbond.Source, - Output: unbond.Target, - Sequence: unbond.Sequence, - } - - return client.Unbond(arg, logger) -} - -func UnbondJob(bond *def.Unbond, tx *payload.UnbondTx, account string, client *def.Client, logger *logging.Logger) (string, error) { - // Sign, broadcast, display - txe, err := client.SignAndBroadcast(tx, logger) - if err != nil { - return "", util.ChainErrorHandler(account, err, logger) - } - - util.ReadTxSignAndBroadcast(txe, err, logger) - if err != nil { - return "", err - } - - return txe.Receipt.TxHash.String(), nil -} - func FormulateRegisterNameJob(name *def.RegisterName, do *def.DeployArgs, playbook *def.Playbook, client *def.Client, logger *logging.Logger) ([]*payload.NameTx, error) { txs := make([]*payload.NameTx, 0) diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go index abc42aac0..8d71faf85 100644 --- a/execution/contexts/bond_context.go +++ b/execution/contexts/bond_context.go @@ -4,23 +4,21 @@ import ( "fmt" "math/big" - "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/acm/validator" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/logging" - "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/txs/payload" ) type BondContext struct { StateWriter acmstate.ReaderWriter - ValidatorSet validator.ReaderWriter + ValidatorSet validator.Alterer Logger *logging.Logger tx *payload.BondTx } -// Execute a BondTx to add a new validator +// Execute a BondTx to add or remove a new validator func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error { var ok bool ctx.tx, ok = p.(*payload.BondTx) @@ -29,31 +27,12 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error } // the account initiating the bond - account, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) + power := new(big.Int).SetUint64(ctx.tx.Input.GetAmount()) + account, err := validateBonding(ctx.StateWriter, ctx.tx.Input, ctx.tx.PublicKey, ctx.Logger) if err != nil { return err } - // ensure pubKey of validator is set - val := ctx.tx.Validator - if err := GetIdentity(ctx.StateWriter, val); err != nil { - return fmt.Errorf("couldn't retrieve identity: %v", err) - } - - // check if validator already exists - power, err := ctx.ValidatorSet.Power(*val.Address) - if err != nil { - return err - } else if power != nil && power.Cmp(big.NewInt(0)) == 1 && account.Address != *val.Address { - // we currently do not support delegated bonding - return fmt.Errorf("%s is already bonded", val.Address) - } - - // can the account bond? - if !hasBondPermission(ctx.StateWriter, account, ctx.Logger) { - return fmt.Errorf("account '%s' lacks bond permission", account.Address) - } - // check account has enough to bond amount := ctx.tx.Input.GetAmount() if amount == 0 { @@ -63,116 +42,16 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error "we are deducting %v", account.Address, account.Balance, amount) } - // can power be added? - power = new(big.Int).SetUint64(amount) - if !power.IsInt64() { - return fmt.Errorf("power supplied by %v does not fit into int64 and "+ - "so is not supported by Tendermint", account.Address) - } - priorPow, err := ctx.ValidatorSet.Power(*val.Address) - if err != nil { - return err - } - postPow := big.NewInt(0).Add(priorPow, power) - if !postPow.IsInt64() { - return fmt.Errorf("power supplied in update to validator power for %v does not fit into int64 and "+ - "so is not supported by Tendermint", *val.Address) - } - - // create the account if it doesn't exist - valAcc, err := ctx.StateWriter.GetAccount(*val.Address) - if err != nil { - return err - } else if valAcc == nil { - valAcc = &acm.Account{ - Address: *val.Address, - PublicKey: *val.PublicKey, - Sequence: 0, - Balance: 0, - Permissions: permission.NewAccountPermissions(permission.Bond), - } - // pk must be known later to unbond - if err = ctx.StateWriter.UpdateAccount(valAcc); err != nil { - return err - } - } - // we're good to go err = account.SubtractFromBalance(amount) if err != nil { return err } - err = validator.AddPower(ctx.ValidatorSet, *val.PublicKey, power) - if err != nil { - return err - } - - return ctx.StateWriter.UpdateAccount(account) -} - -type UnbondContext struct { - StateWriter acmstate.ReaderWriter - ValidatorSet validator.ReaderWriter - Logger *logging.Logger - tx *payload.UnbondTx -} - -// Execute an UnbondTx to remove a validator -func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) error { - var ok bool - ctx.tx, ok = p.(*payload.UnbondTx) - if !ok { - return fmt.Errorf("payload must be UnbondTx, but is: %v", txe.Envelope.Tx.Payload) - } - - // the unbonding validator - sender, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) - if err != nil { - return err - } - - var signed bool - // ensure tx is signed by validator - for _, sig := range txe.Envelope.GetSignatories() { - if sender.GetPublicKey().String() == sig.GetPublicKey().String() { - signed = true - } - } - if !signed { - return fmt.Errorf("can't unbond, not signed by validator") - } - - recipient, err := ctx.StateWriter.GetAccount(ctx.tx.Output.Address) - if err != nil { - return err - } - - // make sure that the validator has power to remove - power, err := ctx.ValidatorSet.Power(sender.Address) - if err != nil { - return err - } else if power == nil || power.Cmp(big.NewInt(0)) == 0 { - // TODO: remove custom amount? - return fmt.Errorf("nothing bonded for validator '%s'", sender.Address) - } - - publicKey, err := MaybeGetPublicKey(ctx.StateWriter, sender.Address) - if err != nil { - return err - } else if publicKey == nil { - return fmt.Errorf("need public key to unbond '%s'", sender.Address) - } - // remove power and transfer to output - err = validator.SubtractPower(ctx.ValidatorSet, *publicKey, power) + _, err = ctx.ValidatorSet.AlterPower(account.PublicKey, power) if err != nil { return err } - err = recipient.AddToBalance(power.Uint64()) - if err != nil { - return err - } - - return ctx.StateWriter.UpdateAccount(recipient) + return ctx.StateWriter.UpdateAccount(account) } diff --git a/execution/contexts/governance_context.go b/execution/contexts/governance_context.go index 544c4e3ea..555fe29ce 100644 --- a/execution/contexts/governance_context.go +++ b/execution/contexts/governance_context.go @@ -50,7 +50,7 @@ func (ctx *GovernanceContext) Execute(txe *exec.TxExecution, p payload.Payload) } for _, update := range ctx.tx.AccountUpdates { - err := GetIdentity(ctx.StateWriter, update) + err := VerifyIdentity(ctx.StateWriter, update) if err != nil { return fmt.Errorf("GovTx: %v", err) } @@ -115,7 +115,7 @@ func (ctx *GovernanceContext) UpdateAccount(account *acm.Account, update *spec.T return } -func GetIdentity(sw acmstate.ReaderWriter, account *spec.TemplateAccount) (err error) { +func VerifyIdentity(sw acmstate.ReaderWriter, account *spec.TemplateAccount) (err error) { if account.Address == nil && account.PublicKey == nil { // We do not want to generate a key return fmt.Errorf("could not execute Tx since account template %v contains neither "+ diff --git a/execution/contexts/shared.go b/execution/contexts/shared.go index 84959a3f8..d322032ab 100644 --- a/execution/contexts/shared.go +++ b/execution/contexts/shared.go @@ -230,3 +230,26 @@ func hasBondOrSendPermission(accountGetter acmstate.AccountGetter, accs map[cryp } return true } + +func validateBonding(accGet acmstate.AccountGetter, in *payload.TxInput, + pubKey *crypto.PublicKey, log *logging.Logger) (*acm.Account, error) { + + account, err := accGet.GetAccount(in.Address) + if err != nil { + return nil, err + } + + // ensure pubKey of validator is set correctly + if pubKey.GetAddress() != account.GetAddress() { + return nil, fmt.Errorf("input address and public key address do not much") + } + + account.PublicKey = *pubKey + + // can the account bond? + if !hasBondPermission(accGet, account, log) { + return nil, fmt.Errorf("account '%s' lacks bond permission", account.Address) + } + + return account, nil +} diff --git a/execution/contexts/unbond_context.go b/execution/contexts/unbond_context.go new file mode 100644 index 000000000..172ad47b8 --- /dev/null +++ b/execution/contexts/unbond_context.go @@ -0,0 +1,46 @@ +package contexts + +import ( + "fmt" + "math/big" + + "github.com/hyperledger/burrow/acm/acmstate" + "github.com/hyperledger/burrow/acm/validator" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/txs/payload" +) + +type UnbondContext struct { + StateWriter acmstate.ReaderWriter + ValidatorSet validator.Alterer + Logger *logging.Logger + tx *payload.UnbondTx +} + +// Execute an UnbondTx to remove a validator +func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) error { + var ok bool + ctx.tx, ok = p.(*payload.UnbondTx) + if !ok { + return fmt.Errorf("payload must be UnbondTx, but is: %v", txe.Envelope.Tx.Payload) + } + + power := new(big.Int).Neg(new(big.Int).SetUint64(ctx.tx.Input.GetAmount())) + account, err := validateBonding(ctx.StateWriter, ctx.tx.Input, ctx.tx.PublicKey, ctx.Logger) + if err != nil { + return err + } + + err = account.AddToBalance(power.Uint64()) + if err != nil { + return err + } + + _, err = ctx.ValidatorSet.AlterPower(account.PublicKey, power) + if err != nil { + return err + } + + return ctx.StateWriter.UpdateAccount(account) +} diff --git a/integration/governance/bonding_test.go b/integration/governance/bonding_test.go index f5b9928cb..9ee415f76 100644 --- a/integration/governance/bonding_test.go +++ b/integration/governance/bonding_test.go @@ -1,3 +1,5 @@ +// +build integration + package governance import ( @@ -15,11 +17,11 @@ import ( ) func TestBonding(t *testing.T) { - genesisAccounts := integration.MakePrivateAccounts("accounts", 4) + genesisAccounts := integration.MakePrivateAccounts("accounts", 6) genesisKernels := make([]*core.Kernel, len(genesisAccounts)) - genesisDoc := integration.TestGenesisDoc(genesisAccounts, 0, 1) + genesisDoc := integration.TestGenesisDoc(genesisAccounts, 0, 1, 2, 3) genesisDoc.GlobalPermissions = permission.NewAccountPermissions(permission.Input) - genesisDoc.Accounts[3].Permissions = permission.ZeroAccountPermissions.Clone() + genesisDoc.Accounts[4].Permissions = permission.ZeroAccountPermissions.Clone() var err error // we need at least one validator to start @@ -33,71 +35,56 @@ func TestBonding(t *testing.T) { t.Run("NoPermission", func(t *testing.T) { val := acm.GeneratePrivateAccountFromSecret("validator_1") - localAddress := genesisKernels[3].GRPCListenAddress().String() - inputAccount := genesisAccounts[3].GetAddress() + localAddress := genesisKernels[4].GRPCListenAddress().String() + inputAccount := genesisAccounts[4].GetAddress() tcli := rpctest.NewTransactClient(t, localAddress) - bondTx := createBondTx(inputAccount, uint64(1<<2), val.GetPublicKey()) + bondTx := createBondTx(inputAccount, val.GetPublicKey(), uint64(1<<2)) _, err = payloadSync(tcli, bondTx) require.Error(t, err) }) t.Run("BondFromNonVal", func(t *testing.T) { // lets do the bond tx from a non-validator node - localAddress := genesisKernels[2].GRPCListenAddress().String() - inputAccount := genesisAccounts[2].GetAddress() + valAccount := genesisAccounts[5] + valKernel := genesisKernels[5] + + localAddress := valKernel.GRPCListenAddress().String() + inputAccount := valAccount.GetAddress() tcli := rpctest.NewTransactClient(t, localAddress) qcli := rpctest.NewQueryClient(t, localAddress) - // make a new validator to grant power to - val := acm.GeneratePrivateAccountFromSecret("validator_2") accBefore := getAccount(t, qcli, inputAccount) var power uint64 = 1 << 16 - bondTx := createBondTx(inputAccount, power, val.GetPublicKey()) + bondTx := createBondTx(inputAccount, valAccount.GetPublicKey(), power) _, err = payloadSync(tcli, bondTx) require.NoError(t, err) accAfter := getAccount(t, qcli, inputAccount) // ensure power is subtracted from original account balance require.Equal(t, accBefore.GetBalance()-power, accAfter.GetBalance()) - valAfter := getAccount(t, qcli, val.GetAddress()) - // validator must have associated account - // typically without balance if just created - require.NotEmpty(t, valAfter.GetAddress()) - require.Equal(t, uint64(0), valAfter.GetBalance()) - // make sure our new validator exists in the set vsOut := getValidators(t, qcli) - require.Contains(t, vsOut, val.GetAddress()) - require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) - - // start the new validator - valKernel, err := createKernel(genesisDoc, val, append(genesisAccounts, val)...) - require.NoError(t, err) - connectKernels(genesisKernels[0], valKernel) + require.Contains(t, vsOut, valAccount.GetAddress()) + require.Equal(t, vsOut[valAccount.GetAddress()].GetPower(), power) // wait for new validator to see themself in set waitFor(3, valKernel.Blockchain) - grpcBondedVal := valKernel.GRPCListenAddress().String() - qcli = rpctest.NewQueryClient(t, grpcBondedVal) vsOut = getValidators(t, qcli) - require.Contains(t, vsOut, val.GetAddress()) - require.Equal(t, vsOut[val.GetAddress()].GetPower(), power) + require.Contains(t, vsOut, valAccount.GetAddress()) + require.Equal(t, vsOut[valAccount.GetAddress()].GetPower(), power) // wait for validator to propose a block waitFor(7, valKernel.Blockchain) - checkProposed(t, genesisKernels[0], val.GetPublicKey().GetAddress().Bytes()) + checkProposed(t, genesisKernels[0], valAccount.GetPublicKey().GetAddress().Bytes()) - unbondTx := createUnbondTx(val.GetAddress(), inputAccount) - tcli = rpctest.NewTransactClient(t, grpcBondedVal) + unbondTx := createUnbondTx(inputAccount, valAccount.GetPublicKey(), power) _, err = payloadSync(tcli, unbondTx) require.NoError(t, err) - waitFor(2, genesisKernels[0].Blockchain) - tcli = rpctest.NewTransactClient(t, localAddress) - qcli = rpctest.NewQueryClient(t, localAddress) + waitFor(2, valKernel.Blockchain) vsOut = getValidators(t, qcli) - require.NotContains(t, vsOut, val.GetAddress()) + require.NotContains(t, vsOut, valAccount.GetAddress()) accAfter = getAccount(t, qcli, inputAccount) require.Equal(t, accBefore.GetBalance(), accAfter.GetBalance()) }) diff --git a/integration/governance/helpers.go b/integration/governance/helpers.go index c6de0ef01..ed327d21b 100644 --- a/integration/governance/helpers.go +++ b/integration/governance/helpers.go @@ -1,3 +1,5 @@ +// +build integration + package governance import ( @@ -13,7 +15,6 @@ import ( "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/genesis" - "github.com/hyperledger/burrow/genesis/spec" "github.com/hyperledger/burrow/integration" "github.com/hyperledger/burrow/integration/rpctest" "github.com/hyperledger/burrow/logging/logconfig" @@ -64,26 +65,23 @@ func createKernel(genesisDoc *genesis.GenesisDoc, account *acm.PrivateAccount, return kernel, kernel.Boot() } -func createBondTx(address crypto.Address, amount uint64, pubKey crypto.PublicKey) *payload.BondTx { +func createBondTx(address crypto.Address, pubKey crypto.PublicKey, amount uint64) *payload.BondTx { return &payload.BondTx{ Input: &payload.TxInput{ Address: address, Amount: amount, }, - Validator: &spec.TemplateAccount{ - PublicKey: &pubKey, - }, + PublicKey: &pubKey, } } -func createUnbondTx(validator, account crypto.Address) *payload.UnbondTx { +func createUnbondTx(address crypto.Address, pubKey crypto.PublicKey, amount uint64) *payload.UnbondTx { return &payload.UnbondTx{ Input: &payload.TxInput{ - Address: validator, - }, - Output: &payload.TxOutput{ - Address: account, + Address: address, + Amount: amount, }, + PublicKey: &pubKey, } } diff --git a/protobuf/payload.proto b/protobuf/payload.proto index 76d97203a..54869bb65 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -5,6 +5,7 @@ option go_package = "github.com/hyperledger/burrow/txs/payload"; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "permission.proto"; +import "crypto.proto"; import "spec.proto"; package payload; @@ -111,17 +112,17 @@ message BondTx { // Account with bonding permission TxInput Input = 1; // The validator to bond, public key must be known - spec.TemplateAccount Validator = 2 [(gogoproto.nullable) = true]; + crypto.PublicKey PublicKey = 2; } message UnbondTx { option (gogoproto.goproto_stringer) = false; option (gogoproto.goproto_getters) = false; - // The validator to unbond + // Account with bonding permission TxInput Input = 1; - // The account to unbond power to - TxOutput Output = 2; + // The validator to unbond, public key must be known + crypto.PublicKey PublicKey = 2; } message GovTx { diff --git a/txs/payload/bond_tx.go b/txs/payload/bond_tx.go index 1fd30b817..a82c017f9 100644 --- a/txs/payload/bond_tx.go +++ b/txs/payload/bond_tx.go @@ -5,13 +5,12 @@ import ( "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/crypto" - "github.com/hyperledger/burrow/genesis/spec" ) func NewBondTx(pubkey crypto.PublicKey) (*BondTx, error) { return &BondTx{ Input: &TxInput{}, - Validator: &spec.TemplateAccount{}, + PublicKey: &crypto.PublicKey{}, }, nil } @@ -24,7 +23,7 @@ func (tx *BondTx) GetInputs() []*TxInput { } func (tx *BondTx) String() string { - return fmt.Sprintf("BondTx{%v -> %v}", tx.Input, tx.Validator) + return fmt.Sprintf("BondTx{%v}", tx.Input) } func (tx *BondTx) AddInput(st acmstate.AccountGetter, pubkey crypto.PublicKey, amt uint64) error { diff --git a/txs/payload/payload.go b/txs/payload/payload.go index cd2170580..95bdbe777 100644 --- a/txs/payload/payload.go +++ b/txs/payload/payload.go @@ -57,11 +57,11 @@ var nameFromType = map[Type]string{ TypeCall: "CallTx", TypeName: "NameTx", TypeBatch: "BatchTx", - TypeBond: "BondTx", - TypeUnbond: "UnbondTx", TypePermissions: "PermsTx", TypeGovernance: "GovTx", TypeProposal: "ProposalTx", + TypeBond: "BondTx", + TypeUnbond: "UnbondTx", } var typeFromName = make(map[string]Type) @@ -120,14 +120,14 @@ func New(txType Type) (Payload, error) { return &NameTx{}, nil case TypeBatch: return &BatchTx{}, nil - case TypeBond: - return &BondTx{}, nil - case TypeUnbond: - return &UnbondTx{}, nil case TypePermissions: return &PermsTx{}, nil case TypeGovernance: return &GovTx{}, nil + case TypeBond: + return &BondTx{}, nil + case TypeUnbond: + return &UnbondTx{}, nil case TypeProposal: return &ProposalTx{}, nil } diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 73a1e4d8b..0647de24b 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -9,6 +9,7 @@ import ( proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" + crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" spec "github.com/hyperledger/burrow/genesis/spec" permission "github.com/hyperledger/burrow/permission" @@ -557,10 +558,10 @@ type BondTx struct { // Account with bonding permission Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` // The validator to bond, public key must be known - Validator *spec.TemplateAccount `protobuf:"bytes,2,opt,name=Validator,proto3" json:"Validator,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + PublicKey *crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *BondTx) Reset() { *m = BondTx{} } @@ -600,13 +601,13 @@ func (*BondTx) XXX_MessageName() string { } type UnbondTx struct { - // The validator to unbond + // Account with bonding permission Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` - // The account to unbond power to - Output *TxOutput `protobuf:"bytes,2,opt,name=Output,proto3" json:"Output,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + // The validator to unbond, public key must be known + PublicKey *crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *UnbondTx) Reset() { *m = UnbondTx{} } @@ -974,70 +975,69 @@ func init() { proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d5 func init() { golang_proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d56) } var fileDescriptor_678c914f1bee6d56 = []byte{ - // 995 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4d, 0x6f, 0x1b, 0xc5, - 0x1b, 0xcf, 0x7a, 0xd7, 0x2f, 0x79, 0xea, 0xe4, 0xef, 0xce, 0x1f, 0x90, 0x15, 0x09, 0xbb, 0x32, - 0x08, 0x5a, 0x68, 0x6c, 0x68, 0x79, 0x11, 0xb9, 0x20, 0x6f, 0xec, 0xa4, 0x41, 0xa5, 0x89, 0xc6, - 0x9b, 0x14, 0x81, 0x38, 0x8c, 0xed, 0x61, 0xbd, 0x92, 0xbd, 0xb3, 0xec, 0x8e, 0xcb, 0x9a, 0x13, - 0x07, 0x0e, 0x5c, 0x11, 0x17, 0x8e, 0xf9, 0x0a, 0x7c, 0x03, 0x8e, 0x39, 0x72, 0x44, 0x1c, 0x22, - 0x94, 0x5e, 0x10, 0x9f, 0x02, 0xcd, 0xec, 0xcc, 0x7a, 0xed, 0x56, 0xa9, 0x13, 0x21, 0x6e, 0x3b, - 0xcf, 0xf3, 0x9b, 0xe7, 0xe5, 0xf7, 0xbc, 0xcc, 0xc2, 0x46, 0x40, 0x66, 0x63, 0x46, 0x86, 0xcd, - 0x20, 0x64, 0x9c, 0xa1, 0xa2, 0x3a, 0x6e, 0x6d, 0xbb, 0x1e, 0x1f, 0x4d, 0xfb, 0xcd, 0x01, 0x9b, - 0xb4, 0x5c, 0xe6, 0xb2, 0x96, 0xd4, 0xf7, 0xa7, 0x5f, 0xc9, 0x93, 0x3c, 0xc8, 0xaf, 0xe4, 0xde, - 0x56, 0x25, 0xa0, 0xe1, 0xc4, 0x8b, 0x22, 0x8f, 0xf9, 0x4a, 0x02, 0x51, 0x40, 0x07, 0xc9, 0x77, - 0xe3, 0x47, 0x13, 0xcc, 0xb6, 0x3f, 0x43, 0x6f, 0x42, 0x61, 0x97, 0x8c, 0xc7, 0x4e, 0x5c, 0x35, - 0x6e, 0x19, 0xb7, 0x6f, 0xdc, 0xfb, 0x5f, 0x53, 0x7b, 0x4f, 0xc4, 0x58, 0xa9, 0x05, 0xb0, 0x47, - 0xfd, 0xa1, 0x13, 0x57, 0x73, 0x4b, 0xc0, 0x44, 0x8c, 0x95, 0x5a, 0x00, 0x1f, 0x91, 0x09, 0x75, - 0xe2, 0xaa, 0xb9, 0x04, 0x4c, 0xc4, 0x58, 0xa9, 0xd1, 0x5b, 0x50, 0x3c, 0xa2, 0xe1, 0x24, 0x72, - 0xe2, 0xaa, 0x25, 0x91, 0x95, 0x14, 0xa9, 0xe4, 0x58, 0x03, 0xd0, 0xeb, 0x90, 0xdf, 0x67, 0x4f, - 0x9c, 0xb8, 0x9a, 0x97, 0xc8, 0xcd, 0x14, 0x29, 0xa5, 0x38, 0x51, 0x0a, 0xd7, 0x36, 0x93, 0x31, - 0x16, 0x96, 0x5c, 0x27, 0x62, 0xac, 0xd4, 0x68, 0x1b, 0x4a, 0xc7, 0x7e, 0x3f, 0x81, 0x16, 0x25, - 0xf4, 0x66, 0x0a, 0xd5, 0x0a, 0x9c, 0x42, 0x44, 0xa4, 0x36, 0xe1, 0x83, 0x91, 0x13, 0x57, 0x4b, - 0x4b, 0x91, 0x2a, 0x39, 0xd6, 0x00, 0x74, 0x1f, 0xe0, 0x28, 0x64, 0x01, 0x8b, 0x88, 0x20, 0x75, - 0x5d, 0xc2, 0xff, 0x3f, 0x4f, 0x2c, 0x55, 0xe1, 0x0c, 0x6c, 0xc7, 0x3a, 0x3b, 0xad, 0x1b, 0x8d, - 0x9f, 0x0c, 0x28, 0x3a, 0xf1, 0x81, 0x1f, 0x4c, 0x39, 0x7a, 0x04, 0xc5, 0xf6, 0x70, 0x18, 0xd2, - 0x28, 0x92, 0x85, 0x29, 0xdb, 0xef, 0x9d, 0x9d, 0xd7, 0xd7, 0xfe, 0x38, 0xaf, 0xdf, 0xcd, 0x74, - 0xc1, 0x68, 0x16, 0xd0, 0x70, 0x4c, 0x87, 0x2e, 0x0d, 0x5b, 0xfd, 0x69, 0x18, 0xb2, 0x6f, 0x5a, - 0x83, 0x70, 0x16, 0x70, 0xd6, 0x54, 0x77, 0xb1, 0x36, 0x82, 0x5e, 0x81, 0x42, 0x7b, 0xc2, 0xa6, - 0x3e, 0x97, 0xe5, 0xb3, 0xb0, 0x3a, 0xa1, 0x2d, 0x28, 0xf5, 0xe8, 0xd7, 0x53, 0xea, 0x0f, 0xa8, - 0xac, 0x97, 0x85, 0xd3, 0xf3, 0x8e, 0xf5, 0xf3, 0x69, 0x7d, 0xad, 0x11, 0x43, 0xc9, 0x89, 0x0f, - 0xa7, 0xfc, 0x3f, 0x8c, 0x4a, 0x79, 0xfe, 0x3d, 0xa7, 0x9b, 0x13, 0xbd, 0x01, 0x79, 0xc9, 0x8b, - 0xea, 0xd2, 0x39, 0xff, 0x8a, 0x2f, 0x9c, 0xa8, 0xd1, 0x27, 0xf3, 0x00, 0x73, 0x32, 0xc0, 0x77, - 0xae, 0x1f, 0xdc, 0x16, 0x94, 0xf6, 0x49, 0xf4, 0xd0, 0x9b, 0x78, 0x5c, 0x53, 0xa3, 0xcf, 0xa8, - 0x02, 0xe6, 0x1e, 0xa5, 0xb2, 0x6f, 0x2d, 0x2c, 0x3e, 0xd1, 0x01, 0x58, 0x1d, 0xc2, 0x89, 0x6c, - 0xd0, 0xb2, 0xfd, 0xbe, 0xe2, 0x65, 0xfb, 0x72, 0xd7, 0x7d, 0xcf, 0x27, 0xe1, 0xac, 0xf9, 0x80, - 0xc6, 0xf6, 0x8c, 0xd3, 0x08, 0x4b, 0x13, 0xe8, 0x0b, 0xb0, 0x1e, 0xb7, 0x7b, 0x9f, 0xca, 0x26, - 0x2e, 0xdb, 0xfb, 0xd7, 0x32, 0xf5, 0xf7, 0x79, 0x7d, 0x93, 0x13, 0x37, 0xba, 0xcb, 0x26, 0x1e, - 0xa7, 0x93, 0x80, 0xcf, 0xb0, 0x34, 0xaa, 0xa8, 0xf5, 0xf4, 0x34, 0xa3, 0xdb, 0x50, 0x90, 0xd4, - 0x89, 0x8a, 0x9a, 0xcf, 0xa5, 0x56, 0xe9, 0xd1, 0xdb, 0x50, 0x4c, 0xda, 0x40, 0x70, 0x6b, 0x2e, - 0xcc, 0x8c, 0x6e, 0x10, 0xac, 0x11, 0x3b, 0xa5, 0x1f, 0x4e, 0xeb, 0x6b, 0xd2, 0x15, 0x4b, 0xc7, - 0x7c, 0xe5, 0x2a, 0x7e, 0x00, 0x25, 0x71, 0xa5, 0x1d, 0xba, 0x91, 0xda, 0x36, 0x2f, 0x35, 0x33, - 0xdb, 0x4c, 0xeb, 0x6c, 0x4b, 0x50, 0x83, 0x53, 0xac, 0xca, 0x2d, 0xd0, 0x0b, 0x68, 0x65, 0x7f, - 0x08, 0x2c, 0x71, 0x43, 0xfa, 0x5a, 0xc7, 0xf2, 0x5b, 0xc8, 0x64, 0x3d, 0xcd, 0x44, 0x26, 0x0b, - 0xf3, 0x4c, 0xd5, 0x95, 0xc7, 0xa9, 0xde, 0x3b, 0x2b, 0x7b, 0xfc, 0x08, 0xd6, 0x4f, 0xc8, 0xd8, - 0x1b, 0x12, 0xce, 0x42, 0x95, 0xe2, 0xcb, 0x4d, 0xb9, 0x9e, 0x1d, 0x3a, 0x09, 0xc6, 0x84, 0xd3, - 0xf6, 0x60, 0x20, 0x46, 0x41, 0xe6, 0x68, 0xe0, 0x39, 0x3a, 0xc3, 0xac, 0x3b, 0xdf, 0x62, 0x2b, - 0x3b, 0xbe, 0x03, 0x85, 0xa4, 0x44, 0xca, 0xeb, 0x73, 0x6a, 0xa8, 0x00, 0x19, 0x47, 0xdf, 0x19, - 0x6a, 0xfd, 0x5e, 0xa1, 0x5b, 0x76, 0x61, 0x53, 0xa5, 0x70, 0x1c, 0x0c, 0x09, 0xa7, 0xba, 0x69, - 0x2e, 0x4d, 0x73, 0xe9, 0x4a, 0x26, 0x84, 0xbf, 0x8c, 0xec, 0x5e, 0x5d, 0x39, 0xdd, 0x06, 0x94, - 0x4f, 0x18, 0xf7, 0x7c, 0xf7, 0x31, 0xf5, 0xdc, 0x51, 0x92, 0xb4, 0x89, 0x17, 0x64, 0xe8, 0x18, - 0xca, 0xda, 0xf2, 0x03, 0x12, 0x8d, 0x64, 0xc5, 0xcb, 0xf6, 0xbb, 0x57, 0x9f, 0xde, 0x05, 0x33, - 0xe2, 0x8d, 0xd1, 0x67, 0xf5, 0xbe, 0xdd, 0x7c, 0xe6, 0x19, 0xc0, 0x29, 0x24, 0x93, 0xea, 0x97, - 0xe9, 0x6b, 0x73, 0x05, 0xba, 0x6b, 0x60, 0x3a, 0xb1, 0xe6, 0xb8, 0x9c, 0xc2, 0xda, 0xfe, 0x0c, - 0x0b, 0x45, 0xc6, 0xfc, 0xf7, 0x06, 0x58, 0x27, 0x8c, 0xd3, 0x7f, 0x7d, 0x99, 0xaf, 0xc0, 0x75, - 0x26, 0x8c, 0x27, 0x73, 0x7a, 0xd2, 0xf9, 0x33, 0x32, 0xf3, 0x77, 0x0b, 0x6e, 0x74, 0x68, 0x34, - 0x08, 0xbd, 0x80, 0x7b, 0xcc, 0x57, 0xa3, 0x99, 0x15, 0x65, 0x5f, 0x65, 0xf3, 0x05, 0xaf, 0x72, - 0xc6, 0xef, 0x2f, 0x39, 0x28, 0xd8, 0x64, 0x3c, 0x66, 0x7c, 0xa1, 0x42, 0xc6, 0x0b, 0x2b, 0x24, - 0xfa, 0x64, 0xcf, 0xf3, 0xc9, 0xd8, 0xfb, 0xd6, 0xf3, 0x5d, 0xf5, 0x1f, 0x74, 0xbd, 0x3e, 0xc9, - 0x9a, 0x41, 0xbb, 0xb0, 0x11, 0x28, 0x17, 0x3d, 0x4e, 0x78, 0xb2, 0x5e, 0x36, 0xef, 0xbd, 0x9a, - 0x49, 0x46, 0x44, 0x9b, 0x46, 0x24, 0x41, 0x78, 0xf1, 0x0e, 0x7a, 0x0d, 0xf2, 0xa2, 0xa6, 0x51, - 0x35, 0x2f, 0x1b, 0x60, 0x23, 0xbd, 0x2c, 0xa4, 0x38, 0xd1, 0x35, 0x3e, 0x84, 0x8d, 0x05, 0x23, - 0xa8, 0x0c, 0xa5, 0x23, 0x7c, 0x78, 0x74, 0xd8, 0xeb, 0x76, 0x2a, 0x6b, 0xe2, 0xd4, 0xfd, 0xac, - 0xbb, 0x7b, 0xec, 0x74, 0x3b, 0x15, 0x03, 0x01, 0x14, 0xf6, 0xda, 0x07, 0x0f, 0xbb, 0x9d, 0x4a, - 0xce, 0xfe, 0xf8, 0xec, 0xa2, 0x66, 0xfc, 0x76, 0x51, 0x33, 0xfe, 0xbc, 0xa8, 0x19, 0xbf, 0x3e, - 0xad, 0x19, 0x67, 0x4f, 0x6b, 0xc6, 0xe7, 0x77, 0x2e, 0xcf, 0x9a, 0xc7, 0x51, 0x4b, 0x45, 0xd1, - 0x2f, 0xc8, 0x9f, 0xce, 0xfb, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x72, 0xd8, 0x03, 0x0f, 0xdb, - 0x0a, 0x00, 0x00, + // 990 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcd, 0x6f, 0xe3, 0x44, + 0x14, 0xaf, 0x6b, 0xe7, 0xa3, 0x6f, 0xd3, 0x92, 0x0e, 0x1f, 0x8a, 0x2a, 0x91, 0xac, 0x02, 0x82, + 0x05, 0xb6, 0x09, 0xec, 0xf2, 0x21, 0xf5, 0x82, 0xe2, 0x26, 0xed, 0x16, 0x96, 0x6d, 0x34, 0x71, + 0x77, 0x11, 0x88, 0x83, 0x93, 0x0c, 0x89, 0x85, 0xed, 0x31, 0xf6, 0x64, 0xb1, 0x39, 0x71, 0xe0, + 0xc0, 0x15, 0x71, 0xe1, 0xd8, 0x7f, 0x81, 0xff, 0x80, 0x63, 0x8f, 0x1c, 0x11, 0x87, 0x0a, 0x75, + 0x2f, 0x88, 0xbf, 0x02, 0xcd, 0x78, 0xc6, 0x71, 0xb2, 0x68, 0x37, 0xad, 0x56, 0x7b, 0xf3, 0x7b, + 0xef, 0x37, 0xef, 0xbd, 0xf9, 0xbd, 0x8f, 0x31, 0x6c, 0x06, 0x76, 0xe2, 0x52, 0x7b, 0xdc, 0x0a, + 0x42, 0xca, 0x28, 0x2a, 0x49, 0x71, 0x67, 0x77, 0xe2, 0xb0, 0xe9, 0x6c, 0xd8, 0x1a, 0x51, 0xaf, + 0x3d, 0xa1, 0x13, 0xda, 0x16, 0xf6, 0xe1, 0xec, 0x6b, 0x21, 0x09, 0x41, 0x7c, 0xa5, 0xe7, 0x76, + 0xaa, 0x01, 0x09, 0x3d, 0x27, 0x8a, 0x1c, 0xea, 0x4b, 0x4d, 0x65, 0x14, 0x26, 0x01, 0x53, 0x76, + 0x88, 0x02, 0x32, 0x4a, 0xbf, 0x9b, 0x3f, 0xeb, 0xa0, 0x77, 0xfc, 0x04, 0xbd, 0x09, 0xc5, 0x7d, + 0xdb, 0x75, 0xad, 0xb8, 0xa6, 0x5d, 0xd7, 0x6e, 0x5c, 0xbb, 0xf5, 0x42, 0x4b, 0xe5, 0x92, 0xaa, + 0xb1, 0x34, 0x73, 0xe0, 0x80, 0xf8, 0x63, 0x2b, 0xae, 0xad, 0x2f, 0x01, 0x53, 0x35, 0x96, 0x66, + 0x0e, 0xbc, 0x67, 0x7b, 0xc4, 0x8a, 0x6b, 0xfa, 0x12, 0x30, 0x55, 0x63, 0x69, 0x46, 0x6f, 0x43, + 0xa9, 0x4f, 0x42, 0x2f, 0xb2, 0xe2, 0x9a, 0x21, 0x90, 0xd5, 0x0c, 0x29, 0xf5, 0x58, 0x01, 0xd0, + 0xeb, 0x50, 0x38, 0xa4, 0x0f, 0xad, 0xb8, 0x56, 0x10, 0xc8, 0xad, 0x0c, 0x29, 0xb4, 0x38, 0x35, + 0xf2, 0xd0, 0x26, 0x15, 0x39, 0x16, 0x97, 0x42, 0xa7, 0x6a, 0x2c, 0xcd, 0x68, 0x17, 0xca, 0x27, + 0xfe, 0x30, 0x85, 0x96, 0x04, 0x74, 0x3b, 0x83, 0x2a, 0x03, 0xce, 0x20, 0x3c, 0x53, 0xd3, 0x66, + 0xa3, 0xa9, 0x15, 0xd7, 0xca, 0x4b, 0x99, 0x4a, 0x3d, 0x56, 0x00, 0x74, 0x1b, 0xa0, 0x1f, 0xd2, + 0x80, 0x46, 0x36, 0x27, 0x75, 0x43, 0xc0, 0x5f, 0x9c, 0x5f, 0x2c, 0x33, 0xe1, 0x1c, 0x6c, 0xcf, + 0x38, 0x3b, 0x6d, 0x68, 0xcd, 0x5f, 0x34, 0x28, 0x59, 0xf1, 0x91, 0x1f, 0xcc, 0x18, 0xba, 0x07, + 0xa5, 0xce, 0x78, 0x1c, 0x92, 0x28, 0x12, 0x85, 0xa9, 0x98, 0xef, 0x9f, 0x9d, 0x37, 0xd6, 0xfe, + 0x3a, 0x6f, 0xdc, 0xcc, 0xf5, 0xc4, 0x34, 0x09, 0x48, 0xe8, 0x92, 0xf1, 0x84, 0x84, 0xed, 0xe1, + 0x2c, 0x0c, 0xe9, 0x77, 0x6d, 0x59, 0x70, 0x79, 0x16, 0x2b, 0x27, 0xe8, 0x15, 0x28, 0x76, 0x3c, + 0x3a, 0xf3, 0x99, 0x28, 0x9f, 0x81, 0xa5, 0x84, 0x76, 0xa0, 0x3c, 0x20, 0xdf, 0xce, 0x88, 0x3f, + 0x22, 0xa2, 0x5e, 0x06, 0xce, 0xe4, 0x3d, 0xe3, 0xd7, 0xd3, 0xc6, 0x5a, 0x33, 0x86, 0xb2, 0x15, + 0x1f, 0xcf, 0xd8, 0x73, 0xcc, 0x4a, 0x46, 0xfe, 0x73, 0x5d, 0x35, 0x27, 0x7a, 0x03, 0x0a, 0x82, + 0x17, 0xd9, 0xa5, 0x73, 0xfe, 0x25, 0x5f, 0x38, 0x35, 0xa3, 0x4f, 0xe6, 0x09, 0xae, 0x8b, 0x04, + 0xdf, 0xbd, 0x7a, 0x72, 0x3b, 0x50, 0x3e, 0xb4, 0xa3, 0xbb, 0x8e, 0xe7, 0x30, 0x45, 0x8d, 0x92, + 0x51, 0x15, 0xf4, 0x03, 0x42, 0x44, 0xdf, 0x1a, 0x98, 0x7f, 0xa2, 0x23, 0x30, 0xba, 0x36, 0xb3, + 0x45, 0x83, 0x56, 0xcc, 0x0f, 0x24, 0x2f, 0xbb, 0x4f, 0x0e, 0x3d, 0x74, 0x7c, 0x3b, 0x4c, 0x5a, + 0x77, 0x48, 0x6c, 0x26, 0x8c, 0x44, 0x58, 0xb8, 0x40, 0x5f, 0x82, 0xf1, 0xa0, 0x33, 0xf8, 0x4c, + 0x34, 0x71, 0xc5, 0x3c, 0xbc, 0x92, 0xab, 0x7f, 0xcf, 0x1b, 0x5b, 0xcc, 0x9e, 0x44, 0x37, 0xa9, + 0xe7, 0x30, 0xe2, 0x05, 0x2c, 0xc1, 0xc2, 0xa9, 0xa4, 0xd6, 0x51, 0xd3, 0x8c, 0x6e, 0x40, 0x51, + 0x50, 0xc7, 0x2b, 0xaa, 0xff, 0x2f, 0xb5, 0xd2, 0x8e, 0xde, 0x81, 0x52, 0xda, 0x06, 0x9c, 0x5b, + 0x7d, 0x61, 0x66, 0x54, 0x83, 0x60, 0x85, 0xd8, 0x2b, 0xff, 0x74, 0xda, 0x58, 0x13, 0xa1, 0x68, + 0x36, 0xe6, 0x2b, 0x57, 0xf1, 0x43, 0x28, 0xf3, 0x23, 0x9d, 0x70, 0x12, 0xc9, 0x6d, 0xf3, 0x52, + 0x2b, 0xb7, 0xdb, 0x94, 0xcd, 0x34, 0x38, 0x35, 0x38, 0xc3, 0xca, 0xbb, 0x05, 0x6a, 0x01, 0xad, + 0x1c, 0x0f, 0x81, 0xc1, 0x4f, 0x88, 0x58, 0x1b, 0x58, 0x7c, 0x73, 0x9d, 0xa8, 0xa7, 0x9e, 0xea, + 0x44, 0x61, 0x1e, 0xab, 0xba, 0x8c, 0xf8, 0x8d, 0xda, 0x3b, 0x2b, 0x47, 0x6c, 0xc3, 0x46, 0x7f, + 0x36, 0x74, 0x9d, 0xd1, 0xa7, 0x24, 0x91, 0x57, 0xdc, 0x6e, 0xc9, 0x46, 0xcc, 0x0c, 0x78, 0x8e, + 0xc9, 0xf1, 0xe9, 0xcd, 0x77, 0xd7, 0xf3, 0x08, 0xf7, 0x83, 0x26, 0x57, 0xef, 0x25, 0x3a, 0x65, + 0x1f, 0xb6, 0x3a, 0xa3, 0x11, 0x9f, 0xe4, 0x93, 0x60, 0x6c, 0x33, 0xa2, 0x1a, 0xe6, 0xe5, 0x96, + 0x78, 0x81, 0x2c, 0xe2, 0x05, 0xae, 0xcd, 0x88, 0xc4, 0x88, 0x32, 0x6a, 0x78, 0xe9, 0x48, 0x2e, + 0x85, 0x7f, 0xb4, 0xfc, 0x4e, 0x5d, 0xf9, 0xd2, 0x4d, 0xa8, 0xdc, 0xa7, 0xcc, 0xf1, 0x27, 0x0f, + 0x88, 0x33, 0x99, 0xa6, 0x2b, 0x46, 0xc7, 0x0b, 0x3a, 0x74, 0x02, 0x15, 0xe5, 0xf9, 0x8e, 0x1d, + 0x4d, 0x45, 0xb5, 0x2b, 0xe6, 0x7b, 0x97, 0x9f, 0xdc, 0x05, 0x37, 0xfc, 0x7d, 0x51, 0xb2, 0x7c, + 0xdb, 0xb6, 0x1f, 0x7b, 0x02, 0x70, 0x06, 0xc9, 0x5d, 0xf5, 0xab, 0xec, 0xa5, 0xb9, 0x04, 0xdd, + 0x75, 0xd0, 0xad, 0x58, 0x71, 0x5c, 0xc9, 0x60, 0x1d, 0x3f, 0xc1, 0xdc, 0x90, 0x73, 0xff, 0xa3, + 0x06, 0xc6, 0x7d, 0xca, 0xc8, 0x33, 0x5f, 0xe4, 0x2b, 0x70, 0x9d, 0x4b, 0xe3, 0xe1, 0x9c, 0x9e, + 0x6c, 0xf6, 0xb4, 0xdc, 0xec, 0x5d, 0x87, 0x6b, 0x5d, 0x12, 0x8d, 0x42, 0x27, 0x60, 0x0e, 0xf5, + 0xe5, 0x58, 0xe6, 0x55, 0xf9, 0x17, 0x59, 0x7f, 0xca, 0x8b, 0x9c, 0x8b, 0xfb, 0xdb, 0x3a, 0x14, + 0x4d, 0xdb, 0x75, 0x29, 0x5b, 0xa8, 0x90, 0xf6, 0xd4, 0x0a, 0xf1, 0x3e, 0x39, 0x70, 0x7c, 0xdb, + 0x75, 0xbe, 0x77, 0xfc, 0x89, 0xfc, 0x07, 0xba, 0x5a, 0x9f, 0xe4, 0xdd, 0xa0, 0x7d, 0xd8, 0x0c, + 0x64, 0x88, 0x01, 0xb3, 0x59, 0xba, 0x5a, 0xb6, 0x6e, 0xbd, 0x9a, 0xbb, 0x0c, 0xcf, 0x36, 0xcb, + 0x48, 0x80, 0xf0, 0xe2, 0x19, 0xf4, 0x1a, 0x14, 0x78, 0x4d, 0xa3, 0x5a, 0x41, 0x34, 0xc0, 0x66, + 0x76, 0x98, 0x6b, 0x71, 0x6a, 0x6b, 0x7e, 0x04, 0x9b, 0x0b, 0x4e, 0x50, 0x05, 0xca, 0x7d, 0x7c, + 0xdc, 0x3f, 0x1e, 0xf4, 0xba, 0xd5, 0x35, 0x2e, 0xf5, 0x3e, 0xef, 0xed, 0x9f, 0x58, 0xbd, 0x6e, + 0x55, 0x43, 0x00, 0xc5, 0x83, 0xce, 0xd1, 0xdd, 0x5e, 0xb7, 0xba, 0x6e, 0x7e, 0x7c, 0x76, 0x51, + 0xd7, 0xfe, 0xb8, 0xa8, 0x6b, 0x7f, 0x5f, 0xd4, 0xb5, 0xdf, 0x1f, 0xd5, 0xb5, 0xb3, 0x47, 0x75, + 0xed, 0x8b, 0xb7, 0x9e, 0x7c, 0x6b, 0x16, 0x47, 0x6d, 0x99, 0xc5, 0xb0, 0x28, 0x7e, 0x38, 0x6f, + 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0xda, 0x2a, 0x1e, 0x80, 0xe5, 0x0a, 0x00, 0x00, } func (m *Any) Marshal() (dAtA []byte, err error) { @@ -1448,11 +1448,11 @@ func (m *BondTx) MarshalTo(dAtA []byte) (int, error) { } i += n19 } - if m.Validator != nil { + if m.PublicKey != nil { dAtA[i] = 0x12 i++ - i = encodeVarintPayload(dAtA, i, uint64(m.Validator.Size())) - n20, err := m.Validator.MarshalTo(dAtA[i:]) + i = encodeVarintPayload(dAtA, i, uint64(m.PublicKey.Size())) + n20, err := m.PublicKey.MarshalTo(dAtA[i:]) if err != nil { return 0, err } @@ -1489,11 +1489,11 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { } i += n21 } - if m.Output != nil { + if m.PublicKey != nil { dAtA[i] = 0x12 i++ - i = encodeVarintPayload(dAtA, i, uint64(m.Output.Size())) - n22, err := m.Output.MarshalTo(dAtA[i:]) + i = encodeVarintPayload(dAtA, i, uint64(m.PublicKey.Size())) + n22, err := m.PublicKey.MarshalTo(dAtA[i:]) if err != nil { return 0, err } @@ -1989,8 +1989,8 @@ func (m *BondTx) Size() (n int) { l = m.Input.Size() n += 1 + l + sovPayload(uint64(l)) } - if m.Validator != nil { - l = m.Validator.Size() + if m.PublicKey != nil { + l = m.PublicKey.Size() n += 1 + l + sovPayload(uint64(l)) } if m.XXX_unrecognized != nil { @@ -2009,8 +2009,8 @@ func (m *UnbondTx) Size() (n int) { l = m.Input.Size() n += 1 + l + sovPayload(uint64(l)) } - if m.Output != nil { - l = m.Output.Size() + if m.PublicKey != nil { + l = m.PublicKey.Size() n += 1 + l + sovPayload(uint64(l)) } if m.XXX_unrecognized != nil { @@ -3556,7 +3556,7 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Validator", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -3583,10 +3583,10 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Validator == nil { - m.Validator = &spec.TemplateAccount{} + if m.PublicKey == nil { + m.PublicKey = &crypto.PublicKey{} } - if err := m.Validator.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.PublicKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -3682,7 +3682,7 @@ func (m *UnbondTx) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Output", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -3709,10 +3709,10 @@ func (m *UnbondTx) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Output == nil { - m.Output = &TxOutput{} + if m.PublicKey == nil { + m.PublicKey = &crypto.PublicKey{} } - if err := m.Output.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.PublicKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex diff --git a/txs/payload/unbond_tx.go b/txs/payload/unbond_tx.go index 10a058ae9..fbcdcbeb6 100644 --- a/txs/payload/unbond_tx.go +++ b/txs/payload/unbond_tx.go @@ -8,8 +8,8 @@ import ( func NewUnbondTx(address crypto.Address, height uint64) *UnbondTx { return &UnbondTx{ - Input: &TxInput{}, - Output: &TxOutput{}, + Input: &TxInput{}, + PublicKey: &crypto.PublicKey{}, } } @@ -22,7 +22,7 @@ func (tx *UnbondTx) GetInputs() []*TxInput { } func (tx *UnbondTx) String() string { - return fmt.Sprintf("UnbondTx{%v -> %v}", tx.Input.Address, tx.Output.Address) + return fmt.Sprintf("UnbondTx{%v}", tx.Input.Address) } func (tx *UnbondTx) Any() *Any { diff --git a/txs/tx_test.go b/txs/tx_test.go index 15c0d69dd..a4dda8d14 100644 --- a/txs/tx_test.go +++ b/txs/tx_test.go @@ -24,7 +24,6 @@ import ( "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/event/query" - "github.com/hyperledger/burrow/genesis/spec" "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/txs/payload" "github.com/stretchr/testify/assert" @@ -120,22 +119,21 @@ func TestBondTxSignable(t *testing.T) { Amount: 12345, Sequence: 67890, }, - Validator: &spec.TemplateAccount{ - PublicKey: &val, - }, + PublicKey: &val, } testTxMarshalJSON(t, bondTx) testTxSignVerify(t, bondTx) } func TestUnbondTxSignable(t *testing.T) { + val := makePrivateAccount("output1").GetPublicKey() unbondTx := &payload.UnbondTx{ Input: &payload.TxInput{ - Address: makePrivateAccount("output1").GetAddress(), - }, - Output: &payload.TxOutput{ - Address: makePrivateAccount("input1").GetAddress(), + Address: makePrivateAccount("input1").GetAddress(), + Amount: 12345, + Sequence: 67890, }, + PublicKey: &val, } testTxMarshalJSON(t, unbondTx) testTxSignVerify(t, unbondTx) From 3ff236797f270f0fb9012d53181ae76b56a56d10 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Mon, 8 Jul 2019 10:49:02 +0100 Subject: [PATCH 35/70] init management contract ADR Signed-off-by: Gregory Hill --- docs/ADRs/draft/adr-3_bonding-natives.md | 40 ++++++++++++++++++++++++ docs/{ => quickstart}/bonding.md | 0 integration/governance/bonding_test.go | 2 +- 3 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 docs/ADRs/draft/adr-3_bonding-natives.md rename docs/{ => quickstart}/bonding.md (100%) diff --git a/docs/ADRs/draft/adr-3_bonding-natives.md b/docs/ADRs/draft/adr-3_bonding-natives.md new file mode 100644 index 000000000..ef34bf659 --- /dev/null +++ b/docs/ADRs/draft/adr-3_bonding-natives.md @@ -0,0 +1,40 @@ +--- +adr: 3 +title: Smart Contract Bonding Natives +author: Silas Davis (@silasdavis), Gregory Hill (@gregdhill) +discussions-to: https://chat.hyperledger.org/channel/burrow-contributors +status: Draft +type: Standards Track +category: State, Consensus, Governance +created: 2019-07-08 +--- + +## Abstract + +In vanilla proof-of-stake, an account with some amount of token pledges to vest a portion of this to actively participate in consensus with the knowledge that +misbehavior could result in punishment - bonded power is slashed to some extent. [PR 1100](https://github.com/hyperledger/burrow/pull/1100) contains the base +implementation to support this model, but we foresee techniques such as delegation being important to network users in the future. Therefore, we propose a +smart contract orientated approach which leverages SNatives to expose 'admin' functionality for controlling individual validator investments. + +## Motivation + +There are countless ways to model token economics, even in Proof-of-Stake (PoS) there are a number of schemes such as delegation, nomination or even hybrid +approaches. It is conceivable that we may want to incorporate alternate methods in the future without forking which (depending on the technique) may not be +easily done. Outsourcing this task to individual validators makes sense in the same way we do not control how native accounts move and use their tokens. + +## Specification + +A management contract should sit at the address of each validator bonded onto the network which contains the logic for how that validator may operate. For instance, +delegation would be trivial to implement if we could simply transfer funds to this account and have the smart contract automatically bond them. This special account +then handles the validators portfolio at its discretion, simplifying our consensus overhead with a tight account to validator binding. + +1. Account w/ bond permission signs and sends BondTx + - (Optionally) add EVM / WASM bytecode +2. Bond given amount for account into validator set +3. Call against validator address checks for existence of code and executes +4. Exposed natives verify validator address / power and run directly against state, updating or removing power + +We would still like to maintain the notion of validator 'flow' to ensure that the set does not change too quickly. Additionally, if power is ever depleted then we +may want to consider whether we should retain the contract. This problem also lends itself to upgradeability as a poorly written contract could severely impact the +lifetime and reputation of the identity in question. One possible solution is to hard-code the special validator contract and make it a concern of the network - +upgradeable through governance / proposals. This has the benefit of equalizing all validators on a per-chain basis and makes patching vulnerabilities easier. \ No newline at end of file diff --git a/docs/bonding.md b/docs/quickstart/bonding.md similarity index 100% rename from docs/bonding.md rename to docs/quickstart/bonding.md diff --git a/integration/governance/bonding_test.go b/integration/governance/bonding_test.go index 9ee415f76..527f34810 100644 --- a/integration/governance/bonding_test.go +++ b/integration/governance/bonding_test.go @@ -75,7 +75,7 @@ func TestBonding(t *testing.T) { require.Equal(t, vsOut[valAccount.GetAddress()].GetPower(), power) // wait for validator to propose a block - waitFor(7, valKernel.Blockchain) + waitFor(10, valKernel.Blockchain) checkProposed(t, genesisKernels[0], valAccount.GetPublicKey().GetAddress().Bytes()) unbondTx := createUnbondTx(inputAccount, valAccount.GetPublicKey(), power) From 8ff8240a10d1cd3c0fec395a9ff6e94d02dadd72 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Mon, 8 Jul 2019 16:12:04 +0100 Subject: [PATCH 36/70] alterpower -> setpower Signed-off-by: Gregory Hill --- acm/validator/bucket.go | 31 ++++++++++++++--------- acm/validator/bucket_test.go | 23 +++++++---------- acm/validator/cache.go | 3 ++- acm/validator/ring.go | 16 +++++------- acm/validator/ring_test.go | 29 ++++++++------------- acm/validator/set.go | 13 ++++++---- acm/validator/validators.go | 22 +++++----------- execution/contexts/bond_context.go | 4 +-- execution/contexts/governance_context.go | 8 ++---- execution/contexts/permissions_context.go | 2 +- execution/contexts/proposal_context.go | 5 +++- execution/contexts/unbond_context.go | 4 +-- execution/evm/state.go | 9 ------- execution/state/validators.go | 18 +++---------- execution/state/validators_test.go | 12 +++++++-- 15 files changed, 86 insertions(+), 113 deletions(-) diff --git a/acm/validator/bucket.go b/acm/validator/bucket.go index b675e9738..875cf6a1c 100644 --- a/acm/validator/bucket.go +++ b/acm/validator/bucket.go @@ -45,9 +45,9 @@ func (vc *Bucket) Power(id crypto.Address) (*big.Int, error) { return vc.Previous.Power(id) } -// Updates the current head bucket (accumulator) whilst -func (vc *Bucket) AlterPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { - const errHeader = "Bucket.AlterPower():" +// SetPower ensures that validator power would not change too quickly in a single block +func (vc *Bucket) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { + const errHeader = "Bucket.SetPower():" err := checkPower(power) if err != nil { return nil, fmt.Errorf("%s %v", errHeader, err) @@ -66,15 +66,17 @@ func (vc *Bucket) AlterPower(id crypto.PublicKey, power *big.Int) (*big.Int, err "in a total power greater than that allowed by tendermint (%v): would make next total power: %v", errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, maxTotalVotingPower, nextTotalPower) } + // If we call vc.flow.ChangePower(id, absFlow) (below) will we induce a change in flow greater than the allowable // flow we have left to spend? - if vc.Flow.Flow(id, absFlow).Cmp(allowableFlow) == 1 { + if vc.Flow.Flow(id, absFlow).Cmp(allowableFlow) == 1 && allowableFlow.Cmp(big.NewInt(0)) > 0 { return nil, fmt.Errorf("%s cannot change validator power of %v from %v to %v because that would result "+ "in a flow greater than or equal to 1/3 of total power for the next commit: flow induced by change: %v, "+ "current total flow: %v/%v (cumulative/max), remaining allowable flow: %v", errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, absFlow, vc.Flow.totalPower, maxFlow, allowableFlow) } + // Set flow for this id to update flow.totalPower (total flow) for comparison below, keep track of flow for each id // so that we only count flow once for each id vc.Flow.ChangePower(id, absFlow) @@ -84,15 +86,20 @@ func (vc *Bucket) AlterPower(id crypto.PublicKey, power *big.Int) (*big.Int, err return absFlow, nil } -func (vc *Bucket) SetPower(id crypto.PublicKey, power *big.Int) error { - err := checkPower(power) +func (vc *Bucket) Initialize(id crypto.PublicKey, power *big.Int) error { + exists, err := vc.Power(id.GetAddress()) + if err != nil { + return err + } + if exists.Cmp(new(big.Int)) > 0 { + return fmt.Errorf("cannot initialize %v, validator already has power %v", + id.GetAddress(), exists) + } + err = checkPower(power) if err != nil { return err } - // The new absolute flow caused by this AlterPower absFlow := new(big.Int).Abs(vc.Previous.Flow(id, power)) - // Set flow for this id to update flow.totalPower (total flow) for comparison below, keep track of flow for each id - // so that we only count flow once for each id vc.Flow.ChangePower(id, absFlow) // Add to total power vc.Delta.ChangePower(id, power) @@ -121,9 +128,9 @@ func (vc *Bucket) Equal(vwOther *Bucket) error { } func checkPower(power *big.Int) error { - // if power.Sign() == -1 { - // return fmt.Errorf("cannot set negative validator power: %v", power) - // } + if power.Sign() == -1 { + return fmt.Errorf("cannot set negative validator power: %v", power) + } if !power.IsInt64() { return fmt.Errorf("for tendermint compatibility validator power must fit within an int but %v "+ "does not", power) diff --git a/acm/validator/bucket_test.go b/acm/validator/bucket_test.go index e1ab8b79a..9ed0aa744 100644 --- a/acm/validator/bucket_test.go +++ b/acm/validator/bucket_test.go @@ -10,43 +10,38 @@ import ( var pubA = pubKey(1) var pubB = pubKey(2) var pubC = pubKey(3) -var big2 = big.NewInt(2) -func TestBucket_AlterPower(t *testing.T) { +func TestBucket_SetPower(t *testing.T) { base := NewBucket() - err := base.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big3)) + _, err := base.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big3)) require.NoError(t, err) bucket := NewBucket(base.Next) - flow, err := bucket.AlterPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big2)) + flow, err := bucket.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big2)) require.NoError(t, err) require.Equal(t, big1.Int64(), flow.Int64()) - flow, err = bucket.AlterPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big1)) + flow, err = bucket.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big1)) require.NoError(t, err) require.Equal(t, big2.Int64(), flow.Int64()) - flow, err = bucket.AlterPower(pubA, maxTotalVotingPower) + flow, err = bucket.SetPower(pubA, maxTotalVotingPower) require.NoError(t, err) require.Equal(t, big3.Int64(), flow.Int64()) - _, err = bucket.AlterPower(pubA, new(big.Int).Add(maxTotalVotingPower, big1)) + _, err = bucket.SetPower(pubA, new(big.Int).Add(maxTotalVotingPower, big1)) require.Error(t, err, "should fail as we would breach total power") - _, err = bucket.AlterPower(pubB, big1) + _, err = bucket.SetPower(pubB, big1) require.Error(t, err, "should fail as we would breach total power") // Drop A and raise B - should now succeed - flow, err = bucket.AlterPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big1)) + flow, err = bucket.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big1)) require.NoError(t, err) require.Equal(t, big2.Int64(), flow.Int64()) - flow, err = bucket.AlterPower(pubB, big1) + flow, err = bucket.SetPower(pubB, big1) require.NoError(t, err) require.Equal(t, big1.Int64(), flow.Int64()) } - -//func setPower(t *testing.T, id crypto.PublicKey, bucket *Bucket, power int64) { -// err := bucket.SetPower(id, power) -//} diff --git a/acm/validator/cache.go b/acm/validator/cache.go index 14898bea0..c81189950 100644 --- a/acm/validator/cache.go +++ b/acm/validator/cache.go @@ -23,7 +23,8 @@ func (vc *Cache) Reset(backend Iterable) { func (vc *Cache) Flush(output Writer, backend Iterable) error { err := vc.Delta.IterateValidators(func(id crypto.Addressable, power *big.Int) error { - return output.SetPower(id.GetPublicKey(), power) + _, err := output.SetPower(id.GetPublicKey(), power) + return err }) if err != nil { return err diff --git a/acm/validator/ring.go b/acm/validator/ring.go index 5cd634ab6..2f4af838b 100644 --- a/acm/validator/ring.go +++ b/acm/validator/ring.go @@ -31,9 +31,6 @@ type Ring struct { populated int } -var big1 = big.NewInt(1) -var big3 = big.NewInt(3) - var _ History = &Ring{} // Provides a sliding window over the last size buckets of validator power changes @@ -66,21 +63,20 @@ func (vc *Ring) GetPower(id crypto.Address) *big.Int { return vc.Head().Previous.GetPower(id) } -// Updates the current head bucket (accumulator) whilst -func (vc *Ring) AlterPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { - return vc.Head().AlterPower(id, power) +func (vc *Ring) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { + return vc.Head().SetPower(id, power) } -func (vc *Ring) SetPower(id crypto.PublicKey, power *big.Int) error { - return vc.Head().SetPower(id, power) +func (vc *Ring) Initialize(id crypto.PublicKey, power *big.Int) error { + return vc.Head().Initialize(id, power) } -// Get the sum of all powers added in any bucket +// CumulativePower gets the sum of all powers added in any bucket func (vc *Ring) CumulativePower() *Set { return vc.power } -// Advance the current head bucket to the next bucket and returns the change in total power between the previous bucket +// Rotate the current head bucket to the next bucket and returns the change in total power between the previous bucket // and the current head, and the total flow which is the sum of absolute values of all changes each validator's power // after rotation the next head is a copy of the current head func (vc *Ring) Rotate() (totalPowerChange *big.Int, totalFlow *big.Int, err error) { diff --git a/acm/validator/ring_test.go b/acm/validator/ring_test.go index 12567d9f8..e3eb476a1 100644 --- a/acm/validator/ring_test.go +++ b/acm/validator/ring_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestValidatorsRing_AlterPower(t *testing.T) { +func TestValidatorsRing_SetPower(t *testing.T) { vsBase := NewSet() powAInitial := int64(10000) vsBase.ChangePower(pubA, big.NewInt(powAInitial)) @@ -44,18 +44,18 @@ func TestValidatorsRing_AlterPower(t *testing.T) { assertZero(t, powerChange) assertZero(t, totalFlow) - _, err = vw.AlterPower(pubA, big.NewInt(8000)) + _, err = vw.SetPower(pubA, big.NewInt(8000)) assert.NoError(t, err) // Should fail - not enough flow left - _, err = vw.AlterPower(pubB, big.NewInt(2000)) + _, err = vw.SetPower(pubB, big.NewInt(2000)) assert.Error(t, err) // Take a bit off should work - _, err = vw.AlterPower(pubA, big.NewInt(7000)) + _, err = vw.SetPower(pubA, big.NewInt(7000)) assert.NoError(t, err) - _, err = vw.AlterPower(pubB, big.NewInt(2000)) + _, err = vw.SetPower(pubB, big.NewInt(2000)) assert.NoError(t, err) _, _, err = vw.Rotate() require.NoError(t, err) @@ -78,27 +78,20 @@ func TestValidatorsRing_AlterPower(t *testing.T) { func TestRing_Rotate(t *testing.T) { ring := NewRing(nil, 3) - err := ring.SetPower(pubA, big.NewInt(234)) + _, err := ring.SetPower(pubA, big.NewInt(234)) require.NoError(t, err) fmt.Println(printBuckets(ring)) _, _, err = ring.Rotate() require.NoError(t, err) - err = ring.SetPower(pubA, big.NewInt(111)) + _, err = ring.SetPower(pubB, big.NewInt(40)) require.NoError(t, err) fmt.Println(printBuckets(ring)) _, _, err = ring.Rotate() require.NoError(t, err) fmt.Println(printBuckets(ring)) - err = ring.SetPower(pubB, big.NewInt(40)) - require.NoError(t, err) - fmt.Println(printBuckets(ring)) - _, _, err = ring.Rotate() - require.NoError(t, err) - fmt.Println(printBuckets(ring)) - - err = ring.SetPower(pubC, big.NewInt(99990)) + _, err = ring.SetPower(pubC, big.NewInt(90)) require.NoError(t, err) fmt.Println(printBuckets(ring)) _, _, err = ring.Rotate() @@ -119,15 +112,15 @@ func printBuckets(ring *Ring) string { } func alterPowers(t testing.TB, vw *Ring, powA, powB, powC int64) (powerChange, totalFlow *big.Int, err error) { - _, err = vw.AlterPower(pubA, big.NewInt(powA)) + _, err = vw.SetPower(pubA, big.NewInt(powA)) if err != nil { return nil, nil, err } - _, err = vw.AlterPower(pubB, big.NewInt(powB)) + _, err = vw.SetPower(pubB, big.NewInt(powB)) if err != nil { return nil, nil, err } - _, err = vw.AlterPower(pubC, big.NewInt(powC)) + _, err = vw.SetPower(pubC, big.NewInt(powC)) if err != nil { return nil, nil, err } diff --git a/acm/validator/set.go b/acm/validator/set.go index 495b66518..058151999 100644 --- a/acm/validator/set.go +++ b/acm/validator/set.go @@ -10,6 +10,9 @@ import ( ) var big0 = big.NewInt(0) +var big1 = big.NewInt(1) +var big2 = big.NewInt(2) +var big3 = big.NewInt(3) // A Validator multiset - can be used to capture the global state of validators or as an accumulator each block type Set struct { @@ -41,9 +44,8 @@ func NewTrimSet() *Set { } // Implements Writer, but will never error -func (vs *Set) SetPower(id crypto.PublicKey, power *big.Int) error { - vs.ChangePower(id, power) - return nil +func (vs *Set) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { + return vs.ChangePower(id, power), nil } // Add the power of a validator and returns the flow into that validator @@ -77,7 +79,7 @@ func (vs *Set) MaxFlow() *big.Int { // Returns the flow that would be induced by a validator power change func (vs *Set) Flow(id crypto.PublicKey, power *big.Int) *big.Int { - return new(big.Int).Sub(new(big.Int).Abs(power), vs.GetPower(id.GetAddress())) + return new(big.Int).Sub(power, vs.GetPower(id.GetAddress())) } // Returns the power of id but only if it is set @@ -137,7 +139,8 @@ func (vs *Set) IterateValidators(iter func(id crypto.Addressable, power *big.Int func (vs *Set) Flush(output Writer, backend Reader) error { return vs.IterateValidators(func(id crypto.Addressable, power *big.Int) error { - return output.SetPower(id.GetPublicKey(), power) + _, err := output.SetPower(id.GetPublicKey(), power) + return err }) } diff --git a/acm/validator/validators.go b/acm/validator/validators.go index e88f3d196..ff614d910 100644 --- a/acm/validator/validators.go +++ b/acm/validator/validators.go @@ -7,13 +7,7 @@ import ( ) type Writer interface { - SetPower(id crypto.PublicKey, power *big.Int) error -} - -type Alterer interface { - // AlterPower ensures that validator power would not change too quickly in a single block (unlike SetPower) which - // merely checks values are sane. It returns the flow induced by the change in power. - AlterPower(id crypto.PublicKey, power *big.Int) (flow *big.Int, err error) + SetPower(id crypto.PublicKey, power *big.Int) (flow *big.Int, err error) } type Reader interface { @@ -50,7 +44,8 @@ func AddPower(vs ReaderWriter, id crypto.PublicKey, power *big.Int) error { if err != nil { return err } - return vs.SetPower(id, new(big.Int).Add(currentPower, power)) + _, err = vs.SetPower(id, new(big.Int).Add(currentPower, power)) + return err } func SubtractPower(vs ReaderWriter, id crypto.PublicKey, power *big.Int) error { @@ -58,7 +53,8 @@ func SubtractPower(vs ReaderWriter, id crypto.PublicKey, power *big.Int) error { if err != nil { return err } - return vs.SetPower(id, new(big.Int).Sub(currentPower, power)) + _, err = vs.SetPower(id, new(big.Int).Sub(currentPower, power)) + return err } // Returns the asymmetric difference, diff, between two Sets such that applying diff to before results in after @@ -98,13 +94,7 @@ func Diff(before, after IterableReader) (*Set, error) { func Write(vs Writer, vsOther Iterable) error { return vsOther.IterateValidators(func(id crypto.Addressable, power *big.Int) error { - return vs.SetPower(id.GetPublicKey(), power) - }) -} - -func Alter(vs Alterer, vsOther Iterable) error { - return vsOther.IterateValidators(func(id crypto.Addressable, power *big.Int) error { - _, err := vs.AlterPower(id.GetPublicKey(), power) + _, err := vs.SetPower(id.GetPublicKey(), power) return err }) } diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go index 8d71faf85..e49707af6 100644 --- a/execution/contexts/bond_context.go +++ b/execution/contexts/bond_context.go @@ -13,7 +13,7 @@ import ( type BondContext struct { StateWriter acmstate.ReaderWriter - ValidatorSet validator.Alterer + ValidatorSet validator.ReaderWriter Logger *logging.Logger tx *payload.BondTx } @@ -48,7 +48,7 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error return err } - _, err = ctx.ValidatorSet.AlterPower(account.PublicKey, power) + _, err = ctx.ValidatorSet.SetPower(account.PublicKey, power) if err != nil { return err } diff --git a/execution/contexts/governance_context.go b/execution/contexts/governance_context.go index 555fe29ce..fde105578 100644 --- a/execution/contexts/governance_context.go +++ b/execution/contexts/governance_context.go @@ -18,7 +18,7 @@ import ( type GovernanceContext struct { StateWriter acmstate.ReaderWriter - ValidatorSet validator.Alterer + ValidatorSet validator.ReaderWriter Logger *logging.Logger tx *payload.GovTx txe *exec.TxExecution @@ -82,11 +82,7 @@ func (ctx *GovernanceContext) UpdateAccount(account *acm.Account, update *spec.T return } power := new(big.Int).SetUint64(update.Balances().GetPower(0)) - if !power.IsInt64() { - err = fmt.Errorf("power supplied in update to validator power for %v does not fit into int64 and "+ - "so is not supported by Tendermint", update.Address) - } - _, err := ctx.ValidatorSet.AlterPower(*update.PublicKey, power) + _, err := ctx.ValidatorSet.SetPower(*update.PublicKey, power) if err != nil { return ev, err } diff --git a/execution/contexts/permissions_context.go b/execution/contexts/permissions_context.go index bcc3a977c..a11752e78 100644 --- a/execution/contexts/permissions_context.go +++ b/execution/contexts/permissions_context.go @@ -97,7 +97,7 @@ func (ctx *PermissionsContext) Execute(txe *exec.TxExecution, p payload.Payload) return fmt.Errorf("invalid permission function: %v", permFlag) } - // TODO: maybe we want to take funds on error and allow txs in that don't do anythingi? + // TODO: maybe we want to take funds on error and allow txs in that don't do anything? if err != nil { return err } diff --git a/execution/contexts/proposal_context.go b/execution/contexts/proposal_context.go index 50e30bfd8..9698c9913 100644 --- a/execution/contexts/proposal_context.go +++ b/execution/contexts/proposal_context.go @@ -178,7 +178,10 @@ func (ctx *ProposalContext) Execute(txe *exec.TxExecution, p payload.Payload) er return fmt.Errorf("proposal expired, sequence number %d for account %s wrong at step %d", input.Sequence, input.Address, i+1) } - stateCache.UpdateAccount(acc) + err = stateCache.UpdateAccount(acc) + if err != nil { + return err + } } } diff --git a/execution/contexts/unbond_context.go b/execution/contexts/unbond_context.go index 172ad47b8..403f344ff 100644 --- a/execution/contexts/unbond_context.go +++ b/execution/contexts/unbond_context.go @@ -13,7 +13,7 @@ import ( type UnbondContext struct { StateWriter acmstate.ReaderWriter - ValidatorSet validator.Alterer + ValidatorSet validator.ReaderWriter Logger *logging.Logger tx *payload.UnbondTx } @@ -37,7 +37,7 @@ func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) erro return err } - _, err = ctx.ValidatorSet.AlterPower(account.PublicKey, power) + _, err = ctx.ValidatorSet.SetPower(account.PublicKey, power) if err != nil { return err } diff --git a/execution/evm/state.go b/execution/evm/state.go index 7fafeed60..e23de381d 100644 --- a/execution/evm/state.go +++ b/execution/evm/state.go @@ -29,7 +29,6 @@ type Reader interface { GetPermissions(address crypto.Address) permission.AccountPermissions GetEVMCode(address crypto.Address) acm.Bytecode GetWASMCode(address crypto.Address) acm.Bytecode - GetSequence(address crypto.Address) uint64 Exists(address crypto.Address) bool // GetBlockHash returns hash of the specific block GetBlockHash(blockNumber uint64) (binary.Word256, error) @@ -163,14 +162,6 @@ func (st *State) Exists(address crypto.Address) bool { return true } -func (st *State) GetSequence(address crypto.Address) uint64 { - acc := st.account(address) - if acc == nil { - return 0 - } - return acc.Sequence -} - // Writer func (st *State) CreateAccount(address crypto.Address) { diff --git a/execution/state/validators.go b/execution/state/validators.go index 362897af9..56945de9c 100644 --- a/execution/state/validators.go +++ b/execution/state/validators.go @@ -82,7 +82,7 @@ func LoadValidatorRing(version int64, ringSize int, func (ws *writeState) MakeGenesisValidators(genesisDoc *genesis.GenesisDoc) error { for _, gv := range genesisDoc.Validators { - err := ws.SetPower(gv.PublicKey, new(big.Int).SetUint64(gv.Amount)) + _, err := ws.SetPower(gv.PublicKey, new(big.Int).SetUint64(gv.Amount)) if err != nil { return err } @@ -122,9 +122,9 @@ func (s *ReadState) IterateValidators(fn func(id crypto.Addressable, power *big. }) } -func (ws *writeState) AlterPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { - // AlterPower in ring - flow, err := ws.ring.AlterPower(id, power) +func (ws *writeState) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) { + // SetPower in ring + flow, err := ws.ring.SetPower(id, power) if err != nil { return nil, err } @@ -132,16 +132,6 @@ func (ws *writeState) AlterPower(id crypto.PublicKey, power *big.Int) (*big.Int, return flow, ws.setPower(id, power) } -func (ws *writeState) SetPower(id crypto.PublicKey, power *big.Int) error { - // SetPower in ring - err := ws.ring.SetPower(id, power) - if err != nil { - return err - } - // Set power in versioned state - return ws.setPower(id, power) -} - func (ws *writeState) setPower(id crypto.PublicKey, power *big.Int) error { tree, err := ws.forest.Writer(keys.Validator.Prefix()) if err != nil { diff --git a/execution/state/validators_test.go b/execution/state/validators_test.go index 771540ff2..b9915b099 100644 --- a/execution/state/validators_test.go +++ b/execution/state/validators_test.go @@ -20,7 +20,8 @@ func TestValidatorsReadWrite(t *testing.T) { v := validator.FromAccount(acm.NewAccountFromSecret("foobar"), power) _, _, err := s.Update(func(up Updatable) error { - return up.SetPower(v.GetPublicKey(), v.BigPower()) + _, err := up.SetPower(v.GetPublicKey(), v.BigPower()) + return err }) require.NoError(t, err) @@ -55,8 +56,15 @@ func testLoadValidatorRing(t *testing.T, commits int) { var version int64 var err error + // we need to add a larger staked entity first + // to prevent unbalancing the validator set + _, err = s.writeState.SetPower(pub(0), pow(1000)) + require.NoError(t, err) + _, version, err = s.commit() + require.NoError(t, err) + for i := 1; i <= commits; i++ { - err = s.writeState.SetPower(pub(i), pow(i)) + _, err = s.writeState.SetPower(pub(i), pow(i)) require.NoError(t, err) _, version, err = s.commit() require.NoError(t, err) From 3c872c0f2be131f23953f96ce6cdd8efe207cab7 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Mon, 8 Jul 2019 18:01:45 +0100 Subject: [PATCH 37/70] use signatories for pubkey, simplify bond/unbond and use tx constructors Signed-off-by: Gregory Hill --- acm/validator/bucket.go | 21 -- acm/validator/ring.go | 10 +- cmd/burrow/commands/tx.go | 12 +- deploy/def/client.go | 33 +-- deploy/def/jobs.go | 12 +- deploy/jobs/jobs_governance.go | 17 +- docs/README.md | 6 +- .../{bonding.md => bonding-validators.md} | 6 +- execution/contexts/bond_context.go | 9 +- execution/contexts/shared.go | 23 -- execution/contexts/unbond_context.go | 10 +- execution/execution.go | 59 +++-- integration/governance/bonding_test.go | 12 +- integration/governance/helpers.go | 20 -- protobuf/payload.proto | 9 +- txs/payload/bond_tx.go | 10 +- txs/payload/payload.pb.go | 250 +++++++----------- txs/payload/unbond_tx.go | 11 +- txs/tx_test.go | 4 - 19 files changed, 209 insertions(+), 325 deletions(-) rename docs/quickstart/{bonding.md => bonding-validators.md} (79%) diff --git a/acm/validator/bucket.go b/acm/validator/bucket.go index 875cf6a1c..fe72e2f50 100644 --- a/acm/validator/bucket.go +++ b/acm/validator/bucket.go @@ -86,27 +86,6 @@ func (vc *Bucket) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error return absFlow, nil } -func (vc *Bucket) Initialize(id crypto.PublicKey, power *big.Int) error { - exists, err := vc.Power(id.GetAddress()) - if err != nil { - return err - } - if exists.Cmp(new(big.Int)) > 0 { - return fmt.Errorf("cannot initialize %v, validator already has power %v", - id.GetAddress(), exists) - } - err = checkPower(power) - if err != nil { - return err - } - absFlow := new(big.Int).Abs(vc.Previous.Flow(id, power)) - vc.Flow.ChangePower(id, absFlow) - // Add to total power - vc.Delta.ChangePower(id, power) - vc.Next.ChangePower(id, power) - return nil -} - func (vc *Bucket) CurrentSet() *Set { return vc.Previous } diff --git a/acm/validator/ring.go b/acm/validator/ring.go index 2f4af838b..33740fa01 100644 --- a/acm/validator/ring.go +++ b/acm/validator/ring.go @@ -18,7 +18,6 @@ import ( // // delta [d5 | d6| d7 | d8 ] // cum [v0+d1+d2+d3+d4|...| | ] - type Ring struct { buckets []*Bucket // Power tracks the sliding sum of all powers for each validator added by each delta bucket - power is added for the newest delta and subtracted from the oldest delta each rotation @@ -33,7 +32,7 @@ type Ring struct { var _ History = &Ring{} -// Provides a sliding window over the last size buckets of validator power changes +// NewRing provides a sliding window over the last size buckets of validator power changes func NewRing(initialSet Iterable, windowSize int) *Ring { if windowSize < 1 { windowSize = 1 @@ -54,7 +53,8 @@ func NewRing(initialSet Iterable, windowSize int) *Ring { } // Implement Reader -// Get power at index from the delta bucket then falling through to the cumulative + +// Power gets the balance at index from the delta bucket then falling through to the cumulative func (vc *Ring) Power(id crypto.Address) (*big.Int, error) { return vc.GetPower(id), nil } @@ -67,10 +67,6 @@ func (vc *Ring) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error) return vc.Head().SetPower(id, power) } -func (vc *Ring) Initialize(id crypto.PublicKey, power *big.Int) error { - return vc.Head().Initialize(id, power) -} - // CumulativePower gets the sum of all powers added in any bucket func (vc *Ring) CumulativePower() *Set { return vc.power diff --git a/cmd/burrow/commands/tx.go b/cmd/burrow/commands/tx.go index dc6d81784..11d947879 100644 --- a/cmd/burrow/commands/tx.go +++ b/cmd/burrow/commands/tx.go @@ -72,13 +72,13 @@ func Tx(output Output) func(cmd *cli.Cmd) { cmd.Command("bond", "bond a new validator", func(cmd *cli.Cmd) { sourceOpt := cmd.StringOpt("source", "", "Account with bonding perm, if not set config is used") - powerOpt := cmd.StringOpt("power", "", "Amount of value to bond, required") - cmd.Spec += "[--source=
] [--power=]" + amountOpt := cmd.StringOpt("amount", "", "Amount of value to bond, required") + cmd.Spec += "[--source=
] [--amount=]" cmd.Action = func() { bond := &def.Bond{ Source: jobs.FirstOf(*sourceOpt, address), - Power: *powerOpt, + Amount: *amountOpt, } if err := bond.Validate(); err != nil { @@ -98,13 +98,13 @@ func Tx(output Output) func(cmd *cli.Cmd) { cmd.Command("unbond", "unbond an existing validator", func(cmd *cli.Cmd) { sourceOpt := cmd.StringOpt("source", "", "Validator to unbond, if not set config is used") - powerOpt := cmd.StringOpt("power", "", "Amount of value to unbond, required") - cmd.Spec += "[--source=
] [--power=]" + amountOpt := cmd.StringOpt("amount", "", "Amount of value to unbond, required") + cmd.Spec += "[--source=
] [--amount=]" cmd.Action = func() { unbond := &def.Unbond{ Source: jobs.FirstOf(*sourceOpt, address), - Power: *powerOpt, + Amount: *amountOpt, } if err := unbond.Validate(); err != nil { diff --git a/deploy/def/client.go b/deploy/def/client.go index 485037934..ee6963fa8 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -506,11 +506,9 @@ func (c *Client) Send(arg *SendArg, logger *logging.Logger) (*payload.SendTx, er } type BondArg struct { - Input string - Amount string - Sequence string - Address string - PublicKey string + Input string + Amount string + Sequence string } func (c *Client) Bond(arg *BondArg, logger *logging.Logger) (*payload.BondTx, error) { @@ -519,24 +517,18 @@ func (c *Client) Bond(arg *BondArg, logger *logging.Logger) (*payload.BondTx, er if err != nil { return nil, err } - // TODO: disable mempool signing input, err := c.TxInput(arg.Input, arg.Amount, arg.Sequence, true, logger) if err != nil { return nil, err } - pubKey, err := c.PublicKeyFromAddress(&input.Address) - if err != nil { - return nil, err - } return &payload.BondTx{ - Input: input, - PublicKey: pubKey, + Input: input, }, nil } type UnbondArg struct { - Input string Output string + Amount string Sequence string } @@ -545,18 +537,15 @@ func (c *Client) Unbond(arg *UnbondArg, logger *logging.Logger) (*payload.Unbond if err := c.dial(logger); err != nil { return nil, err } - input, err := c.TxInput(arg.Input, "", arg.Sequence, true, logger) - if err != nil { - return nil, err - } - pubKey, err := c.PublicKeyFromAddress(&input.Address) + input, err := c.TxInput(arg.Output, arg.Amount, arg.Sequence, true, logger) if err != nil { return nil, err } - return &payload.UnbondTx{ - Input: input, - PublicKey: pubKey, - }, nil + + tx := payload.NewUnbondTx(input.Address, input.Amount) + tx.Input = input + + return tx, nil } type NameArg struct { diff --git a/deploy/def/jobs.go b/deploy/def/jobs.go index e01613f14..d1652296e 100644 --- a/deploy/def/jobs.go +++ b/deploy/def/jobs.go @@ -179,10 +179,8 @@ type Bond struct { // (Optional, if account job or global account set) address of the account from which to bond (the // public key for the account must be available to burrow keys) Source string `mapstructure:"source" json:"source" yaml:"source" toml:"source"` - // (Optional, if key client enabled) the public key of the bonding validator - Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` // (Required) the Tendermint validator power to claim - Power string `mapstructure:"power" json:"power" yaml:"power" toml:"power"` + Amount string `mapstructure:"amount" json:"amount" yaml:"amount" toml:"amount"` // (Optional, advanced only) sequence to use when burrow keys signs the transaction // (do not use unless you know what you're doing) Sequence string `mapstructure:"sequence" json:"sequence" yaml:"sequence" toml:"sequence"` @@ -190,7 +188,7 @@ type Bond struct { func (job *Bond) Validate() error { return validation.ValidateStruct(job, - validation.Field(&job.Power, validation.Required), + validation.Field(&job.Amount, validation.Required), validation.Field(&job.Sequence, rule.Uint64OrPlaceholder), ) } @@ -199,10 +197,8 @@ type Unbond struct { // (Optional, if account job or global account set) address of the validator to unbond (the // public key for the validator must be available to burrow keys) Source string `mapstructure:"source" json:"source" yaml:"source" toml:"source"` - // (Optional, if key client enabled) the public key of the unbonding validator - Target string `mapstructure:"target" json:"target" yaml:"target" toml:"target"` // (Required) the Tendermint validator power to unclaim - Power string `mapstructure:"power" json:"power" yaml:"power" toml:"power"` + Amount string `mapstructure:"amount" json:"amount" yaml:"amount" toml:"amount"` // (Optional, advanced only) sequence to use when burrow keys signs the transaction (do not use unless you // know what you're doing) Sequence string `mapstructure:"sequence" json:"sequence" yaml:"sequence" toml:"sequence"` @@ -210,7 +206,7 @@ type Unbond struct { func (job *Unbond) Validate() error { return validation.ValidateStruct(job, - validation.Field(&job.Power, validation.Required), + validation.Field(&job.Amount, validation.Required), validation.Field(&job.Sequence, rule.Uint64OrPlaceholder), ) } diff --git a/deploy/jobs/jobs_governance.go b/deploy/jobs/jobs_governance.go index 1388351b1..75955677b 100644 --- a/deploy/jobs/jobs_governance.go +++ b/deploy/jobs/jobs_governance.go @@ -71,21 +71,14 @@ func FormulateBondJob(bond *def.Bond, account string, client *def.Client, logger // Formulate tx logger.InfoMsg("Bonding Transaction", "source", bond.Source, - "target", bond.Target, - "power", bond.Power) + "amount", bond.Amount) arg := &def.BondArg{ Input: bond.Source, - Amount: bond.Power, + Amount: bond.Amount, Sequence: bond.Sequence, } - if len(bond.Source) == crypto.AddressHexLength { - arg.Address = bond.Target - } else { - arg.PublicKey = bond.Target - } - return client.Bond(arg, logger) } @@ -110,12 +103,10 @@ func FormulateUnbondJob(unbond *def.Unbond, account string, client *def.Client, // Formulate tx logger.InfoMsg("Unbonding Transaction", - "source", unbond.Source, - "target", unbond.Target) + "source", unbond.Source) arg := &def.UnbondArg{ - Input: unbond.Source, - Output: unbond.Target, + Output: unbond.Source, Sequence: unbond.Sequence, } diff --git a/docs/README.md b/docs/README.md index ce2f74fc0..432507d6b 100644 --- a/docs/README.md +++ b/docs/README.md @@ -11,11 +11,11 @@ Hyperledger Burrow is a permissioned Ethereum smart-contract blockchain node. It * [Send transactions](quickstart/send-transactions.md) - how to communicate with your Burrow chain * [Deploy contracts](quickstart/deploy-contracts.md) - interact with the Ethereum Virtual Machine * [Multiple validators](quickstart/multiple-validators.md) - advanced consensus setup - * [Adding validators](quickstart/add-validators.md) - bonding a new party - * [Seed nodes](quickstart/seed-nodes.md) - add new node dynamically + * [Bonding validators](quickstart/bonding-validators.md) - bonding yourself on + * [Seed nodes](quickstart/seed-nodes.md) - add new nodes dynamically * [Dump / restore](design/dump-restore.md) - create a new chain with previous state 4. [Genesis](design/genesis.md) 5. [Permissions](design/permissions.md) -6. [Architecture](arch) +6. [Architecture](architecture) * [State](arch/state.md) 7. [Kubernetes](https://github.com/helm/charts/tree/master/stable/burrow) - bootstraps a burrow network on a Kubernetes cluster diff --git a/docs/quickstart/bonding.md b/docs/quickstart/bonding-validators.md similarity index 79% rename from docs/quickstart/bonding.md rename to docs/quickstart/bonding-validators.md index 8a907f93a..4a3c33130 100644 --- a/docs/quickstart/bonding.md +++ b/docs/quickstart/bonding-validators.md @@ -29,14 +29,14 @@ This will return the pre-bonded validator, defined in our pool. To have the second node bond on and produce blocks: ```bash -burrow tx --config burrow001.toml formulate bond --power 10000 | burrow tx commit +burrow tx --config burrow001.toml formulate bond --amount 10000 | burrow tx commit ``` Note that this will bond the current account, to bond an alternate account (which is created if it doesn't exist) -simply specific the `--target=
` flag in formulation: +simply specific the `--source=
` flag in formulation: ```bash -burrow tx --config burrow001.toml formulate bond --target 8A468CC3A28A6E84ED52E433DA21D6E9ED7C1577 --power 10000 +burrow tx --config burrow001.toml formulate bond --source 8A468CC3A28A6E84ED52E433DA21D6E9ED7C1577 --amount 10000 ``` It should now be in the validator set: diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go index e49707af6..b6169c952 100644 --- a/execution/contexts/bond_context.go +++ b/execution/contexts/bond_context.go @@ -28,11 +28,16 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error // the account initiating the bond power := new(big.Int).SetUint64(ctx.tx.Input.GetAmount()) - account, err := validateBonding(ctx.StateWriter, ctx.tx.Input, ctx.tx.PublicKey, ctx.Logger) + account, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) if err != nil { return err } + // can the account bond? + if !hasBondPermission(ctx.StateWriter, account, ctx.Logger) { + return fmt.Errorf("account '%s' lacks bond permission", account.Address) + } + // check account has enough to bond amount := ctx.tx.Input.GetAmount() if amount == 0 { @@ -48,7 +53,7 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error return err } - _, err = ctx.ValidatorSet.SetPower(account.PublicKey, power) + err = validator.AddPower(ctx.ValidatorSet, account.PublicKey, power) if err != nil { return err } diff --git a/execution/contexts/shared.go b/execution/contexts/shared.go index d322032ab..84959a3f8 100644 --- a/execution/contexts/shared.go +++ b/execution/contexts/shared.go @@ -230,26 +230,3 @@ func hasBondOrSendPermission(accountGetter acmstate.AccountGetter, accs map[cryp } return true } - -func validateBonding(accGet acmstate.AccountGetter, in *payload.TxInput, - pubKey *crypto.PublicKey, log *logging.Logger) (*acm.Account, error) { - - account, err := accGet.GetAccount(in.Address) - if err != nil { - return nil, err - } - - // ensure pubKey of validator is set correctly - if pubKey.GetAddress() != account.GetAddress() { - return nil, fmt.Errorf("input address and public key address do not much") - } - - account.PublicKey = *pubKey - - // can the account bond? - if !hasBondPermission(accGet, account, log) { - return nil, fmt.Errorf("account '%s' lacks bond permission", account.Address) - } - - return account, nil -} diff --git a/execution/contexts/unbond_context.go b/execution/contexts/unbond_context.go index 403f344ff..04f91223f 100644 --- a/execution/contexts/unbond_context.go +++ b/execution/contexts/unbond_context.go @@ -26,8 +26,12 @@ func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) erro return fmt.Errorf("payload must be UnbondTx, but is: %v", txe.Envelope.Tx.Payload) } - power := new(big.Int).Neg(new(big.Int).SetUint64(ctx.tx.Input.GetAmount())) - account, err := validateBonding(ctx.StateWriter, ctx.tx.Input, ctx.tx.PublicKey, ctx.Logger) + if ctx.tx.Input.Address != ctx.tx.Output.Address { + return fmt.Errorf("input and output address must match") + } + + power := new(big.Int).SetUint64(ctx.tx.Output.GetAmount()) + account, err := ctx.StateWriter.GetAccount(ctx.tx.Input.Address) if err != nil { return err } @@ -37,7 +41,7 @@ func (ctx *UnbondContext) Execute(txe *exec.TxExecution, p payload.Payload) erro return err } - _, err = ctx.ValidatorSet.SetPower(account.PublicKey, power) + err = validator.SubtractPower(ctx.ValidatorSet, account.PublicKey, power) if err != nil { return err } diff --git a/execution/execution.go b/execution/execution.go index ce94b1812..3140ea1a5 100644 --- a/execution/execution.go +++ b/execution/execution.go @@ -244,13 +244,20 @@ func (exe *executor) Execute(txEnv *txs.Envelope) (txe *exec.TxExecution, err er }() // Validate inputs and check sequence numbers - err = validateInputs(txEnv.Tx, exe.stateCache) + err = exe.validateInputs(txEnv.Tx) if err != nil { logger.InfoMsg("Transaction validate failed", structure.ErrorKey, err) txe.PushError(err) return nil, err } + err = exe.updateSignatories(txEnv.Signatories) + if err != nil { + logger.InfoMsg("Updating signatories failed", structure.ErrorKey, err) + txe.PushError(err) + return nil, err + } + err = txExecutor.Execute(txe, txe.Envelope.Tx.Payload) if err != nil { logger.InfoMsg("Transaction execution failed", structure.ErrorKey, err) @@ -258,10 +265,10 @@ func (exe *executor) Execute(txEnv *txs.Envelope) (txe *exec.TxExecution, err er return nil, err } - // Initialise public keys and increment sequence numbers for Tx inputs - err = exe.updateSignatories(txEnv) + // Increment sequence numbers for Tx inputs + err = exe.updateSequenceNumbers(txEnv) if err != nil { - logger.InfoMsg("Updating signatories failed", structure.ErrorKey, err) + logger.InfoMsg("Updating sequences failed", structure.ErrorKey, err) txe.PushError(err) return nil, err } @@ -271,9 +278,9 @@ func (exe *executor) Execute(txEnv *txs.Envelope) (txe *exec.TxExecution, err er return nil, fmt.Errorf("unknown transaction type: %v", txEnv.Tx.Type()) } -func validateInputs(tx *txs.Tx, getter acmstate.AccountGetter) error { +func (exe *executor) validateInputs(tx *txs.Tx) error { for _, in := range tx.GetInputs() { - acc, err := getter.GetAccount(in.Address) + acc, err := exe.stateCache.GetAccount(in.Address) if err != nil { return err } @@ -295,7 +302,7 @@ func validateInputs(tx *txs.Tx, getter acmstate.AccountGetter) error { return errors.ErrorCodeInsufficientFunds } // Check for Input permission - v, err := acc.Permissions.Base.Compose(acmstate.GlobalAccountPermissions(getter).Base).Get(permission.Input) + v, err := acc.Permissions.Base.Compose(acmstate.GlobalAccountPermissions(exe.stateCache).Base).Get(permission.Input) if err != nil { return err } @@ -306,6 +313,29 @@ func validateInputs(tx *txs.Tx, getter acmstate.AccountGetter) error { return nil } +// Capture public keys +func (exe *executor) updateSignatories(sigs []txs.Signatory) error { + for _, sig := range sigs { + // pointer dereferences are safe since txEnv.Validate() is run by + // txEnv.Verify() above which checks they are non-nil + acc, err := exe.stateCache.GetAccount(*sig.Address) + if err != nil { + return fmt.Errorf("error getting account on which to set public key: %v", *sig.Address) + } + // Important that verify has been run against signatories at this point + if sig.PublicKey.GetAddress() != acc.Address { + return fmt.Errorf("unexpected mismatch between address %v and supplied public key %v", + acc.Address, sig.PublicKey) + } + acc.PublicKey = *sig.PublicKey + err = exe.stateCache.UpdateAccount(acc) + if err != nil { + return fmt.Errorf("error updating account after setting public key: %v", err) + } + } + return nil +} + // Commit the current state - optionally pass in the tendermint ABCI header for that to be included with the BeginBlock // StreamEvent func (exe *executor) Commit(header *abciTypes.Header) (stateHash []byte, err error) { @@ -411,21 +441,13 @@ func (exe *executor) finaliseBlockExecution(header *abciTypes.Header) (*exec.Blo return be, nil } -// Capture public keys and update sequence numbers -func (exe *executor) updateSignatories(txEnv *txs.Envelope) error { +// update sequence numbers +func (exe *executor) updateSequenceNumbers(txEnv *txs.Envelope) error { for _, sig := range txEnv.Signatories { - // pointer dereferences are safe since txEnv.Validate() is run by txEnv.Verify() above which checks they are - // non-nil acc, err := exe.stateCache.GetAccount(*sig.Address) if err != nil { return fmt.Errorf("error getting account on which to set public key: %v", *sig.Address) } - // Important that verify has been run against signatories at this point - if sig.PublicKey.GetAddress() != acc.Address { - return fmt.Errorf("unexpected mismatch between address %v and supplied public key %v", - acc.Address, sig.PublicKey) - } - acc.PublicKey = *sig.PublicKey exe.logger.TraceMsg("Incrementing sequence number Tx signatory/input", "height", exe.block.Height, @@ -433,10 +455,11 @@ func (exe *executor) updateSignatories(txEnv *txs.Envelope) error { "account", acc.Address, "old_sequence", acc.Sequence, "new_sequence", acc.Sequence+1) + acc.Sequence++ err = exe.stateCache.UpdateAccount(acc) if err != nil { - return fmt.Errorf("error updating account after setting public key: %v", err) + return fmt.Errorf("error updating account after incrementing sequence: %v", err) } } return nil diff --git a/integration/governance/bonding_test.go b/integration/governance/bonding_test.go index 527f34810..bf16db284 100644 --- a/integration/governance/bonding_test.go +++ b/integration/governance/bonding_test.go @@ -6,13 +6,12 @@ import ( "bytes" "testing" - "github.com/hyperledger/burrow/permission" - - "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/bcm" "github.com/hyperledger/burrow/core" "github.com/hyperledger/burrow/integration" "github.com/hyperledger/burrow/integration/rpctest" + "github.com/hyperledger/burrow/permission" + "github.com/hyperledger/burrow/txs/payload" "github.com/stretchr/testify/require" ) @@ -34,11 +33,10 @@ func TestBonding(t *testing.T) { connectAllKernels(genesisKernels) t.Run("NoPermission", func(t *testing.T) { - val := acm.GeneratePrivateAccountFromSecret("validator_1") localAddress := genesisKernels[4].GRPCListenAddress().String() inputAccount := genesisAccounts[4].GetAddress() tcli := rpctest.NewTransactClient(t, localAddress) - bondTx := createBondTx(inputAccount, val.GetPublicKey(), uint64(1<<2)) + bondTx := payload.NewBondTx(inputAccount, uint64(1<<2)) _, err = payloadSync(tcli, bondTx) require.Error(t, err) }) @@ -56,7 +54,7 @@ func TestBonding(t *testing.T) { accBefore := getAccount(t, qcli, inputAccount) var power uint64 = 1 << 16 - bondTx := createBondTx(inputAccount, valAccount.GetPublicKey(), power) + bondTx := payload.NewBondTx(inputAccount, power) _, err = payloadSync(tcli, bondTx) require.NoError(t, err) accAfter := getAccount(t, qcli, inputAccount) @@ -78,7 +76,7 @@ func TestBonding(t *testing.T) { waitFor(10, valKernel.Blockchain) checkProposed(t, genesisKernels[0], valAccount.GetPublicKey().GetAddress().Bytes()) - unbondTx := createUnbondTx(inputAccount, valAccount.GetPublicKey(), power) + unbondTx := payload.NewUnbondTx(inputAccount, power) _, err = payloadSync(tcli, unbondTx) require.NoError(t, err) diff --git a/integration/governance/helpers.go b/integration/governance/helpers.go index ed327d21b..5136c2c1e 100644 --- a/integration/governance/helpers.go +++ b/integration/governance/helpers.go @@ -65,26 +65,6 @@ func createKernel(genesisDoc *genesis.GenesisDoc, account *acm.PrivateAccount, return kernel, kernel.Boot() } -func createBondTx(address crypto.Address, pubKey crypto.PublicKey, amount uint64) *payload.BondTx { - return &payload.BondTx{ - Input: &payload.TxInput{ - Address: address, - Amount: amount, - }, - PublicKey: &pubKey, - } -} - -func createUnbondTx(address crypto.Address, pubKey crypto.PublicKey, amount uint64) *payload.UnbondTx { - return &payload.UnbondTx{ - Input: &payload.TxInput{ - Address: address, - Amount: amount, - }, - PublicKey: &pubKey, - } -} - func signTx(t *testing.T, tx payload.Payload, chainID string, from acm.AddressableSigner) (txEnv *txs.Envelope) { txEnv = txs.Enclose(chainID, tx) require.NoError(t, txEnv.Sign(from)) diff --git a/protobuf/payload.proto b/protobuf/payload.proto index 54869bb65..21f76f888 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -5,7 +5,6 @@ option go_package = "github.com/hyperledger/burrow/txs/payload"; import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "permission.proto"; -import "crypto.proto"; import "spec.proto"; package payload; @@ -111,18 +110,16 @@ message BondTx { // Account with bonding permission TxInput Input = 1; - // The validator to bond, public key must be known - crypto.PublicKey PublicKey = 2; + // PublicKey is retrieved from signature } message UnbondTx { option (gogoproto.goproto_stringer) = false; option (gogoproto.goproto_getters) = false; - // Account with bonding permission TxInput Input = 1; - // The validator to unbond, public key must be known - crypto.PublicKey PublicKey = 2; + // Account to unbond + TxOutput Output = 2; } message GovTx { diff --git a/txs/payload/bond_tx.go b/txs/payload/bond_tx.go index a82c017f9..d42c0722d 100644 --- a/txs/payload/bond_tx.go +++ b/txs/payload/bond_tx.go @@ -7,11 +7,13 @@ import ( "github.com/hyperledger/burrow/crypto" ) -func NewBondTx(pubkey crypto.PublicKey) (*BondTx, error) { +func NewBondTx(address crypto.Address, amount uint64) *BondTx { return &BondTx{ - Input: &TxInput{}, - PublicKey: &crypto.PublicKey{}, - }, nil + Input: &TxInput{ + Address: address, + Amount: amount, + }, + } } func (tx *BondTx) Type() Type { diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 0647de24b..72762323f 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -9,7 +9,6 @@ import ( proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" - crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" spec "github.com/hyperledger/burrow/genesis/spec" permission "github.com/hyperledger/burrow/permission" @@ -556,12 +555,10 @@ func (*NameTx) XXX_MessageName() string { type BondTx struct { // Account with bonding permission - Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` - // The validator to bond, public key must be known - PublicKey *crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *BondTx) Reset() { *m = BondTx{} } @@ -601,13 +598,12 @@ func (*BondTx) XXX_MessageName() string { } type UnbondTx struct { - // Account with bonding permission Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` - // The validator to unbond, public key must be known - PublicKey *crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + // Account to unbond + Output *TxOutput `protobuf:"bytes,2,opt,name=Output,proto3" json:"Output,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *UnbondTx) Reset() { *m = UnbondTx{} } @@ -975,69 +971,69 @@ func init() { proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d5 func init() { golang_proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d56) } var fileDescriptor_678c914f1bee6d56 = []byte{ - // 990 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcd, 0x6f, 0xe3, 0x44, - 0x14, 0xaf, 0x6b, 0xe7, 0xa3, 0x6f, 0xd3, 0x92, 0x0e, 0x1f, 0x8a, 0x2a, 0x91, 0xac, 0x02, 0x82, - 0x05, 0xb6, 0x09, 0xec, 0xf2, 0x21, 0xf5, 0x82, 0xe2, 0x26, 0xed, 0x16, 0x96, 0x6d, 0x34, 0x71, - 0x77, 0x11, 0x88, 0x83, 0x93, 0x0c, 0x89, 0x85, 0xed, 0x31, 0xf6, 0x64, 0xb1, 0x39, 0x71, 0xe0, - 0xc0, 0x15, 0x71, 0xe1, 0xd8, 0x7f, 0x81, 0xff, 0x80, 0x63, 0x8f, 0x1c, 0x11, 0x87, 0x0a, 0x75, - 0x2f, 0x88, 0xbf, 0x02, 0xcd, 0x78, 0xc6, 0x71, 0xb2, 0x68, 0x37, 0xad, 0x56, 0x7b, 0xf3, 0x7b, - 0xef, 0x37, 0xef, 0xbd, 0xf9, 0xbd, 0x8f, 0x31, 0x6c, 0x06, 0x76, 0xe2, 0x52, 0x7b, 0xdc, 0x0a, - 0x42, 0xca, 0x28, 0x2a, 0x49, 0x71, 0x67, 0x77, 0xe2, 0xb0, 0xe9, 0x6c, 0xd8, 0x1a, 0x51, 0xaf, - 0x3d, 0xa1, 0x13, 0xda, 0x16, 0xf6, 0xe1, 0xec, 0x6b, 0x21, 0x09, 0x41, 0x7c, 0xa5, 0xe7, 0x76, - 0xaa, 0x01, 0x09, 0x3d, 0x27, 0x8a, 0x1c, 0xea, 0x4b, 0x4d, 0x65, 0x14, 0x26, 0x01, 0x53, 0x76, - 0x88, 0x02, 0x32, 0x4a, 0xbf, 0x9b, 0x3f, 0xeb, 0xa0, 0x77, 0xfc, 0x04, 0xbd, 0x09, 0xc5, 0x7d, - 0xdb, 0x75, 0xad, 0xb8, 0xa6, 0x5d, 0xd7, 0x6e, 0x5c, 0xbb, 0xf5, 0x42, 0x4b, 0xe5, 0x92, 0xaa, - 0xb1, 0x34, 0x73, 0xe0, 0x80, 0xf8, 0x63, 0x2b, 0xae, 0xad, 0x2f, 0x01, 0x53, 0x35, 0x96, 0x66, - 0x0e, 0xbc, 0x67, 0x7b, 0xc4, 0x8a, 0x6b, 0xfa, 0x12, 0x30, 0x55, 0x63, 0x69, 0x46, 0x6f, 0x43, - 0xa9, 0x4f, 0x42, 0x2f, 0xb2, 0xe2, 0x9a, 0x21, 0x90, 0xd5, 0x0c, 0x29, 0xf5, 0x58, 0x01, 0xd0, - 0xeb, 0x50, 0x38, 0xa4, 0x0f, 0xad, 0xb8, 0x56, 0x10, 0xc8, 0xad, 0x0c, 0x29, 0xb4, 0x38, 0x35, - 0xf2, 0xd0, 0x26, 0x15, 0x39, 0x16, 0x97, 0x42, 0xa7, 0x6a, 0x2c, 0xcd, 0x68, 0x17, 0xca, 0x27, - 0xfe, 0x30, 0x85, 0x96, 0x04, 0x74, 0x3b, 0x83, 0x2a, 0x03, 0xce, 0x20, 0x3c, 0x53, 0xd3, 0x66, - 0xa3, 0xa9, 0x15, 0xd7, 0xca, 0x4b, 0x99, 0x4a, 0x3d, 0x56, 0x00, 0x74, 0x1b, 0xa0, 0x1f, 0xd2, - 0x80, 0x46, 0x36, 0x27, 0x75, 0x43, 0xc0, 0x5f, 0x9c, 0x5f, 0x2c, 0x33, 0xe1, 0x1c, 0x6c, 0xcf, - 0x38, 0x3b, 0x6d, 0x68, 0xcd, 0x5f, 0x34, 0x28, 0x59, 0xf1, 0x91, 0x1f, 0xcc, 0x18, 0xba, 0x07, - 0xa5, 0xce, 0x78, 0x1c, 0x92, 0x28, 0x12, 0x85, 0xa9, 0x98, 0xef, 0x9f, 0x9d, 0x37, 0xd6, 0xfe, - 0x3a, 0x6f, 0xdc, 0xcc, 0xf5, 0xc4, 0x34, 0x09, 0x48, 0xe8, 0x92, 0xf1, 0x84, 0x84, 0xed, 0xe1, - 0x2c, 0x0c, 0xe9, 0x77, 0x6d, 0x59, 0x70, 0x79, 0x16, 0x2b, 0x27, 0xe8, 0x15, 0x28, 0x76, 0x3c, - 0x3a, 0xf3, 0x99, 0x28, 0x9f, 0x81, 0xa5, 0x84, 0x76, 0xa0, 0x3c, 0x20, 0xdf, 0xce, 0x88, 0x3f, - 0x22, 0xa2, 0x5e, 0x06, 0xce, 0xe4, 0x3d, 0xe3, 0xd7, 0xd3, 0xc6, 0x5a, 0x33, 0x86, 0xb2, 0x15, - 0x1f, 0xcf, 0xd8, 0x73, 0xcc, 0x4a, 0x46, 0xfe, 0x73, 0x5d, 0x35, 0x27, 0x7a, 0x03, 0x0a, 0x82, - 0x17, 0xd9, 0xa5, 0x73, 0xfe, 0x25, 0x5f, 0x38, 0x35, 0xa3, 0x4f, 0xe6, 0x09, 0xae, 0x8b, 0x04, - 0xdf, 0xbd, 0x7a, 0x72, 0x3b, 0x50, 0x3e, 0xb4, 0xa3, 0xbb, 0x8e, 0xe7, 0x30, 0x45, 0x8d, 0x92, - 0x51, 0x15, 0xf4, 0x03, 0x42, 0x44, 0xdf, 0x1a, 0x98, 0x7f, 0xa2, 0x23, 0x30, 0xba, 0x36, 0xb3, - 0x45, 0x83, 0x56, 0xcc, 0x0f, 0x24, 0x2f, 0xbb, 0x4f, 0x0e, 0x3d, 0x74, 0x7c, 0x3b, 0x4c, 0x5a, - 0x77, 0x48, 0x6c, 0x26, 0x8c, 0x44, 0x58, 0xb8, 0x40, 0x5f, 0x82, 0xf1, 0xa0, 0x33, 0xf8, 0x4c, - 0x34, 0x71, 0xc5, 0x3c, 0xbc, 0x92, 0xab, 0x7f, 0xcf, 0x1b, 0x5b, 0xcc, 0x9e, 0x44, 0x37, 0xa9, - 0xe7, 0x30, 0xe2, 0x05, 0x2c, 0xc1, 0xc2, 0xa9, 0xa4, 0xd6, 0x51, 0xd3, 0x8c, 0x6e, 0x40, 0x51, - 0x50, 0xc7, 0x2b, 0xaa, 0xff, 0x2f, 0xb5, 0xd2, 0x8e, 0xde, 0x81, 0x52, 0xda, 0x06, 0x9c, 0x5b, - 0x7d, 0x61, 0x66, 0x54, 0x83, 0x60, 0x85, 0xd8, 0x2b, 0xff, 0x74, 0xda, 0x58, 0x13, 0xa1, 0x68, - 0x36, 0xe6, 0x2b, 0x57, 0xf1, 0x43, 0x28, 0xf3, 0x23, 0x9d, 0x70, 0x12, 0xc9, 0x6d, 0xf3, 0x52, - 0x2b, 0xb7, 0xdb, 0x94, 0xcd, 0x34, 0x38, 0x35, 0x38, 0xc3, 0xca, 0xbb, 0x05, 0x6a, 0x01, 0xad, - 0x1c, 0x0f, 0x81, 0xc1, 0x4f, 0x88, 0x58, 0x1b, 0x58, 0x7c, 0x73, 0x9d, 0xa8, 0xa7, 0x9e, 0xea, - 0x44, 0x61, 0x1e, 0xab, 0xba, 0x8c, 0xf8, 0x8d, 0xda, 0x3b, 0x2b, 0x47, 0x6c, 0xc3, 0x46, 0x7f, - 0x36, 0x74, 0x9d, 0xd1, 0xa7, 0x24, 0x91, 0x57, 0xdc, 0x6e, 0xc9, 0x46, 0xcc, 0x0c, 0x78, 0x8e, - 0xc9, 0xf1, 0xe9, 0xcd, 0x77, 0xd7, 0xf3, 0x08, 0xf7, 0x83, 0x26, 0x57, 0xef, 0x25, 0x3a, 0x65, - 0x1f, 0xb6, 0x3a, 0xa3, 0x11, 0x9f, 0xe4, 0x93, 0x60, 0x6c, 0x33, 0xa2, 0x1a, 0xe6, 0xe5, 0x96, - 0x78, 0x81, 0x2c, 0xe2, 0x05, 0xae, 0xcd, 0x88, 0xc4, 0x88, 0x32, 0x6a, 0x78, 0xe9, 0x48, 0x2e, - 0x85, 0x7f, 0xb4, 0xfc, 0x4e, 0x5d, 0xf9, 0xd2, 0x4d, 0xa8, 0xdc, 0xa7, 0xcc, 0xf1, 0x27, 0x0f, - 0x88, 0x33, 0x99, 0xa6, 0x2b, 0x46, 0xc7, 0x0b, 0x3a, 0x74, 0x02, 0x15, 0xe5, 0xf9, 0x8e, 0x1d, - 0x4d, 0x45, 0xb5, 0x2b, 0xe6, 0x7b, 0x97, 0x9f, 0xdc, 0x05, 0x37, 0xfc, 0x7d, 0x51, 0xb2, 0x7c, - 0xdb, 0xb6, 0x1f, 0x7b, 0x02, 0x70, 0x06, 0xc9, 0x5d, 0xf5, 0xab, 0xec, 0xa5, 0xb9, 0x04, 0xdd, - 0x75, 0xd0, 0xad, 0x58, 0x71, 0x5c, 0xc9, 0x60, 0x1d, 0x3f, 0xc1, 0xdc, 0x90, 0x73, 0xff, 0xa3, - 0x06, 0xc6, 0x7d, 0xca, 0xc8, 0x33, 0x5f, 0xe4, 0x2b, 0x70, 0x9d, 0x4b, 0xe3, 0xe1, 0x9c, 0x9e, - 0x6c, 0xf6, 0xb4, 0xdc, 0xec, 0x5d, 0x87, 0x6b, 0x5d, 0x12, 0x8d, 0x42, 0x27, 0x60, 0x0e, 0xf5, - 0xe5, 0x58, 0xe6, 0x55, 0xf9, 0x17, 0x59, 0x7f, 0xca, 0x8b, 0x9c, 0x8b, 0xfb, 0xdb, 0x3a, 0x14, - 0x4d, 0xdb, 0x75, 0x29, 0x5b, 0xa8, 0x90, 0xf6, 0xd4, 0x0a, 0xf1, 0x3e, 0x39, 0x70, 0x7c, 0xdb, - 0x75, 0xbe, 0x77, 0xfc, 0x89, 0xfc, 0x07, 0xba, 0x5a, 0x9f, 0xe4, 0xdd, 0xa0, 0x7d, 0xd8, 0x0c, - 0x64, 0x88, 0x01, 0xb3, 0x59, 0xba, 0x5a, 0xb6, 0x6e, 0xbd, 0x9a, 0xbb, 0x0c, 0xcf, 0x36, 0xcb, - 0x48, 0x80, 0xf0, 0xe2, 0x19, 0xf4, 0x1a, 0x14, 0x78, 0x4d, 0xa3, 0x5a, 0x41, 0x34, 0xc0, 0x66, - 0x76, 0x98, 0x6b, 0x71, 0x6a, 0x6b, 0x7e, 0x04, 0x9b, 0x0b, 0x4e, 0x50, 0x05, 0xca, 0x7d, 0x7c, - 0xdc, 0x3f, 0x1e, 0xf4, 0xba, 0xd5, 0x35, 0x2e, 0xf5, 0x3e, 0xef, 0xed, 0x9f, 0x58, 0xbd, 0x6e, - 0x55, 0x43, 0x00, 0xc5, 0x83, 0xce, 0xd1, 0xdd, 0x5e, 0xb7, 0xba, 0x6e, 0x7e, 0x7c, 0x76, 0x51, - 0xd7, 0xfe, 0xb8, 0xa8, 0x6b, 0x7f, 0x5f, 0xd4, 0xb5, 0xdf, 0x1f, 0xd5, 0xb5, 0xb3, 0x47, 0x75, - 0xed, 0x8b, 0xb7, 0x9e, 0x7c, 0x6b, 0x16, 0x47, 0x6d, 0x99, 0xc5, 0xb0, 0x28, 0x7e, 0x38, 0x6f, - 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0xda, 0x2a, 0x1e, 0x80, 0xe5, 0x0a, 0x00, 0x00, + // 980 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x6f, 0x1b, 0x45, + 0x14, 0xcf, 0x7a, 0xd7, 0x7f, 0xfa, 0xea, 0x04, 0x77, 0xf8, 0x23, 0x2b, 0x12, 0x76, 0x65, 0x10, + 0xb4, 0xd0, 0xda, 0xd0, 0xf2, 0x47, 0xca, 0x05, 0x79, 0x63, 0x27, 0x0d, 0x2a, 0x4d, 0x34, 0xde, + 0xb4, 0x08, 0xc4, 0x61, 0x6c, 0x0f, 0xeb, 0x95, 0xbc, 0x3b, 0xcb, 0xee, 0xb8, 0xac, 0x39, 0x71, + 0xe0, 0xc0, 0x15, 0x71, 0xe1, 0x98, 0xaf, 0xc0, 0x37, 0xe0, 0x98, 0x23, 0x47, 0xc4, 0x21, 0x42, + 0xe9, 0x05, 0xf1, 0x29, 0xd0, 0xcc, 0xce, 0xac, 0xd7, 0x6e, 0xd5, 0x3a, 0x11, 0xe2, 0xb6, 0xf3, + 0xde, 0x6f, 0xde, 0x7b, 0xf3, 0xfb, 0xbd, 0x79, 0xb3, 0xb0, 0x19, 0x92, 0xf9, 0x94, 0x91, 0x71, + 0x3b, 0x8c, 0x18, 0x67, 0xa8, 0xac, 0x96, 0xdb, 0xb7, 0x5d, 0x8f, 0x4f, 0x66, 0xc3, 0xf6, 0x88, + 0xf9, 0x1d, 0x97, 0xb9, 0xac, 0x23, 0xfd, 0xc3, 0xd9, 0xd7, 0x72, 0x25, 0x17, 0xf2, 0x2b, 0xdd, + 0xb7, 0x5d, 0x0b, 0x69, 0xe4, 0x7b, 0x71, 0xec, 0xb1, 0x40, 0x59, 0x20, 0x0e, 0xe9, 0x28, 0xfd, + 0x6e, 0xfd, 0x64, 0x82, 0xd9, 0x0d, 0xe6, 0xe8, 0x6d, 0x28, 0xed, 0x92, 0xe9, 0xd4, 0x49, 0xea, + 0xc6, 0x75, 0xe3, 0xc6, 0xd5, 0x3b, 0x2f, 0xb5, 0x75, 0xf6, 0xd4, 0x8c, 0x95, 0x5b, 0x00, 0x07, + 0x34, 0x18, 0x3b, 0x49, 0xbd, 0xb0, 0x02, 0x4c, 0xcd, 0x58, 0xb9, 0x05, 0xf0, 0x01, 0xf1, 0xa9, + 0x93, 0xd4, 0xcd, 0x15, 0x60, 0x6a, 0xc6, 0xca, 0x8d, 0xde, 0x81, 0xf2, 0x11, 0x8d, 0xfc, 0xd8, + 0x49, 0xea, 0x96, 0x44, 0xd6, 0x32, 0xa4, 0xb2, 0x63, 0x0d, 0x40, 0x6f, 0x42, 0x71, 0x9f, 0x3d, + 0x76, 0x92, 0x7a, 0x51, 0x22, 0xb7, 0x32, 0xa4, 0xb4, 0xe2, 0xd4, 0x29, 0x52, 0xdb, 0x4c, 0xd6, + 0x58, 0x5a, 0x49, 0x9d, 0x9a, 0xb1, 0x72, 0xa3, 0xdb, 0x50, 0x39, 0x0e, 0x86, 0x29, 0xb4, 0x2c, + 0xa1, 0xd7, 0x32, 0xa8, 0x76, 0xe0, 0x0c, 0x22, 0x2a, 0xb5, 0x09, 0x1f, 0x4d, 0x9c, 0xa4, 0x5e, + 0x59, 0xa9, 0x54, 0xd9, 0xb1, 0x06, 0xa0, 0xbb, 0x00, 0x47, 0x11, 0x0b, 0x59, 0x4c, 0x04, 0xa9, + 0x57, 0x24, 0xfc, 0xe5, 0xc5, 0xc1, 0x32, 0x17, 0xce, 0xc1, 0x76, 0xac, 0xd3, 0x93, 0xa6, 0xd1, + 0xfa, 0xd9, 0x80, 0xb2, 0x93, 0x1c, 0x04, 0xe1, 0x8c, 0xa3, 0x07, 0x50, 0xee, 0x8e, 0xc7, 0x11, + 0x8d, 0x63, 0x29, 0x4c, 0xd5, 0xfe, 0xe0, 0xf4, 0xac, 0xb9, 0xf1, 0xe7, 0x59, 0xf3, 0x56, 0xae, + 0x0b, 0x26, 0xf3, 0x90, 0x46, 0x53, 0x3a, 0x76, 0x69, 0xd4, 0x19, 0xce, 0xa2, 0x88, 0x7d, 0xdb, + 0x19, 0x45, 0xf3, 0x90, 0xb3, 0xb6, 0xda, 0x8b, 0x75, 0x10, 0xf4, 0x1a, 0x94, 0xba, 0x3e, 0x9b, + 0x05, 0x5c, 0xca, 0x67, 0x61, 0xb5, 0x42, 0xdb, 0x50, 0x19, 0xd0, 0x6f, 0x66, 0x34, 0x18, 0x51, + 0xa9, 0x97, 0x85, 0xb3, 0xf5, 0x8e, 0xf5, 0xcb, 0x49, 0x73, 0xa3, 0x95, 0x40, 0xc5, 0x49, 0x0e, + 0x67, 0xfc, 0x7f, 0xac, 0x4a, 0x65, 0xfe, 0xa3, 0xa0, 0x9b, 0x13, 0xbd, 0x05, 0x45, 0xc9, 0x8b, + 0xea, 0xd2, 0x05, 0xff, 0x8a, 0x2f, 0x9c, 0xba, 0xd1, 0xa7, 0x8b, 0x02, 0x0b, 0xb2, 0xc0, 0xf7, + 0x2e, 0x5f, 0xdc, 0x36, 0x54, 0xf6, 0x49, 0x7c, 0xdf, 0xf3, 0x3d, 0xae, 0xa9, 0xd1, 0x6b, 0x54, + 0x03, 0x73, 0x8f, 0x52, 0xd9, 0xb7, 0x16, 0x16, 0x9f, 0xe8, 0x00, 0xac, 0x1e, 0xe1, 0x44, 0x36, + 0x68, 0xd5, 0xfe, 0x50, 0xf1, 0x72, 0xfb, 0xf9, 0xa9, 0x87, 0x5e, 0x40, 0xa2, 0x79, 0xfb, 0x1e, + 0x4d, 0xec, 0x39, 0xa7, 0x31, 0x96, 0x21, 0xd0, 0x97, 0x60, 0x3d, 0xea, 0x0e, 0x3e, 0x93, 0x4d, + 0x5c, 0xb5, 0xf7, 0x2f, 0x15, 0xea, 0x9f, 0xb3, 0xe6, 0x16, 0x27, 0x6e, 0x7c, 0x8b, 0xf9, 0x1e, + 0xa7, 0x7e, 0xc8, 0xe7, 0x58, 0x06, 0x55, 0xd4, 0x7a, 0xfa, 0x36, 0xa3, 0x1b, 0x50, 0x92, 0xd4, + 0x09, 0x45, 0xcd, 0x67, 0x52, 0xab, 0xfc, 0xe8, 0x5d, 0x28, 0xa7, 0x6d, 0x20, 0xb8, 0x35, 0x97, + 0xee, 0x8c, 0x6e, 0x10, 0xac, 0x11, 0x3b, 0x95, 0x1f, 0x4f, 0x9a, 0x1b, 0x32, 0x15, 0xcb, 0xae, + 0xf9, 0xda, 0x2a, 0x7e, 0x04, 0x15, 0xb1, 0xa5, 0x1b, 0xb9, 0xb1, 0x9a, 0x36, 0xaf, 0xb4, 0x73, + 0xd3, 0x4c, 0xfb, 0x6c, 0x4b, 0x50, 0x83, 0x33, 0xac, 0x3a, 0x5b, 0xa8, 0x07, 0xd0, 0xda, 0xf9, + 0x10, 0x58, 0x62, 0x87, 0xcc, 0x75, 0x05, 0xcb, 0x6f, 0x61, 0x93, 0x7a, 0x9a, 0xa9, 0x4d, 0x0a, + 0xf3, 0x94, 0xea, 0x2a, 0xe3, 0x8e, 0x9e, 0x3b, 0xeb, 0x66, 0xcc, 0xd1, 0xe3, 0x2e, 0x46, 0xd1, + 0xda, 0xf5, 0xde, 0x84, 0x52, 0xca, 0xb3, 0x62, 0xe7, 0x19, 0x42, 0x28, 0x40, 0x2e, 0xd1, 0xf7, + 0x86, 0x9a, 0xa1, 0x17, 0x90, 0x7c, 0x17, 0xb6, 0xba, 0xa3, 0x91, 0xb8, 0x92, 0xc7, 0xe1, 0x98, + 0x70, 0xaa, 0x95, 0x7f, 0xb5, 0x2d, 0x9f, 0x12, 0x87, 0xfa, 0xe1, 0x94, 0x70, 0xaa, 0x30, 0x52, + 0x0f, 0x03, 0xaf, 0x6c, 0xc9, 0x95, 0xf0, 0xb7, 0x91, 0x1f, 0x8e, 0x6b, 0x1f, 0xb7, 0x05, 0xd5, + 0x87, 0x8c, 0x7b, 0x81, 0xfb, 0x88, 0x7a, 0xee, 0x24, 0x3d, 0xb4, 0x89, 0x97, 0x6c, 0xe8, 0x18, + 0xaa, 0x3a, 0xf2, 0x3d, 0x12, 0x4f, 0xa4, 0x6c, 0x55, 0xfb, 0xfd, 0x8b, 0x5f, 0xc1, 0xa5, 0x30, + 0xe2, 0xa1, 0xd0, 0x6b, 0xf5, 0x48, 0x5d, 0x7b, 0x6a, 0x96, 0xe3, 0x0c, 0x92, 0x3b, 0xea, 0x57, + 0xd9, 0x93, 0x71, 0x01, 0xba, 0x1b, 0x60, 0x3a, 0x89, 0xe6, 0xb8, 0x9a, 0xc1, 0xba, 0xc1, 0x1c, + 0x0b, 0x47, 0x2e, 0xfc, 0x0f, 0x06, 0x58, 0x0f, 0x19, 0xa7, 0xff, 0xf9, 0x44, 0x5e, 0x83, 0xeb, + 0x5c, 0x19, 0x8f, 0x17, 0xf4, 0x64, 0x97, 0xc8, 0xc8, 0x5d, 0xa2, 0xeb, 0x70, 0xb5, 0x47, 0xe3, + 0x51, 0xe4, 0x85, 0xdc, 0x63, 0x81, 0xba, 0x5f, 0x79, 0x53, 0xfe, 0x69, 0x35, 0x5f, 0xf0, 0xb4, + 0xe6, 0xf2, 0xfe, 0x5a, 0x80, 0x92, 0x4d, 0xa6, 0x53, 0xc6, 0x97, 0x14, 0x32, 0x5e, 0xa8, 0x90, + 0xe8, 0x93, 0x3d, 0x2f, 0x20, 0x53, 0xef, 0x3b, 0x2f, 0x70, 0xd5, 0xcf, 0xcc, 0xe5, 0xfa, 0x24, + 0x1f, 0x06, 0xed, 0xc2, 0x66, 0xa8, 0x52, 0x0c, 0x38, 0xe1, 0xe9, 0x8c, 0xd8, 0xba, 0xf3, 0x7a, + 0xee, 0x30, 0xa2, 0xda, 0xac, 0x22, 0x09, 0xc2, 0xcb, 0x7b, 0xd0, 0x1b, 0x50, 0x14, 0x9a, 0xc6, + 0xf5, 0xa2, 0x6c, 0x80, 0xcd, 0x6c, 0xb3, 0xb0, 0xe2, 0xd4, 0xd7, 0xfa, 0x18, 0x36, 0x97, 0x82, + 0xa0, 0x2a, 0x54, 0x8e, 0xf0, 0xe1, 0xd1, 0xe1, 0xa0, 0xdf, 0xab, 0x6d, 0x88, 0x55, 0xff, 0xf3, + 0xfe, 0xee, 0xb1, 0xd3, 0xef, 0xd5, 0x0c, 0x04, 0x50, 0xda, 0xeb, 0x1e, 0xdc, 0xef, 0xf7, 0x6a, + 0x05, 0xfb, 0x93, 0xd3, 0xf3, 0x86, 0xf1, 0xfb, 0x79, 0xc3, 0xf8, 0xeb, 0xbc, 0x61, 0xfc, 0xf6, + 0xa4, 0x61, 0x9c, 0x3e, 0x69, 0x18, 0x5f, 0xdc, 0x7c, 0xfe, 0xa9, 0x79, 0x12, 0x77, 0x54, 0x15, + 0xc3, 0x92, 0xfc, 0x73, 0xbc, 0xfb, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbf, 0xdf, 0xc1, 0x1b, + 0xa0, 0x0a, 0x00, 0x00, } func (m *Any) Marshal() (dAtA []byte, err error) { @@ -1448,16 +1444,6 @@ func (m *BondTx) MarshalTo(dAtA []byte) (int, error) { } i += n19 } - if m.PublicKey != nil { - dAtA[i] = 0x12 - i++ - i = encodeVarintPayload(dAtA, i, uint64(m.PublicKey.Size())) - n20, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err - } - i += n20 - } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) } @@ -1483,21 +1469,21 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n21, err := m.Input.MarshalTo(dAtA[i:]) + n20, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n21 + i += n20 } - if m.PublicKey != nil { + if m.Output != nil { dAtA[i] = 0x12 i++ - i = encodeVarintPayload(dAtA, i, uint64(m.PublicKey.Size())) - n22, err := m.PublicKey.MarshalTo(dAtA[i:]) + i = encodeVarintPayload(dAtA, i, uint64(m.Output.Size())) + n21, err := m.Output.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n22 + i += n21 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1569,11 +1555,11 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n23, err := m.Input.MarshalTo(dAtA[i:]) + n22, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n23 + i += n22 } if m.VotingWeight != 0 { dAtA[i] = 0x10 @@ -1584,21 +1570,21 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalHash.Size())) - n24, err := m.ProposalHash.MarshalTo(dAtA[i:]) + n23, err := m.ProposalHash.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n24 + i += n23 } if m.Proposal != nil { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n25, err := m.Proposal.MarshalTo(dAtA[i:]) + n24, err := m.Proposal.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n25 + i += n24 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1669,11 +1655,11 @@ func (m *Vote) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n26, err := m.Address.MarshalTo(dAtA[i:]) + n25, err := m.Address.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n26 + i += n25 if m.VotingWeight != 0 { dAtA[i] = 0x10 i++ @@ -1716,11 +1702,11 @@ func (m *Proposal) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n27, err := m.BatchTx.MarshalTo(dAtA[i:]) + n26, err := m.BatchTx.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n27 + i += n26 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1747,21 +1733,21 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n28, err := m.Proposal.MarshalTo(dAtA[i:]) + n27, err := m.Proposal.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n28 + i += n27 } if m.FinalizingTx != nil { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.FinalizingTx.Size())) - n29, err := m.FinalizingTx.MarshalTo(dAtA[i:]) + n28, err := m.FinalizingTx.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n29 + i += n28 } if m.ProposalState != 0 { dAtA[i] = 0x20 @@ -1989,10 +1975,6 @@ func (m *BondTx) Size() (n int) { l = m.Input.Size() n += 1 + l + sovPayload(uint64(l)) } - if m.PublicKey != nil { - l = m.PublicKey.Size() - n += 1 + l + sovPayload(uint64(l)) - } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2009,8 +1991,8 @@ func (m *UnbondTx) Size() (n int) { l = m.Input.Size() n += 1 + l + sovPayload(uint64(l)) } - if m.PublicKey != nil { - l = m.PublicKey.Size() + if m.Output != nil { + l = m.Output.Size() n += 1 + l + sovPayload(uint64(l)) } if m.XXX_unrecognized != nil { @@ -3554,42 +3536,6 @@ func (m *BondTx) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPayload - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPayload - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPayload - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.PublicKey == nil { - m.PublicKey = &crypto.PublicKey{} - } - if err := m.PublicKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipPayload(dAtA[iNdEx:]) @@ -3682,7 +3628,7 @@ func (m *UnbondTx) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PublicKey", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Output", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -3709,10 +3655,10 @@ func (m *UnbondTx) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.PublicKey == nil { - m.PublicKey = &crypto.PublicKey{} + if m.Output == nil { + m.Output = &TxOutput{} } - if err := m.PublicKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.Output.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex diff --git a/txs/payload/unbond_tx.go b/txs/payload/unbond_tx.go index fbcdcbeb6..fe481a0d8 100644 --- a/txs/payload/unbond_tx.go +++ b/txs/payload/unbond_tx.go @@ -6,10 +6,15 @@ import ( "github.com/hyperledger/burrow/crypto" ) -func NewUnbondTx(address crypto.Address, height uint64) *UnbondTx { +func NewUnbondTx(address crypto.Address, amount uint64) *UnbondTx { return &UnbondTx{ - Input: &TxInput{}, - PublicKey: &crypto.PublicKey{}, + Input: &TxInput{ + Address: address, + }, + Output: &TxOutput{ + Address: address, + Amount: amount, + }, } } diff --git a/txs/tx_test.go b/txs/tx_test.go index a4dda8d14..6e19c1e3e 100644 --- a/txs/tx_test.go +++ b/txs/tx_test.go @@ -112,28 +112,24 @@ func TestNameTxSignable(t *testing.T) { } func TestBondTxSignable(t *testing.T) { - val := makePrivateAccount("output1").GetPublicKey() bondTx := &payload.BondTx{ Input: &payload.TxInput{ Address: makePrivateAccount("input1").GetAddress(), Amount: 12345, Sequence: 67890, }, - PublicKey: &val, } testTxMarshalJSON(t, bondTx) testTxSignVerify(t, bondTx) } func TestUnbondTxSignable(t *testing.T) { - val := makePrivateAccount("output1").GetPublicKey() unbondTx := &payload.UnbondTx{ Input: &payload.TxInput{ Address: makePrivateAccount("input1").GetAddress(), Amount: 12345, Sequence: 67890, }, - PublicKey: &val, } testTxMarshalJSON(t, unbondTx) testTxSignVerify(t, unbondTx) From aad2c729db67ed5de5591ef1584d5556f6edab43 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Fri, 12 Jul 2019 14:07:30 +0100 Subject: [PATCH 38/70] cleaner comments & combined input validation / pubkey capture Signed-off-by: Gregory Hill --- cmd/burrow/commands/tx.go | 14 ++++---- execution/contexts/bond_context.go | 1 + execution/execution.go | 52 ++++++++++++------------------ protobuf/payload.proto | 3 +- txs/payload/payload.pb.go | 2 +- 5 files changed, 31 insertions(+), 41 deletions(-) diff --git a/cmd/burrow/commands/tx.go b/cmd/burrow/commands/tx.go index 11d947879..144921a5b 100644 --- a/cmd/burrow/commands/tx.go +++ b/cmd/burrow/commands/tx.go @@ -43,9 +43,9 @@ func Tx(output Output) func(cmd *cli.Cmd) { address := conf.Address.String() cmd.Command("send", "send value to another account", func(cmd *cli.Cmd) { - sourceOpt := cmd.StringOpt("source", "", "Address to send from, if not set config is used") - targetOpt := cmd.StringOpt("target", "", "Address to receive transfer, required") - amountOpt := cmd.StringOpt("amount", "", "Amount of value to send, required") + sourceOpt := cmd.StringOpt("s source", "", "Address to send from, if not set config is used") + targetOpt := cmd.StringOpt("t target", "", "Address to receive transfer, required") + amountOpt := cmd.StringOpt("a amount", "", "Amount of value to send, required") cmd.Spec += "[--source=
] [--target=
] [--amount=]" cmd.Action = func() { @@ -71,8 +71,8 @@ func Tx(output Output) func(cmd *cli.Cmd) { }) cmd.Command("bond", "bond a new validator", func(cmd *cli.Cmd) { - sourceOpt := cmd.StringOpt("source", "", "Account with bonding perm, if not set config is used") - amountOpt := cmd.StringOpt("amount", "", "Amount of value to bond, required") + sourceOpt := cmd.StringOpt("s source", "", "Account with bonding perm, if not set config is used") + amountOpt := cmd.StringOpt("a amount", "", "Amount of value to bond, required") cmd.Spec += "[--source=
] [--amount=]" cmd.Action = func() { @@ -97,8 +97,8 @@ func Tx(output Output) func(cmd *cli.Cmd) { }) cmd.Command("unbond", "unbond an existing validator", func(cmd *cli.Cmd) { - sourceOpt := cmd.StringOpt("source", "", "Validator to unbond, if not set config is used") - amountOpt := cmd.StringOpt("amount", "", "Amount of value to unbond, required") + sourceOpt := cmd.StringOpt("s source", "", "Validator to unbond, if not set config is used") + amountOpt := cmd.StringOpt("a amount", "", "Amount of value to unbond, required") cmd.Spec += "[--source=
] [--amount=]" cmd.Action = func() { diff --git a/execution/contexts/bond_context.go b/execution/contexts/bond_context.go index b6169c952..b7d31da27 100644 --- a/execution/contexts/bond_context.go +++ b/execution/contexts/bond_context.go @@ -53,6 +53,7 @@ func (ctx *BondContext) Execute(txe *exec.TxExecution, p payload.Payload) error return err } + // assume public key is know as we update account from signatures err = validator.AddPower(ctx.ValidatorSet, account.PublicKey, power) if err != nil { return err diff --git a/execution/execution.go b/execution/execution.go index 3140ea1a5..0be5f783c 100644 --- a/execution/execution.go +++ b/execution/execution.go @@ -243,21 +243,13 @@ func (exe *executor) Execute(txEnv *txs.Envelope) (txe *exec.TxExecution, err er } }() - // Validate inputs and check sequence numbers - err = exe.validateInputs(txEnv.Tx) + err = exe.validateInputsAndStorePublicKeys(txEnv) if err != nil { logger.InfoMsg("Transaction validate failed", structure.ErrorKey, err) txe.PushError(err) return nil, err } - err = exe.updateSignatories(txEnv.Signatories) - if err != nil { - logger.InfoMsg("Updating signatories failed", structure.ErrorKey, err) - txe.PushError(err) - return nil, err - } - err = txExecutor.Execute(txe, txe.Envelope.Tx.Payload) if err != nil { logger.InfoMsg("Transaction execution failed", structure.ErrorKey, err) @@ -278,8 +270,13 @@ func (exe *executor) Execute(txEnv *txs.Envelope) (txe *exec.TxExecution, err er return nil, fmt.Errorf("unknown transaction type: %v", txEnv.Tx.Type()) } -func (exe *executor) validateInputs(tx *txs.Tx) error { - for _, in := range tx.GetInputs() { +// Validate inputs, check sequence numbers and capture public keys +func (exe *executor) validateInputsAndStorePublicKeys(txEnv *txs.Envelope) error { + for s, in := range txEnv.Tx.GetInputs() { + err := exe.updateSignatory(txEnv.Signatories[s]) + if err != nil { + return fmt.Errorf("failed to update public key for input %X: %v", in.Address, err) + } acc, err := exe.stateCache.GetAccount(in.Address) if err != nil { return err @@ -313,27 +310,20 @@ func (exe *executor) validateInputs(tx *txs.Tx) error { return nil } -// Capture public keys -func (exe *executor) updateSignatories(sigs []txs.Signatory) error { - for _, sig := range sigs { - // pointer dereferences are safe since txEnv.Validate() is run by - // txEnv.Verify() above which checks they are non-nil - acc, err := exe.stateCache.GetAccount(*sig.Address) - if err != nil { - return fmt.Errorf("error getting account on which to set public key: %v", *sig.Address) - } - // Important that verify has been run against signatories at this point - if sig.PublicKey.GetAddress() != acc.Address { - return fmt.Errorf("unexpected mismatch between address %v and supplied public key %v", - acc.Address, sig.PublicKey) - } - acc.PublicKey = *sig.PublicKey - err = exe.stateCache.UpdateAccount(acc) - if err != nil { - return fmt.Errorf("error updating account after setting public key: %v", err) - } +func (exe *executor) updateSignatory(sig txs.Signatory) error { + // pointer dereferences are safe since txEnv.Validate() is run by + // txEnv.Verify() above which checks they are non-nil + acc, err := exe.stateCache.GetAccount(*sig.Address) + if err != nil { + return fmt.Errorf("error getting account on which to set public key: %v", *sig.Address) } - return nil + // Important that verify has been run against signatories at this point + if sig.PublicKey.GetAddress() != acc.Address { + return fmt.Errorf("unexpected mismatch between address %v and supplied public key %v", + acc.Address, sig.PublicKey) + } + acc.PublicKey = *sig.PublicKey + return exe.stateCache.UpdateAccount(acc) } // Commit the current state - optionally pass in the tendermint ABCI header for that to be included with the BeginBlock diff --git a/protobuf/payload.proto b/protobuf/payload.proto index 21f76f888..a1bee3fd3 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -108,9 +108,8 @@ message BondTx { option (gogoproto.goproto_stringer) = false; option (gogoproto.goproto_getters) = false; - // Account with bonding permission + // Input must be the validator that desires to bond TxInput Input = 1; - // PublicKey is retrieved from signature } message UnbondTx { diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 72762323f..61f4cc272 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -554,7 +554,7 @@ func (*NameTx) XXX_MessageName() string { } type BondTx struct { - // Account with bonding permission + // Input must be the validator that desires to bond Input *TxInput `protobuf:"bytes,1,opt,name=Input,proto3" json:"Input,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` From c0d4e1aa663695d6952d36f23354bc46ec14c02e Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Fri, 12 Jul 2019 15:54:01 +0100 Subject: [PATCH 39/70] add test fixture for bond / unbond Signed-off-by: Gregory Hill --- .../app57-bond-unbond/deploy.yaml | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 tests/jobs_fixtures/app57-bond-unbond/deploy.yaml diff --git a/tests/jobs_fixtures/app57-bond-unbond/deploy.yaml b/tests/jobs_fixtures/app57-bond-unbond/deploy.yaml new file mode 100644 index 000000000..a33a1e7b7 --- /dev/null +++ b/tests/jobs_fixtures/app57-bond-unbond/deploy.yaml @@ -0,0 +1,45 @@ +jobs: + +- name: power + set: + val: 12345 + +- name: Validator + update-account: + target: new() + native: $power + permissions: ["all"] + +- name: InitialTotalPower + query-vals: + field: "Set.TotalPower" + +- name: BondValidator + bond: + source: $Validator.address + amount: $power + +- name: CheckAdded + query-vals: + field: "Set.${Validator.address}.Power" + +- name: PowerAssert + assert: + key: $CheckAdded + relation: eq + val: $power + +- name: UnbondValidator + unbond: + source: $Validator.address + amount: $power + +- name: PowerAfterRemoved + query-vals: + field: "Set.TotalPower" + +- name: AssertPowerNonZero + assert: + key: $PowerAfterRemoved + relation: eq + val: $InitialTotalPower \ No newline at end of file From 661262bd0ee7e571cc8006715b68bf3bd9390c73 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Fri, 12 Jul 2019 18:10:19 +0100 Subject: [PATCH 40/70] Enforce minimum total validator power Signed-off-by: Silas Davis --- acm/validator/bucket.go | 51 +++++++++++++++++++++++------------- acm/validator/bucket_test.go | 12 ++++----- 2 files changed, 39 insertions(+), 24 deletions(-) diff --git a/acm/validator/bucket.go b/acm/validator/bucket.go index fe72e2f50..ccc305e0a 100644 --- a/acm/validator/bucket.go +++ b/acm/validator/bucket.go @@ -9,7 +9,8 @@ import ( ) // Safety margin determined by Tendermint (see comment on source constant) -var maxTotalVotingPower = big.NewInt(types.MaxTotalVotingPower) +var maxTotalPower = big.NewInt(types.MaxTotalVotingPower) +var minTotalPower = big.NewInt(4) type Bucket struct { // Delta tracks the changes to validator power made since the previous rotation @@ -52,31 +53,45 @@ func (vc *Bucket) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error if err != nil { return nil, fmt.Errorf("%s %v", errHeader, err) } - // The max flow we are permitted to allow across all validators - maxFlow := vc.Previous.MaxFlow() - // The remaining flow we have to play with - allowableFlow := new(big.Int).Sub(maxFlow, vc.Flow.totalPower) - // The new absolute flow caused by this AlterPower - flow := vc.Previous.Flow(id, power) - absFlow := new(big.Int).Abs(flow) nextTotalPower := vc.Next.TotalPower() - if nextTotalPower.Add(nextTotalPower, vc.Next.Flow(id, power)).Cmp(maxTotalVotingPower) == 1 { + nextTotalPower.Add(nextTotalPower, vc.Next.Flow(id, power)) + // We must not have lower validator power than 4 because this would prevent any flow from occurring + // min > nextTotalPower + if minTotalPower.Cmp(nextTotalPower) == 1 { return nil, fmt.Errorf("%s cannot change validator power of %v from %v to %v because that would result "+ - "in a total power greater than that allowed by tendermint (%v): would make next total power: %v", - errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, maxTotalVotingPower, nextTotalPower) + "in a total power less than the permitted minimum of 4: would make next total power: %v", + errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, nextTotalPower) } - // If we call vc.flow.ChangePower(id, absFlow) (below) will we induce a change in flow greater than the allowable - // flow we have left to spend? - if vc.Flow.Flow(id, absFlow).Cmp(allowableFlow) == 1 && allowableFlow.Cmp(big.NewInt(0)) > 0 { + // nextTotalPower > max + if nextTotalPower.Cmp(maxTotalPower) == 1 { return nil, fmt.Errorf("%s cannot change validator power of %v from %v to %v because that would result "+ - "in a flow greater than or equal to 1/3 of total power for the next commit: flow induced by change: %v, "+ - "current total flow: %v/%v (cumulative/max), remaining allowable flow: %v", - errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, absFlow, vc.Flow.totalPower, - maxFlow, allowableFlow) + "in a total power greater than that allowed by tendermint (%v): would make next total power: %v", + errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, maxTotalPower, nextTotalPower) } + // The new absolute flow caused by this AlterPower + flow := vc.Previous.Flow(id, power) + absFlow := new(big.Int).Abs(flow) + + // Check flow except in the special case when previous Set was empty + if vc.Previous.TotalPower().Sign() == 0 { + // The max flow we are permitted to allow across all validators + maxFlow := vc.Previous.MaxFlow() + // The remaining flow we have to play with + allowableFlow := new(big.Int).Sub(maxFlow, vc.Flow.totalPower) + + // If we call vc.flow.ChangePower(id, absFlow) (below) will we induce a change in flow greater than the allowable + // flow we have left to spend? + if vc.Flow.Flow(id, absFlow).Cmp(allowableFlow) == 1 { + return nil, fmt.Errorf("%s cannot change validator power of %v from %v to %v because that would result "+ + "in a flow greater than or equal to 1/3 of total power for the next commit: flow induced by change: %v, "+ + "current total flow: %v/%v (cumulative/max), remaining allowable flow: %v", + errHeader, id.GetAddress(), vc.Previous.GetPower(id.GetAddress()), power, absFlow, vc.Flow.totalPower, + maxFlow, allowableFlow) + } + } // Set flow for this id to update flow.totalPower (total flow) for comparison below, keep track of flow for each id // so that we only count flow once for each id vc.Flow.ChangePower(id, absFlow) diff --git a/acm/validator/bucket_test.go b/acm/validator/bucket_test.go index 9ed0aa744..5e92ad417 100644 --- a/acm/validator/bucket_test.go +++ b/acm/validator/bucket_test.go @@ -13,31 +13,31 @@ var pubC = pubKey(3) func TestBucket_SetPower(t *testing.T) { base := NewBucket() - _, err := base.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big3)) + _, err := base.SetPower(pubA, new(big.Int).Sub(maxTotalPower, big3)) require.NoError(t, err) bucket := NewBucket(base.Next) - flow, err := bucket.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big2)) + flow, err := bucket.SetPower(pubA, new(big.Int).Sub(maxTotalPower, big2)) require.NoError(t, err) require.Equal(t, big1.Int64(), flow.Int64()) - flow, err = bucket.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big1)) + flow, err = bucket.SetPower(pubA, new(big.Int).Sub(maxTotalPower, big1)) require.NoError(t, err) require.Equal(t, big2.Int64(), flow.Int64()) - flow, err = bucket.SetPower(pubA, maxTotalVotingPower) + flow, err = bucket.SetPower(pubA, maxTotalPower) require.NoError(t, err) require.Equal(t, big3.Int64(), flow.Int64()) - _, err = bucket.SetPower(pubA, new(big.Int).Add(maxTotalVotingPower, big1)) + _, err = bucket.SetPower(pubA, new(big.Int).Add(maxTotalPower, big1)) require.Error(t, err, "should fail as we would breach total power") _, err = bucket.SetPower(pubB, big1) require.Error(t, err, "should fail as we would breach total power") // Drop A and raise B - should now succeed - flow, err = bucket.SetPower(pubA, new(big.Int).Sub(maxTotalVotingPower, big1)) + flow, err = bucket.SetPower(pubA, new(big.Int).Sub(maxTotalPower, big1)) require.NoError(t, err) require.Equal(t, big2.Int64(), flow.Int64()) From 6b93c1055bee5e6d505cbef868179c43a87b64f2 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Mon, 15 Jul 2019 09:32:26 +0100 Subject: [PATCH 41/70] only check flow if totalpower > 0 Signed-off-by: Gregory Hill --- acm/validator/bucket.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/acm/validator/bucket.go b/acm/validator/bucket.go index ccc305e0a..81ff76b7d 100644 --- a/acm/validator/bucket.go +++ b/acm/validator/bucket.go @@ -75,8 +75,9 @@ func (vc *Bucket) SetPower(id crypto.PublicKey, power *big.Int) (*big.Int, error flow := vc.Previous.Flow(id, power) absFlow := new(big.Int).Abs(flow) - // Check flow except in the special case when previous Set was empty - if vc.Previous.TotalPower().Sign() == 0 { + // Only check flow if power exists, this allows us to + // bootstrap the set from an empty state + if vc.Previous.TotalPower().Sign() > 0 { // The max flow we are permitted to allow across all validators maxFlow := vc.Previous.MaxFlow() // The remaining flow we have to play with From a8e7d57474eb127b2907caaeb899c874598bbe59 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 19 Jun 2019 16:12:52 +0100 Subject: [PATCH 42/70] Use install rather than cp to prevent "Text file busy" if burrow is running Signed-off-by: Sean Young --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 795704eef..059992661 100644 --- a/Makefile +++ b/Makefile @@ -130,7 +130,7 @@ build_burrow_sqlite: commit_hash .PHONY: install install: build_burrow mkdir -p ${BIN_PATH} - cp ${REPO}/bin/burrow ${BIN_PATH}/burrow + install -T ${REPO}/bin/burrow ${BIN_PATH}/burrow # build burrow with checks for race conditions .PHONY: build_race_db From fcd57e93feb987f172949c72477f13d46f09361e Mon Sep 17 00:00:00 2001 From: Sean Young Date: Wed, 5 Jun 2019 14:12:19 +0100 Subject: [PATCH 43/70] Store ABIs burrow side Ensure burrow deploy sends the codehashes for possible contracts for each contract (including contracts it can create). Add rpc interface to retrieve abi for account. Signed-off-by: Sean Young --- acm/acm.pb.go | 498 +++++++++++++++-- acm/acmstate/memory_state.go | 11 + acm/acmstate/state.go | 74 +++ acm/acmstate/state_cache.go | 41 ++ cmd/burrow/commands/vent.go | 8 +- deploy/compile/compilers.go | 58 +- deploy/compile/compilers_test.go | 4 +- deploy/compile/solgo/main.go | 4 + deploy/def/client.go | 33 +- deploy/jobs/jobs_contracts.go | 56 +- dump/dump.go | 12 + dump/load.go | 10 + execution/contexts/call_context.go | 17 +- execution/errors/errors.go | 3 + execution/evm/abi/abi.go | 6 + execution/evm/fake_app_state.go | 10 + execution/evm/state.go | 94 +++- execution/evm/state_test.go | 2 +- execution/evm/vm.go | 19 +- execution/evm/vm_test.go | 8 +- execution/execution.go | 6 + execution/solidity/abi_tester.sol | 17 + execution/solidity/abi_tester.sol.go | 13 + execution/solidity/event_emitter.sol.go | 1 + execution/solidity/revert.sol.go | 1 + execution/solidity/strange_loop.sol.go | 1 + execution/solidity/zero_reset.sol.go | 1 + execution/state/abi.go | 14 + execution/state/state.go | 6 + .../rpcevents/execution_events_server_test.go | 2 +- integration/rpctest/helpers.go | 24 +- integration/rpctransact/call_test.go | 58 +- .../rpctransact/transact_server_test.go | 2 +- protobuf/acm.proto | 10 + protobuf/payload.proto | 7 + protobuf/rpcquery.proto | 9 + rpc/rpcquery/query_server.go | 25 + rpc/rpcquery/rpcquery.pb.go | 289 +++++++--- txs/payload/payload.pb.go | 500 ++++++++++++++---- vent/service/abis.go | 66 +++ vent/service/consumer.go | 14 +- vent/service/consumer_test.go | 6 +- vent/service/decoder.go | 14 +- vent/service/rowbuilder.go | 13 +- vent/service/server_test.go | 4 +- vent/test/EventsTest.sol.go | 1 + 46 files changed, 1779 insertions(+), 293 deletions(-) create mode 100644 execution/solidity/abi_tester.sol create mode 100644 execution/solidity/abi_tester.sol.go create mode 100644 execution/state/abi.go create mode 100644 vent/service/abis.go diff --git a/acm/acm.pb.go b/acm/acm.pb.go index 12ab2752b..fa3520945 100644 --- a/acm/acm.pb.go +++ b/acm/acm.pb.go @@ -8,6 +8,7 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" + github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" permission "github.com/hyperledger/burrow/permission" @@ -28,16 +29,19 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package type Account struct { - Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` - PublicKey crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey"` - Sequence uint64 `protobuf:"varint,3,opt,name=Sequence,proto3" json:"Sequence,omitempty"` - Balance uint64 `protobuf:"varint,4,opt,name=Balance,proto3" json:"Balance,omitempty"` - EVMCode Bytecode `protobuf:"bytes,5,opt,name=EVMCode,proto3,customtype=Bytecode" json:"EVMCode"` - Permissions permission.AccountPermissions `protobuf:"bytes,6,opt,name=Permissions,proto3" json:"Permissions"` - WASMCode Bytecode `protobuf:"bytes,7,opt,name=WASMCode,proto3,customtype=Bytecode" json:",omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` + PublicKey crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey"` + Sequence uint64 `protobuf:"varint,3,opt,name=Sequence,proto3" json:"Sequence,omitempty"` + Balance uint64 `protobuf:"varint,4,opt,name=Balance,proto3" json:"Balance,omitempty"` + EVMCode Bytecode `protobuf:"bytes,5,opt,name=EVMCode,proto3,customtype=Bytecode" json:"EVMCode"` + Permissions permission.AccountPermissions `protobuf:"bytes,6,opt,name=Permissions,proto3" json:"Permissions"` + WASMCode Bytecode `protobuf:"bytes,7,opt,name=WASMCode,proto3,customtype=Bytecode" json:",omitempty"` + CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,8,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"-"` + MetaMap []*MetaMap `protobuf:"bytes,9,rep,name=MetaMap,proto3" json:"MetaMap,omitempty"` + Forebear *github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,10,opt,name=Forebear,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Forebear,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Account) Reset() { *m = Account{} } @@ -96,42 +100,109 @@ func (m *Account) GetPermissions() permission.AccountPermissions { return permission.AccountPermissions{} } +func (m *Account) GetMetaMap() []*MetaMap { + if m != nil { + return m.MetaMap + } + return nil +} + func (*Account) XXX_MessageName() string { return "acm.Account" } + +type MetaMap struct { + CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,1,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"CodeHash"` + AbiHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,2,opt,name=AbiHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"AbiHash"` + // In the dump format we would like the ABI rather than its hash + Abi string `protobuf:"bytes,3,opt,name=Abi,proto3" json:"Abi,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetaMap) Reset() { *m = MetaMap{} } +func (m *MetaMap) String() string { return proto.CompactTextString(m) } +func (*MetaMap) ProtoMessage() {} +func (*MetaMap) Descriptor() ([]byte, []int) { + return fileDescriptor_49ed775bc0a6adf6, []int{1} +} +func (m *MetaMap) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *MetaMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *MetaMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetaMap.Merge(m, src) +} +func (m *MetaMap) XXX_Size() int { + return m.Size() +} +func (m *MetaMap) XXX_DiscardUnknown() { + xxx_messageInfo_MetaMap.DiscardUnknown(m) +} + +var xxx_messageInfo_MetaMap proto.InternalMessageInfo + +func (m *MetaMap) GetAbi() string { + if m != nil { + return m.Abi + } + return "" +} + +func (*MetaMap) XXX_MessageName() string { + return "acm.MetaMap" +} func init() { proto.RegisterType((*Account)(nil), "acm.Account") golang_proto.RegisterType((*Account)(nil), "acm.Account") + proto.RegisterType((*MetaMap)(nil), "acm.MetaMap") + golang_proto.RegisterType((*MetaMap)(nil), "acm.MetaMap") } func init() { proto.RegisterFile("acm.proto", fileDescriptor_49ed775bc0a6adf6) } func init() { golang_proto.RegisterFile("acm.proto", fileDescriptor_49ed775bc0a6adf6) } var fileDescriptor_49ed775bc0a6adf6 = []byte{ - // 357 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x3f, 0x4f, 0xc2, 0x40, - 0x1c, 0xe5, 0xa0, 0x52, 0x38, 0x18, 0xf0, 0xa6, 0x86, 0xe1, 0x8a, 0x4e, 0xc4, 0x60, 0x9b, 0xf8, - 0x67, 0xc1, 0x89, 0x1a, 0x5d, 0x8c, 0x86, 0x94, 0x44, 0x13, 0xb7, 0xf6, 0x7a, 0x96, 0x26, 0x94, - 0xab, 0xd7, 0x36, 0xa6, 0xdf, 0xc4, 0xd1, 0x8f, 0xe2, 0xc8, 0xe8, 0x68, 0x1c, 0x88, 0x29, 0x9b, - 0x9f, 0xc1, 0xc1, 0x70, 0x1c, 0xb5, 0x71, 0x70, 0xeb, 0xeb, 0x7b, 0xef, 0xf7, 0x5e, 0xde, 0xc1, - 0xa6, 0x43, 0x42, 0x23, 0xe2, 0x2c, 0x61, 0xa8, 0xe6, 0x90, 0xb0, 0x7b, 0xe8, 0x07, 0xc9, 0x34, - 0x75, 0x0d, 0xc2, 0x42, 0xd3, 0x67, 0x3e, 0x33, 0x05, 0xe7, 0xa6, 0x0f, 0x02, 0x09, 0x20, 0xbe, - 0x36, 0x9e, 0x6e, 0x27, 0xa2, 0x3c, 0x0c, 0xe2, 0x38, 0x60, 0x73, 0xf9, 0xa7, 0x4d, 0x78, 0x16, - 0x25, 0x92, 0xdf, 0xff, 0xae, 0x42, 0x75, 0x44, 0x08, 0x4b, 0xe7, 0x09, 0xba, 0x81, 0xea, 0xc8, - 0xf3, 0x38, 0x8d, 0x63, 0x0d, 0xf4, 0x40, 0xbf, 0x6d, 0x9d, 0x2c, 0x96, 0x7a, 0xe5, 0x63, 0xa9, - 0x0f, 0x4a, 0x99, 0xd3, 0x2c, 0xa2, 0x7c, 0x46, 0x3d, 0x9f, 0x72, 0xd3, 0x4d, 0x39, 0x67, 0x4f, - 0xa6, 0x3c, 0x28, 0xbd, 0xf6, 0xf6, 0x08, 0x3a, 0x85, 0xcd, 0x71, 0xea, 0xce, 0x02, 0x72, 0x45, - 0x33, 0xad, 0xda, 0x03, 0xfd, 0xd6, 0xd1, 0xae, 0x21, 0xc5, 0x05, 0x61, 0x29, 0xeb, 0x10, 0xfb, - 0x57, 0x89, 0xba, 0xb0, 0x31, 0xa1, 0x8f, 0x29, 0x9d, 0x13, 0xaa, 0xd5, 0x7a, 0xa0, 0xaf, 0xd8, - 0x05, 0x46, 0x1a, 0x54, 0x2d, 0x67, 0xe6, 0xac, 0x29, 0x45, 0x50, 0x5b, 0x88, 0x0e, 0xa0, 0x7a, - 0x71, 0x7b, 0x7d, 0xce, 0x3c, 0xaa, 0xed, 0x88, 0xf2, 0x1d, 0x59, 0xbe, 0x61, 0x65, 0x09, 0x25, - 0xcc, 0xa3, 0xf6, 0x56, 0x80, 0x2e, 0x61, 0x6b, 0x5c, 0xcc, 0x12, 0x6b, 0x75, 0x51, 0x0d, 0x1b, - 0xa5, 0xa9, 0xe4, 0x24, 0x25, 0x95, 0xec, 0x59, 0x36, 0xa2, 0x21, 0x6c, 0xdc, 0x8d, 0x26, 0x9b, - 0x50, 0x55, 0x84, 0xe2, 0xbf, 0xa1, 0x5f, 0x4b, 0x1d, 0x0e, 0x58, 0x18, 0x24, 0x34, 0x8c, 0x92, - 0xcc, 0x2e, 0xf4, 0x43, 0xe5, 0xf9, 0x45, 0xaf, 0x58, 0x67, 0x8b, 0x1c, 0x83, 0xb7, 0x1c, 0x83, - 0xf7, 0x1c, 0x83, 0xcf, 0x1c, 0x83, 0xd7, 0x15, 0x06, 0x8b, 0x15, 0x06, 0xf7, 0x7b, 0xff, 0x6f, - 0xee, 0x90, 0xd0, 0xad, 0x8b, 0x27, 0x3c, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0xba, 0x95, 0x25, - 0xdd, 0x23, 0x02, 0x00, 0x00, + // 482 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x53, 0x31, 0x6f, 0xd3, 0x40, + 0x14, 0xee, 0x35, 0xa6, 0x76, 0x2e, 0x19, 0xc2, 0x4d, 0xa7, 0x0c, 0x76, 0xe8, 0x80, 0x22, 0xd4, + 0x38, 0xa8, 0xd0, 0x25, 0x4c, 0x36, 0xa2, 0xaa, 0x04, 0x81, 0xe2, 0x4a, 0x45, 0x62, 0xbb, 0x3b, + 0x1f, 0x89, 0xa5, 0x38, 0x67, 0xce, 0xb6, 0xc0, 0xff, 0x84, 0x91, 0x7f, 0x02, 0x63, 0xc4, 0xc4, + 0x88, 0x18, 0x22, 0xe4, 0x6e, 0xfd, 0x15, 0xc8, 0x97, 0xb3, 0xb1, 0x18, 0x2a, 0x41, 0x27, 0xdf, + 0xbb, 0xef, 0x7d, 0xdf, 0xf7, 0xde, 0xf3, 0x3b, 0xd8, 0x25, 0x2c, 0x76, 0x13, 0x29, 0x32, 0x81, + 0x3a, 0x84, 0xc5, 0xc3, 0xc9, 0x22, 0xca, 0x96, 0x39, 0x75, 0x99, 0x88, 0xa7, 0x0b, 0xb1, 0x10, + 0x53, 0x85, 0xd1, 0xfc, 0x9d, 0x8a, 0x54, 0xa0, 0x4e, 0x3b, 0xce, 0x70, 0x90, 0x70, 0x19, 0x47, + 0x69, 0x1a, 0x89, 0xb5, 0xbe, 0xe9, 0x33, 0x59, 0x24, 0x99, 0xc6, 0x0f, 0xbf, 0x19, 0xd0, 0xf4, + 0x18, 0x13, 0xf9, 0x3a, 0x43, 0x2f, 0xa1, 0xe9, 0x85, 0xa1, 0xe4, 0x69, 0x8a, 0xc1, 0x08, 0x8c, + 0xfb, 0xfe, 0xe3, 0xcd, 0xd6, 0xd9, 0xfb, 0xb9, 0x75, 0x8e, 0x5a, 0x9e, 0xcb, 0x22, 0xe1, 0x72, + 0xc5, 0xc3, 0x05, 0x97, 0x53, 0x9a, 0x4b, 0x29, 0x3e, 0x4c, 0xb5, 0xa0, 0xe6, 0x06, 0xb5, 0x08, + 0x3a, 0x81, 0xdd, 0xf3, 0x9c, 0xae, 0x22, 0xf6, 0x9c, 0x17, 0x78, 0x7f, 0x04, 0xc6, 0xbd, 0xe3, + 0xbb, 0xae, 0x4e, 0x6e, 0x00, 0xdf, 0xa8, 0x4c, 0x82, 0x3f, 0x99, 0x68, 0x08, 0xad, 0x0b, 0xfe, + 0x3e, 0xe7, 0x6b, 0xc6, 0x71, 0x67, 0x04, 0xc6, 0x46, 0xd0, 0xc4, 0x08, 0x43, 0xd3, 0x27, 0x2b, + 0x52, 0x41, 0x86, 0x82, 0xea, 0x10, 0x3d, 0x80, 0xe6, 0xb3, 0xcb, 0xf9, 0x53, 0x11, 0x72, 0x7c, + 0x47, 0x15, 0x3f, 0xd0, 0xc5, 0x5b, 0x7e, 0x91, 0x71, 0x26, 0x42, 0x1e, 0xd4, 0x09, 0xe8, 0x14, + 0xf6, 0xce, 0x9b, 0xb1, 0xa4, 0xf8, 0x40, 0x95, 0x66, 0xbb, 0xad, 0x51, 0xe9, 0x91, 0xb4, 0xb2, + 0x74, 0x9d, 0x6d, 0x22, 0x9a, 0x41, 0xeb, 0x8d, 0x77, 0xb1, 0x33, 0x35, 0x95, 0xa9, 0xfd, 0xb7, + 0xe9, 0xf5, 0xd6, 0x81, 0x47, 0x22, 0x8e, 0x32, 0x1e, 0x27, 0x59, 0x11, 0x34, 0xf9, 0xe8, 0x12, + 0x5a, 0xd5, 0xf7, 0x8c, 0xa4, 0x4b, 0x6c, 0x29, 0xee, 0x4c, 0x73, 0x27, 0x37, 0x4f, 0x9b, 0x46, + 0x6b, 0x22, 0x0b, 0xf7, 0x8c, 0x7f, 0xac, 0x3c, 0xd2, 0xeb, 0xad, 0x03, 0x26, 0x41, 0xa3, 0x85, + 0xee, 0x43, 0x73, 0xce, 0x33, 0x32, 0x27, 0x09, 0xee, 0x8e, 0x3a, 0xe3, 0xde, 0x71, 0xdf, 0xad, + 0x36, 0x48, 0xdf, 0x05, 0x35, 0x88, 0x5e, 0x40, 0xeb, 0x54, 0x48, 0x4e, 0x39, 0x91, 0x18, 0x2a, + 0xff, 0x87, 0xff, 0xfc, 0xa7, 0x1b, 0x85, 0x99, 0xf1, 0xe9, 0xb3, 0xb3, 0x77, 0xf8, 0x05, 0x34, + 0xe6, 0xe8, 0x75, 0xab, 0xbf, 0xdd, 0x36, 0x9d, 0xfc, 0x57, 0x7f, 0xad, 0xd6, 0x5e, 0x41, 0xd3, + 0xa3, 0x91, 0x52, 0xdc, 0xbf, 0x8d, 0x62, 0xad, 0x82, 0x06, 0xb0, 0xe3, 0xd1, 0x48, 0x2d, 0x59, + 0x37, 0xa8, 0x8e, 0xfe, 0x93, 0x4d, 0x69, 0x83, 0xef, 0xa5, 0x0d, 0x7e, 0x94, 0x36, 0xf8, 0x55, + 0xda, 0xe0, 0xeb, 0x95, 0x0d, 0x36, 0x57, 0x36, 0x78, 0x7b, 0xef, 0x66, 0x0f, 0xc2, 0x62, 0x7a, + 0xa0, 0x9e, 0xd4, 0xa3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x97, 0xe1, 0x06, 0x7b, 0xb3, 0x03, + 0x00, 0x00, } func (m *Account) Marshal() (dAtA []byte, err error) { @@ -199,6 +270,79 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { return 0, err } i += n5 + dAtA[i] = 0x42 + i++ + i = encodeVarintAcm(dAtA, i, uint64(m.CodeHash.Size())) + n6, err := m.CodeHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n6 + if len(m.MetaMap) > 0 { + for _, msg := range m.MetaMap { + dAtA[i] = 0x4a + i++ + i = encodeVarintAcm(dAtA, i, uint64(msg.Size())) + n, err := msg.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n + } + } + if m.Forebear != nil { + dAtA[i] = 0x52 + i++ + i = encodeVarintAcm(dAtA, i, uint64(m.Forebear.Size())) + n7, err := m.Forebear.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n7 + } + if m.XXX_unrecognized != nil { + i += copy(dAtA[i:], m.XXX_unrecognized) + } + return i, nil +} + +func (m *MetaMap) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalTo(dAtA) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *MetaMap) MarshalTo(dAtA []byte) (int, error) { + var i int + _ = i + var l int + _ = l + dAtA[i] = 0xa + i++ + i = encodeVarintAcm(dAtA, i, uint64(m.CodeHash.Size())) + n8, err := m.CodeHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n8 + dAtA[i] = 0x12 + i++ + i = encodeVarintAcm(dAtA, i, uint64(m.AbiHash.Size())) + n9, err := m.AbiHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n9 + if len(m.Abi) > 0 { + dAtA[i] = 0x1a + i++ + i = encodeVarintAcm(dAtA, i, uint64(len(m.Abi))) + i += copy(dAtA[i:], m.Abi) + } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) } @@ -236,6 +380,38 @@ func (m *Account) Size() (n int) { n += 1 + l + sovAcm(uint64(l)) l = m.WASMCode.Size() n += 1 + l + sovAcm(uint64(l)) + l = m.CodeHash.Size() + n += 1 + l + sovAcm(uint64(l)) + if len(m.MetaMap) > 0 { + for _, e := range m.MetaMap { + l = e.Size() + n += 1 + l + sovAcm(uint64(l)) + } + } + if m.Forebear != nil { + l = m.Forebear.Size() + n += 1 + l + sovAcm(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *MetaMap) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.CodeHash.Size() + n += 1 + l + sovAcm(uint64(l)) + l = m.AbiHash.Size() + n += 1 + l + sovAcm(uint64(l)) + l = len(m.Abi) + if l > 0 { + n += 1 + l + sovAcm(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -487,6 +663,260 @@ func (m *Account) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field CodeHash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthAcm + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthAcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.CodeHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 9: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field MetaMap", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthAcm + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthAcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.MetaMap = append(m.MetaMap, &MetaMap{}) + if err := m.MetaMap[len(m.MetaMap)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Forebear", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthAcm + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthAcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var v github_com_hyperledger_burrow_crypto.Address + m.Forebear = &v + if err := m.Forebear.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipAcm(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthAcm + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthAcm + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *MetaMap) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: MetaMap: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: MetaMap: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field CodeHash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthAcm + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthAcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.CodeHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field AbiHash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthAcm + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthAcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.AbiHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Abi", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAcm + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthAcm + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthAcm + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Abi = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAcm(dAtA[iNdEx:]) diff --git a/acm/acmstate/memory_state.go b/acm/acmstate/memory_state.go index 10bef6c0d..14b1cb718 100644 --- a/acm/acmstate/memory_state.go +++ b/acm/acmstate/memory_state.go @@ -11,6 +11,7 @@ import ( type MemoryState struct { Accounts map[crypto.Address]*acm.Account Storage map[crypto.Address]map[binary.Word256][]byte + Abis map[AbiHash]string } var _ IterableReaderWriter = &MemoryState{} @@ -20,6 +21,7 @@ func NewMemoryState() *MemoryState { return &MemoryState{ Accounts: make(map[crypto.Address]*acm.Account), Storage: make(map[crypto.Address]map[binary.Word256][]byte), + Abis: make(map[AbiHash]string), } } @@ -35,6 +37,15 @@ func (ms *MemoryState) UpdateAccount(updatedAccount *acm.Account) error { return nil } +func (ms *MemoryState) GetAbi(abihash AbiHash) (string, error) { + return ms.Abis[abihash], nil +} + +func (ms *MemoryState) SetAbi(abihash AbiHash, abi string) error { + ms.Abis[abihash] = abi + return nil +} + func (ms *MemoryState) RemoveAccount(address crypto.Address) error { delete(ms.Accounts, address) return nil diff --git a/acm/acmstate/state.go b/acm/acmstate/state.go index 1f72d15de..aa42b88e6 100644 --- a/acm/acmstate/state.go +++ b/acm/acmstate/state.go @@ -4,9 +4,71 @@ import ( "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/crypto/sha3" "github.com/hyperledger/burrow/permission" + "github.com/tmthrgd/go-hex" ) +// AbiHash is the keccak hash for the ABI. This is to make the ABI content-addressed +type AbiHash [32]byte + +func (h *AbiHash) Bytes() []byte { + b := make([]byte, 32) + copy(b, h[:]) + return b +} + +func (ch *AbiHash) UnmarshalText(hexBytes []byte) error { + bs, err := hex.DecodeString(string(hexBytes)) + if err != nil { + return err + } + copy(ch[:], bs) + return nil +} + +func (ch AbiHash) MarshalText() ([]byte, error) { + return []byte(ch.String()), nil +} + +func (ch AbiHash) String() string { + return hex.EncodeUpperToString(ch[:]) +} + +func GetAbiHash(abi string) (abihash AbiHash) { + hash := sha3.NewKeccak256() + hash.Write([]byte(abi)) + copy(abihash[:], hash.Sum(nil)) + return +} + +// CodeHash is the keccak hash for the code for an account. This is used for the EVM CODEHASH opcode, and to find the +// correct ABI for a contract +type CodeHash [32]byte + +func (h *CodeHash) Bytes() []byte { + b := make([]byte, 32) + copy(b, h[:]) + return b +} + +func (ch *CodeHash) UnmarshalText(hexBytes []byte) error { + bs, err := hex.DecodeString(string(hexBytes)) + if err != nil { + return err + } + copy(ch[:], bs) + return nil +} + +func (ch CodeHash) MarshalText() ([]byte, error) { + return []byte(ch.String()), nil +} + +func (ch CodeHash) String() string { + return hex.EncodeUpperToString(ch[:]) +} + type AccountGetter interface { // Get an account by its address return nil if it does not exist (which should not be an error) GetAccount(address crypto.Address) (*acm.Account, error) @@ -45,6 +107,16 @@ type StorageIterable interface { IterateStorage(address crypto.Address, consumer func(key binary.Word256, value []byte) error) (err error) } +type AbiGetter interface { + // Get an ABI by its hash. This is content-addressed + GetAbi(abihash AbiHash) (string, error) +} + +type AbiSetter interface { + // Set an ABI according to it keccak-256 hash. + SetAbi(abihash AbiHash, abi string) error +} + type AccountStats struct { AccountsWithCode uint64 AccountsWithoutCode uint64 @@ -60,6 +132,7 @@ type AccountStatsGetter interface { type Reader interface { AccountGetter StorageGetter + AbiGetter } type Iterable interface { @@ -82,6 +155,7 @@ type IterableStatsReader interface { type Writer interface { AccountUpdater StorageSetter + AbiSetter } // Read and write account and storage state diff --git a/acm/acmstate/state_cache.go b/acm/acmstate/state_cache.go index ee18d83a1..df5ad3064 100644 --- a/acm/acmstate/state_cache.go +++ b/acm/acmstate/state_cache.go @@ -30,6 +30,7 @@ type Cache struct { name string backend Reader accounts map[crypto.Address]*accountInfo + abis map[AbiHash]*abiInfo readonly bool } @@ -41,6 +42,11 @@ type accountInfo struct { updated bool } +type abiInfo struct { + abi string + updated bool +} + type CacheOption func(*Cache) *Cache // Returns a Cache that wraps an underlying Reader to use on a cache miss, can write to an output Writer @@ -49,6 +55,7 @@ func NewCache(backend Reader, options ...CacheOption) *Cache { cache := &Cache{ backend: backend, accounts: make(map[crypto.Address]*accountInfo), + abis: make(map[AbiHash]*abiInfo), } for _, option := range options { option(cache) @@ -103,6 +110,31 @@ func (cache *Cache) UpdateAccount(account *acm.Account) error { return nil } +func (cache *Cache) GetAbi(abihash AbiHash) (string, error) { + cache.RLock() + defer cache.RUnlock() + + abiInfo, ok := cache.abis[abihash] + if ok { + return abiInfo.abi, nil + } + + return "", nil +} + +func (cache *Cache) SetAbi(abihash AbiHash, abi string) error { + if cache.readonly { + return errors.ErrorCodef(errors.ErrorCodeIllegalWrite, "UpdateAbi called in read-only context on abi hash: %v", abihash) + } + + cache.Lock() + defer cache.Unlock() + + cache.abis[abihash] = &abiInfo{updated: true, abi: abi} + + return nil +} + func (cache *Cache) RemoveAccount(address crypto.Address) error { if cache.readonly { return errors.ErrorCodef(errors.ErrorCodeIllegalWrite, "RemoveAccount called on read-only account %v", address) @@ -249,6 +281,15 @@ func (cache *Cache) Sync(st Writer) error { } accInfo.RUnlock() } + + for abihash, abiInfo := range cache.abis { + if abiInfo.updated { + err := st.SetAbi(abihash, abiInfo.abi) + if err != nil { + return err + } + } + } return nil } diff --git a/cmd/burrow/commands/vent.go b/cmd/burrow/commands/vent.go index 0bbf56613..2df40e90b 100644 --- a/cmd/burrow/commands/vent.go +++ b/cmd/burrow/commands/vent.go @@ -65,7 +65,7 @@ func Vent(output Output) func(cmd *cli.Cmd) { } } - cmd.Spec = "--spec= --abi= [--db-adapter] [--db-url] [--db-schema] " + + cmd.Spec = "--spec= [--abi=] [--db-adapter] [--db-url] [--db-schema] " + "[--blocks] [--txs] [--grpc-addr] [--http-addr] [--log-level] [--announce-every=]" cmd.Action = func() { @@ -82,10 +82,6 @@ func Vent(output Output) func(cmd *cli.Cmd) { if err != nil { output.Fatalf("Spec loader error: %v", err) } - abiSpec, err := abi.LoadPath(cfg.AbiFileOrDirs...) - if err != nil { - output.Fatalf("ABI loader error: %v", err) - } var wg sync.WaitGroup @@ -99,7 +95,7 @@ func Vent(output Output) func(cmd *cli.Cmd) { wg.Add(1) go func() { - if err := consumer.Run(projection, abiSpec, true); err != nil { + if err := consumer.Run(projection, true); err != nil { output.Fatalf("Consumer execution error: %v", err) } diff --git a/deploy/compile/compilers.go b/deploy/compile/compilers.go index b625c40c7..11eb945ee 100644 --- a/deploy/compile/compilers.go +++ b/deploy/compile/compilers.go @@ -2,6 +2,7 @@ package compile import ( "bytes" + "encoding/hex" "encoding/json" "fmt" "io/ioutil" @@ -10,10 +11,14 @@ import ( "path/filepath" "strings" + "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/crypto/sha3" "github.com/hyperledger/burrow/logging" ) +// SolidityInput is a structure for the solidity compiler input json form, see: +// https://solidity.readthedocs.io/en/v0.5.9/using-the-compiler.html#compiler-input-and-output-json-description type SolidityInput struct { Language string `json:"language"` Sources map[string]SolidityInputSource `json:"sources"` @@ -30,11 +35,13 @@ type SolidityInput struct { } `json:"settings"` } +// SolidityInputSource should be set for each solidity input source file in SolidityInput type SolidityInputSource struct { Content string `json:"content,omitempty"` Urls []string `json:"urls,omitempty"` } +// SolidityOutput is a structure for the output of the solidity json output form type SolidityOutput struct { Contracts map[string]map[string]SolidityContract Errors []struct { @@ -46,12 +53,16 @@ type SolidityOutput struct { } } +// SolidityContract is defined for each contract defined in the solidity source code type SolidityContract struct { Abi json.RawMessage Evm struct { Bytecode struct { Object string - Opcodes string + LinkReferences json.RawMessage + } + DeployedBytecode struct { + Object string LinkReferences json.RawMessage } } @@ -61,6 +72,9 @@ type SolidityContract struct { Devdoc json.RawMessage Userdoc json.RawMessage Metadata string + // This is not present in the solidity output, but we add it ourselves + // This is map from CodeHash to ABI + AbiMap map[acmstate.CodeHash]string } type Response struct { @@ -77,6 +91,8 @@ type ResponseItem struct { Contract SolidityContract `json:"binary"` } +// LoadSolidityContract is the opposite of the .Save() method. This expects the input file +// to be in the Solidity json output format func LoadSolidityContract(file string) (*SolidityContract, error) { codeB, err := ioutil.ReadFile(file) if err != nil { @@ -90,6 +106,7 @@ func LoadSolidityContract(file string) (*SolidityContract, error) { return &contract, nil } +// Save persists the contract in its json form to disk func (contract *SolidityContract) Save(dir, file string) error { str, err := json.Marshal(*contract) if err != nil { @@ -162,7 +179,7 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string input.Sources[file] = SolidityInputSource{Urls: []string{file}} input.Settings.Optimizer.Enabled = optimize - input.Settings.OutputSelection.File.OutputType = []string{"abi", "evm.bytecode.linkReferences", "metadata", "bin", "devdoc"} + input.Settings.OutputSelection.File.OutputType = []string{"abi", "evm.deployedBytecode.object", "evm.bytecode.linkReferences", "metadata", "bin", "devdoc"} input.Settings.Libraries = make(map[string]map[string]string) input.Settings.Libraries[""] = make(map[string]string) @@ -188,10 +205,16 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string return nil, err } + abis, err := output.getAbis(logger) + if err != nil { + return nil, err + } + respItemArray := make([]ResponseItem, 0) for f, s := range output.Contracts { for contract, item := range s { + item.AbiMap = abis respItem := ResponseItem{ Filename: f, Objectname: objectName(contract), @@ -318,3 +341,34 @@ func PrintResponse(resp Response, cli bool, logger *logging.Logger) { } } } + +// GetAbis get the CodeHashes + Abis for the generated Code. So, we have a map for all the possible contracts codes hashes to abis +func (sol *SolidityOutput) getAbis(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { + res := make(map[acmstate.CodeHash]string) + for filename, src := range sol.Contracts { + for name, contract := range src { + if contract.Evm.DeployedBytecode.Object == "" { + continue + } + + runtime, err := hex.DecodeString(contract.Evm.DeployedBytecode.Object) + if err != nil { + return nil, err + } + + hash := sha3.NewKeccak256() + hash.Write(runtime) + var codehash acmstate.CodeHash + copy(codehash[:], hash.Sum(nil)) + logger.TraceMsg("Found ABI", + "contract", name, + "file", filename, + "code", fmt.Sprintf("%X", runtime), + "code hash", fmt.Sprintf("%X", codehash), + "abi", string(contract.Abi)) + res[codehash] = string(contract.Abi) + } + } + + return res, nil +} diff --git a/deploy/compile/compilers_test.go b/deploy/compile/compilers_test.go index 33f25d3e2..07cf611db 100644 --- a/deploy/compile/compilers_test.go +++ b/deploy/compile/compilers_test.go @@ -117,7 +117,9 @@ func TestLocalSingle(t *testing.T) { for i := range resp.Objects { resp.Objects[i].Contract.Metadata = "" resp.Objects[i].Contract.Devdoc = nil - resp.Objects[i].Contract.Evm.Bytecode.Opcodes = "" + resp.Objects[i].Contract.AbiMap = nil + resp.Objects[i].Contract.Evm.DeployedBytecode.Object = "" + resp.Objects[i].Contract.Evm.DeployedBytecode.LinkReferences = nil } assert.Equal(t, expectedResponse, resp) } diff --git a/deploy/compile/solgo/main.go b/deploy/compile/solgo/main.go index ea747c646..d666763fa 100644 --- a/deploy/compile/solgo/main.go +++ b/deploy/compile/solgo/main.go @@ -58,6 +58,10 @@ func main() { } f.WriteString(fmt.Sprintf("var Bytecode_%s = hex.MustDecodeString(\"%s\")\n", c.Objectname, code)) + if c.Contract.Evm.DeployedBytecode.Object != "" { + f.WriteString(fmt.Sprintf("var DeployedBytecode_%s = hex.MustDecodeString(\"%s\")\n", + c.Objectname, c.Contract.Evm.DeployedBytecode.Object)) + } f.WriteString(fmt.Sprintf("var Abi_%s = []byte(`%s`)\n", c.Objectname, c.Contract.Abi)) } diff --git a/deploy/def/client.go b/deploy/def/client.go index ee6963fa8..1078b4970 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -12,6 +12,7 @@ import ( hex "github.com/tmthrgd/go-hex" "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/execution/evm/abi" @@ -140,6 +141,22 @@ func (c *Client) GetAccount(address crypto.Address) (*acm.Account, error) { return c.queryClient.GetAccount(ctx, &rpcquery.GetAccountParam{Address: address}) } +func (c *Client) GetAbiForAccount(address crypto.Address) (string, error) { + ctx, cancel := context.WithTimeout(context.Background(), c.timeout) + defer cancel() + abi, err := c.queryClient.GetAbi(ctx, &rpcquery.GetAbiParam{Address: address}) + if err != nil { + return "", err + } + + return abi.Abi, nil +} + +func (c *Client) GetAbi(abihash acmstate.AbiHash) (string, error) { + panic("not implemented") + return "", nil +} + func (c *Client) GetStorage(address crypto.Address, key binary.Word256) ([]byte, error) { val, err := c.queryClient.GetStorage(context.Background(), &rpcquery.GetStorageParam{Address: address, Key: key}) if err != nil { @@ -429,6 +446,7 @@ type CallArg struct { Gas string Data string WASM string + Abis map[acmstate.CodeHash]string } func (c *Client) Call(arg *CallArg, logger *logging.Logger) (*payload.CallTx, error) { @@ -445,12 +463,13 @@ func (c *Client) Call(arg *CallArg, logger *logging.Logger) (*payload.CallTx, er } var contractAddress *crypto.Address if arg.Address != "" { - address, err := c.GetKeyAddress(arg.Address, logger) + address, err := crypto.AddressFromHexString(arg.Address) if err != nil { return nil, err } contractAddress = &address } + fee, err := c.ParseUint64(arg.Fee) if err != nil { return nil, err @@ -463,10 +482,20 @@ func (c *Client) Call(arg *CallArg, logger *logging.Logger) (*payload.CallTx, er if err != nil { return nil, err } + wasm, err := hex.DecodeString(arg.WASM) if err != nil { return nil, err } + + abis := make([]*payload.Abis, 0) + for codehash, abi := range arg.Abis { + abis = append(abis, &payload.Abis{ + CodeHash: codehash.Bytes(), + Abi: abi, + }) + } + tx := &payload.CallTx{ Input: input, Address: contractAddress, @@ -474,7 +503,9 @@ func (c *Client) Call(arg *CallArg, logger *logging.Logger) (*payload.CallTx, er WASM: wasm, Fee: fee, GasLimit: gas, + Abis: abis, } + return tx, nil } diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index 861aa22a7..53a250d09 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -6,6 +6,7 @@ import ( "path/filepath" "strings" + "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/logging" @@ -163,7 +164,7 @@ func FormulateDeployJob(deploy *def.Deploy, do *def.DeployArgs, deployScript *de contractCode = contractCode + callData } - tx, err := deployTx(client, deploy, contractName, string(contractCode), "", logger) + tx, err := deployTx(client, deploy, contractName, string(contractCode), "", nil, logger) if err != nil { return nil, nil, fmt.Errorf("could not deploy binary contract: %v", err) } @@ -387,10 +388,10 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } } - return deployTx(client, deploy, compilersResponse.Objectname, data, wasm, logger) + return deployTx(client, deploy, compilersResponse.Objectname, data, wasm, contract.AbiMap, logger) } -func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm string, logger *logging.Logger) (*payload.CallTx, error) { +func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm string, abis map[acmstate.CodeHash]string, logger *logging.Logger) (*payload.CallTx, error) { // Deploy contract logger.TraceMsg("Deploying Contract", "contract", contractName, @@ -407,6 +408,7 @@ func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm s Data: data, WASM: wasm, Sequence: deploy.Sequence, + Abis: abis, }, logger) } @@ -424,25 +426,41 @@ func FormulateCallJob(call *def.Call, do *def.DeployArgs, deployScript *def.Play call.Fee = FirstOf(call.Fee, do.DefaultFee) call.Gas = FirstOf(call.Gas, do.DefaultGas) + // Get address (possibly via key) + address, err := client.GetKeyAddress(call.Destination, logger) + if err != nil { + return nil, err + } + // formulate call var packedBytes []byte var funcSpec *abi.FunctionSpec - logger.TraceMsg("Looking for ABI in", "path", deployScript.BinPath, "bin", call.Bin, "dest", call.Destination) - if call.Bin != "" { - packedBytes, funcSpec, err = abi.EncodeFunctionCallFromFile(call.Bin, deployScript.BinPath, call.Function, logger, callDataArray...) - callData = hex.EncodeToString(packedBytes) - } - if call.Bin == "" || err != nil { - packedBytes, funcSpec, err = abi.EncodeFunctionCallFromFile(call.Destination, deployScript.BinPath, call.Function, logger, callDataArray...) - callData = hex.EncodeToString(packedBytes) - } - if err != nil { - if call.Function == "()" { - logger.InfoMsg("Calling the fallback function") - } else { - err = util.ABIErrorHandler(err, call, nil, logger) + + abiJSON, err := client.GetAbiForAccount(address) + if abiJSON != "" && err == nil { + packedBytes, funcSpec, err = abi.EncodeFunctionCall(abiJSON, call.Function, logger, callDataArray...) + if err != nil { return } + callData = hex.EncodeToString(packedBytes) + } else { + logger.TraceMsg("Looking for ABI in", "path", deployScript.BinPath, "bin", call.Bin, "dest", call.Destination) + if call.Bin != "" { + packedBytes, funcSpec, err = abi.EncodeFunctionCallFromFile(call.Bin, deployScript.BinPath, call.Function, logger, callDataArray...) + callData = hex.EncodeToString(packedBytes) + } + if call.Bin == "" || err != nil { + packedBytes, funcSpec, err = abi.EncodeFunctionCallFromFile(call.Destination, deployScript.BinPath, call.Function, logger, callDataArray...) + callData = hex.EncodeToString(packedBytes) + } + if err != nil { + if call.Function == "()" { + logger.InfoMsg("Calling the fallback function") + } else { + err = util.ABIErrorHandler(err, call, nil, logger) + return + } + } } if funcSpec.Constant { @@ -450,14 +468,14 @@ func FormulateCallJob(call *def.Call, do *def.DeployArgs, deployScript *def.Play } logger.TraceMsg("Calling", - "destination", call.Destination, + "destination", address.String(), "function", call.Function, "data", callData) return client.Call(&def.CallArg{ Input: call.Source, Amount: call.Amount, - Address: call.Destination, + Address: address.String(), Fee: call.Fee, Gas: call.Gas, Data: callData, diff --git a/dump/dump.go b/dump/dump.go index 61fc10a30..bcaa459e5 100644 --- a/dump/dump.go +++ b/dump/dump.go @@ -7,6 +7,7 @@ import ( "time" "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/encoding" "github.com/hyperledger/burrow/execution/exec" @@ -89,6 +90,17 @@ func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Opt }, } + for _, m := range acc.MetaMap { + var abihash acmstate.AbiHash + copy(abihash[:], m.AbiHash.Bytes()) + abi, err := ds.state.GetAbi(abihash) + if err != nil { + return err + } + m.Abi = abi + m.AbiHash = []byte{} + } + var storageBytes int err = st.IterateStorage(acc.Address, func(key binary.Word256, value []byte) error { if storageBytes > thresholdAccountStorageBytesPerRow { diff --git a/dump/load.go b/dump/load.go index f42178f0a..5fd990e4e 100644 --- a/dump/load.go +++ b/dump/load.go @@ -6,6 +6,7 @@ import ( "io" "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/execution/state" @@ -30,6 +31,15 @@ func Load(source Source, st *state.State) error { if row.Account != nil { if row.Account.Address != acm.GlobalPermissionsAddress { + for _, m := range row.Account.MetaMap { + abihash := acmstate.GetAbiHash(m.Abi) + err = s.SetAbi(abihash, m.Abi) + if err != nil { + return err + } + m.AbiHash = abihash.Bytes() + m.Abi = "" + } err := s.UpdateAccount(row.Account) if err != nil { return err diff --git a/execution/contexts/call_context.go b/execution/contexts/call_context.go index dfd9ff328..7fe1f8efa 100644 --- a/execution/contexts/call_context.go +++ b/execution/contexts/call_context.go @@ -151,6 +151,21 @@ func (ctx *CallContext) Deliver(inAcc, outAcc *acm.Account, value uint64) error ctx.Logger.TraceMsg("Creating new contract", "contract_address", callee, "init_code", code) + + // store abis + if len(ctx.tx.Abis) > 0 { + metamap := make([]*acm.MetaMap, len(ctx.tx.Abis)) + for i, abi := range ctx.tx.Abis { + abihash := acmstate.GetAbiHash(abi.Abi) + metamap[i] = &acm.MetaMap{ + AbiHash: abihash[:], + CodeHash: abi.CodeHash, + } + txCache.SetAbi(abihash, abi.Abi) + } + + txCache.UpdateMetaMap(callee, metamap) + } } else { if outAcc == nil || (len(outAcc.EVMCode) == 0 && len(outAcc.WASMCode) == 0) { // if you call an account that doesn't exist @@ -225,7 +240,7 @@ func (ctx *CallContext) Deliver(inAcc, outAcc *acm.Account, value uint64) error } else { ctx.Logger.TraceMsg("Successful execution") if createContract { - txCache.InitCode(callee, ret) + txCache.InitCode(callee, nil, ret) } err := txCache.Sync() if err != nil { diff --git a/execution/errors/errors.go b/execution/errors/errors.go index 4ee23f821..7c71f580e 100644 --- a/execution/errors/errors.go +++ b/execution/errors/errors.go @@ -61,6 +61,7 @@ const ( ErrorCodeBlockNumberOutOfRange ErrorCodeAlreadyVoted ErrorCodeUnresolvedSymbols + ErrorCodeInvalidContractCode ) func (c Code) ErrorCode() Code { @@ -155,6 +156,8 @@ func (c Code) String() string { return "vote already registered for this address" case ErrorCodeUnresolvedSymbols: return "code has unresolved symbols" + case ErrorCodeInvalidContractCode: + return "contract being created with unexpected code" default: return "Unknown error" } diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index 5d1f770b0..f7c30b1a8 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -10,6 +10,8 @@ import ( "strconv" "strings" + hex "github.com/tmthrgd/go-hex" + burrow_binary "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/crypto/sha3" @@ -239,6 +241,10 @@ const EventIDSize = 32 type EventID [EventIDSize]byte +func (e EventID) String() string { + return hex.EncodeUpperToString(e[:]) +} + type FunctionSpec struct { FunctionID FunctionID Constant bool diff --git a/execution/evm/fake_app_state.go b/execution/evm/fake_app_state.go index 2e3cbddc9..a84781270 100644 --- a/execution/evm/fake_app_state.go +++ b/execution/evm/fake_app_state.go @@ -28,6 +28,7 @@ import ( type FakeAppState struct { accounts map[crypto.Address]*acm.Account storage map[string][]byte + abis map[acmstate.AbiHash]string } var _ acmstate.ReaderWriter = &FakeAppState{} @@ -37,6 +38,15 @@ func (fas *FakeAppState) GetAccount(addr crypto.Address) (*acm.Account, error) { return account, nil } +func (fas *FakeAppState) GetAbi(abihash acmstate.AbiHash) (string, error) { + return fas.abis[abihash], nil +} + +func (fas *FakeAppState) SetAbi(abihash acmstate.AbiHash, abi string) error { + fas.abis[abihash] = abi + return nil +} + func (fas *FakeAppState) UpdateAccount(account *acm.Account) error { fas.accounts[account.GetAddress()] = account return nil diff --git a/execution/evm/state.go b/execution/evm/state.go index e23de381d..1f50856d2 100644 --- a/execution/evm/state.go +++ b/execution/evm/state.go @@ -1,12 +1,14 @@ package evm import ( + "bytes" "fmt" "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/crypto/sha3" "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/permission" ) @@ -29,6 +31,9 @@ type Reader interface { GetPermissions(address crypto.Address) permission.AccountPermissions GetEVMCode(address crypto.Address) acm.Bytecode GetWASMCode(address crypto.Address) acm.Bytecode + GetCodeHash(address crypto.Address) []byte + GetForebear(address crypto.Address) crypto.Address + GetSequence(address crypto.Address) uint64 Exists(address crypto.Address) bool // GetBlockHash returns hash of the specific block GetBlockHash(blockNumber uint64) (binary.Word256, error) @@ -36,8 +41,8 @@ type Reader interface { type Writer interface { CreateAccount(address crypto.Address) - InitCode(address crypto.Address, code []byte) InitWASMCode(address crypto.Address, code []byte) + InitCode(address crypto.Address, forebear *crypto.Address, code []byte) RemoveAccount(address crypto.Address) SetStorage(address crypto.Address, key binary.Word256, value []byte) AddToBalance(address crypto.Address, amount uint64) @@ -150,6 +155,14 @@ func (st *State) GetWASMCode(address crypto.Address) acm.Bytecode { return acc.WASMCode } +func (st *State) GetCodeHash(address crypto.Address) []byte { + acc := st.account(address) + if acc == nil || len(acc.CodeHash) == 0 { + return nil + } + return acc.CodeHash +} + func (st *State) Exists(address crypto.Address) bool { acc, err := st.cache.GetAccount(address) if err != nil { @@ -162,6 +175,22 @@ func (st *State) Exists(address crypto.Address) bool { return true } +func (st *State) GetSequence(address crypto.Address) uint64 { + acc := st.account(address) + if acc == nil { + return 0 + } + return acc.Sequence +} + +func (st *State) GetForebear(address crypto.Address) crypto.Address { + acc := st.account(address) + if acc == nil && acc.Forebear != nil { + return *acc.Forebear + } + return address +} + // Writer func (st *State) CreateAccount(address crypto.Address) { @@ -173,7 +202,7 @@ func (st *State) CreateAccount(address crypto.Address) { st.updateAccount(&acm.Account{Address: address}) } -func (st *State) InitCode(address crypto.Address, code []byte) { +func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code []byte) { acc := st.mustAccount(address) if acc == nil { st.PushError(errors.ErrorCodef(errors.ErrorCodeInvalidAddress, @@ -185,7 +214,48 @@ func (st *State) InitCode(address crypto.Address, code []byte) { "tried to initialise code for a contract that already exists: %v", address)) return } + acc.EVMCode = code + + // keccak256 hash of a contract's code + hash := sha3.NewKeccak256() + hash.Write(code) + codehash := hash.Sum(nil) + + forebear := &address + metamap := acc.MetaMap + if parent != nil { + // find our ancestor, i.e. the initial contract that was deployed, from which this contract descends + ancestor := st.mustAccount(*parent) + if ancestor.Forebear != nil { + ancestor = st.mustAccount(*ancestor.Forebear) + forebear = ancestor.Forebear + } else { + forebear = parent + } + metamap = ancestor.MetaMap + } + + // If we have a list of ABIs for this contract, we also know what contract code it is allowed to create + // For compatibility with older contracts, allow any contract to be created if we have no mappings + if metamap != nil && len(metamap) > 0 { + found := false + for _, m := range metamap { + if bytes.Equal(codehash, m.CodeHash) { + found = true + break + } + } + + if !found { + st.PushError(errors.ErrorCodeInvalidContractCode) + return + } + } + + acc.CodeHash = codehash + acc.Forebear = forebear + st.updateAccount(acc) } @@ -201,10 +271,30 @@ func (st *State) InitWASMCode(address crypto.Address, code []byte) { "tried to initialise code for a contract that already exists: %v", address)) return } + acc.WASMCode = code + // keccak256 hash of a contract's code + hash := sha3.NewKeccak256() + hash.Write(code) + acc.CodeHash = hash.Sum(nil) st.updateAccount(acc) } +func (st *State) UpdateMetaMap(address crypto.Address, mapping []*acm.MetaMap) { + acc := st.mustAccount(address) + if acc == nil { + st.PushError(errors.ErrorCodef(errors.ErrorCodeInvalidAddress, + "tried to initialise code for an account that does not exist: %v", address)) + return + } + acc.MetaMap = mapping + st.updateAccount(acc) +} + +func (st *State) SetAbi(abihash acmstate.AbiHash, abi string) error { + return st.cache.SetAbi(abihash, abi) +} + func (st *State) RemoveAccount(address crypto.Address) { if !st.Exists(address) { st.PushError(errors.ErrorCodef(errors.ErrorCodeDuplicateAddress, diff --git a/execution/evm/state_test.go b/execution/evm/state_test.go index dfe7b2780..dd2035c7c 100644 --- a/execution/evm/state_test.go +++ b/execution/evm/state_test.go @@ -35,7 +35,7 @@ func TestState_CreateAccount(t *testing.T) { st = NewState(newAppState(), blockHashGetter) st.CreateAccount(address) require.Nil(t, st.Error()) - st.InitCode(address, []byte{1, 2, 3}) + st.InitCode(address, nil, []byte{1, 2, 3}) require.Nil(t, st.Error()) } diff --git a/execution/evm/vm.go b/execution/evm/vm.go index 862b968d5..23ce1898e 100644 --- a/execution/evm/vm.go +++ b/execution/evm/vm.go @@ -621,17 +621,15 @@ func (vm *VM) execute(callState Interface, eventSink EventSink, caller, callee c // In case the account does not exist 0 is pushed to the stack. stack.PushU64(0) } else { - code := callState.GetEVMCode(address) - if code == nil { - // In case the account does not have code the keccak256 hash of empty data - code = acm.Bytecode{} - } - // keccak256 hash of a contract's code var extcodehash Word256 - hash := sha3.NewKeccak256() - hash.Write(code) - copy(extcodehash[:], hash.Sum(nil)) + codehash := callState.GetCodeHash(address) + if codehash != nil { + copy(extcodehash[:], codehash) + } else { + hash := sha3.NewKeccak256() + copy(extcodehash[:], hash.Sum(nil)) + } stack.Push(extcodehash) } @@ -815,7 +813,8 @@ func (vm *VM) execute(callState Interface, eventSink EventSink, caller, callee c returnData = ret } else { // Update the account with its initialised contract code - childCallState.InitCode(newAccount, ret) + forebear := callState.GetForebear(callee) + childCallState.InitCode(newAccount, &forebear, ret) callState.PushError(childCallState.Sync()) stack.PushAddress(newAccount) } diff --git a/execution/evm/vm_test.go b/execution/evm/vm_test.go index 8a1af4a41..4cb1fdfb2 100644 --- a/execution/evm/vm_test.go +++ b/execution/evm/vm_test.go @@ -94,7 +94,7 @@ func newAccount(st Interface, name string) crypto.Address { func makeAccountWithCode(st Interface, name string, code []byte) crypto.Address { address := newAddress(name) st.CreateAccount(address) - st.InitCode(address, code) + st.InitCode(address, nil, code) st.AddToBalance(address, 9999999) return address } @@ -999,7 +999,7 @@ func TestMsgSender(t *testing.T) { // Not needed for this test (since contract code is passed as argument to vm), but this is what an execution // framework must do - cache.InitCode(account2, contractCode) + cache.InitCode(account2, nil, contractCode) // Input is the function hash of `get()` input := hex.MustDecodeString("6d4ce63c") @@ -1429,8 +1429,8 @@ func TestCallStackOverflow(t *testing.T) { contractCode, err := ourVm.Call(cache, NewNoopEventSink(), account1, account2, code, code, 0, &gas) require.NoError(t, err) - cache.InitCode(account1, contractCode) - cache.InitCode(account2, contractCode) + cache.InitCode(account1, nil, contractCode) + cache.InitCode(account2, nil, contractCode) // keccak256 hash of 'callMeBack()' input, err := hex.DecodeString("692c3b7c") diff --git a/execution/execution.go b/execution/execution.go index 0be5f783c..973190319 100644 --- a/execution/execution.go +++ b/execution/execution.go @@ -404,6 +404,12 @@ func (exe *executor) GetAccount(address crypto.Address) (*acm.Account, error) { return exe.stateCache.GetAccount(address) } +func (exe *executor) GetAbi(abihash acmstate.AbiHash) (string, error) { + exe.RLock() + defer exe.RUnlock() + return exe.stateCache.GetAbi(abihash) +} + // Storage func (exe *executor) GetStorage(address crypto.Address, key binary.Word256) ([]byte, error) { exe.RLock() diff --git a/execution/solidity/abi_tester.sol b/execution/solidity/abi_tester.sol new file mode 100644 index 000000000..2e66e62bf --- /dev/null +++ b/execution/solidity/abi_tester.sol @@ -0,0 +1,17 @@ +pragma solidity ^0.5.4; + +contract A { + function createB() public returns (B) { + return new B(); + } +} + +contract B { + function createC() public returns (C) { + return new C(); + } +} + +contract C { + uint public this_is_c; +} \ No newline at end of file diff --git a/execution/solidity/abi_tester.sol.go b/execution/solidity/abi_tester.sol.go new file mode 100644 index 000000000..dd51c61ec --- /dev/null +++ b/execution/solidity/abi_tester.sol.go @@ -0,0 +1,13 @@ +package solidity + +import hex "github.com/tmthrgd/go-hex" + +var Bytecode_A = hex.MustDecodeString("608060405234801561001057600080fd5b506102d5806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063dbdc275d1461004d575b600080fd5b610055610097565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006040516100a5906100c7565b604051809103906000f0801580156100c1573d6000803e3d6000fd5b50905090565b6101d5806100d58339019056fe608060405234801561001057600080fd5b506101b5806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063eadf91e31461004d575b600080fd5b610055610097565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006040516100a5906100c7565b604051809103906000f0801580156100c1573d6000803e3d6000fd5b50905090565b60b6806100d48339019056fe6080604052348015600f57600080fd5b5060988061001e6000396000f3fe6080604052348015600f57600080fd5b50600436106045576000357c010000000000000000000000000000000000000000000000000000000090048063ce15daec14604a575b600080fd5b60506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820a46c6c19158be05dcccde6ca95b6d92d08b9a1a7b3c5e8f84a26a49628c351600029a165627a7a7230582078d4ba52f8aab390e3df659d41312bbfda3a9db9681cbe604adc461c44b26c8c0029a165627a7a723058205069226ef56be4806b4221a0e82bb89b45e22e56d6e62696cf4bb606c77156390029") +var DeployedBytecode_A = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063dbdc275d1461004d575b600080fd5b610055610097565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006040516100a5906100c7565b604051809103906000f0801580156100c1573d6000803e3d6000fd5b50905090565b6101d5806100d58339019056fe608060405234801561001057600080fd5b506101b5806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063eadf91e31461004d575b600080fd5b610055610097565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006040516100a5906100c7565b604051809103906000f0801580156100c1573d6000803e3d6000fd5b50905090565b60b6806100d48339019056fe6080604052348015600f57600080fd5b5060988061001e6000396000f3fe6080604052348015600f57600080fd5b50600436106045576000357c010000000000000000000000000000000000000000000000000000000090048063ce15daec14604a575b600080fd5b60506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820a46c6c19158be05dcccde6ca95b6d92d08b9a1a7b3c5e8f84a26a49628c351600029a165627a7a7230582078d4ba52f8aab390e3df659d41312bbfda3a9db9681cbe604adc461c44b26c8c0029a165627a7a723058205069226ef56be4806b4221a0e82bb89b45e22e56d6e62696cf4bb606c77156390029") +var Abi_A = []byte(`[{"constant":false,"inputs":[],"name":"createB","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"nonpayable","type":"function"}]`) +var Bytecode_B = hex.MustDecodeString("608060405234801561001057600080fd5b506101b5806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063eadf91e31461004d575b600080fd5b610055610097565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006040516100a5906100c7565b604051809103906000f0801580156100c1573d6000803e3d6000fd5b50905090565b60b6806100d48339019056fe6080604052348015600f57600080fd5b5060988061001e6000396000f3fe6080604052348015600f57600080fd5b50600436106045576000357c010000000000000000000000000000000000000000000000000000000090048063ce15daec14604a575b600080fd5b60506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820a46c6c19158be05dcccde6ca95b6d92d08b9a1a7b3c5e8f84a26a49628c351600029a165627a7a7230582078d4ba52f8aab390e3df659d41312bbfda3a9db9681cbe604adc461c44b26c8c0029") +var DeployedBytecode_B = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063eadf91e31461004d575b600080fd5b610055610097565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006040516100a5906100c7565b604051809103906000f0801580156100c1573d6000803e3d6000fd5b50905090565b60b6806100d48339019056fe6080604052348015600f57600080fd5b5060988061001e6000396000f3fe6080604052348015600f57600080fd5b50600436106045576000357c010000000000000000000000000000000000000000000000000000000090048063ce15daec14604a575b600080fd5b60506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820a46c6c19158be05dcccde6ca95b6d92d08b9a1a7b3c5e8f84a26a49628c351600029a165627a7a7230582078d4ba52f8aab390e3df659d41312bbfda3a9db9681cbe604adc461c44b26c8c0029") +var Abi_B = []byte(`[{"constant":false,"inputs":[],"name":"createC","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"nonpayable","type":"function"}]`) +var Bytecode_C = hex.MustDecodeString("6080604052348015600f57600080fd5b5060988061001e6000396000f3fe6080604052348015600f57600080fd5b50600436106045576000357c010000000000000000000000000000000000000000000000000000000090048063ce15daec14604a575b600080fd5b60506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820a46c6c19158be05dcccde6ca95b6d92d08b9a1a7b3c5e8f84a26a49628c351600029") +var DeployedBytecode_C = hex.MustDecodeString("6080604052348015600f57600080fd5b50600436106045576000357c010000000000000000000000000000000000000000000000000000000090048063ce15daec14604a575b600080fd5b60506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820a46c6c19158be05dcccde6ca95b6d92d08b9a1a7b3c5e8f84a26a49628c351600029") +var Abi_C = []byte(`[{"constant":true,"inputs":[],"name":"this_is_c","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"}]`) diff --git a/execution/solidity/event_emitter.sol.go b/execution/solidity/event_emitter.sol.go index 3fc42b7f9..d07a98c37 100644 --- a/execution/solidity/event_emitter.sol.go +++ b/execution/solidity/event_emitter.sol.go @@ -3,4 +3,5 @@ package solidity import hex "github.com/tmthrgd/go-hex" var Bytecode_EventEmitter = hex.MustDecodeString("608060405234801561001057600080fd5b50610264806100206000396000f3fe608060405234801561001057600080fd5b5060043610610053576000357c010000000000000000000000000000000000000000000000000000000090048063508ed79914610058578063e8e49a7114610062575b600080fd5b61006061006c565b005b61006a61012b565b005b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f2d989eca8871e173291c8e287f34adebef09917027f9e904c22ce459a2cff0ca6001602a6040518083151515158152602001806020018360000b8152602001828103825260518152602001806101e860519139606001935050505060405180910390a4565b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f20aec2a3bcd8050a3a9e852e9d424805bad75ba33b57077464c73ae98d0582696001602a604051808315151515815260200180602001838152602001828103825260518152602001806101e860519139606001935050505060405180910390a456fe446f6e617564616d7066736368696666666168727473656c656b7472697a6974c3a474656e686175707462657472696562737765726b626175756e7465726265616d74656e676573656c6c736368616674a165627a7a72305820b11ba06d97e4448c1d8bf72e259b4d14daf0355f8aae146b40a708406fe037dd0029") +var DeployedBytecode_EventEmitter = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610053576000357c010000000000000000000000000000000000000000000000000000000090048063508ed79914610058578063e8e49a7114610062575b600080fd5b61006061006c565b005b61006a61012b565b005b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f2d989eca8871e173291c8e287f34adebef09917027f9e904c22ce459a2cff0ca6001602a6040518083151515158152602001806020018360000b8152602001828103825260518152602001806101e860519139606001935050505060405180910390a4565b60405180807f68617368000000000000000000000000000000000000000000000000000000008152506004019050604051809103902060667f446f776e736965210000000000000000000000000000000000000000000000007f20aec2a3bcd8050a3a9e852e9d424805bad75ba33b57077464c73ae98d0582696001602a604051808315151515815260200180602001838152602001828103825260518152602001806101e860519139606001935050505060405180910390a456fe446f6e617564616d7066736368696666666168727473656c656b7472697a6974c3a474656e686175707462657472696562737765726b626175756e7465726265616d74656e676573656c6c736368616674a165627a7a72305820b11ba06d97e4448c1d8bf72e259b4d14daf0355f8aae146b40a708406fe037dd0029") var Abi_EventEmitter = []byte(`[{"constant":false,"inputs":[],"name":"EmitTwo","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"EmitOne","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":false,"name":"trueism","type":"bool"},{"indexed":false,"name":"german","type":"string"},{"indexed":true,"name":"newDepth","type":"int64"},{"indexed":false,"name":"bignum","type":"int256"},{"indexed":true,"name":"hash","type":"string"}],"name":"ManyTypes","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":false,"name":"trueism","type":"bool"},{"indexed":false,"name":"german","type":"string"},{"indexed":true,"name":"newDepth","type":"int128"},{"indexed":false,"name":"bignum","type":"int8"},{"indexed":true,"name":"hash","type":"string"}],"name":"ManyTypes2","type":"event"}]`) diff --git a/execution/solidity/revert.sol.go b/execution/solidity/revert.sol.go index 7801c12c7..bd6339c07 100644 --- a/execution/solidity/revert.sol.go +++ b/execution/solidity/revert.sol.go @@ -3,4 +3,5 @@ package solidity import hex "github.com/tmthrgd/go-hex" var Bytecode_Revert = hex.MustDecodeString("608060405234801561001057600080fd5b50610216806100206000396000f3fe608060405234801561001057600080fd5b5060043610610053576000357c0100000000000000000000000000000000000000000000000000000000900480635b202afb146100585780636037b04c1461008c575b600080fd5b61008a6004803603602081101561006e57600080fd5b81019080803563ffffffff169060200190929190505050610096565b005b6100946101e5565b005b60008163ffffffff161415610113576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252600f8152602001807f492068617665207265766572746564000000000000000000000000000000000081525060200191505060405180910390fd5b8080600190039150508063ffffffff167ff7f0feb5b4ac5276c55faa8936d962de931ebe8333a2efdc0506878de3979ba960405160405180910390a23073ffffffffffffffffffffffffffffffffffffffff16635b202afb826040518263ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401808263ffffffff1663ffffffff168152602001915050600060405180830381600087803b1580156101ca57600080fd5b505af11580156101de573d6000803e3d6000fd5b5050505050565b600080fdfea165627a7a7230582021dc462608e362c1ab12bfca9c326514ec64ff6bbe411b6a3b1c41e20a82a8ac0029") +var DeployedBytecode_Revert = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610053576000357c0100000000000000000000000000000000000000000000000000000000900480635b202afb146100585780636037b04c1461008c575b600080fd5b61008a6004803603602081101561006e57600080fd5b81019080803563ffffffff169060200190929190505050610096565b005b6100946101e5565b005b60008163ffffffff161415610113576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252600f8152602001807f492068617665207265766572746564000000000000000000000000000000000081525060200191505060405180910390fd5b8080600190039150508063ffffffff167ff7f0feb5b4ac5276c55faa8936d962de931ebe8333a2efdc0506878de3979ba960405160405180910390a23073ffffffffffffffffffffffffffffffffffffffff16635b202afb826040518263ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401808263ffffffff1663ffffffff168152602001915050600060405180830381600087803b1580156101ca57600080fd5b505af11580156101de573d6000803e3d6000fd5b5050505050565b600080fdfea165627a7a7230582021dc462608e362c1ab12bfca9c326514ec64ff6bbe411b6a3b1c41e20a82a8ac0029") var Abi_Revert = []byte(`[{"constant":false,"inputs":[{"name":"i","type":"uint32"}],"name":"RevertAt","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"RevertNoReason","outputs":[],"payable":false,"stateMutability":"pure","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"i","type":"uint32"}],"name":"NotReverting","type":"event"}]`) diff --git a/execution/solidity/strange_loop.sol.go b/execution/solidity/strange_loop.sol.go index c1c56b8f9..e9a9f9ee9 100644 --- a/execution/solidity/strange_loop.sol.go +++ b/execution/solidity/strange_loop.sol.go @@ -3,4 +3,5 @@ package solidity import hex "github.com/tmthrgd/go-hex" var Bytecode_StrangeLoop = hex.MustDecodeString("60806040526017600055602260015560116002556001600360006101000a81548160ff02191690831515021790555034801561003a57600080fd5b506103d88061004a6000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063ebb384dd1461004d575b600080fd5b61005561006b565b6040518082815260200191505060405180910390f35b60006002549050600360009054906101000a900460ff161561026557600154600254121561019f576002600081548092919060010191905055506002547f55707369652100000000000000000000000000000000000000000000000000007f3ff0b1eac80ecf8e93d1a2d7982a9230f8ea7693439fd548687b08a5e292b09760405160405180910390a360025490503073ffffffffffffffffffffffffffffffffffffffff1663ebb384dd6040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15801561015e57600080fd5b505af1158015610172573d6000803e3d6000fd5b505050506040513d602081101561018857600080fd5b810190808051906020019092919050505050610260565b6000600360006101000a81548160ff02191690831515021790555060025490503073ffffffffffffffffffffffffffffffffffffffff1663ebb384dd6040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15801561022357600080fd5b505af1158015610237573d6000803e3d6000fd5b505050506040513d602081101561024d57600080fd5b8101908080519060200190929190505050505b6103a8565b600054600254131561037e57600260008154809291906001900391905055506002547f446f776e736965210000000000000000000000000000000000000000000000007f3ff0b1eac80ecf8e93d1a2d7982a9230f8ea7693439fd548687b08a5e292b09760405160405180910390a360025490503073ffffffffffffffffffffffffffffffffffffffff1663ebb384dd6040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15801561033d57600080fd5b505af1158015610351573d6000803e3d6000fd5b505050506040513d602081101561036757600080fd5b8101908080519060200190929190505050506103a7565b6001600360006101000a81548160ff0219169083151502179055506002549050600090506103a9565b5b5b9056fea165627a7a7230582077bccbcca142d5874668a8185b0909f13f771e47714a970862df714f970211aa0029") +var DeployedBytecode_StrangeLoop = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063ebb384dd1461004d575b600080fd5b61005561006b565b6040518082815260200191505060405180910390f35b60006002549050600360009054906101000a900460ff161561026557600154600254121561019f576002600081548092919060010191905055506002547f55707369652100000000000000000000000000000000000000000000000000007f3ff0b1eac80ecf8e93d1a2d7982a9230f8ea7693439fd548687b08a5e292b09760405160405180910390a360025490503073ffffffffffffffffffffffffffffffffffffffff1663ebb384dd6040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15801561015e57600080fd5b505af1158015610172573d6000803e3d6000fd5b505050506040513d602081101561018857600080fd5b810190808051906020019092919050505050610260565b6000600360006101000a81548160ff02191690831515021790555060025490503073ffffffffffffffffffffffffffffffffffffffff1663ebb384dd6040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15801561022357600080fd5b505af1158015610237573d6000803e3d6000fd5b505050506040513d602081101561024d57600080fd5b8101908080519060200190929190505050505b6103a8565b600054600254131561037e57600260008154809291906001900391905055506002547f446f776e736965210000000000000000000000000000000000000000000000007f3ff0b1eac80ecf8e93d1a2d7982a9230f8ea7693439fd548687b08a5e292b09760405160405180910390a360025490503073ffffffffffffffffffffffffffffffffffffffff1663ebb384dd6040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15801561033d57600080fd5b505af1158015610351573d6000803e3d6000fd5b505050506040513d602081101561036757600080fd5b8101908080519060200190929190505050506103a7565b6001600360006101000a81548160ff0219169083151502179055506002549050600090506103a9565b5b5b9056fea165627a7a7230582077bccbcca142d5874668a8185b0909f13f771e47714a970862df714f970211aa0029") var Abi_StrangeLoop = []byte(`[{"constant":false,"inputs":[],"name":"UpsieDownsie","outputs":[{"name":"i","type":"int256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":true,"name":"newDepth","type":"int256"}],"name":"ChangeLevel","type":"event"}]`) diff --git a/execution/solidity/zero_reset.sol.go b/execution/solidity/zero_reset.sol.go index d2811508a..d54770e5e 100644 --- a/execution/solidity/zero_reset.sol.go +++ b/execution/solidity/zero_reset.sol.go @@ -3,4 +3,5 @@ package solidity import hex "github.com/tmthrgd/go-hex" var Bytecode_ZeroReset = hex.MustDecodeString("608060405234801561001057600080fd5b50610195806100206000396000f3fe608060405234801561001057600080fd5b506004361061007e576000357c0100000000000000000000000000000000000000000000000000000000900480620267a4146100835780634ef65c3b146100a157806362738998146100cf578063747586b8146100ed578063987dc8201461011b578063b15a0d5f14610125575b600080fd5b61008b61012f565b6040518082815260200191505060405180910390f35b6100cd600480360360208110156100b757600080fd5b8101908080359060200190929190505050610139565b005b6100d7610143565b6040518082815260200191505060405180910390f35b6101196004803603602081101561010357600080fd5b810190808035906020019092919050505061014c565b005b610123610156565b005b61012d61015f565b005b6000600154905090565b8060018190555050565b60008054905090565b8060008190555050565b60008081905550565b600060018190555056fea165627a7a72305820f425681ba5df6ad8326c87681bfe7f8a84f407dc25e79a4cb790063ac3a8ba1f0029") +var DeployedBytecode_ZeroReset = hex.MustDecodeString("608060405234801561001057600080fd5b506004361061007e576000357c0100000000000000000000000000000000000000000000000000000000900480620267a4146100835780634ef65c3b146100a157806362738998146100cf578063747586b8146100ed578063987dc8201461011b578063b15a0d5f14610125575b600080fd5b61008b61012f565b6040518082815260200191505060405180910390f35b6100cd600480360360208110156100b757600080fd5b8101908080359060200190929190505050610139565b005b6100d7610143565b6040518082815260200191505060405180910390f35b6101196004803603602081101561010357600080fd5b810190808035906020019092919050505061014c565b005b610123610156565b005b61012d61015f565b005b6000600154905090565b8060018190555050565b60008054905090565b8060008190555050565b60008081905550565b600060018190555056fea165627a7a72305820f425681ba5df6ad8326c87681bfe7f8a84f407dc25e79a4cb790063ac3a8ba1f0029") var Abi_ZeroReset = []byte(`[{"constant":true,"inputs":[],"name":"getUint","outputs":[{"name":"retUint","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"x","type":"uint256"}],"name":"setUint","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"getInt","outputs":[{"name":"retInt","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"x","type":"int256"}],"name":"setInt","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"setIntToZero","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"setUintToZero","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"}]`) diff --git a/execution/state/abi.go b/execution/state/abi.go new file mode 100644 index 000000000..2ba9c3e1c --- /dev/null +++ b/execution/state/abi.go @@ -0,0 +1,14 @@ +package state + +import ( + "github.com/hyperledger/burrow/acm/acmstate" +) + +func (s *ReadState) GetAbi(abihash acmstate.AbiHash) (string, error) { + return string(s.Plain.Get(keys.Abi.Key(abihash.Bytes()))), nil +} + +func (ws *writeState) SetAbi(abihash acmstate.AbiHash, abi string) error { + ws.plain.Set(keys.Abi.Key(abihash.Bytes()), []byte(abi)) + return nil +} diff --git a/execution/state/state.go b/execution/state/state.go index ae643eb2f..62470081f 100644 --- a/execution/state/state.go +++ b/execution/state/state.go @@ -58,9 +58,11 @@ type KeyFormatStore struct { Validator *storage.MustKeyFormat Event *storage.MustKeyFormat TxHash *storage.MustKeyFormat + Abi *storage.MustKeyFormat } var keys = KeyFormatStore{ + // Stored in the forest // AccountAddress -> Account Account: storage.NewMustKeyFormat("a", crypto.AddressLength), // AccountAddress, Key -> Value @@ -73,8 +75,12 @@ var keys = KeyFormatStore{ Validator: storage.NewMustKeyFormat("v", crypto.AddressLength), // Height -> StreamEvent Event: storage.NewMustKeyFormat("e", uint64Length), + + // Stored on the plain // TxHash -> TxHeight, TxIndex TxHash: storage.NewMustKeyFormat("th", txs.HashLength), + // CodeHash -> Abi + Abi: storage.NewMustKeyFormat("abi", sha256.Size), } var Prefixes [][]byte diff --git a/integration/rpcevents/execution_events_server_test.go b/integration/rpcevents/execution_events_server_test.go index 94be456c4..5bb9daea4 100644 --- a/integration/rpcevents/execution_events_server_test.go +++ b/integration/rpcevents/execution_events_server_test.go @@ -172,7 +172,7 @@ func TestExecutionEventsTest(t *testing.T) { }) t.Run("Revert", func(t *testing.T) { - txe, err := rpctest.CreateContract(tcli, inputAddress0, solidity.Bytecode_Revert) + txe, err := rpctest.CreateContract(tcli, inputAddress0, solidity.Bytecode_Revert, nil) require.NoError(t, err) spec, err := abi.ReadSpec(solidity.Abi_Revert) require.NoError(t, err) diff --git a/integration/rpctest/helpers.go b/integration/rpctest/helpers.go index 6280d91ce..1902ad64b 100644 --- a/integration/rpctest/helpers.go +++ b/integration/rpctest/helpers.go @@ -1,3 +1,5 @@ +// +build integration + package rpctest import ( @@ -6,6 +8,7 @@ import ( "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/crypto/sha3" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/execution/names" "github.com/hyperledger/burrow/integration" @@ -47,7 +50,25 @@ func NewQueryClient(t testing.TB, listenAddress string) rpcquery.QueryClient { return rpcquery.NewQueryClient(conn) } -func CreateContract(cli rpctransact.TransactClient, inputAddress crypto.Address, bytecode []byte) (*exec.TxExecution, error) { +type AbiMap struct { + DeployedCode []byte + Abi []byte +} + +func CreateContract(cli rpctransact.TransactClient, inputAddress crypto.Address, bytecode []byte, abimap []AbiMap) (*exec.TxExecution, error) { + var abis []*payload.Abis + if abimap != nil { + abis = make([]*payload.Abis, len(abimap)) + for i, m := range abimap { + hash := sha3.NewKeccak256() + hash.Write([]byte(m.DeployedCode)) + abis[i] = &payload.Abis{ + CodeHash: hash.Sum(nil), + Abi: string(m.Abi), + } + } + } + txe, err := cli.CallTxSync(context.Background(), &payload.CallTx{ Input: &payload.TxInput{ Address: inputAddress, @@ -57,6 +78,7 @@ func CreateContract(cli rpctransact.TransactClient, inputAddress crypto.Address, Data: bytecode, Fee: 2, GasLimit: 10000, + Abis: abis, }) if err != nil { return nil, err diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index 84d829219..28f192866 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -24,6 +24,7 @@ import ( "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/execution/solidity" "github.com/hyperledger/burrow/integration/rpctest" + "github.com/hyperledger/burrow/rpc/rpcquery" "github.com/hyperledger/burrow/rpc/rpctransact" "github.com/hyperledger/burrow/txs/payload" "github.com/stretchr/testify/assert" @@ -139,7 +140,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) go func() { defer wg.Done() for j := 0; j < numRuns; j++ { - createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop) + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop, nil) if err != nil { errCh <- err return @@ -279,7 +280,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Run("CallEvents", func(t *testing.T) { t.Parallel() - createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop) + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop, nil) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee spec, err := abi.ReadSpec(solidity.Abi_StrangeLoop) @@ -296,9 +297,52 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) return }) + t.Run("DeployAbis", func(t *testing.T) { + t.Parallel() + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_A, []rpctest.AbiMap{ + {DeployedCode: solidity.DeployedBytecode_A, Abi: solidity.Abi_A}, + {DeployedCode: solidity.DeployedBytecode_B, Abi: solidity.Abi_B}, + {DeployedCode: solidity.DeployedBytecode_C, Abi: solidity.Abi_C}, + }) + require.NoError(t, err) + addressA := lastCall(createTxe.Events).CallData.Callee + // Check ABI for new contract A + qcli := rpctest.NewQueryClient(t, kern.GRPCListenAddress().String()) + res, err := qcli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: addressA}) + require.NoError(t, err) + assert.Equal(t, res.Abi, string(solidity.Abi_A)) + // CreateB + spec, err := abi.ReadAbiSpec(solidity.Abi_A) + require.NoError(t, err) + data, _, err := spec.Pack("createB") + require.NoError(t, err) + callTxe, err := rpctest.CallContract(cli, inputAddress, addressA, data) + require.NoError(t, err) + var addressB crypto.Address + err = spec.Unpack(callTxe.Result.Return, "createB", &addressB) + // check ABI for contract B + res, err = qcli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: addressB}) + require.NoError(t, err) + assert.Equal(t, res.Abi, string(solidity.Abi_B)) + // CreateC + spec, err = abi.ReadAbiSpec(solidity.Abi_B) + require.NoError(t, err) + data, _, err = spec.Pack("createC") + require.NoError(t, err) + callTxe, err = rpctest.CallContract(cli, inputAddress, addressB, data) + require.NoError(t, err) + var addressC crypto.Address + err = spec.Unpack(callTxe.Result.Return, "createC", &addressC) + // check abi for contract C + res, err = qcli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: addressC}) + require.NoError(t, err) + assert.Equal(t, res.Abi, string(solidity.Abi_C)) + return + }) + t.Run("LogEvents", func(t *testing.T) { t.Parallel() - createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop) + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_StrangeLoop, nil) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee spec, err := abi.ReadSpec(solidity.Abi_StrangeLoop) @@ -323,7 +367,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Run("EventEmitter", func(t *testing.T) { t.Parallel() - createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_EventEmitter) + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_EventEmitter, nil) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee spec, err := abi.ReadSpec(solidity.Abi_EventEmitter) @@ -362,7 +406,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) * Any indexed string (or dynamic array) will be hashed, so we might want to store strings * in bytes32. This shows how we would automatically map this to string */ - createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_EventEmitter) + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_EventEmitter, nil) require.NoError(t, err) address := lastCall(createTxe.Events).CallData.Callee spec, err := abi.ReadSpec(solidity.Abi_EventEmitter) @@ -398,7 +442,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Run("Revert", func(t *testing.T) { t.Parallel() - txe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_Revert) + txe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_Revert, nil) require.NoError(t, err) spec, err := abi.ReadSpec(solidity.Abi_Revert) require.NoError(t, err) @@ -415,7 +459,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Run("RevertWithoutReason", func(t *testing.T) { t.Parallel() - txe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_Revert) + txe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_Revert, nil) require.NoError(t, err) spec, err := abi.ReadSpec(solidity.Abi_Revert) require.NoError(t, err) diff --git a/integration/rpctransact/transact_server_test.go b/integration/rpctransact/transact_server_test.go index fbe71333d..7c41e409e 100644 --- a/integration/rpctransact/transact_server_test.go +++ b/integration/rpctransact/transact_server_test.go @@ -56,7 +56,7 @@ func TestTransactServer(t *testing.T) { assert.False(t, acc.PublicKey.IsSet()) // Sign with this account - should set public key - _, err = rpctest.CreateContract(tcli, input.GetAddress(), solidity.Bytecode_StrangeLoop) + _, err = rpctest.CreateContract(tcli, input.GetAddress(), solidity.Bytecode_StrangeLoop, nil) require.NoError(t, err) acc, err = qcli.GetAccount(context.Background(), &rpcquery.GetAccountParam{Address: input.GetAddress()}) diff --git a/protobuf/acm.proto b/protobuf/acm.proto index cc886e111..67a6250ee 100644 --- a/protobuf/acm.proto +++ b/protobuf/acm.proto @@ -25,4 +25,14 @@ message Account { bytes EVMCode = 5 [(gogoproto.customtype) = "Bytecode", (gogoproto.nullable) = false]; permission.AccountPermissions Permissions = 6 [(gogoproto.nullable) = false]; bytes WASMCode = 7 [(gogoproto.customtype) = "Bytecode", (gogoproto.jsontag) = ",omitempty", (gogoproto.nullable) = false]; + bytes CodeHash = 8 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false, (gogoproto.jsontag) = "-"]; + repeated MetaMap MetaMap = 9; + bytes Forebear = 10 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address"]; +} + +message MetaMap { + bytes CodeHash = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; + bytes AbiHash = 2 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; + // In the dump format we would like the ABI rather than its hash + string Abi = 3; } diff --git a/protobuf/payload.proto b/protobuf/payload.proto index a1bee3fd3..dcde0cb24 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -70,6 +70,13 @@ message CallTx { bytes Data = 5 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; // WASM bytecode bytes WASM = 6 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false, (gogoproto.jsontag)="tags,omitempty"]; + // Set of contracts this code will deploy + repeated Abis Abis = 7; +} + +message Abis { + bytes CodeHash = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; + string Abi = 2; } // A payment between two sets of parties diff --git a/protobuf/rpcquery.proto b/protobuf/rpcquery.proto index 9ec2814c9..1f596b814 100644 --- a/protobuf/rpcquery.proto +++ b/protobuf/rpcquery.proto @@ -21,6 +21,7 @@ option (gogoproto.messagename_all) = true; service Query { rpc Status (StatusParam) returns (rpc.ResultStatus); rpc GetAccount (GetAccountParam) returns (acm.Account); + rpc GetAbi (GetAbiParam) returns (AbiValue); rpc GetStorage (GetStorageParam) returns (StorageValue); rpc ListAccounts (ListAccountsParam) returns (stream acm.Account); @@ -48,6 +49,14 @@ message GetAccountParam { bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; } +message GetAbiParam { + bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; +} + +message AbiValue { + string Abi = 1; +} + message GetStorageParam { bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; bytes Key = 2 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.Word256", (gogoproto.nullable) = false]; diff --git a/rpc/rpcquery/query_server.go b/rpc/rpcquery/query_server.go index 9fab09099..13677bfe5 100644 --- a/rpc/rpcquery/query_server.go +++ b/rpc/rpcquery/query_server.go @@ -1,6 +1,7 @@ package rpcquery import ( + "bytes" "context" "fmt" @@ -59,6 +60,30 @@ func (qs *queryServer) GetAccount(ctx context.Context, param *GetAccountParam) ( return acc, err } +func (qs *queryServer) GetAbi(ctx context.Context, param *GetAbiParam) (*AbiValue, error) { + abi := AbiValue{} + acc, err := qs.accounts.GetAccount(param.Address) + if acc != nil && acc.CodeHash != nil { + codehash := acc.CodeHash + if acc.Forebear != nil { + acc, err = qs.accounts.GetAccount(*acc.Forebear) + if err != nil { + return &abi, err + } + } + + for _, m := range acc.MetaMap { + if bytes.Equal(m.CodeHash, codehash) { + var abihash acmstate.AbiHash + copy(abihash[:], m.AbiHash) + abi.Abi, err = qs.accounts.GetAbi(abihash) + break + } + } + } + return &abi, err +} + func (qs *queryServer) GetStorage(ctx context.Context, param *GetStorageParam) (*StorageValue, error) { val, err := qs.accounts.GetStorage(param.Address, param.Key) return &StorageValue{Value: val}, err diff --git a/rpc/rpcquery/rpcquery.pb.go b/rpc/rpcquery/rpcquery.pb.go index 5197b6315..d877f4e04 100644 --- a/rpc/rpcquery/rpcquery.pb.go +++ b/rpc/rpcquery/rpcquery.pb.go @@ -118,6 +118,83 @@ func (*GetAccountParam) XXX_MessageName() string { return "rpcquery.GetAccountParam" } +type GetAbiParam struct { + Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAbiParam) Reset() { *m = GetAbiParam{} } +func (m *GetAbiParam) String() string { return proto.CompactTextString(m) } +func (*GetAbiParam) ProtoMessage() {} +func (*GetAbiParam) Descriptor() ([]byte, []int) { + return fileDescriptor_88e25d9b99e39f02, []int{2} +} +func (m *GetAbiParam) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAbiParam.Unmarshal(m, b) +} +func (m *GetAbiParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAbiParam.Marshal(b, m, deterministic) +} +func (m *GetAbiParam) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAbiParam.Merge(m, src) +} +func (m *GetAbiParam) XXX_Size() int { + return xxx_messageInfo_GetAbiParam.Size(m) +} +func (m *GetAbiParam) XXX_DiscardUnknown() { + xxx_messageInfo_GetAbiParam.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAbiParam proto.InternalMessageInfo + +func (*GetAbiParam) XXX_MessageName() string { + return "rpcquery.GetAbiParam" +} + +type AbiValue struct { + Abi string `protobuf:"bytes,1,opt,name=Abi,proto3" json:"Abi,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AbiValue) Reset() { *m = AbiValue{} } +func (m *AbiValue) String() string { return proto.CompactTextString(m) } +func (*AbiValue) ProtoMessage() {} +func (*AbiValue) Descriptor() ([]byte, []int) { + return fileDescriptor_88e25d9b99e39f02, []int{3} +} +func (m *AbiValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AbiValue.Unmarshal(m, b) +} +func (m *AbiValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AbiValue.Marshal(b, m, deterministic) +} +func (m *AbiValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_AbiValue.Merge(m, src) +} +func (m *AbiValue) XXX_Size() int { + return xxx_messageInfo_AbiValue.Size(m) +} +func (m *AbiValue) XXX_DiscardUnknown() { + xxx_messageInfo_AbiValue.DiscardUnknown(m) +} + +var xxx_messageInfo_AbiValue proto.InternalMessageInfo + +func (m *AbiValue) GetAbi() string { + if m != nil { + return m.Abi + } + return "" +} + +func (*AbiValue) XXX_MessageName() string { + return "rpcquery.AbiValue" +} + type GetStorageParam struct { Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` Key github_com_hyperledger_burrow_binary.Word256 `protobuf:"bytes,2,opt,name=Key,proto3,customtype=github.com/hyperledger/burrow/binary.Word256" json:"Key"` @@ -130,7 +207,7 @@ func (m *GetStorageParam) Reset() { *m = GetStorageParam{} } func (m *GetStorageParam) String() string { return proto.CompactTextString(m) } func (*GetStorageParam) ProtoMessage() {} func (*GetStorageParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{2} + return fileDescriptor_88e25d9b99e39f02, []int{4} } func (m *GetStorageParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetStorageParam.Unmarshal(m, b) @@ -165,7 +242,7 @@ func (m *StorageValue) Reset() { *m = StorageValue{} } func (m *StorageValue) String() string { return proto.CompactTextString(m) } func (*StorageValue) ProtoMessage() {} func (*StorageValue) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{3} + return fileDescriptor_88e25d9b99e39f02, []int{5} } func (m *StorageValue) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StorageValue.Unmarshal(m, b) @@ -200,7 +277,7 @@ func (m *ListAccountsParam) Reset() { *m = ListAccountsParam{} } func (m *ListAccountsParam) String() string { return proto.CompactTextString(m) } func (*ListAccountsParam) ProtoMessage() {} func (*ListAccountsParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{4} + return fileDescriptor_88e25d9b99e39f02, []int{6} } func (m *ListAccountsParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListAccountsParam.Unmarshal(m, b) @@ -242,7 +319,7 @@ func (m *GetNameParam) Reset() { *m = GetNameParam{} } func (m *GetNameParam) String() string { return proto.CompactTextString(m) } func (*GetNameParam) ProtoMessage() {} func (*GetNameParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{5} + return fileDescriptor_88e25d9b99e39f02, []int{7} } func (m *GetNameParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetNameParam.Unmarshal(m, b) @@ -284,7 +361,7 @@ func (m *ListNamesParam) Reset() { *m = ListNamesParam{} } func (m *ListNamesParam) String() string { return proto.CompactTextString(m) } func (*ListNamesParam) ProtoMessage() {} func (*ListNamesParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{6} + return fileDescriptor_88e25d9b99e39f02, []int{8} } func (m *ListNamesParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListNamesParam.Unmarshal(m, b) @@ -325,7 +402,7 @@ func (m *GetValidatorSetParam) Reset() { *m = GetValidatorSetParam{} } func (m *GetValidatorSetParam) String() string { return proto.CompactTextString(m) } func (*GetValidatorSetParam) ProtoMessage() {} func (*GetValidatorSetParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{7} + return fileDescriptor_88e25d9b99e39f02, []int{9} } func (m *GetValidatorSetParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetValidatorSetParam.Unmarshal(m, b) @@ -361,7 +438,7 @@ func (m *GetValidatorSetHistoryParam) Reset() { *m = GetValidatorSetHist func (m *GetValidatorSetHistoryParam) String() string { return proto.CompactTextString(m) } func (*GetValidatorSetHistoryParam) ProtoMessage() {} func (*GetValidatorSetHistoryParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{8} + return fileDescriptor_88e25d9b99e39f02, []int{10} } func (m *GetValidatorSetHistoryParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetValidatorSetHistoryParam.Unmarshal(m, b) @@ -403,7 +480,7 @@ func (m *ValidatorSetHistory) Reset() { *m = ValidatorSetHistory{} } func (m *ValidatorSetHistory) String() string { return proto.CompactTextString(m) } func (*ValidatorSetHistory) ProtoMessage() {} func (*ValidatorSetHistory) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{9} + return fileDescriptor_88e25d9b99e39f02, []int{11} } func (m *ValidatorSetHistory) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ValidatorSetHistory.Unmarshal(m, b) @@ -446,7 +523,7 @@ func (m *ValidatorSet) Reset() { *m = ValidatorSet{} } func (m *ValidatorSet) String() string { return proto.CompactTextString(m) } func (*ValidatorSet) ProtoMessage() {} func (*ValidatorSet) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{10} + return fileDescriptor_88e25d9b99e39f02, []int{12} } func (m *ValidatorSet) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ValidatorSet.Unmarshal(m, b) @@ -495,7 +572,7 @@ func (m *GetProposalParam) Reset() { *m = GetProposalParam{} } func (m *GetProposalParam) String() string { return proto.CompactTextString(m) } func (*GetProposalParam) ProtoMessage() {} func (*GetProposalParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{11} + return fileDescriptor_88e25d9b99e39f02, []int{13} } func (m *GetProposalParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetProposalParam.Unmarshal(m, b) @@ -537,7 +614,7 @@ func (m *ListProposalsParam) Reset() { *m = ListProposalsParam{} } func (m *ListProposalsParam) String() string { return proto.CompactTextString(m) } func (*ListProposalsParam) ProtoMessage() {} func (*ListProposalsParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{12} + return fileDescriptor_88e25d9b99e39f02, []int{14} } func (m *ListProposalsParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListProposalsParam.Unmarshal(m, b) @@ -580,7 +657,7 @@ func (m *ProposalResult) Reset() { *m = ProposalResult{} } func (m *ProposalResult) String() string { return proto.CompactTextString(m) } func (*ProposalResult) ProtoMessage() {} func (*ProposalResult) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{13} + return fileDescriptor_88e25d9b99e39f02, []int{15} } func (m *ProposalResult) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ProposalResult.Unmarshal(m, b) @@ -628,7 +705,7 @@ func (m *GetStatsParam) Reset() { *m = GetStatsParam{} } func (m *GetStatsParam) String() string { return proto.CompactTextString(m) } func (*GetStatsParam) ProtoMessage() {} func (*GetStatsParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{14} + return fileDescriptor_88e25d9b99e39f02, []int{16} } func (m *GetStatsParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetStatsParam.Unmarshal(m, b) @@ -664,7 +741,7 @@ func (m *Stats) Reset() { *m = Stats{} } func (m *Stats) String() string { return proto.CompactTextString(m) } func (*Stats) ProtoMessage() {} func (*Stats) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{15} + return fileDescriptor_88e25d9b99e39f02, []int{17} } func (m *Stats) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Stats.Unmarshal(m, b) @@ -713,7 +790,7 @@ func (m *GetBlockParam) Reset() { *m = GetBlockParam{} } func (m *GetBlockParam) String() string { return proto.CompactTextString(m) } func (*GetBlockParam) ProtoMessage() {} func (*GetBlockParam) Descriptor() ([]byte, []int) { - return fileDescriptor_88e25d9b99e39f02, []int{16} + return fileDescriptor_88e25d9b99e39f02, []int{18} } func (m *GetBlockParam) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetBlockParam.Unmarshal(m, b) @@ -748,6 +825,10 @@ func init() { golang_proto.RegisterType((*StatusParam)(nil), "rpcquery.StatusParam") proto.RegisterType((*GetAccountParam)(nil), "rpcquery.GetAccountParam") golang_proto.RegisterType((*GetAccountParam)(nil), "rpcquery.GetAccountParam") + proto.RegisterType((*GetAbiParam)(nil), "rpcquery.GetAbiParam") + golang_proto.RegisterType((*GetAbiParam)(nil), "rpcquery.GetAbiParam") + proto.RegisterType((*AbiValue)(nil), "rpcquery.AbiValue") + golang_proto.RegisterType((*AbiValue)(nil), "rpcquery.AbiValue") proto.RegisterType((*GetStorageParam)(nil), "rpcquery.GetStorageParam") golang_proto.RegisterType((*GetStorageParam)(nil), "rpcquery.GetStorageParam") proto.RegisterType((*StorageValue)(nil), "rpcquery.StorageValue") @@ -784,62 +865,65 @@ func init() { proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9b99e39f func init() { golang_proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9b99e39f02) } var fileDescriptor_88e25d9b99e39f02 = []byte{ - // 879 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x5f, 0x8f, 0xdb, 0x44, - 0x10, 0xc7, 0x77, 0xbd, 0xdc, 0xdd, 0x24, 0x97, 0xb4, 0xdb, 0x23, 0x04, 0x17, 0xd2, 0x6a, 0x25, - 0xae, 0x47, 0x45, 0x9d, 0x28, 0x34, 0x80, 0x00, 0x09, 0x35, 0x08, 0x92, 0x53, 0xe1, 0x74, 0x38, - 0xa8, 0x95, 0x40, 0x42, 0xda, 0xd8, 0x4b, 0x62, 0xe1, 0x78, 0xcd, 0x7a, 0x5d, 0xf0, 0x47, 0xe2, - 0x5b, 0xf0, 0xd8, 0x8f, 0x80, 0x78, 0xa8, 0x50, 0xfb, 0x2d, 0x78, 0x42, 0xde, 0x3f, 0x8e, 0xed, - 0x4b, 0xab, 0x7b, 0xe1, 0x25, 0x9a, 0x99, 0xfd, 0xcd, 0x6f, 0x36, 0xb3, 0xf3, 0x1b, 0x43, 0x9b, - 0xc7, 0xde, 0xaf, 0x29, 0xe5, 0x99, 0x13, 0x73, 0x26, 0x18, 0x3a, 0x30, 0xbe, 0x7d, 0x7f, 0x19, - 0x88, 0x55, 0xba, 0x70, 0x3c, 0xb6, 0x1e, 0x2c, 0xd9, 0x92, 0x0d, 0x24, 0x60, 0x91, 0xfe, 0x2c, - 0x3d, 0xe9, 0x48, 0x4b, 0x25, 0xda, 0x1f, 0x97, 0xe0, 0x82, 0x46, 0x3e, 0xe5, 0xeb, 0x20, 0x12, - 0x65, 0x93, 0x2c, 0xbc, 0x60, 0x20, 0xb2, 0x98, 0x26, 0xea, 0x57, 0x27, 0x36, 0x23, 0xb2, 0x2e, - 0x9c, 0x43, 0xe2, 0xad, 0xb5, 0xd9, 0x79, 0x4a, 0xc2, 0xc0, 0x27, 0x82, 0x71, 0x73, 0xc6, 0x63, - 0x4f, 0x9b, 0x47, 0x31, 0xc9, 0x42, 0x46, 0x7c, 0xe5, 0xe2, 0x00, 0x9a, 0x73, 0x41, 0x44, 0x9a, - 0x5c, 0x10, 0x4e, 0xd6, 0xe8, 0x14, 0x3a, 0x93, 0x90, 0x79, 0xbf, 0x7c, 0x1f, 0xac, 0xe9, 0x93, - 0x40, 0xac, 0x82, 0xa8, 0x67, 0xdd, 0xb1, 0x4e, 0x0f, 0xdd, 0x7a, 0x18, 0x0d, 0xe1, 0xa6, 0x0c, - 0xcd, 0x29, 0x8d, 0x4a, 0xe8, 0x1d, 0x89, 0xde, 0x76, 0x84, 0x09, 0x74, 0xa6, 0x54, 0x3c, 0xf4, - 0x3c, 0x96, 0x46, 0x42, 0x95, 0x3b, 0x87, 0xfd, 0x87, 0xbe, 0xcf, 0x69, 0x92, 0xc8, 0x32, 0xad, - 0xc9, 0x83, 0x67, 0xcf, 0x6f, 0xbf, 0xf1, 0xf7, 0xf3, 0xdb, 0x1f, 0x94, 0x5a, 0xb2, 0xca, 0x62, - 0xca, 0x43, 0xea, 0x2f, 0x29, 0x1f, 0x2c, 0x52, 0xce, 0xd9, 0x6f, 0x03, 0x8f, 0x67, 0xb1, 0x60, - 0x8e, 0xce, 0x75, 0x0d, 0x09, 0xfe, 0xc3, 0x92, 0x35, 0xe6, 0x82, 0x71, 0xb2, 0xa4, 0xff, 0x4b, - 0x0d, 0xf4, 0x35, 0xec, 0x3e, 0xa2, 0x99, 0xfc, 0xa3, 0x57, 0xe6, 0x5a, 0x04, 0x11, 0xe1, 0x99, - 0xf3, 0x84, 0x71, 0x7f, 0x34, 0xfe, 0xc8, 0xcd, 0x09, 0xf0, 0x8f, 0xd0, 0xd2, 0xf7, 0x7c, 0x4c, - 0xc2, 0x94, 0xa2, 0x47, 0xb0, 0x27, 0x0d, 0x7d, 0xcb, 0xb1, 0x66, 0xbe, 0x7f, 0x25, 0xe6, 0x19, - 0xfd, 0x7d, 0x92, 0x09, 0x9a, 0xb8, 0x8a, 0x03, 0xbf, 0x0f, 0x37, 0xbe, 0x09, 0x12, 0xd3, 0x6c, - 0xfd, 0xb8, 0xc7, 0xb0, 0xf7, 0x5d, 0x3e, 0x9f, 0xfa, 0x49, 0x95, 0x83, 0x31, 0xb4, 0xa6, 0x54, - 0x9c, 0x93, 0xb5, 0xee, 0x17, 0x82, 0x6b, 0xb9, 0xa3, 0x41, 0xd2, 0xc6, 0x27, 0xd0, 0xce, 0xe9, - 0x72, 0xfb, 0xb5, 0x5c, 0x5d, 0x38, 0x9e, 0x52, 0xf1, 0xd8, 0x4c, 0xdf, 0x9c, 0xaa, 0x77, 0xc6, - 0x53, 0xb8, 0x55, 0x8b, 0xcf, 0x82, 0x44, 0x30, 0x9e, 0x15, 0x53, 0x77, 0x16, 0x79, 0x61, 0xea, - 0xd3, 0x0b, 0x4e, 0x9f, 0x06, 0x2c, 0x55, 0x4f, 0xb5, 0xeb, 0xd6, 0xc3, 0x78, 0x0a, 0x37, 0xb7, - 0xb0, 0xa0, 0x21, 0xec, 0x6b, 0xb3, 0x67, 0xdd, 0xd9, 0x3d, 0x6d, 0x8e, 0xba, 0x4e, 0x21, 0xce, - 0x32, 0xde, 0x35, 0x30, 0x7c, 0x0e, 0xad, 0xf2, 0x01, 0xea, 0x42, 0x63, 0x45, 0x83, 0xe5, 0x4a, - 0xc8, 0xca, 0xd7, 0x5c, 0xed, 0xa1, 0x13, 0xd8, 0x9d, 0x53, 0xd1, 0xdb, 0x91, 0xac, 0xc7, 0xce, - 0x46, 0x58, 0x45, 0xb6, 0x9b, 0x03, 0xf0, 0x09, 0x5c, 0x9f, 0x52, 0x71, 0xc1, 0x59, 0xcc, 0x12, - 0x12, 0x16, 0x9d, 0x9c, 0x91, 0x64, 0xa5, 0x1e, 0xd4, 0x95, 0x36, 0x1e, 0x02, 0xca, 0x3b, 0x69, - 0x80, 0xba, 0x9b, 0x36, 0x1c, 0xa8, 0x08, 0xf5, 0x25, 0xfa, 0xc0, 0x2d, 0x7c, 0xfc, 0x2d, 0xb4, - 0x0d, 0xda, 0xa5, 0x49, 0x1a, 0x8a, 0x6d, 0xbc, 0xe8, 0x2e, 0x34, 0x26, 0x24, 0x0c, 0x99, 0x90, - 0x83, 0xd9, 0x1c, 0x75, 0x1c, 0xa3, 0x73, 0x15, 0x76, 0xf5, 0x31, 0xee, 0xc0, 0x91, 0x54, 0x08, - 0xd1, 0x53, 0x81, 0x29, 0xec, 0x49, 0x0f, 0xdd, 0x83, 0xeb, 0x66, 0x5e, 0x72, 0xc5, 0x7e, 0xc9, - 0x7c, 0xaa, 0x9b, 0x71, 0x29, 0x9e, 0xab, 0xbf, 0x1c, 0x63, 0xa9, 0x90, 0xf0, 0x1d, 0x09, 0xdf, - 0x76, 0x84, 0xef, 0xca, 0xba, 0x72, 0x2f, 0xa8, 0xff, 0xdc, 0x85, 0xc6, 0xac, 0xd2, 0x71, 0xe5, - 0x8d, 0xfe, 0xdd, 0xd3, 0xa3, 0x85, 0x46, 0xd0, 0x50, 0xbb, 0x09, 0xbd, 0xb9, 0x79, 0xce, 0xd2, - 0xb6, 0xb2, 0x6f, 0xe4, 0x61, 0x47, 0x75, 0x45, 0x23, 0xc7, 0x00, 0x9b, 0x25, 0x83, 0xde, 0xde, - 0xe4, 0xd5, 0x56, 0x8f, 0xdd, 0x72, 0xf2, 0x7d, 0x69, 0x80, 0x5f, 0xc8, 0x34, 0xad, 0xc7, 0x5a, - 0x5a, 0x79, 0x9b, 0xd8, 0xdd, 0xf2, 0x4d, 0x4a, 0xea, 0xfd, 0x0c, 0x5a, 0x65, 0xc1, 0xa1, 0x5b, - 0x1b, 0xdc, 0x25, 0x21, 0x56, 0x6b, 0x0f, 0x2d, 0x34, 0x80, 0x7d, 0x2d, 0x41, 0xd4, 0xad, 0x94, - 0x2e, 0x54, 0x69, 0xb7, 0x1c, 0xb5, 0xeb, 0xbf, 0x8a, 0x04, 0xcf, 0xd0, 0x18, 0x0e, 0x0b, 0x3d, - 0xa2, 0x5e, 0xb5, 0xd4, 0x46, 0xa4, 0xd5, 0xa4, 0xa1, 0x85, 0xce, 0xe4, 0x76, 0xac, 0xcc, 0x7d, - 0xbf, 0x52, 0xef, 0x92, 0x72, 0xed, 0x57, 0x08, 0x09, 0xfd, 0x04, 0xdd, 0xed, 0x8a, 0x46, 0xef, - 0xbd, 0x92, 0xb1, 0xac, 0x79, 0xfb, 0xdd, 0xed, 0xc4, 0x86, 0xe5, 0x53, 0x68, 0x96, 0xf4, 0x84, - 0xec, 0x0a, 0x69, 0x45, 0x66, 0x76, 0x7d, 0xd4, 0xd1, 0x19, 0x1c, 0x55, 0x34, 0x86, 0xde, 0xa9, - 0x76, 0xa8, 0x2a, 0x3e, 0xbb, 0xd4, 0xbf, 0xaa, 0xd0, 0x86, 0x16, 0x7a, 0x00, 0x07, 0x46, 0x2d, - 0xe8, 0xad, 0xda, 0x54, 0x18, 0x05, 0xd9, 0x9d, 0xea, 0x74, 0x26, 0xe8, 0x13, 0x68, 0x9b, 0x59, - 0x9f, 0x51, 0xe2, 0x53, 0x5e, 0xcb, 0xdd, 0xa8, 0xc0, 0x3e, 0x72, 0xd4, 0x07, 0x5d, 0xe1, 0x26, - 0x9f, 0xff, 0xf5, 0xa2, 0x6f, 0xfd, 0xf3, 0xa2, 0x6f, 0xfd, 0xf9, 0xb2, 0x6f, 0x3d, 0x7b, 0xd9, - 0xb7, 0x7e, 0xb8, 0xf7, 0xfa, 0xfd, 0xcf, 0x63, 0x6f, 0x60, 0xa8, 0x17, 0x0d, 0xf9, 0x4d, 0xff, - 0xf0, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbe, 0x6e, 0x9f, 0x8e, 0x9a, 0x08, 0x00, 0x00, + // 923 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x8e, 0xdb, 0x44, + 0x14, 0xc6, 0x4d, 0x37, 0x9b, 0x3d, 0xc9, 0x6e, 0xb6, 0xd3, 0x25, 0x2c, 0x6e, 0x49, 0xab, 0x91, + 0xd8, 0x2e, 0x15, 0x75, 0xa2, 0xb4, 0x0b, 0x08, 0x90, 0xd0, 0x06, 0x41, 0xb2, 0x2a, 0xac, 0x16, + 0x07, 0xb5, 0x12, 0x08, 0xa4, 0xb1, 0x3d, 0x24, 0x16, 0x8e, 0xc7, 0x8c, 0xc7, 0x05, 0x3f, 0x12, + 0x57, 0xbc, 0x02, 0x97, 0x7d, 0x04, 0xc4, 0x45, 0x85, 0xda, 0x17, 0x41, 0x9e, 0x1f, 0xc7, 0xce, + 0xa6, 0x55, 0x6f, 0x7a, 0x13, 0x9d, 0x73, 0xfc, 0xcd, 0x77, 0xec, 0xe3, 0xf3, 0x7d, 0x0e, 0xec, + 0xf1, 0xc4, 0xff, 0x2d, 0xa3, 0x3c, 0x77, 0x12, 0xce, 0x04, 0x43, 0x2d, 0x93, 0xdb, 0xf7, 0xe6, + 0xa1, 0x58, 0x64, 0x9e, 0xe3, 0xb3, 0xe5, 0x60, 0xce, 0xe6, 0x6c, 0x20, 0x01, 0x5e, 0xf6, 0x8b, + 0xcc, 0x64, 0x22, 0x23, 0x75, 0xd0, 0xfe, 0xb8, 0x02, 0x17, 0x34, 0x0e, 0x28, 0x5f, 0x86, 0xb1, + 0xa8, 0x86, 0xc4, 0xf3, 0xc3, 0x81, 0xc8, 0x13, 0x9a, 0xaa, 0x5f, 0x7d, 0xb0, 0x1d, 0x93, 0x65, + 0x99, 0xec, 0x10, 0x7f, 0xa9, 0xc3, 0xee, 0x13, 0x12, 0x85, 0x01, 0x11, 0x8c, 0x9b, 0x6b, 0x3c, + 0xf1, 0x75, 0xb8, 0x9b, 0x90, 0x3c, 0x62, 0x24, 0x50, 0x29, 0x0e, 0xa1, 0x3d, 0x13, 0x44, 0x64, + 0xe9, 0x05, 0xe1, 0x64, 0x89, 0x8e, 0xa1, 0x3b, 0x8e, 0x98, 0xff, 0xeb, 0xf7, 0xe1, 0x92, 0x3e, + 0x0e, 0xc5, 0x22, 0x8c, 0x0f, 0xad, 0xdb, 0xd6, 0xf1, 0x8e, 0xbb, 0x5e, 0x46, 0x43, 0xb8, 0x2e, + 0x4b, 0x33, 0x4a, 0xe3, 0x0a, 0xfa, 0x8a, 0x44, 0x6f, 0xba, 0x84, 0x09, 0x74, 0x27, 0x54, 0x9c, + 0xfa, 0x3e, 0xcb, 0x62, 0xa1, 0xda, 0x9d, 0xc3, 0xf6, 0x69, 0x10, 0x70, 0x9a, 0xa6, 0xb2, 0x4d, + 0x67, 0xfc, 0xe0, 0xe9, 0xb3, 0x5b, 0x6f, 0xfd, 0xfb, 0xec, 0xd6, 0x87, 0x95, 0x91, 0x2c, 0xf2, + 0x84, 0xf2, 0x88, 0x06, 0x73, 0xca, 0x07, 0x5e, 0xc6, 0x39, 0xfb, 0x7d, 0xe0, 0xf3, 0x3c, 0x11, + 0xcc, 0xd1, 0x67, 0x5d, 0x43, 0x82, 0x7f, 0x82, 0x76, 0xd1, 0xc2, 0x0b, 0xdf, 0x0c, 0xfd, 0x4d, + 0x68, 0x9d, 0x7a, 0xe1, 0x23, 0x12, 0x65, 0x14, 0xed, 0x43, 0xe3, 0xd4, 0x0b, 0xf5, 0x74, 0x8a, + 0x10, 0xff, 0x69, 0xc9, 0x07, 0x9c, 0x09, 0xc6, 0xc9, 0x9c, 0xbe, 0x91, 0x3b, 0x40, 0x5f, 0x43, + 0xe3, 0x21, 0xcd, 0xe5, 0x94, 0x5f, 0x9b, 0xcb, 0x0b, 0x63, 0xc2, 0x73, 0xe7, 0x31, 0xe3, 0xc1, + 0xe8, 0xe4, 0x23, 0xb7, 0x20, 0xc0, 0x3f, 0x42, 0x47, 0xdf, 0xa7, 0x7a, 0x9a, 0x87, 0xb0, 0x25, + 0x03, 0x7d, 0x97, 0x27, 0x9a, 0xf9, 0xde, 0x6b, 0x31, 0x4f, 0xe9, 0x1f, 0xe3, 0x5c, 0xd0, 0xd4, + 0x55, 0x1c, 0xf8, 0x03, 0xb8, 0xf6, 0x4d, 0x98, 0x9a, 0x37, 0xad, 0x37, 0xeb, 0x00, 0xb6, 0xbe, + 0x2b, 0xc4, 0xa1, 0x27, 0xa6, 0x12, 0x8c, 0xa1, 0x33, 0xa1, 0xe2, 0x9c, 0x2c, 0xf5, 0xbc, 0x10, + 0x5c, 0x2d, 0x12, 0x0d, 0x92, 0x31, 0x3e, 0x82, 0xbd, 0x82, 0xae, 0x88, 0x5f, 0xc9, 0xd5, 0x83, + 0x83, 0x09, 0x15, 0x8f, 0xcc, 0xea, 0xcf, 0xa8, 0x5a, 0x32, 0x3c, 0x81, 0x1b, 0x6b, 0xf5, 0x69, + 0x98, 0x0a, 0xc6, 0xf3, 0x72, 0xe5, 0xcf, 0x62, 0x3f, 0xca, 0x02, 0x7a, 0xc1, 0xe9, 0x93, 0x90, + 0x65, 0xea, 0x55, 0x35, 0xdc, 0xf5, 0x32, 0x9e, 0xc0, 0xf5, 0x0d, 0x2c, 0x68, 0x08, 0xdb, 0x3a, + 0x3c, 0xb4, 0x6e, 0x37, 0x8e, 0xdb, 0xa3, 0x9e, 0x53, 0x3a, 0x43, 0x15, 0xef, 0x1a, 0x18, 0x3e, + 0x87, 0x4e, 0xf5, 0x02, 0xea, 0x41, 0x73, 0x41, 0xc3, 0xf9, 0x42, 0xc8, 0xce, 0x57, 0x5d, 0x9d, + 0xa1, 0x23, 0x68, 0xcc, 0xa8, 0x38, 0xbc, 0x22, 0x59, 0x0f, 0x9c, 0x95, 0xaa, 0xcb, 0xd3, 0x6e, + 0x01, 0xc0, 0x47, 0xb0, 0x3f, 0xa1, 0xe2, 0x82, 0xb3, 0x84, 0xa5, 0x24, 0x2a, 0x27, 0x39, 0x25, + 0xe9, 0x42, 0xbd, 0x50, 0x57, 0xc6, 0x78, 0x08, 0xa8, 0x98, 0xa4, 0x01, 0xea, 0x69, 0xda, 0xd0, + 0x52, 0x15, 0x1a, 0x48, 0x74, 0xcb, 0x2d, 0x73, 0xfc, 0x2d, 0xec, 0x19, 0xb4, 0x4b, 0xd3, 0x2c, + 0x12, 0x9b, 0x78, 0xd1, 0x1d, 0x68, 0x8e, 0x49, 0x14, 0x31, 0x21, 0x17, 0xb3, 0x3d, 0xea, 0x3a, + 0xc6, 0x64, 0x54, 0xd9, 0xd5, 0x97, 0x71, 0x17, 0x76, 0xa5, 0x42, 0x88, 0xde, 0x0a, 0x4c, 0x61, + 0x4b, 0x66, 0xe8, 0x2e, 0xec, 0x9b, 0x7d, 0x29, 0xec, 0xe2, 0x4b, 0x16, 0x50, 0x3d, 0x8c, 0x4b, + 0xf5, 0xc2, 0x7a, 0xaa, 0x35, 0x96, 0x09, 0x09, 0xbf, 0x22, 0xe1, 0x9b, 0x2e, 0xe1, 0x3b, 0xb2, + 0xaf, 0x34, 0x25, 0xf5, 0xcc, 0x3d, 0x68, 0x4e, 0x6b, 0x13, 0x57, 0xd9, 0xe8, 0xaf, 0xa6, 0x5e, + 0x2d, 0x34, 0x82, 0xa6, 0x32, 0x46, 0xf4, 0xf6, 0xea, 0x75, 0x56, 0xac, 0xd2, 0xbe, 0x56, 0x94, + 0x1d, 0x35, 0x15, 0x8d, 0x3c, 0x01, 0x58, 0x39, 0x1c, 0x7a, 0x77, 0x75, 0x6e, 0xcd, 0xf7, 0xec, + 0x8e, 0x53, 0x98, 0xb5, 0x01, 0xde, 0x87, 0xa6, 0x72, 0xad, 0x6a, 0xab, 0x8a, 0x8f, 0xd9, 0x68, + 0x55, 0x2e, 0xfd, 0xe7, 0x0b, 0xd9, 0x4b, 0x8b, 0x78, 0xad, 0x57, 0xd5, 0x82, 0xec, 0x5e, 0xf5, + 0xf6, 0x2b, 0x92, 0xff, 0x0c, 0x3a, 0x55, 0x95, 0xa2, 0x1b, 0x2b, 0xdc, 0x25, 0xf5, 0xd6, 0x6f, + 0x78, 0x68, 0xa1, 0x01, 0x6c, 0x6b, 0xdd, 0xa2, 0x5e, 0xad, 0x75, 0x29, 0x65, 0xbb, 0xe3, 0xa8, + 0xaf, 0xd3, 0x57, 0xb1, 0xe0, 0x39, 0x3a, 0x81, 0x9d, 0x52, 0xc4, 0xe8, 0xb0, 0xde, 0x6a, 0xa5, + 0xec, 0xfa, 0xa1, 0xa1, 0x85, 0xce, 0xa4, 0xa5, 0xd6, 0xc4, 0xd2, 0xaf, 0xf5, 0xbb, 0x24, 0x77, + 0xfb, 0x25, 0xea, 0x43, 0x3f, 0x43, 0x6f, 0xb3, 0x0d, 0xa0, 0xf7, 0x5f, 0xca, 0x58, 0x35, 0x0a, + 0xfb, 0xbd, 0xcd, 0xc4, 0x86, 0xe5, 0x53, 0xf9, 0xed, 0x31, 0x6a, 0x41, 0x76, 0x8d, 0xb4, 0xa6, + 0x4d, 0x7b, 0x5d, 0x1f, 0xe8, 0x0c, 0x76, 0x6b, 0xc2, 0x44, 0x37, 0xeb, 0x13, 0xaa, 0x2b, 0xd6, + 0xae, 0xcc, 0xaf, 0xae, 0xce, 0xa1, 0x85, 0x1e, 0x40, 0xcb, 0x48, 0x0c, 0xbd, 0xb3, 0xb6, 0x15, + 0x46, 0x76, 0x76, 0xb7, 0xbe, 0xd2, 0x29, 0xfa, 0x04, 0xf6, 0x8c, 0x40, 0xa6, 0x94, 0x04, 0x94, + 0xaf, 0x9d, 0x5d, 0x49, 0xc7, 0xde, 0x75, 0xd4, 0x5f, 0x10, 0x85, 0x1b, 0x7f, 0xfe, 0xcf, 0xf3, + 0xbe, 0xf5, 0xdf, 0xf3, 0xbe, 0xf5, 0xf7, 0x8b, 0xbe, 0xf5, 0xf4, 0x45, 0xdf, 0xfa, 0xe1, 0xee, + 0xab, 0x3f, 0x1a, 0x3c, 0xf1, 0x07, 0x86, 0xda, 0x6b, 0xca, 0x7f, 0x21, 0xf7, 0xff, 0x0f, 0x00, + 0x00, 0xff, 0xff, 0x6f, 0x49, 0x2c, 0xaf, 0x4c, 0x09, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -856,6 +940,7 @@ const _ = grpc.SupportPackageIsVersion4 type QueryClient interface { Status(ctx context.Context, in *StatusParam, opts ...grpc.CallOption) (*rpc.ResultStatus, error) GetAccount(ctx context.Context, in *GetAccountParam, opts ...grpc.CallOption) (*acm.Account, error) + GetAbi(ctx context.Context, in *GetAbiParam, opts ...grpc.CallOption) (*AbiValue, error) GetStorage(ctx context.Context, in *GetStorageParam, opts ...grpc.CallOption) (*StorageValue, error) ListAccounts(ctx context.Context, in *ListAccountsParam, opts ...grpc.CallOption) (Query_ListAccountsClient, error) GetName(ctx context.Context, in *GetNameParam, opts ...grpc.CallOption) (*names.Entry, error) @@ -894,6 +979,15 @@ func (c *queryClient) GetAccount(ctx context.Context, in *GetAccountParam, opts return out, nil } +func (c *queryClient) GetAbi(ctx context.Context, in *GetAbiParam, opts ...grpc.CallOption) (*AbiValue, error) { + out := new(AbiValue) + err := c.cc.Invoke(ctx, "/rpcquery.Query/GetAbi", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *queryClient) GetStorage(ctx context.Context, in *GetStorageParam, opts ...grpc.CallOption) (*StorageValue, error) { out := new(StorageValue) err := c.cc.Invoke(ctx, "/rpcquery.Query/GetStorage", in, out, opts...) @@ -1057,6 +1151,7 @@ func (c *queryClient) GetBlockHeader(ctx context.Context, in *GetBlockParam, opt type QueryServer interface { Status(context.Context, *StatusParam) (*rpc.ResultStatus, error) GetAccount(context.Context, *GetAccountParam) (*acm.Account, error) + GetAbi(context.Context, *GetAbiParam) (*AbiValue, error) GetStorage(context.Context, *GetStorageParam) (*StorageValue, error) ListAccounts(*ListAccountsParam, Query_ListAccountsServer) error GetName(context.Context, *GetNameParam) (*names.Entry, error) @@ -1109,6 +1204,24 @@ func _Query_GetAccount_Handler(srv interface{}, ctx context.Context, dec func(in return interceptor(ctx, in, info, handler) } +func _Query_GetAbi_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAbiParam) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(QueryServer).GetAbi(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpcquery.Query/GetAbi", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(QueryServer).GetAbi(ctx, req.(*GetAbiParam)) + } + return interceptor(ctx, in, info, handler) +} + func _Query_GetStorage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetStorageParam) if err := dec(in); err != nil { @@ -1310,6 +1423,10 @@ var _Query_serviceDesc = grpc.ServiceDesc{ MethodName: "GetAccount", Handler: _Query_GetAccount_Handler, }, + { + MethodName: "GetAbi", + Handler: _Query_GetAbi_Handler, + }, { MethodName: "GetStorage", Handler: _Query_GetStorage_Handler, @@ -1393,6 +1510,36 @@ func (m *GetAccountParam) Size() (n int) { return n } +func (m *GetAbiParam) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.Address.Size() + n += 1 + l + sovRpcquery(uint64(l)) + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *AbiValue) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Abi) + if l > 0 { + n += 1 + l + sovRpcquery(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + func (m *GetStorageParam) Size() (n int) { if m == nil { return 0 diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 61f4cc272..9726af56e 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -54,7 +54,7 @@ func (x Ballot_ProposalState) String() string { } func (Ballot_ProposalState) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{14, 0} + return fileDescriptor_678c914f1bee6d56, []int{15, 0} } type Any struct { @@ -303,10 +303,12 @@ type CallTx struct { // EVM bytecode Data github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,5,opt,name=Data,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"Data"` // WASM bytecode - WASM github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,6,opt,name=WASM,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"tags,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + WASM github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,6,opt,name=WASM,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"tags,omitempty"` + // Set of contracts this code will deploy + Abis []*Abis `protobuf:"bytes,7,rep,name=Abis,proto3" json:"Abis,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CallTx) Reset() { *m = CallTx{} } @@ -362,10 +364,69 @@ func (m *CallTx) GetFee() uint64 { return 0 } +func (m *CallTx) GetAbis() []*Abis { + if m != nil { + return m.Abis + } + return nil +} + func (*CallTx) XXX_MessageName() string { return "payload.CallTx" } +type Abis struct { + CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,1,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"CodeHash"` + Abi string `protobuf:"bytes,2,opt,name=Abi,proto3" json:"Abi,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Abis) Reset() { *m = Abis{} } +func (m *Abis) String() string { return proto.CompactTextString(m) } +func (*Abis) ProtoMessage() {} +func (*Abis) Descriptor() ([]byte, []int) { + return fileDescriptor_678c914f1bee6d56, []int{4} +} +func (m *Abis) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Abis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Abis.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalTo(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Abis) XXX_Merge(src proto.Message) { + xxx_messageInfo_Abis.Merge(m, src) +} +func (m *Abis) XXX_Size() int { + return m.Size() +} +func (m *Abis) XXX_DiscardUnknown() { + xxx_messageInfo_Abis.DiscardUnknown(m) +} + +var xxx_messageInfo_Abis proto.InternalMessageInfo + +func (m *Abis) GetAbi() string { + if m != nil { + return m.Abi + } + return "" +} + +func (*Abis) XXX_MessageName() string { + return "payload.Abis" +} + // A payment between two sets of parties type SendTx struct { // The payers @@ -380,7 +441,7 @@ type SendTx struct { func (m *SendTx) Reset() { *m = SendTx{} } func (*SendTx) ProtoMessage() {} func (*SendTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{4} + return fileDescriptor_678c914f1bee6d56, []int{5} } func (m *SendTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -427,7 +488,7 @@ type PermsTx struct { func (m *PermsTx) Reset() { *m = PermsTx{} } func (*PermsTx) ProtoMessage() {} func (*PermsTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{5} + return fileDescriptor_678c914f1bee6d56, []int{6} } func (m *PermsTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -492,7 +553,7 @@ type NameTx struct { func (m *NameTx) Reset() { *m = NameTx{} } func (*NameTx) ProtoMessage() {} func (*NameTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{6} + return fileDescriptor_678c914f1bee6d56, []int{7} } func (m *NameTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -564,7 +625,7 @@ type BondTx struct { func (m *BondTx) Reset() { *m = BondTx{} } func (*BondTx) ProtoMessage() {} func (*BondTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{7} + return fileDescriptor_678c914f1bee6d56, []int{8} } func (m *BondTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -609,7 +670,7 @@ type UnbondTx struct { func (m *UnbondTx) Reset() { *m = UnbondTx{} } func (*UnbondTx) ProtoMessage() {} func (*UnbondTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{8} + return fileDescriptor_678c914f1bee6d56, []int{9} } func (m *UnbondTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -653,7 +714,7 @@ type GovTx struct { func (m *GovTx) Reset() { *m = GovTx{} } func (*GovTx) ProtoMessage() {} func (*GovTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{9} + return fileDescriptor_678c914f1bee6d56, []int{10} } func (m *GovTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -699,7 +760,7 @@ type ProposalTx struct { func (m *ProposalTx) Reset() { *m = ProposalTx{} } func (*ProposalTx) ProtoMessage() {} func (*ProposalTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{10} + return fileDescriptor_678c914f1bee6d56, []int{11} } func (m *ProposalTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -743,7 +804,7 @@ type BatchTx struct { func (m *BatchTx) Reset() { *m = BatchTx{} } func (*BatchTx) ProtoMessage() {} func (*BatchTx) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{11} + return fileDescriptor_678c914f1bee6d56, []int{12} } func (m *BatchTx) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -787,7 +848,7 @@ type Vote struct { func (m *Vote) Reset() { *m = Vote{} } func (*Vote) ProtoMessage() {} func (*Vote) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{12} + return fileDescriptor_678c914f1bee6d56, []int{13} } func (m *Vote) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -832,7 +893,7 @@ type Proposal struct { func (m *Proposal) Reset() { *m = Proposal{} } func (*Proposal) ProtoMessage() {} func (*Proposal) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{13} + return fileDescriptor_678c914f1bee6d56, []int{14} } func (m *Proposal) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -879,7 +940,7 @@ func (m *Ballot) Reset() { *m = Ballot{} } func (m *Ballot) String() string { return proto.CompactTextString(m) } func (*Ballot) ProtoMessage() {} func (*Ballot) Descriptor() ([]byte, []int) { - return fileDescriptor_678c914f1bee6d56, []int{14} + return fileDescriptor_678c914f1bee6d56, []int{15} } func (m *Ballot) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -943,6 +1004,8 @@ func init() { golang_proto.RegisterType((*TxOutput)(nil), "payload.TxOutput") proto.RegisterType((*CallTx)(nil), "payload.CallTx") golang_proto.RegisterType((*CallTx)(nil), "payload.CallTx") + proto.RegisterType((*Abis)(nil), "payload.Abis") + golang_proto.RegisterType((*Abis)(nil), "payload.Abis") proto.RegisterType((*SendTx)(nil), "payload.SendTx") golang_proto.RegisterType((*SendTx)(nil), "payload.SendTx") proto.RegisterType((*PermsTx)(nil), "payload.PermsTx") @@ -971,69 +1034,72 @@ func init() { proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d5 func init() { golang_proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d56) } var fileDescriptor_678c914f1bee6d56 = []byte{ - // 980 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xcf, 0x7a, 0xd7, 0x7f, 0xfa, 0xea, 0x04, 0x77, 0xf8, 0x23, 0x2b, 0x12, 0x76, 0x65, 0x10, - 0xb4, 0xd0, 0xda, 0xd0, 0xf2, 0x47, 0xca, 0x05, 0x79, 0x63, 0x27, 0x0d, 0x2a, 0x4d, 0x34, 0xde, - 0xb4, 0x08, 0xc4, 0x61, 0x6c, 0x0f, 0xeb, 0x95, 0xbc, 0x3b, 0xcb, 0xee, 0xb8, 0xac, 0x39, 0x71, - 0xe0, 0xc0, 0x15, 0x71, 0xe1, 0x98, 0xaf, 0xc0, 0x37, 0xe0, 0x98, 0x23, 0x47, 0xc4, 0x21, 0x42, - 0xe9, 0x05, 0xf1, 0x29, 0xd0, 0xcc, 0xce, 0xac, 0xd7, 0x6e, 0xd5, 0x3a, 0x11, 0xe2, 0xb6, 0xf3, - 0xde, 0x6f, 0xde, 0x7b, 0xf3, 0xfb, 0xbd, 0x79, 0xb3, 0xb0, 0x19, 0x92, 0xf9, 0x94, 0x91, 0x71, - 0x3b, 0x8c, 0x18, 0x67, 0xa8, 0xac, 0x96, 0xdb, 0xb7, 0x5d, 0x8f, 0x4f, 0x66, 0xc3, 0xf6, 0x88, - 0xf9, 0x1d, 0x97, 0xb9, 0xac, 0x23, 0xfd, 0xc3, 0xd9, 0xd7, 0x72, 0x25, 0x17, 0xf2, 0x2b, 0xdd, - 0xb7, 0x5d, 0x0b, 0x69, 0xe4, 0x7b, 0x71, 0xec, 0xb1, 0x40, 0x59, 0x20, 0x0e, 0xe9, 0x28, 0xfd, - 0x6e, 0xfd, 0x64, 0x82, 0xd9, 0x0d, 0xe6, 0xe8, 0x6d, 0x28, 0xed, 0x92, 0xe9, 0xd4, 0x49, 0xea, - 0xc6, 0x75, 0xe3, 0xc6, 0xd5, 0x3b, 0x2f, 0xb5, 0x75, 0xf6, 0xd4, 0x8c, 0x95, 0x5b, 0x00, 0x07, - 0x34, 0x18, 0x3b, 0x49, 0xbd, 0xb0, 0x02, 0x4c, 0xcd, 0x58, 0xb9, 0x05, 0xf0, 0x01, 0xf1, 0xa9, - 0x93, 0xd4, 0xcd, 0x15, 0x60, 0x6a, 0xc6, 0xca, 0x8d, 0xde, 0x81, 0xf2, 0x11, 0x8d, 0xfc, 0xd8, - 0x49, 0xea, 0x96, 0x44, 0xd6, 0x32, 0xa4, 0xb2, 0x63, 0x0d, 0x40, 0x6f, 0x42, 0x71, 0x9f, 0x3d, - 0x76, 0x92, 0x7a, 0x51, 0x22, 0xb7, 0x32, 0xa4, 0xb4, 0xe2, 0xd4, 0x29, 0x52, 0xdb, 0x4c, 0xd6, - 0x58, 0x5a, 0x49, 0x9d, 0x9a, 0xb1, 0x72, 0xa3, 0xdb, 0x50, 0x39, 0x0e, 0x86, 0x29, 0xb4, 0x2c, - 0xa1, 0xd7, 0x32, 0xa8, 0x76, 0xe0, 0x0c, 0x22, 0x2a, 0xb5, 0x09, 0x1f, 0x4d, 0x9c, 0xa4, 0x5e, - 0x59, 0xa9, 0x54, 0xd9, 0xb1, 0x06, 0xa0, 0xbb, 0x00, 0x47, 0x11, 0x0b, 0x59, 0x4c, 0x04, 0xa9, - 0x57, 0x24, 0xfc, 0xe5, 0xc5, 0xc1, 0x32, 0x17, 0xce, 0xc1, 0x76, 0xac, 0xd3, 0x93, 0xa6, 0xd1, - 0xfa, 0xd9, 0x80, 0xb2, 0x93, 0x1c, 0x04, 0xe1, 0x8c, 0xa3, 0x07, 0x50, 0xee, 0x8e, 0xc7, 0x11, - 0x8d, 0x63, 0x29, 0x4c, 0xd5, 0xfe, 0xe0, 0xf4, 0xac, 0xb9, 0xf1, 0xe7, 0x59, 0xf3, 0x56, 0xae, - 0x0b, 0x26, 0xf3, 0x90, 0x46, 0x53, 0x3a, 0x76, 0x69, 0xd4, 0x19, 0xce, 0xa2, 0x88, 0x7d, 0xdb, - 0x19, 0x45, 0xf3, 0x90, 0xb3, 0xb6, 0xda, 0x8b, 0x75, 0x10, 0xf4, 0x1a, 0x94, 0xba, 0x3e, 0x9b, - 0x05, 0x5c, 0xca, 0x67, 0x61, 0xb5, 0x42, 0xdb, 0x50, 0x19, 0xd0, 0x6f, 0x66, 0x34, 0x18, 0x51, - 0xa9, 0x97, 0x85, 0xb3, 0xf5, 0x8e, 0xf5, 0xcb, 0x49, 0x73, 0xa3, 0x95, 0x40, 0xc5, 0x49, 0x0e, - 0x67, 0xfc, 0x7f, 0xac, 0x4a, 0x65, 0xfe, 0xa3, 0xa0, 0x9b, 0x13, 0xbd, 0x05, 0x45, 0xc9, 0x8b, - 0xea, 0xd2, 0x05, 0xff, 0x8a, 0x2f, 0x9c, 0xba, 0xd1, 0xa7, 0x8b, 0x02, 0x0b, 0xb2, 0xc0, 0xf7, - 0x2e, 0x5f, 0xdc, 0x36, 0x54, 0xf6, 0x49, 0x7c, 0xdf, 0xf3, 0x3d, 0xae, 0xa9, 0xd1, 0x6b, 0x54, - 0x03, 0x73, 0x8f, 0x52, 0xd9, 0xb7, 0x16, 0x16, 0x9f, 0xe8, 0x00, 0xac, 0x1e, 0xe1, 0x44, 0x36, - 0x68, 0xd5, 0xfe, 0x50, 0xf1, 0x72, 0xfb, 0xf9, 0xa9, 0x87, 0x5e, 0x40, 0xa2, 0x79, 0xfb, 0x1e, - 0x4d, 0xec, 0x39, 0xa7, 0x31, 0x96, 0x21, 0xd0, 0x97, 0x60, 0x3d, 0xea, 0x0e, 0x3e, 0x93, 0x4d, - 0x5c, 0xb5, 0xf7, 0x2f, 0x15, 0xea, 0x9f, 0xb3, 0xe6, 0x16, 0x27, 0x6e, 0x7c, 0x8b, 0xf9, 0x1e, - 0xa7, 0x7e, 0xc8, 0xe7, 0x58, 0x06, 0x55, 0xd4, 0x7a, 0xfa, 0x36, 0xa3, 0x1b, 0x50, 0x92, 0xd4, - 0x09, 0x45, 0xcd, 0x67, 0x52, 0xab, 0xfc, 0xe8, 0x5d, 0x28, 0xa7, 0x6d, 0x20, 0xb8, 0x35, 0x97, - 0xee, 0x8c, 0x6e, 0x10, 0xac, 0x11, 0x3b, 0x95, 0x1f, 0x4f, 0x9a, 0x1b, 0x32, 0x15, 0xcb, 0xae, - 0xf9, 0xda, 0x2a, 0x7e, 0x04, 0x15, 0xb1, 0xa5, 0x1b, 0xb9, 0xb1, 0x9a, 0x36, 0xaf, 0xb4, 0x73, - 0xd3, 0x4c, 0xfb, 0x6c, 0x4b, 0x50, 0x83, 0x33, 0xac, 0x3a, 0x5b, 0xa8, 0x07, 0xd0, 0xda, 0xf9, - 0x10, 0x58, 0x62, 0x87, 0xcc, 0x75, 0x05, 0xcb, 0x6f, 0x61, 0x93, 0x7a, 0x9a, 0xa9, 0x4d, 0x0a, - 0xf3, 0x94, 0xea, 0x2a, 0xe3, 0x8e, 0x9e, 0x3b, 0xeb, 0x66, 0xcc, 0xd1, 0xe3, 0x2e, 0x46, 0xd1, - 0xda, 0xf5, 0xde, 0x84, 0x52, 0xca, 0xb3, 0x62, 0xe7, 0x19, 0x42, 0x28, 0x40, 0x2e, 0xd1, 0xf7, - 0x86, 0x9a, 0xa1, 0x17, 0x90, 0x7c, 0x17, 0xb6, 0xba, 0xa3, 0x91, 0xb8, 0x92, 0xc7, 0xe1, 0x98, - 0x70, 0xaa, 0x95, 0x7f, 0xb5, 0x2d, 0x9f, 0x12, 0x87, 0xfa, 0xe1, 0x94, 0x70, 0xaa, 0x30, 0x52, - 0x0f, 0x03, 0xaf, 0x6c, 0xc9, 0x95, 0xf0, 0xb7, 0x91, 0x1f, 0x8e, 0x6b, 0x1f, 0xb7, 0x05, 0xd5, - 0x87, 0x8c, 0x7b, 0x81, 0xfb, 0x88, 0x7a, 0xee, 0x24, 0x3d, 0xb4, 0x89, 0x97, 0x6c, 0xe8, 0x18, - 0xaa, 0x3a, 0xf2, 0x3d, 0x12, 0x4f, 0xa4, 0x6c, 0x55, 0xfb, 0xfd, 0x8b, 0x5f, 0xc1, 0xa5, 0x30, - 0xe2, 0xa1, 0xd0, 0x6b, 0xf5, 0x48, 0x5d, 0x7b, 0x6a, 0x96, 0xe3, 0x0c, 0x92, 0x3b, 0xea, 0x57, - 0xd9, 0x93, 0x71, 0x01, 0xba, 0x1b, 0x60, 0x3a, 0x89, 0xe6, 0xb8, 0x9a, 0xc1, 0xba, 0xc1, 0x1c, - 0x0b, 0x47, 0x2e, 0xfc, 0x0f, 0x06, 0x58, 0x0f, 0x19, 0xa7, 0xff, 0xf9, 0x44, 0x5e, 0x83, 0xeb, - 0x5c, 0x19, 0x8f, 0x17, 0xf4, 0x64, 0x97, 0xc8, 0xc8, 0x5d, 0xa2, 0xeb, 0x70, 0xb5, 0x47, 0xe3, - 0x51, 0xe4, 0x85, 0xdc, 0x63, 0x81, 0xba, 0x5f, 0x79, 0x53, 0xfe, 0x69, 0x35, 0x5f, 0xf0, 0xb4, - 0xe6, 0xf2, 0xfe, 0x5a, 0x80, 0x92, 0x4d, 0xa6, 0x53, 0xc6, 0x97, 0x14, 0x32, 0x5e, 0xa8, 0x90, - 0xe8, 0x93, 0x3d, 0x2f, 0x20, 0x53, 0xef, 0x3b, 0x2f, 0x70, 0xd5, 0xcf, 0xcc, 0xe5, 0xfa, 0x24, - 0x1f, 0x06, 0xed, 0xc2, 0x66, 0xa8, 0x52, 0x0c, 0x38, 0xe1, 0xe9, 0x8c, 0xd8, 0xba, 0xf3, 0x7a, - 0xee, 0x30, 0xa2, 0xda, 0xac, 0x22, 0x09, 0xc2, 0xcb, 0x7b, 0xd0, 0x1b, 0x50, 0x14, 0x9a, 0xc6, - 0xf5, 0xa2, 0x6c, 0x80, 0xcd, 0x6c, 0xb3, 0xb0, 0xe2, 0xd4, 0xd7, 0xfa, 0x18, 0x36, 0x97, 0x82, - 0xa0, 0x2a, 0x54, 0x8e, 0xf0, 0xe1, 0xd1, 0xe1, 0xa0, 0xdf, 0xab, 0x6d, 0x88, 0x55, 0xff, 0xf3, - 0xfe, 0xee, 0xb1, 0xd3, 0xef, 0xd5, 0x0c, 0x04, 0x50, 0xda, 0xeb, 0x1e, 0xdc, 0xef, 0xf7, 0x6a, - 0x05, 0xfb, 0x93, 0xd3, 0xf3, 0x86, 0xf1, 0xfb, 0x79, 0xc3, 0xf8, 0xeb, 0xbc, 0x61, 0xfc, 0xf6, - 0xa4, 0x61, 0x9c, 0x3e, 0x69, 0x18, 0x5f, 0xdc, 0x7c, 0xfe, 0xa9, 0x79, 0x12, 0x77, 0x54, 0x15, - 0xc3, 0x92, 0xfc, 0x73, 0xbc, 0xfb, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbf, 0xdf, 0xc1, 0x1b, - 0xa0, 0x0a, 0x00, 0x00, + // 1025 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcf, 0x6f, 0xe3, 0xc4, + 0x17, 0xaf, 0x63, 0xe7, 0xc7, 0xbe, 0xa6, 0xfd, 0x66, 0xe7, 0x0b, 0x28, 0xaa, 0x44, 0x52, 0x02, + 0x82, 0x2e, 0x6c, 0x13, 0xe8, 0xf2, 0x43, 0xea, 0x05, 0xc5, 0x49, 0xda, 0x2d, 0x5a, 0xb6, 0x65, + 0xe2, 0xee, 0x22, 0x10, 0x07, 0x27, 0x19, 0x1c, 0x8b, 0xd8, 0x63, 0xec, 0xc9, 0xe2, 0x70, 0xe2, + 0xc0, 0x81, 0x2b, 0xe2, 0xc2, 0xb1, 0xff, 0x02, 0xff, 0x01, 0xc7, 0x1e, 0x39, 0x73, 0xa8, 0x50, + 0xf7, 0x82, 0xf8, 0x17, 0xb8, 0xa0, 0x19, 0xcf, 0x38, 0x4e, 0x76, 0xb5, 0x9b, 0x56, 0x88, 0x9b, + 0xe7, 0xbd, 0xcf, 0xbc, 0x1f, 0x9f, 0xf7, 0xe6, 0x3d, 0xc3, 0x46, 0x60, 0xcf, 0x26, 0xd4, 0x1e, + 0x35, 0x83, 0x90, 0x32, 0x8a, 0x8a, 0xf2, 0xb8, 0xb5, 0xeb, 0xb8, 0x6c, 0x3c, 0x1d, 0x34, 0x87, + 0xd4, 0x6b, 0x39, 0xd4, 0xa1, 0x2d, 0xa1, 0x1f, 0x4c, 0xbf, 0x14, 0x27, 0x71, 0x10, 0x5f, 0xc9, + 0xbd, 0xad, 0x4a, 0x40, 0x42, 0xcf, 0x8d, 0x22, 0x97, 0xfa, 0x52, 0x02, 0x51, 0x40, 0x86, 0xc9, + 0x77, 0xe3, 0x47, 0x1d, 0xf4, 0xb6, 0x3f, 0x43, 0x6f, 0x40, 0xa1, 0x63, 0x4f, 0x26, 0x56, 0x5c, + 0xd5, 0xb6, 0xb5, 0x9d, 0xf5, 0xbd, 0xff, 0x35, 0x95, 0xf7, 0x44, 0x8c, 0xa5, 0x9a, 0x03, 0xfb, + 0xc4, 0x1f, 0x59, 0x71, 0x35, 0xb7, 0x04, 0x4c, 0xc4, 0x58, 0xaa, 0x39, 0xf0, 0xbe, 0xed, 0x11, + 0x2b, 0xae, 0xea, 0x4b, 0xc0, 0x44, 0x8c, 0xa5, 0x1a, 0xbd, 0x09, 0xc5, 0x13, 0x12, 0x7a, 0x91, + 0x15, 0x57, 0x0d, 0x81, 0xac, 0xa4, 0x48, 0x29, 0xc7, 0x0a, 0x80, 0x5e, 0x83, 0xfc, 0x21, 0x7d, + 0x64, 0xc5, 0xd5, 0xbc, 0x40, 0x6e, 0xa6, 0x48, 0x21, 0xc5, 0x89, 0x92, 0xbb, 0x36, 0xa9, 0x88, + 0xb1, 0xb0, 0xe4, 0x3a, 0x11, 0x63, 0xa9, 0x46, 0xbb, 0x50, 0x3a, 0xf5, 0x07, 0x09, 0xb4, 0x28, + 0xa0, 0x37, 0x53, 0xa8, 0x52, 0xe0, 0x14, 0xc2, 0x23, 0x35, 0x6d, 0x36, 0x1c, 0x5b, 0x71, 0xb5, + 0xb4, 0x14, 0xa9, 0x94, 0x63, 0x05, 0x40, 0x77, 0x00, 0x4e, 0x42, 0x1a, 0xd0, 0xc8, 0xe6, 0xa4, + 0xde, 0x10, 0xf0, 0xff, 0xcf, 0x13, 0x4b, 0x55, 0x38, 0x03, 0xdb, 0x37, 0xce, 0xcf, 0xea, 0x5a, + 0xe3, 0x27, 0x0d, 0x8a, 0x56, 0x7c, 0xe4, 0x07, 0x53, 0x86, 0xee, 0x43, 0xb1, 0x3d, 0x1a, 0x85, + 0x24, 0x8a, 0x44, 0x61, 0xca, 0xe6, 0xbb, 0xe7, 0x17, 0xf5, 0xb5, 0xdf, 0x2f, 0xea, 0xb7, 0x33, + 0x5d, 0x30, 0x9e, 0x05, 0x24, 0x9c, 0x90, 0x91, 0x43, 0xc2, 0xd6, 0x60, 0x1a, 0x86, 0xf4, 0x9b, + 0xd6, 0x30, 0x9c, 0x05, 0x8c, 0x36, 0xe5, 0x5d, 0xac, 0x8c, 0xa0, 0x97, 0xa0, 0xd0, 0xf6, 0xe8, + 0xd4, 0x67, 0xa2, 0x7c, 0x06, 0x96, 0x27, 0xb4, 0x05, 0xa5, 0x3e, 0xf9, 0x7a, 0x4a, 0xfc, 0x21, + 0x11, 0xf5, 0x32, 0x70, 0x7a, 0xde, 0x37, 0x7e, 0x3e, 0xab, 0xaf, 0x35, 0x62, 0x28, 0x59, 0xf1, + 0xf1, 0x94, 0xfd, 0x87, 0x51, 0x49, 0xcf, 0x7f, 0xe7, 0x54, 0x73, 0xa2, 0xd7, 0x21, 0x2f, 0x78, + 0x91, 0x5d, 0x3a, 0xe7, 0x5f, 0xf2, 0x85, 0x13, 0x35, 0xfa, 0x68, 0x1e, 0x60, 0x4e, 0x04, 0xf8, + 0xf6, 0xf5, 0x83, 0xdb, 0x82, 0xd2, 0xa1, 0x1d, 0xdd, 0x73, 0x3d, 0x97, 0x29, 0x6a, 0xd4, 0x19, + 0x55, 0x40, 0x3f, 0x20, 0x44, 0xf4, 0xad, 0x81, 0xf9, 0x27, 0x3a, 0x02, 0xa3, 0x6b, 0x33, 0x5b, + 0x34, 0x68, 0xd9, 0x7c, 0x4f, 0xf2, 0xb2, 0xfb, 0x6c, 0xd7, 0x03, 0xd7, 0xb7, 0xc3, 0x59, 0xf3, + 0x2e, 0x89, 0xcd, 0x19, 0x23, 0x11, 0x16, 0x26, 0xd0, 0xe7, 0x60, 0x3c, 0x6c, 0xf7, 0x3f, 0x16, + 0x4d, 0x5c, 0x36, 0x0f, 0xaf, 0x65, 0xea, 0xaf, 0x8b, 0xfa, 0x26, 0xb3, 0x9d, 0xe8, 0x36, 0xf5, + 0x5c, 0x46, 0xbc, 0x80, 0xcd, 0xb0, 0x30, 0x8a, 0x5e, 0x01, 0xa3, 0x3d, 0x70, 0xa3, 0x6a, 0x71, + 0x5b, 0xdf, 0x59, 0xdf, 0xdb, 0x48, 0x89, 0xe4, 0x42, 0x2c, 0x54, 0x92, 0xfd, 0xaf, 0x12, 0x20, + 0xfa, 0x04, 0x4a, 0x1d, 0x3a, 0x22, 0x77, 0xed, 0x68, 0x2c, 0x8b, 0x7e, 0xcd, 0xe4, 0x52, 0x33, + 0x9c, 0xbd, 0xf6, 0xc0, 0x15, 0x15, 0xba, 0x81, 0xf9, 0x67, 0xc3, 0x55, 0xd3, 0x05, 0xed, 0x40, + 0x41, 0x94, 0x92, 0x77, 0x98, 0xfe, 0xd4, 0x52, 0x4b, 0x3d, 0x7a, 0x0b, 0x8a, 0x49, 0x5b, 0xf2, + 0x5a, 0xeb, 0x0b, 0x6f, 0x58, 0x35, 0x2c, 0x56, 0x88, 0xfd, 0xd2, 0x0f, 0x67, 0xf5, 0x35, 0x91, + 0x17, 0x4d, 0xc7, 0xce, 0xca, 0x5d, 0xf5, 0x3e, 0x94, 0xf8, 0x95, 0x76, 0xe8, 0x44, 0x72, 0xfa, + 0xbd, 0xd0, 0xcc, 0x4c, 0x57, 0xa5, 0x33, 0x0d, 0x4e, 0x0c, 0x4e, 0xb1, 0x92, 0xc8, 0x40, 0x0d, + 0xc4, 0x95, 0xfd, 0x21, 0x30, 0xf8, 0x0d, 0x49, 0x90, 0xf8, 0xe6, 0x32, 0xd1, 0x5f, 0x7a, 0x22, + 0x13, 0x8d, 0xf2, 0x44, 0x17, 0x4a, 0x8f, 0xfb, 0x6a, 0x0e, 0xae, 0xea, 0x31, 0x43, 0x8f, 0x33, + 0x1f, 0x8d, 0x2b, 0xc7, 0x7b, 0x0b, 0x0a, 0x09, 0xcf, 0x92, 0x9d, 0xa7, 0x14, 0x42, 0x02, 0x32, + 0x8e, 0xbe, 0xd3, 0xe4, 0x4c, 0xbf, 0x42, 0xc9, 0x3b, 0xb0, 0xd9, 0x1e, 0x0e, 0xf9, 0x88, 0x38, + 0x0d, 0x46, 0x36, 0x23, 0xaa, 0xf2, 0x2f, 0x36, 0xc5, 0x6a, 0xb3, 0x88, 0x17, 0x4c, 0x6c, 0x46, + 0x24, 0x46, 0xd4, 0x43, 0xc3, 0x4b, 0x57, 0x32, 0x21, 0xfc, 0xa9, 0x65, 0x87, 0xf5, 0xca, 0xe9, + 0x36, 0xa0, 0xfc, 0x80, 0x32, 0xd7, 0x77, 0x1e, 0x12, 0xd7, 0x19, 0x27, 0x49, 0xeb, 0x78, 0x41, + 0x86, 0x4e, 0xa1, 0xac, 0x2c, 0x8b, 0x97, 0xa3, 0x8b, 0x97, 0xf3, 0xce, 0xd5, 0x5f, 0xcd, 0x82, + 0x19, 0xbe, 0xb8, 0xd4, 0x59, 0x2e, 0xcd, 0x9b, 0x4f, 0xec, 0x16, 0x9c, 0x42, 0x32, 0xa9, 0x7e, + 0x91, 0xae, 0xb0, 0x2b, 0xd0, 0x5d, 0x03, 0xdd, 0x8a, 0x15, 0xc7, 0xe5, 0xf9, 0xa8, 0xf0, 0x67, + 0x98, 0x2b, 0x32, 0xe6, 0xbf, 0xd7, 0xc0, 0x78, 0x40, 0x19, 0xf9, 0xd7, 0x37, 0xc4, 0x0a, 0x5c, + 0x67, 0xc2, 0x78, 0x34, 0xa7, 0x27, 0x7d, 0x44, 0x5a, 0xe6, 0x11, 0x6d, 0xc3, 0x7a, 0x97, 0x44, + 0xc3, 0xd0, 0x0d, 0x98, 0x4b, 0x7d, 0xf9, 0xbe, 0xb2, 0xa2, 0xec, 0xaa, 0xd7, 0x9f, 0xb3, 0xea, + 0x33, 0x7e, 0x7f, 0xc9, 0x41, 0xc1, 0xb4, 0x27, 0x13, 0xca, 0x16, 0x2a, 0xa4, 0x3d, 0xb7, 0x42, + 0xbc, 0x4f, 0x0e, 0x5c, 0xdf, 0x9e, 0xb8, 0xdf, 0xba, 0xbe, 0x23, 0x7f, 0xae, 0xae, 0xd7, 0x27, + 0x59, 0x33, 0xa8, 0x03, 0x1b, 0x81, 0x74, 0xd1, 0x67, 0x36, 0x4b, 0x66, 0xc4, 0xe6, 0xde, 0xcb, + 0x99, 0x64, 0x78, 0xb4, 0x69, 0x44, 0x02, 0x84, 0x17, 0xef, 0xa0, 0x57, 0x21, 0xcf, 0x6b, 0x1a, + 0x55, 0xf3, 0x4b, 0xbb, 0x82, 0x4b, 0x71, 0xa2, 0x6b, 0x7c, 0x00, 0x1b, 0x0b, 0x46, 0x50, 0x19, + 0x4a, 0x27, 0xf8, 0xf8, 0xe4, 0xb8, 0xdf, 0xeb, 0x56, 0xd6, 0xf8, 0xa9, 0xf7, 0x69, 0xaf, 0x73, + 0x6a, 0xf5, 0xba, 0x15, 0x0d, 0x01, 0x14, 0x0e, 0xda, 0x47, 0xf7, 0x7a, 0xdd, 0x4a, 0xce, 0xfc, + 0xf0, 0xfc, 0xb2, 0xa6, 0xfd, 0x76, 0x59, 0xd3, 0xfe, 0xb8, 0xac, 0x69, 0xbf, 0x3e, 0xae, 0x69, + 0xe7, 0x8f, 0x6b, 0xda, 0x67, 0xb7, 0x9e, 0x9d, 0x35, 0x8b, 0xa3, 0x96, 0x8c, 0x62, 0x50, 0x10, + 0x7f, 0xb2, 0x77, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0xb4, 0x05, 0xdb, 0xfe, 0x30, 0x0b, 0x00, + 0x00, } func (m *Any) Marshal() (dAtA []byte, err error) { @@ -1281,6 +1347,53 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { return 0, err } i += n15 + if len(m.Abis) > 0 { + for _, msg := range m.Abis { + dAtA[i] = 0x3a + i++ + i = encodeVarintPayload(dAtA, i, uint64(msg.Size())) + n, err := msg.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n + } + } + if m.XXX_unrecognized != nil { + i += copy(dAtA[i:], m.XXX_unrecognized) + } + return i, nil +} + +func (m *Abis) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalTo(dAtA) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Abis) MarshalTo(dAtA []byte) (int, error) { + var i int + _ = i + var l int + _ = l + dAtA[i] = 0xa + i++ + i = encodeVarintPayload(dAtA, i, uint64(m.CodeHash.Size())) + n16, err := m.CodeHash.MarshalTo(dAtA[i:]) + if err != nil { + return 0, err + } + i += n16 + if len(m.Abi) > 0 { + dAtA[i] = 0x12 + i++ + i = encodeVarintPayload(dAtA, i, uint64(len(m.Abi))) + i += copy(dAtA[i:], m.Abi) + } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) } @@ -1351,20 +1464,20 @@ func (m *PermsTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n16, err := m.Input.MarshalTo(dAtA[i:]) + n17, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n16 + i += n17 } dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermArgs.Size())) - n17, err := m.PermArgs.MarshalTo(dAtA[i:]) + n18, err := m.PermArgs.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n17 + i += n18 if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) } @@ -1390,11 +1503,11 @@ func (m *NameTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n18, err := m.Input.MarshalTo(dAtA[i:]) + n19, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n18 + i += n19 } if len(m.Name) > 0 { dAtA[i] = 0x12 @@ -1438,11 +1551,11 @@ func (m *BondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n19, err := m.Input.MarshalTo(dAtA[i:]) + n20, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n19 + i += n20 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1469,21 +1582,21 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n20, err := m.Input.MarshalTo(dAtA[i:]) + n21, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n20 + i += n21 } if m.Output != nil { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Output.Size())) - n21, err := m.Output.MarshalTo(dAtA[i:]) + n22, err := m.Output.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n21 + i += n22 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1555,11 +1668,11 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n22, err := m.Input.MarshalTo(dAtA[i:]) + n23, err := m.Input.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n22 + i += n23 } if m.VotingWeight != 0 { dAtA[i] = 0x10 @@ -1570,21 +1683,21 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalHash.Size())) - n23, err := m.ProposalHash.MarshalTo(dAtA[i:]) + n24, err := m.ProposalHash.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n23 + i += n24 } if m.Proposal != nil { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n24, err := m.Proposal.MarshalTo(dAtA[i:]) + n25, err := m.Proposal.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n24 + i += n25 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1655,11 +1768,11 @@ func (m *Vote) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n25, err := m.Address.MarshalTo(dAtA[i:]) + n26, err := m.Address.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n25 + i += n26 if m.VotingWeight != 0 { dAtA[i] = 0x10 i++ @@ -1702,11 +1815,11 @@ func (m *Proposal) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n26, err := m.BatchTx.MarshalTo(dAtA[i:]) + n27, err := m.BatchTx.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n26 + i += n27 } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -1733,21 +1846,21 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n27, err := m.Proposal.MarshalTo(dAtA[i:]) + n28, err := m.Proposal.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n27 + i += n28 } if m.FinalizingTx != nil { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.FinalizingTx.Size())) - n28, err := m.FinalizingTx.MarshalTo(dAtA[i:]) + n29, err := m.FinalizingTx.MarshalTo(dAtA[i:]) if err != nil { return 0, err } - i += n28 + i += n29 } if m.ProposalState != 0 { dAtA[i] = 0x20 @@ -1890,6 +2003,30 @@ func (m *CallTx) Size() (n int) { n += 1 + l + sovPayload(uint64(l)) l = m.WASM.Size() n += 1 + l + sovPayload(uint64(l)) + if len(m.Abis) > 0 { + for _, e := range m.Abis { + l = e.Size() + n += 1 + l + sovPayload(uint64(l)) + } + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *Abis) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.CodeHash.Size() + n += 1 + l + sovPayload(uint64(l)) + l = len(m.Abi) + if l > 0 { + n += 1 + l + sovPayload(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3028,6 +3165,159 @@ func (m *CallTx) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 7: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Abis", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowPayload + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthPayload + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthPayload + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Abis = append(m.Abis, &Abis{}) + if err := m.Abis[len(m.Abis)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipPayload(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthPayload + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthPayload + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Abis) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowPayload + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Abis: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Abis: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field CodeHash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowPayload + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthPayload + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthPayload + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.CodeHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Abi", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowPayload + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthPayload + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthPayload + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Abi = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipPayload(dAtA[iNdEx:]) diff --git a/vent/service/abis.go b/vent/service/abis.go new file mode 100644 index 000000000..4e6631c05 --- /dev/null +++ b/vent/service/abis.go @@ -0,0 +1,66 @@ +package service + +import ( + "context" + "fmt" + + "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/rpc/rpcquery" +) + +// AbiProvider provides a method for loading ABIs from disk, and retrieving them from burrow on-demand +type AbiProvider struct { + abiSpec *abi.Spec + cli rpcquery.QueryClient +} + +// NewAbiProvider loads ABIs from the filesystem. A set of zero or more files or directories can be passed in the path +// argument. If an event is encountered for which no ABI is known, it is retrieved from burrow +func NewAbiProvider(paths []string, cli rpcquery.QueryClient) (provider *AbiProvider, err error) { + abiSpec := &abi.Spec{} + if len(paths) > 0 { + abiSpec, err = abi.LoadPath(paths...) + if err != nil { + return nil, err + } + } + + provider = &AbiProvider{ + abiSpec, + cli, + } + return +} + +// GetEventAbi get the ABI for a particular eventID. If it is not known, it is retrieved from the burrow node via +// the address for the contract +func (p *AbiProvider) GetEventAbi(eventID abi.EventID, address crypto.Address, l *logging.Logger) (*abi.EventSpec, error) { + evAbi, ok := p.abiSpec.EventsByID[eventID] + if !ok { + resp, err := p.cli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: address}) + if err != nil { + l.InfoMsg("Error retrieving abi for event", "address", address.String(), "eventid", eventID.String(), "error", err) + return nil, err + } + if resp == nil || resp.Abi == "" { + l.InfoMsg("ABI not found for contract", "address", address.String(), "eventid", eventID.String()) + return nil, fmt.Errorf("No ABI present for contract at address %v", address) + } + a, err := abi.ReadSpec([]byte(resp.Abi)) + if err != nil { + l.InfoMsg("Failed to parse abi", "address", address.String(), "eventid", eventID.String(), "abi", resp.Abi) + return nil, err + } + evAbi, ok = a.EventsByID[eventID] + if !ok { + l.InfoMsg("Event missing from ABI spec for contract", "address", address.String(), "eventid", eventID.String(), "abi", resp.Abi) + return nil, fmt.Errorf("Event missing from ABI spec for contract") + } + + p.abiSpec = abi.MergeSpec([]*abi.Spec{p.abiSpec, a}) + } + + return &evAbi, nil +} diff --git a/vent/service/consumer.go b/vent/service/consumer.go index 430d90956..ab32620b5 100644 --- a/vent/service/consumer.go +++ b/vent/service/consumer.go @@ -8,7 +8,6 @@ import ( "github.com/hyperledger/burrow/rpc" - "github.com/hyperledger/burrow/execution/evm/abi" "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/rpc/rpcevents" @@ -55,7 +54,7 @@ func NewConsumer(cfg *config.VentConfig, log *logging.Logger, eventChannel chan // Run connects to a grpc service and subscribes to log events, // then gets tables structures, maps them & parse event data. // Store data in SQL event tables, it runs forever -func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.Spec, stream bool) error { +func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { var err error c.Log.InfoMsg("Connecting to Burrow gRPC server") @@ -74,6 +73,11 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.Spec, stream return errors.Wrapf(err, "Error getting chain status") } + abiProvider, err := NewAbiProvider(c.Config.AbiFileOrDirs, rpcquery.NewQueryClient(c.GRPCConnection)) + if err != nil { + return errors.Wrapf(err, "Error loading ABIs") + } + if len(projection.EventSpec) == 0 { c.Log.InfoMsg("No events specifications found") return nil @@ -167,7 +171,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.Spec, stream c.Log.TraceMsg("Waiting for blocks...") - err = rpcevents.ConsumeBlockExecutions(stream, c.makeBlockConsumer(projection, abiSpec, eventCh)) + err = rpcevents.ConsumeBlockExecutions(stream, c.makeBlockConsumer(projection, abiProvider, eventCh)) if err != nil { if err == io.EOF { @@ -211,7 +215,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, abiSpec *abi.Spec, stream } } -func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi.Spec, +func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiProvider *AbiProvider, eventCh chan<- types.EventData) func(blockExecution *exec.BlockExecution) error { return func(blockExecution *exec.BlockExecution) error { @@ -285,7 +289,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiSpec *abi "filter", eventClass.Filter) // unpack, decode & build event data - eventData, err := buildEventData(projection, eventClass, event, origin, abiSpec, c.Log) + eventData, err := buildEventData(projection, eventClass, event, origin, abiProvider, c.Log) if err != nil { return errors.Wrapf(err, "Error building event data") } diff --git a/vent/service/consumer_test.go b/vent/service/consumer_test.go index 93ccf75ca..62723786e 100644 --- a/vent/service/consumer_test.go +++ b/vent/service/consumer_test.go @@ -10,7 +10,6 @@ import ( "time" "github.com/hyperledger/burrow/crypto" - "github.com/hyperledger/burrow/execution/evm/abi" "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/rpc/rpctransact" "github.com/hyperledger/burrow/vent/config" @@ -204,10 +203,7 @@ func runConsumer(t *testing.T, cfg *config.VentConfig) chan types.EventData { projection, err := sqlsol.SpecLoader(cfg.SpecFileOrDirs, cfg.SpecOpt) require.NoError(t, err) - abiSpec, err := abi.LoadPath(cfg.AbiFileOrDirs...) - require.NoError(t, err) - - err = consumer.Run(projection, abiSpec, false) + err = consumer.Run(projection, false) require.NoError(t, err) return consumer.EventsChannel } diff --git a/vent/service/decoder.go b/vent/service/decoder.go index 77f7034fa..5a9844337 100644 --- a/vent/service/decoder.go +++ b/vent/service/decoder.go @@ -12,18 +12,10 @@ import ( ) // decodeEvent unpacks & decodes event data -func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, abiSpec *abi.Spec) (map[string]interface{}, error) { +func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, evAbi *abi.EventSpec) (map[string]interface{}, error) { // to prepare decoded data and map to event item name data := make(map[string]interface{}) - var eventID abi.EventID - copy(eventID[:], log.Topics[0].Bytes()) - - evAbi, ok := abiSpec.EventsByID[eventID] - if !ok { - return nil, fmt.Errorf("abi spec not found for event %x", eventID) - } - // decode header to get context data for each event data[types.EventNameLabel] = evAbi.Name data[types.ChainIDLabel] = origin.ChainID @@ -36,8 +28,8 @@ func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, a unpackedData := abi.GetPackingTypes(evAbi.Inputs) // unpack event data (topics & data part) - if err := abi.UnpackEvent(&evAbi, log.Topics, log.Data, unpackedData...); err != nil { - return nil, errors.Wrap(err, "could not unpack event data") + if err := abi.UnpackEvent(evAbi, log.Topics, log.Data, unpackedData...); err != nil { + return nil, errors.Wrap(err, "Could not unpack event data") } // for each decoded item value, stores it in given item name diff --git a/vent/service/rowbuilder.go b/vent/service/rowbuilder.go index 942a3b35e..3dba2dcf4 100644 --- a/vent/service/rowbuilder.go +++ b/vent/service/rowbuilder.go @@ -16,7 +16,7 @@ import ( ) // buildEventData builds event data from transactions -func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, origin *exec.Origin, abiSpec *abi.Spec, +func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, origin *exec.Origin, abiProvider *AbiProvider, l *logging.Logger) (types.EventDataRow, error) { // a fresh new row to store column/value data @@ -26,8 +26,17 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, eventHeader := event.GetHeader() eventLog := event.GetLog() + // Find event spec for this event + var eventID abi.EventID + copy(eventID[:], eventLog.Topics[0].Bytes()) + + evAbi, err := abiProvider.GetEventAbi(eventID, eventLog.Address, l) + if err != nil { + return types.EventDataRow{}, err + } + // decode event data using the provided abi specification - decodedData, err := decodeEvent(eventHeader, eventLog, origin, abiSpec) + decodedData, err := decodeEvent(eventHeader, eventLog, origin, evAbi) if err != nil { return types.EventDataRow{}, errors.Wrapf(err, "Error decoding event (filter: %s)", eventClass.Filter) } diff --git a/vent/service/server_test.go b/vent/service/server_test.go index d7ee91b86..73e3f2698 100644 --- a/vent/service/server_test.go +++ b/vent/service/server_test.go @@ -11,7 +11,6 @@ import ( "testing" "time" - "github.com/hyperledger/burrow/execution/evm/abi" "github.com/hyperledger/burrow/integration" "github.com/hyperledger/burrow/integration/rpctest" "github.com/hyperledger/burrow/logging" @@ -44,13 +43,12 @@ func TestServer(t *testing.T) { log := logging.NewNoopLogger() consumer := service.NewConsumer(cfg, log, make(chan types.EventData)) projection, err := sqlsol.SpecLoader(cfg.SpecFileOrDirs, sqlsol.None) - abiSpec, err := abi.LoadPath(cfg.AbiFileOrDirs...) var wg sync.WaitGroup wg.Add(1) go func() { - err := consumer.Run(projection, abiSpec, true) + err := consumer.Run(projection, true) require.NoError(t, err) wg.Done() diff --git a/vent/test/EventsTest.sol.go b/vent/test/EventsTest.sol.go index 9e6d6b72d..9edb1450e 100644 --- a/vent/test/EventsTest.sol.go +++ b/vent/test/EventsTest.sol.go @@ -3,4 +3,5 @@ package test import hex "github.com/tmthrgd/go-hex" var Bytecode_EventsTest = hex.MustDecodeString("608060405234801561001057600080fd5b506107a8806100206000396000f3fe608060405234801561001057600080fd5b5060043610610069576000357c01000000000000000000000000000000000000000000000000000000009004806306661abd1461006e578063882551991461008c578063c1de9c6d1461017e578063dc667a62146101f7575b600080fd5b6100766102c5565b6040518082815260200191505060405180910390f35b610103600480360360208110156100a257600080fd5b81019080803590602001906401000000008111156100bf57600080fd5b8201836020820111156100d157600080fd5b803590602001918460018302840111640100000000831117156100f357600080fd5b90919293919293905050506102ce565b6040518080602001828103825283818151815260200191508051906020019080838360005b83811015610143578082015181840152602081019050610128565b50505050905090810190601f1680156101705780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101f56004803603602081101561019457600080fd5b81019080803590602001906401000000008111156101b157600080fd5b8201836020820111156101c357600080fd5b803590602001918460018302840111640100000000831117156101e557600080fd5b909192939192939050505061039b565b005b6102c36004803603604081101561020d57600080fd5b810190808035906020019064010000000081111561022a57600080fd5b82018360208201111561023c57600080fd5b8035906020019184600183028401116401000000008311171561025e57600080fd5b90919293919293908035906020019064010000000081111561027f57600080fd5b82018360208201111561029157600080fd5b803590602001918460018302840111640100000000831117156102b357600080fd5b90919293919293905050506104f9565b005b60008054905090565b60606001838360405180838380828437808301925050509250505090815260200160405180910390206001018054600181600116156101000203166002900480601f01602080910402602001604051908101604052809291908181526020018280546001816001161561010002031660029004801561038e5780601f106103635761010080835404028352916020019161038e565b820191906000526020600020905b81548152906001019060200180831161037157829003601f168201915b5050505050905092915050565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16156104f45760008081548092919060019003919050555060018383604051808383808284378083019250505092505050908152602001604051809103902060008082016000610427919061068f565b600182016000610437919061068f565b6002820160006101000a81549060ff021916905550507f544553545f4556454e54530000000000000000000000000000000000000000006104bb84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16151561054e5760008081548092919060010191905055505b84848260000191906105619291906106d7565b5082828260010191906105759291906106d7565b5060018160020160006101000a81548160ff0219169083151502179055506105e083838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f544553545f4556454e545300000000000000000000000000000000000000000061064e87878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060208201519050919050565b50805460018160011615610100020316600290046000825580601f106106b557506106d4565b601f0160209004906000526020600020908101906106d39190610757565b5b50565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f1061071857803560ff1916838001178555610746565b82800160010185558215610746579182015b8281111561074557823582559160200191906001019061072a565b5b5090506107539190610757565b5090565b61077991905b8082111561077557600081600090555060010161075d565b5090565b9056fea165627a7a72305820b80269f52800df97ea8d0eaa20a8d8541f670004f26b92c1fad10c4bd238a6020029") +var DeployedBytecode_EventsTest = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610069576000357c01000000000000000000000000000000000000000000000000000000009004806306661abd1461006e578063882551991461008c578063c1de9c6d1461017e578063dc667a62146101f7575b600080fd5b6100766102c5565b6040518082815260200191505060405180910390f35b610103600480360360208110156100a257600080fd5b81019080803590602001906401000000008111156100bf57600080fd5b8201836020820111156100d157600080fd5b803590602001918460018302840111640100000000831117156100f357600080fd5b90919293919293905050506102ce565b6040518080602001828103825283818151815260200191508051906020019080838360005b83811015610143578082015181840152602081019050610128565b50505050905090810190601f1680156101705780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101f56004803603602081101561019457600080fd5b81019080803590602001906401000000008111156101b157600080fd5b8201836020820111156101c357600080fd5b803590602001918460018302840111640100000000831117156101e557600080fd5b909192939192939050505061039b565b005b6102c36004803603604081101561020d57600080fd5b810190808035906020019064010000000081111561022a57600080fd5b82018360208201111561023c57600080fd5b8035906020019184600183028401116401000000008311171561025e57600080fd5b90919293919293908035906020019064010000000081111561027f57600080fd5b82018360208201111561029157600080fd5b803590602001918460018302840111640100000000831117156102b357600080fd5b90919293919293905050506104f9565b005b60008054905090565b60606001838360405180838380828437808301925050509250505090815260200160405180910390206001018054600181600116156101000203166002900480601f01602080910402602001604051908101604052809291908181526020018280546001816001161561010002031660029004801561038e5780601f106103635761010080835404028352916020019161038e565b820191906000526020600020905b81548152906001019060200180831161037157829003601f168201915b5050505050905092915050565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16156104f45760008081548092919060019003919050555060018383604051808383808284378083019250505092505050908152602001604051809103902060008082016000610427919061068f565b600182016000610437919061068f565b6002820160006101000a81549060ff021916905550507f544553545f4556454e54530000000000000000000000000000000000000000006104bb84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16151561054e5760008081548092919060010191905055505b84848260000191906105619291906106d7565b5082828260010191906105759291906106d7565b5060018160020160006101000a81548160ff0219169083151502179055506105e083838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f544553545f4556454e545300000000000000000000000000000000000000000061064e87878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060208201519050919050565b50805460018160011615610100020316600290046000825580601f106106b557506106d4565b601f0160209004906000526020600020908101906106d39190610757565b5b50565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f1061071857803560ff1916838001178555610746565b82800160010185558215610746579182015b8281111561074557823582559160200191906001019061072a565b5b5090506107539190610757565b5090565b61077991905b8082111561077557600081600090555060010161075d565b5090565b9056fea165627a7a72305820b80269f52800df97ea8d0eaa20a8d8541f670004f26b92c1fad10c4bd238a6020029") var Abi_EventsTest = []byte(`[{"constant":true,"inputs":[],"name":"count","outputs":[{"name":"size","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"_name","type":"string"}],"name":"description","outputs":[{"name":"_description","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"}],"name":"removeThing","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_description","type":"string"}],"name":"addThing","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":true,"name":"description","type":"bytes32"}],"name":"UpdateTestEvents","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":false,"name":"__DELETE__","type":"int256"}],"name":"DeleteTestEvents","type":"event"}]`) From f1bcef66525dfd96304ba0b78c8b406ef086284d Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 15 Jul 2019 09:58:14 +0100 Subject: [PATCH 44/70] Libraries do not have ABIs Signed-off-by: Sean Young --- deploy/compile/compilers.go | 161 ++++++++++++------ deploy/jobs/jobs_contracts.go | 15 +- .../app09-basic_solidity_imports/deploy.yaml | 1 + .../deploy.yaml | 1 + .../deploy.yaml | 2 + .../deploy.yaml | 2 +- 6 files changed, 128 insertions(+), 54 deletions(-) diff --git a/deploy/compile/compilers.go b/deploy/compile/compilers.go index 11eb945ee..97514be39 100644 --- a/deploy/compile/compilers.go +++ b/deploy/compile/compilers.go @@ -53,18 +53,22 @@ type SolidityOutput struct { } } +type ContractCode struct { + Object string + LinkReferences json.RawMessage +} + +type AbiMap struct { + DeployedBytecode ContractCode + Abi string +} + // SolidityContract is defined for each contract defined in the solidity source code type SolidityContract struct { Abi json.RawMessage Evm struct { - Bytecode struct { - Object string - LinkReferences json.RawMessage - } - DeployedBytecode struct { - Object string - LinkReferences json.RawMessage - } + Bytecode ContractCode + DeployedBytecode ContractCode } EWasm struct { Wasm string @@ -73,8 +77,13 @@ type SolidityContract struct { Userdoc json.RawMessage Metadata string // This is not present in the solidity output, but we add it ourselves - // This is map from CodeHash to ABI - AbiMap map[acmstate.CodeHash]string + // This is map from DeployedBytecode to ABI. A Solidity contract can create any number + // of contracts, which have distinct ABIs. This is a map for the deployed code to abi, + // including the first contract itself. + + // Note that libraries do not have ABIs. Also, the deployedbytecode does not match + // what Solidity tells use it will be. + AbiMap []AbiMap `json:",omitempty"` } type Response struct { @@ -128,40 +137,60 @@ func (contract *SolidityContract) Save(dir, file string) error { return os.Rename(f.Name(), filepath.Join(dir, file)) } -func (contract *SolidityContract) Link(libraries map[string]string) error { - bin := contract.Evm.Bytecode.Object - if !strings.Contains(bin, "_") { - return nil - } +func link(bytecode string, linkReferences json.RawMessage, libraries map[string]string) (string, error) { var links map[string]map[string][]struct{ Start, Length int } - err := json.Unmarshal(contract.Evm.Bytecode.LinkReferences, &links) + err := json.Unmarshal(linkReferences, &links) if err != nil { - return err + return "", err } for _, f := range links { for name, relos := range f { addr, ok := libraries[name] if !ok { - return fmt.Errorf("library %s is not defined", name) + return "", fmt.Errorf("library %s is not defined", name) } for _, relo := range relos { if relo.Length != crypto.AddressLength { - return fmt.Errorf("linkReference should be %d bytes long, not %d", crypto.AddressLength, relo.Length) + return "", fmt.Errorf("linkReference should be %d bytes long, not %d", crypto.AddressLength, relo.Length) } if len(addr) != crypto.AddressHexLength { - return fmt.Errorf("address %s should be %d character long, not %d", addr, crypto.AddressHexLength, len(addr)) + return "", fmt.Errorf("address %s should be %d character long, not %d", addr, crypto.AddressHexLength, len(addr)) } start := relo.Start * 2 end := relo.Start*2 + crypto.AddressHexLength - if bin[start+1] != '_' || bin[end-1] != '_' { - return fmt.Errorf("relocation dummy not found at %d in %s ", relo.Start, bin) + if bytecode[start+1] != '_' || bytecode[end-1] != '_' { + return "", fmt.Errorf("relocation dummy not found at %d in %s ", relo.Start, bytecode) } - bin = bin[:start] + addr + bin[end:] + bytecode = bytecode[:start] + addr + bytecode[end:] } } } - contract.Evm.Bytecode.Object = bin + return bytecode, nil +} + +// Link will replace the unresolved references with the libraries provided +func (contract *SolidityContract) Link(libraries map[string]string) error { + bin := contract.Evm.Bytecode.Object + if strings.Contains(bin, "_") { + bin, err := link(bin, contract.Evm.Bytecode.LinkReferences, libraries) + if err != nil { + return err + } + contract.Evm.Bytecode.Object = bin + } + + if contract.AbiMap != nil { + for _, m := range contract.AbiMap { + if strings.Contains(bin, "_") { + bin, err := link(m.DeployedBytecode.Object, m.DeployedBytecode.LinkReferences, libraries) + if err != nil { + return err + } + m.DeployedBytecode.Object = bin + } + } + } return nil } @@ -174,6 +203,28 @@ func (contract *SolidityContract) Code() (code string) { return } +func (contract *SolidityContract) IsLibrary() bool { + // If this contract is an Library, then: + // a) it does not have an externally callable ABI + // b) the deployedBytecode will not match the actually deployedBytecode (thanks Solidity!). It inserts its own address + // into the bytecode before deploying. + + // https://github.com/ethereum/solidity/issues/7101 + + // However Solidity does not make it easy to detect whether a contract is a library or a regular contract. Any suggestions + // for improving this are *very* welcome. + + // A library deploy code always starts with PUSH20 followed by 20 0 bytes. When the code is actually deployed, the address + // of the library itself is populated there. This is done so that there is some code which prevents a library being called + // directly via a transaction, rather than from a solidity contract as it was intended. + + // https://github.com/ethereum/solidity/issues/7102 + + libraryPrefix := "73" + strings.Repeat("00", 20) + + return strings.HasPrefix(contract.Evm.DeployedBytecode.Object, libraryPrefix) +} + func EVM(file string, optimize bool, workDir string, libraries map[string]string, logger *logging.Logger) (*Response, error) { input := SolidityInput{Language: "Solidity", Sources: make(map[string]SolidityInputSource)} @@ -205,16 +256,26 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string return nil, err } - abis, err := output.getAbis(logger) - if err != nil { - return nil, err + // Collect our ABIs + abimap := make([]AbiMap, 0) + for _, src := range output.Contracts { + for _, item := range src { + if !item.IsLibrary() && item.Evm.DeployedBytecode.Object != "" { + abimap = append(abimap, AbiMap{ + DeployedBytecode: item.Evm.DeployedBytecode, + Abi: string(item.Abi), + }) + } + } } respItemArray := make([]ResponseItem, 0) for f, s := range output.Contracts { for contract, item := range s { - item.AbiMap = abis + if !item.IsLibrary() { + item.AbiMap = abimap + } respItem := ResponseItem{ Filename: f, Objectname: objectName(contract), @@ -343,32 +404,30 @@ func PrintResponse(resp Response, cli bool, logger *logging.Logger) { } // GetAbis get the CodeHashes + Abis for the generated Code. So, we have a map for all the possible contracts codes hashes to abis -func (sol *SolidityOutput) getAbis(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { +func (contract *SolidityContract) GetAbis(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { res := make(map[acmstate.CodeHash]string) - for filename, src := range sol.Contracts { - for name, contract := range src { - if contract.Evm.DeployedBytecode.Object == "" { - continue - } - - runtime, err := hex.DecodeString(contract.Evm.DeployedBytecode.Object) - if err != nil { - return nil, err - } + if contract.Evm.DeployedBytecode.Object == "" { + return nil, nil + } - hash := sha3.NewKeccak256() - hash.Write(runtime) - var codehash acmstate.CodeHash - copy(codehash[:], hash.Sum(nil)) - logger.TraceMsg("Found ABI", - "contract", name, - "file", filename, - "code", fmt.Sprintf("%X", runtime), - "code hash", fmt.Sprintf("%X", codehash), - "abi", string(contract.Abi)) - res[codehash] = string(contract.Abi) + for _, m := range contract.AbiMap { + if strings.Contains(m.DeployedBytecode.Object, "_") { + continue + } + runtime, err := hex.DecodeString(m.DeployedBytecode.Object) + if err != nil { + return nil, err } - } + hash := sha3.NewKeccak256() + hash.Write(runtime) + var codehash acmstate.CodeHash + copy(codehash[:], hash.Sum(nil)) + logger.TraceMsg("Found ABI", + "code", fmt.Sprintf("%X", runtime), + "code hash", fmt.Sprintf("%X", codehash), + "abi", string(m.Abi)) + res[codehash] = string(m.Abi) + } return res, nil } diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index 53a250d09..3a32c1a61 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -164,7 +164,12 @@ func FormulateDeployJob(deploy *def.Deploy, do *def.DeployArgs, deployScript *de contractCode = contractCode + callData } - tx, err := deployTx(client, deploy, contractName, string(contractCode), "", nil, logger) + abiMap, err := contract.GetAbis(logger) + if err != nil { + return nil, nil, err + } + + tx, err := deployTx(client, deploy, contractName, string(contractCode), "", abiMap, logger) if err != nil { return nil, nil, fmt.Errorf("could not deploy binary contract: %v", err) } @@ -352,6 +357,7 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } } + var abiMap map[acmstate.CodeHash]string wasm := "" data := "" if contract.EWasm.Wasm != "" { @@ -362,6 +368,11 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook return nil, err } data = contract.Evm.Bytecode.Object + + abiMap, err = contract.GetAbis(logger) + if err != nil { + return nil, err + } } if deploy.Data != nil { @@ -388,7 +399,7 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } } - return deployTx(client, deploy, compilersResponse.Objectname, data, wasm, contract.AbiMap, logger) + return deployTx(client, deploy, compilersResponse.Objectname, data, wasm, abiMap, logger) } func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm string, abis map[acmstate.CodeHash]string, logger *logging.Logger) (*payload.CallTx, error) { diff --git a/tests/jobs_fixtures/app09-basic_solidity_imports/deploy.yaml b/tests/jobs_fixtures/app09-basic_solidity_imports/deploy.yaml index ce6ee0b50..1b1c4b0f2 100644 --- a/tests/jobs_fixtures/app09-basic_solidity_imports/deploy.yaml +++ b/tests/jobs_fixtures/app09-basic_solidity_imports/deploy.yaml @@ -8,6 +8,7 @@ jobs: deploy: contract: contracts/storage.sol instance: all + data: [ 0 ] - name: setStorage call: diff --git a/tests/jobs_fixtures/app26-factories_single_constructor_and_overwriting_testing/deploy.yaml b/tests/jobs_fixtures/app26-factories_single_constructor_and_overwriting_testing/deploy.yaml index eb07c30d9..73b5a597a 100644 --- a/tests/jobs_fixtures/app26-factories_single_constructor_and_overwriting_testing/deploy.yaml +++ b/tests/jobs_fixtures/app26-factories_single_constructor_and_overwriting_testing/deploy.yaml @@ -17,6 +17,7 @@ jobs: deploy: contract: GSContract.sol instance: GSContract + data: [ 0 ] - name: createGSContract call: diff --git a/tests/jobs_fixtures/app27-multiple_imports_and_multiple_constructor_different_factory_types/deploy.yaml b/tests/jobs_fixtures/app27-multiple_imports_and_multiple_constructor_different_factory_types/deploy.yaml index 31fc9bd2e..4798c3d0c 100644 --- a/tests/jobs_fixtures/app27-multiple_imports_and_multiple_constructor_different_factory_types/deploy.yaml +++ b/tests/jobs_fixtures/app27-multiple_imports_and_multiple_constructor_different_factory_types/deploy.yaml @@ -18,6 +18,7 @@ jobs: deploy: contract: contracts/GSSingle.sol instance: GSSingle + data: [ 0 ] - name: createGSContractSingle call: @@ -79,6 +80,7 @@ jobs: deploy: contract: contracts/GSMulti.sol instance: GSMulti + data: [ 0, 0 ] - name: createGSContractMulti call: diff --git a/tests/jobs_fixtures/app38-deploy_bin_file_with_linkage_to_libraries/deploy.yaml b/tests/jobs_fixtures/app38-deploy_bin_file_with_linkage_to_libraries/deploy.yaml index 3a33cd59d..a1d752839 100644 --- a/tests/jobs_fixtures/app38-deploy_bin_file_with_linkage_to_libraries/deploy.yaml +++ b/tests/jobs_fixtures/app38-deploy_bin_file_with_linkage_to_libraries/deploy.yaml @@ -18,7 +18,7 @@ jobs: deploy: contract: Set.bin -- name: deployContractNormal +- name: deployContractBin deploy: contract: C.bin libraries: Set:$deployLibBin From 4f4610b958e061db3feb27fdd08d06de059bfffd Mon Sep 17 00:00:00 2001 From: Sean Young Date: Tue, 16 Jul 2019 10:26:04 +0100 Subject: [PATCH 45/70] Track libraries ABIs by recalculating the codehash according to what solc does Signed-off-by: Sean Young --- deploy/compile/compilers.go | 54 +++++++++++++--------------- execution/evm/state.go | 13 +++++++ integration/rpctransact/call_test.go | 4 +-- rpc/rpcquery/query_server.go | 13 ++++++- 4 files changed, 51 insertions(+), 33 deletions(-) diff --git a/deploy/compile/compilers.go b/deploy/compile/compilers.go index 97514be39..1ae23a3ae 100644 --- a/deploy/compile/compilers.go +++ b/deploy/compile/compilers.go @@ -14,6 +14,7 @@ import ( "github.com/hyperledger/burrow/acm/acmstate" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/crypto/sha3" + "github.com/hyperledger/burrow/execution/evm/asm" "github.com/hyperledger/burrow/logging" ) @@ -181,13 +182,19 @@ func (contract *SolidityContract) Link(libraries map[string]string) error { } if contract.AbiMap != nil { - for _, m := range contract.AbiMap { + for i, m := range contract.AbiMap { + bin := m.DeployedBytecode.Object if strings.Contains(bin, "_") { - bin, err := link(m.DeployedBytecode.Object, m.DeployedBytecode.LinkReferences, libraries) + bin, err := link(bin, m.DeployedBytecode.LinkReferences, libraries) + // When compiling a solidity file with many contracts contained it, some of those contracts might + // never be created by the contract we're current linking. However, Solidity does not tell us + // which contracts can be created by a contract. + // See: https://github.com/ethereum/solidity/issues/7111 + // Some of these contracts might have unresolved libraries. We can safely skip those contracts. if err != nil { - return err + continue } - m.DeployedBytecode.Object = bin + contract.AbiMap[i].DeployedBytecode.Object = bin } } } @@ -203,28 +210,6 @@ func (contract *SolidityContract) Code() (code string) { return } -func (contract *SolidityContract) IsLibrary() bool { - // If this contract is an Library, then: - // a) it does not have an externally callable ABI - // b) the deployedBytecode will not match the actually deployedBytecode (thanks Solidity!). It inserts its own address - // into the bytecode before deploying. - - // https://github.com/ethereum/solidity/issues/7101 - - // However Solidity does not make it easy to detect whether a contract is a library or a regular contract. Any suggestions - // for improving this are *very* welcome. - - // A library deploy code always starts with PUSH20 followed by 20 0 bytes. When the code is actually deployed, the address - // of the library itself is populated there. This is done so that there is some code which prevents a library being called - // directly via a transaction, rather than from a solidity contract as it was intended. - - // https://github.com/ethereum/solidity/issues/7102 - - libraryPrefix := "73" + strings.Repeat("00", 20) - - return strings.HasPrefix(contract.Evm.DeployedBytecode.Object, libraryPrefix) -} - func EVM(file string, optimize bool, workDir string, libraries map[string]string, logger *logging.Logger) (*Response, error) { input := SolidityInput{Language: "Solidity", Sources: make(map[string]SolidityInputSource)} @@ -260,7 +245,7 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string abimap := make([]AbiMap, 0) for _, src := range output.Contracts { for _, item := range src { - if !item.IsLibrary() && item.Evm.DeployedBytecode.Object != "" { + if item.Evm.DeployedBytecode.Object != "" { abimap = append(abimap, AbiMap{ DeployedBytecode: item.Evm.DeployedBytecode, Abi: string(item.Abi), @@ -273,9 +258,7 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string for f, s := range output.Contracts { for contract, item := range s { - if !item.IsLibrary() { - item.AbiMap = abimap - } + item.AbiMap = abimap respItem := ResponseItem{ Filename: f, Objectname: objectName(contract), @@ -431,3 +414,14 @@ func (contract *SolidityContract) GetAbis(logger *logging.Logger) (map[acmstate. } return res, nil } + +// GetDeployCodeHash deals with the issue described in https://github.com/ethereum/solidity/issues/7101 +func GetDeployCodeHash(code []byte, address crypto.Address) []byte { + if bytes.HasPrefix(code, append([]byte{byte(asm.PUSH20)}, address.Bytes()...)) { + code = append([]byte{byte(asm.PUSH20)}, append(make([]byte, crypto.AddressLength), code[crypto.AddressLength+1:]...)...) + } + + hash := sha3.NewKeccak256() + hash.Write(code) + return hash.Sum(nil) +} diff --git a/execution/evm/state.go b/execution/evm/state.go index 1f50856d2..d82435fd5 100644 --- a/execution/evm/state.go +++ b/execution/evm/state.go @@ -9,6 +9,7 @@ import ( "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/crypto/sha3" + "github.com/hyperledger/burrow/deploy/compile" "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/permission" ) @@ -247,6 +248,18 @@ func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code [ } } + // Libraries lie about their deployed bytecode + if !found { + deployCodehash := compile.GetDeployCodeHash(code, address) + + for _, m := range metamap { + if bytes.Equal(deployCodehash, m.CodeHash) { + found = true + break + } + } + } + if !found { st.PushError(errors.ErrorCodeInvalidContractCode) return diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index 28f192866..e0c6ddbc9 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -312,7 +312,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) require.NoError(t, err) assert.Equal(t, res.Abi, string(solidity.Abi_A)) // CreateB - spec, err := abi.ReadAbiSpec(solidity.Abi_A) + spec, err := abi.ReadSpec(solidity.Abi_A) require.NoError(t, err) data, _, err := spec.Pack("createB") require.NoError(t, err) @@ -325,7 +325,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) require.NoError(t, err) assert.Equal(t, res.Abi, string(solidity.Abi_B)) // CreateC - spec, err = abi.ReadAbiSpec(solidity.Abi_B) + spec, err = abi.ReadSpec(solidity.Abi_B) require.NoError(t, err) data, _, err = spec.Pack("createC") require.NoError(t, err) diff --git a/rpc/rpcquery/query_server.go b/rpc/rpcquery/query_server.go index 13677bfe5..a00f8c37d 100644 --- a/rpc/rpcquery/query_server.go +++ b/rpc/rpcquery/query_server.go @@ -10,6 +10,7 @@ import ( "github.com/hyperledger/burrow/acm/validator" "github.com/hyperledger/burrow/bcm" "github.com/hyperledger/burrow/consensus/tendermint" + "github.com/hyperledger/burrow/deploy/compile" "github.com/hyperledger/burrow/event/query" "github.com/hyperledger/burrow/execution/names" "github.com/hyperledger/burrow/execution/proposal" @@ -77,7 +78,17 @@ func (qs *queryServer) GetAbi(ctx context.Context, param *GetAbiParam) (*AbiValu var abihash acmstate.AbiHash copy(abihash[:], m.AbiHash) abi.Abi, err = qs.accounts.GetAbi(abihash) - break + return &abi, err + } + } + + deployCodehash := compile.GetDeployCodeHash(acc.EVMCode, param.Address) + for _, m := range acc.MetaMap { + if bytes.Equal(m.CodeHash, deployCodehash) { + var abihash acmstate.AbiHash + copy(abihash[:], m.AbiHash) + abi.Abi, err = qs.accounts.GetAbi(abihash) + return &abi, err } } } From 06c11e3c16e0c2f197b5a871e941a06336e4b846 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Fri, 19 Jul 2019 10:43:58 +0100 Subject: [PATCH 46/70] Rename metadata and stuff Signed-off-by: Sean Young --- acm/acm.pb.go | 160 +++++++++--------- acm/acmstate/memory_state.go | 20 +-- acm/acmstate/state.go | 36 ++--- acm/acmstate/state_cache.go | 28 ++-- deploy/compile/compilers.go | 91 ++++++----- deploy/compile/compilers_test.go | 2 +- deploy/def/client.go | 32 ++-- deploy/jobs/jobs_contracts.go | 16 +- dump/dump.go | 12 +- dump/load.go | 10 +- execution/contexts/call_context.go | 16 +- execution/evm/fake_app_state.go | 10 +- execution/evm/state.go | 12 +- execution/execution.go | 4 +- execution/state/abi.go | 8 +- integration/rpctest/helpers.go | 28 ++-- integration/rpctransact/call_test.go | 14 +- protobuf/acm.proto | 8 +- protobuf/payload.proto | 6 +- protobuf/rpcquery.proto | 8 +- rpc/rpcquery/query_server.go | 31 ++-- rpc/rpcquery/rpcquery.pb.go | 232 +++++++++++++-------------- txs/payload/payload.pb.go | 224 +++++++++++++------------- vent/service/abis.go | 10 +- 24 files changed, 519 insertions(+), 499 deletions(-) diff --git a/acm/acm.pb.go b/acm/acm.pb.go index fa3520945..ca198d56b 100644 --- a/acm/acm.pb.go +++ b/acm/acm.pb.go @@ -37,7 +37,7 @@ type Account struct { Permissions permission.AccountPermissions `protobuf:"bytes,6,opt,name=Permissions,proto3" json:"Permissions"` WASMCode Bytecode `protobuf:"bytes,7,opt,name=WASMCode,proto3,customtype=Bytecode" json:",omitempty"` CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,8,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"-"` - MetaMap []*MetaMap `protobuf:"bytes,9,rep,name=MetaMap,proto3" json:"MetaMap,omitempty"` + ContractMeta []*ContractMeta `protobuf:"bytes,9,rep,name=ContractMeta,proto3" json:"ContractMeta,omitempty"` Forebear *github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,10,opt,name=Forebear,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Forebear,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -100,9 +100,9 @@ func (m *Account) GetPermissions() permission.AccountPermissions { return permission.AccountPermissions{} } -func (m *Account) GetMetaMap() []*MetaMap { +func (m *Account) GetContractMeta() []*ContractMeta { if m != nil { - return m.MetaMap + return m.ContractMeta } return nil } @@ -111,26 +111,26 @@ func (*Account) XXX_MessageName() string { return "acm.Account" } -type MetaMap struct { - CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,1,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"CodeHash"` - AbiHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,2,opt,name=AbiHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"AbiHash"` +type ContractMeta struct { + CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,1,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"CodeHash"` + MetadataHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,2,opt,name=MetadataHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"MetadataHash"` // In the dump format we would like the ABI rather than its hash - Abi string `protobuf:"bytes,3,opt,name=Abi,proto3" json:"Abi,omitempty"` + Metadata string `protobuf:"bytes,3,opt,name=Metadata,proto3" json:"Metadata,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } -func (m *MetaMap) Reset() { *m = MetaMap{} } -func (m *MetaMap) String() string { return proto.CompactTextString(m) } -func (*MetaMap) ProtoMessage() {} -func (*MetaMap) Descriptor() ([]byte, []int) { +func (m *ContractMeta) Reset() { *m = ContractMeta{} } +func (m *ContractMeta) String() string { return proto.CompactTextString(m) } +func (*ContractMeta) ProtoMessage() {} +func (*ContractMeta) Descriptor() ([]byte, []int) { return fileDescriptor_49ed775bc0a6adf6, []int{1} } -func (m *MetaMap) XXX_Unmarshal(b []byte) error { +func (m *ContractMeta) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } -func (m *MetaMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { +func (m *ContractMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { b = b[:cap(b)] n, err := m.MarshalTo(b) if err != nil { @@ -138,71 +138,71 @@ func (m *MetaMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { } return b[:n], nil } -func (m *MetaMap) XXX_Merge(src proto.Message) { - xxx_messageInfo_MetaMap.Merge(m, src) +func (m *ContractMeta) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContractMeta.Merge(m, src) } -func (m *MetaMap) XXX_Size() int { +func (m *ContractMeta) XXX_Size() int { return m.Size() } -func (m *MetaMap) XXX_DiscardUnknown() { - xxx_messageInfo_MetaMap.DiscardUnknown(m) +func (m *ContractMeta) XXX_DiscardUnknown() { + xxx_messageInfo_ContractMeta.DiscardUnknown(m) } -var xxx_messageInfo_MetaMap proto.InternalMessageInfo +var xxx_messageInfo_ContractMeta proto.InternalMessageInfo -func (m *MetaMap) GetAbi() string { +func (m *ContractMeta) GetMetadata() string { if m != nil { - return m.Abi + return m.Metadata } return "" } -func (*MetaMap) XXX_MessageName() string { - return "acm.MetaMap" +func (*ContractMeta) XXX_MessageName() string { + return "acm.ContractMeta" } func init() { proto.RegisterType((*Account)(nil), "acm.Account") golang_proto.RegisterType((*Account)(nil), "acm.Account") - proto.RegisterType((*MetaMap)(nil), "acm.MetaMap") - golang_proto.RegisterType((*MetaMap)(nil), "acm.MetaMap") + proto.RegisterType((*ContractMeta)(nil), "acm.ContractMeta") + golang_proto.RegisterType((*ContractMeta)(nil), "acm.ContractMeta") } func init() { proto.RegisterFile("acm.proto", fileDescriptor_49ed775bc0a6adf6) } func init() { golang_proto.RegisterFile("acm.proto", fileDescriptor_49ed775bc0a6adf6) } var fileDescriptor_49ed775bc0a6adf6 = []byte{ - // 482 bytes of a gzipped FileDescriptorProto + // 483 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x53, 0x31, 0x6f, 0xd3, 0x40, - 0x14, 0xee, 0x35, 0xa6, 0x76, 0x2e, 0x19, 0xc2, 0x4d, 0xa7, 0x0c, 0x76, 0xe8, 0x80, 0x22, 0xd4, - 0x38, 0xa8, 0xd0, 0x25, 0x4c, 0x36, 0xa2, 0xaa, 0x04, 0x81, 0xe2, 0x4a, 0x45, 0x62, 0xbb, 0x3b, - 0x1f, 0x89, 0xa5, 0x38, 0x67, 0xce, 0xb6, 0xc0, 0xff, 0x84, 0x91, 0x7f, 0x02, 0x63, 0xc4, 0xc4, - 0x88, 0x18, 0x22, 0xe4, 0x6e, 0xfd, 0x15, 0xc8, 0x97, 0xb3, 0xb1, 0x18, 0x2a, 0x41, 0x27, 0xdf, - 0xbb, 0xef, 0x7d, 0xdf, 0xf7, 0xde, 0xf3, 0x3b, 0xd8, 0x25, 0x2c, 0x76, 0x13, 0x29, 0x32, 0x81, - 0x3a, 0x84, 0xc5, 0xc3, 0xc9, 0x22, 0xca, 0x96, 0x39, 0x75, 0x99, 0x88, 0xa7, 0x0b, 0xb1, 0x10, - 0x53, 0x85, 0xd1, 0xfc, 0x9d, 0x8a, 0x54, 0xa0, 0x4e, 0x3b, 0xce, 0x70, 0x90, 0x70, 0x19, 0x47, - 0x69, 0x1a, 0x89, 0xb5, 0xbe, 0xe9, 0x33, 0x59, 0x24, 0x99, 0xc6, 0x0f, 0xbf, 0x19, 0xd0, 0xf4, - 0x18, 0x13, 0xf9, 0x3a, 0x43, 0x2f, 0xa1, 0xe9, 0x85, 0xa1, 0xe4, 0x69, 0x8a, 0xc1, 0x08, 0x8c, - 0xfb, 0xfe, 0xe3, 0xcd, 0xd6, 0xd9, 0xfb, 0xb9, 0x75, 0x8e, 0x5a, 0x9e, 0xcb, 0x22, 0xe1, 0x72, - 0xc5, 0xc3, 0x05, 0x97, 0x53, 0x9a, 0x4b, 0x29, 0x3e, 0x4c, 0xb5, 0xa0, 0xe6, 0x06, 0xb5, 0x08, - 0x3a, 0x81, 0xdd, 0xf3, 0x9c, 0xae, 0x22, 0xf6, 0x9c, 0x17, 0x78, 0x7f, 0x04, 0xc6, 0xbd, 0xe3, - 0xbb, 0xae, 0x4e, 0x6e, 0x00, 0xdf, 0xa8, 0x4c, 0x82, 0x3f, 0x99, 0x68, 0x08, 0xad, 0x0b, 0xfe, - 0x3e, 0xe7, 0x6b, 0xc6, 0x71, 0x67, 0x04, 0xc6, 0x46, 0xd0, 0xc4, 0x08, 0x43, 0xd3, 0x27, 0x2b, - 0x52, 0x41, 0x86, 0x82, 0xea, 0x10, 0x3d, 0x80, 0xe6, 0xb3, 0xcb, 0xf9, 0x53, 0x11, 0x72, 0x7c, - 0x47, 0x15, 0x3f, 0xd0, 0xc5, 0x5b, 0x7e, 0x91, 0x71, 0x26, 0x42, 0x1e, 0xd4, 0x09, 0xe8, 0x14, - 0xf6, 0xce, 0x9b, 0xb1, 0xa4, 0xf8, 0x40, 0x95, 0x66, 0xbb, 0xad, 0x51, 0xe9, 0x91, 0xb4, 0xb2, - 0x74, 0x9d, 0x6d, 0x22, 0x9a, 0x41, 0xeb, 0x8d, 0x77, 0xb1, 0x33, 0x35, 0x95, 0xa9, 0xfd, 0xb7, - 0xe9, 0xf5, 0xd6, 0x81, 0x47, 0x22, 0x8e, 0x32, 0x1e, 0x27, 0x59, 0x11, 0x34, 0xf9, 0xe8, 0x12, - 0x5a, 0xd5, 0xf7, 0x8c, 0xa4, 0x4b, 0x6c, 0x29, 0xee, 0x4c, 0x73, 0x27, 0x37, 0x4f, 0x9b, 0x46, - 0x6b, 0x22, 0x0b, 0xf7, 0x8c, 0x7f, 0xac, 0x3c, 0xd2, 0xeb, 0xad, 0x03, 0x26, 0x41, 0xa3, 0x85, - 0xee, 0x43, 0x73, 0xce, 0x33, 0x32, 0x27, 0x09, 0xee, 0x8e, 0x3a, 0xe3, 0xde, 0x71, 0xdf, 0xad, - 0x36, 0x48, 0xdf, 0x05, 0x35, 0x88, 0x5e, 0x40, 0xeb, 0x54, 0x48, 0x4e, 0x39, 0x91, 0x18, 0x2a, - 0xff, 0x87, 0xff, 0xfc, 0xa7, 0x1b, 0x85, 0x99, 0xf1, 0xe9, 0xb3, 0xb3, 0x77, 0xf8, 0x05, 0x34, - 0xe6, 0xe8, 0x75, 0xab, 0xbf, 0xdd, 0x36, 0x9d, 0xfc, 0x57, 0x7f, 0xad, 0xd6, 0x5e, 0x41, 0xd3, - 0xa3, 0x91, 0x52, 0xdc, 0xbf, 0x8d, 0x62, 0xad, 0x82, 0x06, 0xb0, 0xe3, 0xd1, 0x48, 0x2d, 0x59, - 0x37, 0xa8, 0x8e, 0xfe, 0x93, 0x4d, 0x69, 0x83, 0xef, 0xa5, 0x0d, 0x7e, 0x94, 0x36, 0xf8, 0x55, - 0xda, 0xe0, 0xeb, 0x95, 0x0d, 0x36, 0x57, 0x36, 0x78, 0x7b, 0xef, 0x66, 0x0f, 0xc2, 0x62, 0x7a, - 0xa0, 0x9e, 0xd4, 0xa3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x97, 0xe1, 0x06, 0x7b, 0xb3, 0x03, - 0x00, 0x00, + 0x14, 0xee, 0x35, 0xa6, 0x76, 0xae, 0x19, 0xca, 0x4d, 0x56, 0x06, 0x3b, 0x74, 0x8a, 0x50, 0xe3, + 0x20, 0x20, 0x4b, 0x98, 0xe2, 0x8a, 0xaa, 0x12, 0x14, 0x15, 0x57, 0x2a, 0x82, 0xed, 0x7c, 0x7e, + 0x24, 0x96, 0x62, 0x9f, 0x39, 0x9f, 0x05, 0xfe, 0x27, 0x8c, 0xfc, 0x14, 0xc6, 0x8c, 0x8c, 0x85, + 0x21, 0x42, 0xe9, 0xd6, 0x5f, 0x81, 0x7c, 0xb9, 0x18, 0x87, 0x21, 0x12, 0x30, 0x25, 0xcf, 0xdf, + 0xf7, 0xbe, 0xef, 0x7b, 0xcf, 0xcf, 0xb8, 0x4d, 0x59, 0xe2, 0x65, 0x82, 0x4b, 0x4e, 0x5a, 0x94, + 0x25, 0xdd, 0xc1, 0x34, 0x96, 0xb3, 0x22, 0xf4, 0x18, 0x4f, 0x86, 0x53, 0x3e, 0xe5, 0x43, 0x85, + 0x85, 0xc5, 0x7b, 0x55, 0xa9, 0x42, 0xfd, 0x5b, 0xf7, 0x74, 0x8f, 0x32, 0x10, 0x49, 0x9c, 0xe7, + 0x31, 0x4f, 0xf5, 0x93, 0x0e, 0x13, 0x65, 0x26, 0x35, 0x7e, 0xfc, 0xdd, 0xc0, 0xe6, 0x84, 0x31, + 0x5e, 0xa4, 0x92, 0xbc, 0xc2, 0xe6, 0x24, 0x8a, 0x04, 0xe4, 0xb9, 0x8d, 0x7a, 0xa8, 0xdf, 0xf1, + 0x9f, 0x2e, 0x96, 0xee, 0xde, 0x8f, 0xa5, 0x7b, 0xd2, 0xf0, 0x9c, 0x95, 0x19, 0x88, 0x39, 0x44, + 0x53, 0x10, 0xc3, 0xb0, 0x10, 0x82, 0x7f, 0x1c, 0x6a, 0x41, 0xdd, 0x1b, 0x6c, 0x44, 0xc8, 0x08, + 0xb7, 0x2f, 0x8b, 0x70, 0x1e, 0xb3, 0x17, 0x50, 0xda, 0xfb, 0x3d, 0xd4, 0x3f, 0x7c, 0x7c, 0xdf, + 0xd3, 0xe4, 0x1a, 0xf0, 0x8d, 0xca, 0x24, 0xf8, 0xcd, 0x24, 0x5d, 0x6c, 0x5d, 0xc1, 0x87, 0x02, + 0x52, 0x06, 0x76, 0xab, 0x87, 0xfa, 0x46, 0x50, 0xd7, 0xc4, 0xc6, 0xa6, 0x4f, 0xe7, 0xb4, 0x82, + 0x0c, 0x05, 0x6d, 0x4a, 0xf2, 0x10, 0x9b, 0xcf, 0xaf, 0x2f, 0x4e, 0x79, 0x04, 0xf6, 0x3d, 0x15, + 0xfe, 0x48, 0x87, 0xb7, 0xfc, 0x52, 0x02, 0xe3, 0x11, 0x04, 0x1b, 0x02, 0x39, 0xc3, 0x87, 0x97, + 0xf5, 0x5a, 0x72, 0xfb, 0x40, 0x45, 0x73, 0xbc, 0xc6, 0xaa, 0xf4, 0x4a, 0x1a, 0x2c, 0x9d, 0xb3, + 0xd9, 0x48, 0xc6, 0xd8, 0x7a, 0x33, 0xb9, 0x5a, 0x9b, 0x9a, 0xca, 0xd4, 0xf9, 0xd3, 0xf4, 0x6e, + 0xe9, 0xe2, 0x13, 0x9e, 0xc4, 0x12, 0x92, 0x4c, 0x96, 0x41, 0xcd, 0x27, 0xd7, 0xd8, 0xaa, 0x7e, + 0xcf, 0x69, 0x3e, 0xb3, 0x2d, 0xd5, 0x3b, 0xd6, 0xbd, 0x83, 0xdd, 0xdb, 0x0e, 0xe3, 0x94, 0x8a, + 0xd2, 0x3b, 0x87, 0x4f, 0x95, 0x47, 0x7e, 0xb7, 0x74, 0xd1, 0x20, 0xa8, 0xb5, 0xc8, 0x08, 0x77, + 0x4e, 0x79, 0x2a, 0x05, 0x65, 0xf2, 0x02, 0x24, 0xb5, 0xdb, 0xbd, 0x96, 0xda, 0x7b, 0x75, 0x46, + 0x4d, 0x20, 0xd8, 0xa2, 0x91, 0x97, 0xd8, 0x3a, 0xe3, 0x02, 0x42, 0xa0, 0xc2, 0xc6, 0x2a, 0xce, + 0xa3, 0xbf, 0x7e, 0xf1, 0xb5, 0xc2, 0xd8, 0xf8, 0xfc, 0xc5, 0xdd, 0x3b, 0xbe, 0x41, 0xdb, 0x59, + 0xc8, 0xeb, 0xc6, 0xcc, 0xeb, 0x0b, 0x1b, 0xfd, 0xd3, 0xcc, 0x8d, 0x71, 0xdf, 0xe2, 0x4e, 0x25, + 0x1d, 0x51, 0x49, 0x95, 0xec, 0xfe, 0xff, 0xc8, 0x6e, 0x49, 0x55, 0x77, 0xb8, 0xa9, 0xd5, 0x1d, + 0xb6, 0x83, 0xba, 0xf6, 0x9f, 0x2d, 0x56, 0x0e, 0xfa, 0xb6, 0x72, 0xd0, 0xcd, 0xca, 0x41, 0x3f, + 0x57, 0x0e, 0xfa, 0x7a, 0xeb, 0xa0, 0xc5, 0xad, 0x83, 0xde, 0x3d, 0xd8, 0x6d, 0x49, 0x59, 0x12, + 0x1e, 0xa8, 0x4f, 0xef, 0xc9, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x8b, 0x68, 0x6a, 0xd3, 0xdb, + 0x03, 0x00, 0x00, } func (m *Account) Marshal() (dAtA []byte, err error) { @@ -278,8 +278,8 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { return 0, err } i += n6 - if len(m.MetaMap) > 0 { - for _, msg := range m.MetaMap { + if len(m.ContractMeta) > 0 { + for _, msg := range m.ContractMeta { dAtA[i] = 0x4a i++ i = encodeVarintAcm(dAtA, i, uint64(msg.Size())) @@ -306,7 +306,7 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { return i, nil } -func (m *MetaMap) Marshal() (dAtA []byte, err error) { +func (m *ContractMeta) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) @@ -316,7 +316,7 @@ func (m *MetaMap) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *MetaMap) MarshalTo(dAtA []byte) (int, error) { +func (m *ContractMeta) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int @@ -331,17 +331,17 @@ func (m *MetaMap) MarshalTo(dAtA []byte) (int, error) { i += n8 dAtA[i] = 0x12 i++ - i = encodeVarintAcm(dAtA, i, uint64(m.AbiHash.Size())) - n9, err := m.AbiHash.MarshalTo(dAtA[i:]) + i = encodeVarintAcm(dAtA, i, uint64(m.MetadataHash.Size())) + n9, err := m.MetadataHash.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n9 - if len(m.Abi) > 0 { + if len(m.Metadata) > 0 { dAtA[i] = 0x1a i++ - i = encodeVarintAcm(dAtA, i, uint64(len(m.Abi))) - i += copy(dAtA[i:], m.Abi) + i = encodeVarintAcm(dAtA, i, uint64(len(m.Metadata))) + i += copy(dAtA[i:], m.Metadata) } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -382,8 +382,8 @@ func (m *Account) Size() (n int) { n += 1 + l + sovAcm(uint64(l)) l = m.CodeHash.Size() n += 1 + l + sovAcm(uint64(l)) - if len(m.MetaMap) > 0 { - for _, e := range m.MetaMap { + if len(m.ContractMeta) > 0 { + for _, e := range m.ContractMeta { l = e.Size() n += 1 + l + sovAcm(uint64(l)) } @@ -398,7 +398,7 @@ func (m *Account) Size() (n int) { return n } -func (m *MetaMap) Size() (n int) { +func (m *ContractMeta) Size() (n int) { if m == nil { return 0 } @@ -406,9 +406,9 @@ func (m *MetaMap) Size() (n int) { _ = l l = m.CodeHash.Size() n += 1 + l + sovAcm(uint64(l)) - l = m.AbiHash.Size() + l = m.MetadataHash.Size() n += 1 + l + sovAcm(uint64(l)) - l = len(m.Abi) + l = len(m.Metadata) if l > 0 { n += 1 + l + sovAcm(uint64(l)) } @@ -698,7 +698,7 @@ func (m *Account) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 9: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field MetaMap", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ContractMeta", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -725,8 +725,8 @@ func (m *Account) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.MetaMap = append(m.MetaMap, &MetaMap{}) - if err := m.MetaMap[len(m.MetaMap)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.ContractMeta = append(m.ContractMeta, &ContractMeta{}) + if err := m.ContractMeta[len(m.ContractMeta)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -790,7 +790,7 @@ func (m *Account) Unmarshal(dAtA []byte) error { } return nil } -func (m *MetaMap) Unmarshal(dAtA []byte) error { +func (m *ContractMeta) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -813,10 +813,10 @@ func (m *MetaMap) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: MetaMap: wiretype end group for non-group") + return fmt.Errorf("proto: ContractMeta: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: MetaMap: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ContractMeta: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -854,7 +854,7 @@ func (m *MetaMap) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field AbiHash", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field MetadataHash", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { @@ -881,13 +881,13 @@ func (m *MetaMap) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.AbiHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.MetadataHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Abi", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Metadata", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -915,7 +915,7 @@ func (m *MetaMap) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Abi = string(dAtA[iNdEx:postIndex]) + m.Metadata = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex diff --git a/acm/acmstate/memory_state.go b/acm/acmstate/memory_state.go index 14b1cb718..6e7f78730 100644 --- a/acm/acmstate/memory_state.go +++ b/acm/acmstate/memory_state.go @@ -9,9 +9,9 @@ import ( ) type MemoryState struct { - Accounts map[crypto.Address]*acm.Account - Storage map[crypto.Address]map[binary.Word256][]byte - Abis map[AbiHash]string + Accounts map[crypto.Address]*acm.Account + Storage map[crypto.Address]map[binary.Word256][]byte + Metadatas map[MetadataHash]string } var _ IterableReaderWriter = &MemoryState{} @@ -19,9 +19,9 @@ var _ IterableReaderWriter = &MemoryState{} // Get an in-memory state IterableReader func NewMemoryState() *MemoryState { return &MemoryState{ - Accounts: make(map[crypto.Address]*acm.Account), - Storage: make(map[crypto.Address]map[binary.Word256][]byte), - Abis: make(map[AbiHash]string), + Accounts: make(map[crypto.Address]*acm.Account), + Storage: make(map[crypto.Address]map[binary.Word256][]byte), + Metadatas: make(map[MetadataHash]string), } } @@ -37,12 +37,12 @@ func (ms *MemoryState) UpdateAccount(updatedAccount *acm.Account) error { return nil } -func (ms *MemoryState) GetAbi(abihash AbiHash) (string, error) { - return ms.Abis[abihash], nil +func (ms *MemoryState) GetMetadata(metahash MetadataHash) (string, error) { + return ms.Metadatas[metahash], nil } -func (ms *MemoryState) SetAbi(abihash AbiHash, abi string) error { - ms.Abis[abihash] = abi +func (ms *MemoryState) SetMetadata(metahash MetadataHash, metadata string) error { + ms.Metadatas[metahash] = metadata return nil } diff --git a/acm/acmstate/state.go b/acm/acmstate/state.go index aa42b88e6..d32b56ce1 100644 --- a/acm/acmstate/state.go +++ b/acm/acmstate/state.go @@ -9,16 +9,16 @@ import ( "github.com/tmthrgd/go-hex" ) -// AbiHash is the keccak hash for the ABI. This is to make the ABI content-addressed -type AbiHash [32]byte +// MetadataHash is the keccak hash for the metadata. This is to make the metadata content-addressed +type MetadataHash [32]byte -func (h *AbiHash) Bytes() []byte { +func (h *MetadataHash) Bytes() []byte { b := make([]byte, 32) copy(b, h[:]) return b } -func (ch *AbiHash) UnmarshalText(hexBytes []byte) error { +func (ch *MetadataHash) UnmarshalText(hexBytes []byte) error { bs, err := hex.DecodeString(string(hexBytes)) if err != nil { return err @@ -27,23 +27,23 @@ func (ch *AbiHash) UnmarshalText(hexBytes []byte) error { return nil } -func (ch AbiHash) MarshalText() ([]byte, error) { +func (ch MetadataHash) MarshalText() ([]byte, error) { return []byte(ch.String()), nil } -func (ch AbiHash) String() string { +func (ch MetadataHash) String() string { return hex.EncodeUpperToString(ch[:]) } -func GetAbiHash(abi string) (abihash AbiHash) { +func GetMetadataHash(metadata string) (metahash MetadataHash) { hash := sha3.NewKeccak256() - hash.Write([]byte(abi)) - copy(abihash[:], hash.Sum(nil)) + hash.Write([]byte(metadata)) + copy(metahash[:], hash.Sum(nil)) return } // CodeHash is the keccak hash for the code for an account. This is used for the EVM CODEHASH opcode, and to find the -// correct ABI for a contract +// correct Metadata for a contract type CodeHash [32]byte func (h *CodeHash) Bytes() []byte { @@ -107,14 +107,14 @@ type StorageIterable interface { IterateStorage(address crypto.Address, consumer func(key binary.Word256, value []byte) error) (err error) } -type AbiGetter interface { - // Get an ABI by its hash. This is content-addressed - GetAbi(abihash AbiHash) (string, error) +type MetadataGetter interface { + // Get an Metadata by its hash. This is content-addressed + GetMetadata(metahash MetadataHash) (string, error) } -type AbiSetter interface { - // Set an ABI according to it keccak-256 hash. - SetAbi(abihash AbiHash, abi string) error +type MetadataSetter interface { + // Set an Metadata according to it keccak-256 hash. + SetMetadata(metahash MetadataHash, Metadata string) error } type AccountStats struct { @@ -132,7 +132,7 @@ type AccountStatsGetter interface { type Reader interface { AccountGetter StorageGetter - AbiGetter + MetadataGetter } type Iterable interface { @@ -155,7 +155,7 @@ type IterableStatsReader interface { type Writer interface { AccountUpdater StorageSetter - AbiSetter + MetadataSetter } // Read and write account and storage state diff --git a/acm/acmstate/state_cache.go b/acm/acmstate/state_cache.go index df5ad3064..0dd58682c 100644 --- a/acm/acmstate/state_cache.go +++ b/acm/acmstate/state_cache.go @@ -30,7 +30,7 @@ type Cache struct { name string backend Reader accounts map[crypto.Address]*accountInfo - abis map[AbiHash]*abiInfo + metadata map[MetadataHash]*metadataInfo readonly bool } @@ -42,9 +42,9 @@ type accountInfo struct { updated bool } -type abiInfo struct { - abi string - updated bool +type metadataInfo struct { + metadata string + updated bool } type CacheOption func(*Cache) *Cache @@ -55,7 +55,7 @@ func NewCache(backend Reader, options ...CacheOption) *Cache { cache := &Cache{ backend: backend, accounts: make(map[crypto.Address]*accountInfo), - abis: make(map[AbiHash]*abiInfo), + metadata: make(map[MetadataHash]*metadataInfo), } for _, option := range options { option(cache) @@ -110,27 +110,27 @@ func (cache *Cache) UpdateAccount(account *acm.Account) error { return nil } -func (cache *Cache) GetAbi(abihash AbiHash) (string, error) { +func (cache *Cache) GetMetadata(metahash MetadataHash) (string, error) { cache.RLock() defer cache.RUnlock() - abiInfo, ok := cache.abis[abihash] + metadataInfo, ok := cache.metadata[metahash] if ok { - return abiInfo.abi, nil + return metadataInfo.metadata, nil } return "", nil } -func (cache *Cache) SetAbi(abihash AbiHash, abi string) error { +func (cache *Cache) SetMetadata(metahash MetadataHash, metadata string) error { if cache.readonly { - return errors.ErrorCodef(errors.ErrorCodeIllegalWrite, "UpdateAbi called in read-only context on abi hash: %v", abihash) + return errors.ErrorCodef(errors.ErrorCodeIllegalWrite, "SetMetadata called in read-only context on metadata hash: %v", metahash) } cache.Lock() defer cache.Unlock() - cache.abis[abihash] = &abiInfo{updated: true, abi: abi} + cache.metadata[metahash] = &metadataInfo{updated: true, metadata: metadata} return nil } @@ -282,9 +282,9 @@ func (cache *Cache) Sync(st Writer) error { accInfo.RUnlock() } - for abihash, abiInfo := range cache.abis { - if abiInfo.updated { - err := st.SetAbi(abihash, abiInfo.abi) + for metahash, metadataInfo := range cache.metadata { + if metadataInfo.updated { + err := st.SetMetadata(metahash, metadataInfo.metadata) if err != nil { return err } diff --git a/deploy/compile/compilers.go b/deploy/compile/compilers.go index 1ae23a3ae..20523d64b 100644 --- a/deploy/compile/compilers.go +++ b/deploy/compile/compilers.go @@ -2,7 +2,6 @@ package compile import ( "bytes" - "encoding/hex" "encoding/json" "fmt" "io/ioutil" @@ -16,6 +15,7 @@ import ( "github.com/hyperledger/burrow/crypto/sha3" "github.com/hyperledger/burrow/execution/evm/asm" "github.com/hyperledger/burrow/logging" + hex "github.com/tmthrgd/go-hex" ) // SolidityInput is a structure for the solidity compiler input json form, see: @@ -54,16 +54,6 @@ type SolidityOutput struct { } } -type ContractCode struct { - Object string - LinkReferences json.RawMessage -} - -type AbiMap struct { - DeployedBytecode ContractCode - Abi string -} - // SolidityContract is defined for each contract defined in the solidity source code type SolidityContract struct { Abi json.RawMessage @@ -78,13 +68,26 @@ type SolidityContract struct { Userdoc json.RawMessage Metadata string // This is not present in the solidity output, but we add it ourselves - // This is map from DeployedBytecode to ABI. A Solidity contract can create any number - // of contracts, which have distinct ABIs. This is a map for the deployed code to abi, + // This is map from DeployedBytecode to Metadata. A Solidity contract can create any number + // of contracts, which have distinct metadata. This is a map for the deployed code to metdata, // including the first contract itself. + MetadataMap []MetadataMap `json:",omitempty"` +} - // Note that libraries do not have ABIs. Also, the deployedbytecode does not match - // what Solidity tells use it will be. - AbiMap []AbiMap `json:",omitempty"` +type ContractCode struct { + Object string + LinkReferences json.RawMessage +} + +type Metadata struct { + ContractName string + SourceFile string + Abi json.RawMessage +} + +type MetadataMap struct { + DeployedBytecode ContractCode + Metadata Metadata } type Response struct { @@ -181,20 +184,20 @@ func (contract *SolidityContract) Link(libraries map[string]string) error { contract.Evm.Bytecode.Object = bin } - if contract.AbiMap != nil { - for i, m := range contract.AbiMap { + // When compiling a solidity file with many contracts contained it, some of those contracts might + // never be created by the contract we're current linking. However, Solidity does not tell us + // which contracts can be created by a contract. + // See: https://github.com/ethereum/solidity/issues/7111 + // Some of these contracts might have unresolved libraries. We can safely skip those contracts. + if contract.MetadataMap != nil { + for i, m := range contract.MetadataMap { bin := m.DeployedBytecode.Object if strings.Contains(bin, "_") { bin, err := link(bin, m.DeployedBytecode.LinkReferences, libraries) - // When compiling a solidity file with many contracts contained it, some of those contracts might - // never be created by the contract we're current linking. However, Solidity does not tell us - // which contracts can be created by a contract. - // See: https://github.com/ethereum/solidity/issues/7111 - // Some of these contracts might have unresolved libraries. We can safely skip those contracts. if err != nil { continue } - contract.AbiMap[i].DeployedBytecode.Object = bin + contract.MetadataMap[i].DeployedBytecode.Object = bin } } } @@ -242,13 +245,17 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string } // Collect our ABIs - abimap := make([]AbiMap, 0) - for _, src := range output.Contracts { - for _, item := range src { + metamap := make([]MetadataMap, 0) + for filename, src := range output.Contracts { + for contractname, item := range src { if item.Evm.DeployedBytecode.Object != "" { - abimap = append(abimap, AbiMap{ + metamap = append(metamap, MetadataMap{ DeployedBytecode: item.Evm.DeployedBytecode, - Abi: string(item.Abi), + Metadata: Metadata{ + ContractName: contractname, + SourceFile: filename, + Abi: item.Abi, + }, }) } } @@ -258,7 +265,7 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string for f, s := range output.Contracts { for contract, item := range s { - item.AbiMap = abimap + item.MetadataMap = metamap respItem := ResponseItem{ Filename: f, Objectname: objectName(contract), @@ -386,14 +393,14 @@ func PrintResponse(resp Response, cli bool, logger *logging.Logger) { } } -// GetAbis get the CodeHashes + Abis for the generated Code. So, we have a map for all the possible contracts codes hashes to abis -func (contract *SolidityContract) GetAbis(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { +// GetMetadatas get the CodeHashes + Abis for the generated Code. So, we have a map for all the possible contracts codes hashes to abis +func (contract *SolidityContract) GetMetadatas(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { res := make(map[acmstate.CodeHash]string) if contract.Evm.DeployedBytecode.Object == "" { return nil, nil } - for _, m := range contract.AbiMap { + for _, m := range contract.MetadataMap { if strings.Contains(m.DeployedBytecode.Object, "_") { continue } @@ -402,20 +409,32 @@ func (contract *SolidityContract) GetAbis(logger *logging.Logger) (map[acmstate. return nil, err } + bs, err := json.Marshal(m.Metadata) + if err != nil { + return nil, err + } + hash := sha3.NewKeccak256() hash.Write(runtime) var codehash acmstate.CodeHash copy(codehash[:], hash.Sum(nil)) - logger.TraceMsg("Found ABI", + logger.TraceMsg("Found metadata", "code", fmt.Sprintf("%X", runtime), "code hash", fmt.Sprintf("%X", codehash), - "abi", string(m.Abi)) - res[codehash] = string(m.Abi) + "meta", string(bs)) + res[codehash] = string(bs) } return res, nil } // GetDeployCodeHash deals with the issue described in https://github.com/ethereum/solidity/issues/7101 +// When a library contract (one declared with "libary { }" rather than "contract { }"), the deployed code +// will not match what the solidity compiler said it would be. This is done to implement "call protection"; +// library contracts are only supposed to be called from our solidity contracts, not directly. To prevent +// this, the library deployed code compares the callee address with the contract address itself. If it equal, +// it calls revert. +// The library contract address is only known post-deploy so this issue can only be handled post-deploy. This +// is why this is not dealt with during deploy time. func GetDeployCodeHash(code []byte, address crypto.Address) []byte { if bytes.HasPrefix(code, append([]byte{byte(asm.PUSH20)}, address.Bytes()...)) { code = append([]byte{byte(asm.PUSH20)}, append(make([]byte, crypto.AddressLength), code[crypto.AddressLength+1:]...)...) diff --git a/deploy/compile/compilers_test.go b/deploy/compile/compilers_test.go index 07cf611db..a8aa6cb6c 100644 --- a/deploy/compile/compilers_test.go +++ b/deploy/compile/compilers_test.go @@ -117,7 +117,7 @@ func TestLocalSingle(t *testing.T) { for i := range resp.Objects { resp.Objects[i].Contract.Metadata = "" resp.Objects[i].Contract.Devdoc = nil - resp.Objects[i].Contract.AbiMap = nil + resp.Objects[i].Contract.MetadataMap = nil resp.Objects[i].Contract.Evm.DeployedBytecode.Object = "" resp.Objects[i].Contract.Evm.DeployedBytecode.LinkReferences = nil } diff --git a/deploy/def/client.go b/deploy/def/client.go index 1078b4970..539d8828c 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -141,18 +141,18 @@ func (c *Client) GetAccount(address crypto.Address) (*acm.Account, error) { return c.queryClient.GetAccount(ctx, &rpcquery.GetAccountParam{Address: address}) } -func (c *Client) GetAbiForAccount(address crypto.Address) (string, error) { +func (c *Client) GetMetadataForAccount(address crypto.Address) (string, error) { ctx, cancel := context.WithTimeout(context.Background(), c.timeout) defer cancel() - abi, err := c.queryClient.GetAbi(ctx, &rpcquery.GetAbiParam{Address: address}) + metadata, err := c.queryClient.GetMetadata(ctx, &rpcquery.GetMetadataParam{Address: address}) if err != nil { return "", err } - return abi.Abi, nil + return metadata.Metadata, nil } -func (c *Client) GetAbi(abihash acmstate.AbiHash) (string, error) { +func (c *Client) GetMetadata(metahash acmstate.MetadataHash) (string, error) { panic("not implemented") return "", nil } @@ -446,7 +446,7 @@ type CallArg struct { Gas string Data string WASM string - Abis map[acmstate.CodeHash]string + Metadata map[acmstate.CodeHash]string } func (c *Client) Call(arg *CallArg, logger *logging.Logger) (*payload.CallTx, error) { @@ -488,22 +488,22 @@ func (c *Client) Call(arg *CallArg, logger *logging.Logger) (*payload.CallTx, er return nil, err } - abis := make([]*payload.Abis, 0) - for codehash, abi := range arg.Abis { - abis = append(abis, &payload.Abis{ + metas := make([]*payload.ContractMeta, 0) + for codehash, metadata := range arg.Metadata { + metas = append(metas, &payload.ContractMeta{ CodeHash: codehash.Bytes(), - Abi: abi, + Meta: metadata, }) } tx := &payload.CallTx{ - Input: input, - Address: contractAddress, - Data: code, - WASM: wasm, - Fee: fee, - GasLimit: gas, - Abis: abis, + Input: input, + Address: contractAddress, + Data: code, + WASM: wasm, + Fee: fee, + GasLimit: gas, + ContractMeta: metas, } return tx, nil diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index 3a32c1a61..3abd4a887 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -164,12 +164,12 @@ func FormulateDeployJob(deploy *def.Deploy, do *def.DeployArgs, deployScript *de contractCode = contractCode + callData } - abiMap, err := contract.GetAbis(logger) + metaMap, err := contract.GetMetadatas(logger) if err != nil { return nil, nil, err } - tx, err := deployTx(client, deploy, contractName, string(contractCode), "", abiMap, logger) + tx, err := deployTx(client, deploy, contractName, string(contractCode), "", metaMap, logger) if err != nil { return nil, nil, fmt.Errorf("could not deploy binary contract: %v", err) } @@ -357,7 +357,7 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } } - var abiMap map[acmstate.CodeHash]string + var metaMap map[acmstate.CodeHash]string wasm := "" data := "" if contract.EWasm.Wasm != "" { @@ -369,7 +369,7 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } data = contract.Evm.Bytecode.Object - abiMap, err = contract.GetAbis(logger) + metaMap, err = contract.GetMetadatas(logger) if err != nil { return nil, err } @@ -399,10 +399,10 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } } - return deployTx(client, deploy, compilersResponse.Objectname, data, wasm, abiMap, logger) + return deployTx(client, deploy, compilersResponse.Objectname, data, wasm, metaMap, logger) } -func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm string, abis map[acmstate.CodeHash]string, logger *logging.Logger) (*payload.CallTx, error) { +func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm string, metamap map[acmstate.CodeHash]string, logger *logging.Logger) (*payload.CallTx, error) { // Deploy contract logger.TraceMsg("Deploying Contract", "contract", contractName, @@ -419,7 +419,7 @@ func deployTx(client *def.Client, deploy *def.Deploy, contractName, data, wasm s Data: data, WASM: wasm, Sequence: deploy.Sequence, - Abis: abis, + Metadata: metamap, }, logger) } @@ -447,7 +447,7 @@ func FormulateCallJob(call *def.Call, do *def.DeployArgs, deployScript *def.Play var packedBytes []byte var funcSpec *abi.FunctionSpec - abiJSON, err := client.GetAbiForAccount(address) + abiJSON, err := client.GetMetadataForAccount(address) if abiJSON != "" && err == nil { packedBytes, funcSpec, err = abi.EncodeFunctionCall(abiJSON, call.Function, logger, callDataArray...) if err != nil { diff --git a/dump/dump.go b/dump/dump.go index bcaa459e5..64963aa7c 100644 --- a/dump/dump.go +++ b/dump/dump.go @@ -90,15 +90,15 @@ func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Opt }, } - for _, m := range acc.MetaMap { - var abihash acmstate.AbiHash - copy(abihash[:], m.AbiHash.Bytes()) - abi, err := ds.state.GetAbi(abihash) + for _, m := range acc.ContractMeta { + var metahash acmstate.MetadataHash + copy(metahash[:], m.MetadataHash.Bytes()) + meta, err := ds.state.GetMetadata(metahash) if err != nil { return err } - m.Abi = abi - m.AbiHash = []byte{} + m.Metadata = meta + m.MetadataHash = []byte{} } var storageBytes int diff --git a/dump/load.go b/dump/load.go index 5fd990e4e..17232a8ce 100644 --- a/dump/load.go +++ b/dump/load.go @@ -31,14 +31,14 @@ func Load(source Source, st *state.State) error { if row.Account != nil { if row.Account.Address != acm.GlobalPermissionsAddress { - for _, m := range row.Account.MetaMap { - abihash := acmstate.GetAbiHash(m.Abi) - err = s.SetAbi(abihash, m.Abi) + for _, m := range row.Account.ContractMeta { + metahash := acmstate.GetMetadataHash(m.Metadata) + err = s.SetMetadata(metahash, m.Metadata) if err != nil { return err } - m.AbiHash = abihash.Bytes() - m.Abi = "" + m.MetadataHash = metahash.Bytes() + m.Metadata = "" } err := s.UpdateAccount(row.Account) if err != nil { diff --git a/execution/contexts/call_context.go b/execution/contexts/call_context.go index 7fe1f8efa..3f4131db2 100644 --- a/execution/contexts/call_context.go +++ b/execution/contexts/call_context.go @@ -153,15 +153,15 @@ func (ctx *CallContext) Deliver(inAcc, outAcc *acm.Account, value uint64) error "init_code", code) // store abis - if len(ctx.tx.Abis) > 0 { - metamap := make([]*acm.MetaMap, len(ctx.tx.Abis)) - for i, abi := range ctx.tx.Abis { - abihash := acmstate.GetAbiHash(abi.Abi) - metamap[i] = &acm.MetaMap{ - AbiHash: abihash[:], - CodeHash: abi.CodeHash, + if len(ctx.tx.ContractMeta) > 0 { + metamap := make([]*acm.ContractMeta, len(ctx.tx.ContractMeta)) + for i, abi := range ctx.tx.ContractMeta { + metahash := acmstate.GetMetadataHash(abi.Meta) + metamap[i] = &acm.ContractMeta{ + MetadataHash: metahash[:], + CodeHash: abi.CodeHash, } - txCache.SetAbi(abihash, abi.Abi) + txCache.SetMetadata(metahash, abi.Meta) } txCache.UpdateMetaMap(callee, metamap) diff --git a/execution/evm/fake_app_state.go b/execution/evm/fake_app_state.go index a84781270..e12558e67 100644 --- a/execution/evm/fake_app_state.go +++ b/execution/evm/fake_app_state.go @@ -28,7 +28,7 @@ import ( type FakeAppState struct { accounts map[crypto.Address]*acm.Account storage map[string][]byte - abis map[acmstate.AbiHash]string + metadata map[acmstate.MetadataHash]string } var _ acmstate.ReaderWriter = &FakeAppState{} @@ -38,12 +38,12 @@ func (fas *FakeAppState) GetAccount(addr crypto.Address) (*acm.Account, error) { return account, nil } -func (fas *FakeAppState) GetAbi(abihash acmstate.AbiHash) (string, error) { - return fas.abis[abihash], nil +func (fas *FakeAppState) GetMetadata(metahash acmstate.MetadataHash) (string, error) { + return fas.metadata[metahash], nil } -func (fas *FakeAppState) SetAbi(abihash acmstate.AbiHash, abi string) error { - fas.abis[abihash] = abi +func (fas *FakeAppState) SetMetadata(metahash acmstate.MetadataHash, metadata string) error { + fas.metadata[metahash] = metadata return nil } diff --git a/execution/evm/state.go b/execution/evm/state.go index d82435fd5..b8863add9 100644 --- a/execution/evm/state.go +++ b/execution/evm/state.go @@ -224,7 +224,7 @@ func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code [ codehash := hash.Sum(nil) forebear := &address - metamap := acc.MetaMap + metamap := acc.ContractMeta if parent != nil { // find our ancestor, i.e. the initial contract that was deployed, from which this contract descends ancestor := st.mustAccount(*parent) @@ -234,7 +234,7 @@ func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code [ } else { forebear = parent } - metamap = ancestor.MetaMap + metamap = ancestor.ContractMeta } // If we have a list of ABIs for this contract, we also know what contract code it is allowed to create @@ -293,19 +293,19 @@ func (st *State) InitWASMCode(address crypto.Address, code []byte) { st.updateAccount(acc) } -func (st *State) UpdateMetaMap(address crypto.Address, mapping []*acm.MetaMap) { +func (st *State) UpdateMetaMap(address crypto.Address, mapping []*acm.ContractMeta) { acc := st.mustAccount(address) if acc == nil { st.PushError(errors.ErrorCodef(errors.ErrorCodeInvalidAddress, "tried to initialise code for an account that does not exist: %v", address)) return } - acc.MetaMap = mapping + acc.ContractMeta = mapping st.updateAccount(acc) } -func (st *State) SetAbi(abihash acmstate.AbiHash, abi string) error { - return st.cache.SetAbi(abihash, abi) +func (st *State) SetMetadata(metahash acmstate.MetadataHash, abi string) error { + return st.cache.SetMetadata(metahash, abi) } func (st *State) RemoveAccount(address crypto.Address) { diff --git a/execution/execution.go b/execution/execution.go index 973190319..97c2909e6 100644 --- a/execution/execution.go +++ b/execution/execution.go @@ -404,10 +404,10 @@ func (exe *executor) GetAccount(address crypto.Address) (*acm.Account, error) { return exe.stateCache.GetAccount(address) } -func (exe *executor) GetAbi(abihash acmstate.AbiHash) (string, error) { +func (exe *executor) GetMetadata(metahash acmstate.MetadataHash) (string, error) { exe.RLock() defer exe.RUnlock() - return exe.stateCache.GetAbi(abihash) + return exe.stateCache.GetMetadata(metahash) } // Storage diff --git a/execution/state/abi.go b/execution/state/abi.go index 2ba9c3e1c..a4fb1fe20 100644 --- a/execution/state/abi.go +++ b/execution/state/abi.go @@ -4,11 +4,11 @@ import ( "github.com/hyperledger/burrow/acm/acmstate" ) -func (s *ReadState) GetAbi(abihash acmstate.AbiHash) (string, error) { - return string(s.Plain.Get(keys.Abi.Key(abihash.Bytes()))), nil +func (s *ReadState) GetMetadata(metahash acmstate.MetadataHash) (string, error) { + return string(s.Plain.Get(keys.Abi.Key(metahash.Bytes()))), nil } -func (ws *writeState) SetAbi(abihash acmstate.AbiHash, abi string) error { - ws.plain.Set(keys.Abi.Key(abihash.Bytes()), []byte(abi)) +func (ws *writeState) SetMetadata(metahash acmstate.MetadataHash, abi string) error { + ws.plain.Set(keys.Abi.Key(metahash.Bytes()), []byte(abi)) return nil } diff --git a/integration/rpctest/helpers.go b/integration/rpctest/helpers.go index 1902ad64b..efd4e73c6 100644 --- a/integration/rpctest/helpers.go +++ b/integration/rpctest/helpers.go @@ -1,5 +1,3 @@ -// +build integration - package rpctest import ( @@ -50,21 +48,21 @@ func NewQueryClient(t testing.TB, listenAddress string) rpcquery.QueryClient { return rpcquery.NewQueryClient(conn) } -type AbiMap struct { +type MetadataMap struct { DeployedCode []byte Abi []byte } -func CreateContract(cli rpctransact.TransactClient, inputAddress crypto.Address, bytecode []byte, abimap []AbiMap) (*exec.TxExecution, error) { - var abis []*payload.Abis - if abimap != nil { - abis = make([]*payload.Abis, len(abimap)) - for i, m := range abimap { +func CreateContract(cli rpctransact.TransactClient, inputAddress crypto.Address, bytecode []byte, metamap []MetadataMap) (*exec.TxExecution, error) { + var meta []*payload.ContractMeta + if metamap != nil { + meta = make([]*payload.ContractMeta, len(metamap)) + for i, m := range metamap { hash := sha3.NewKeccak256() hash.Write([]byte(m.DeployedCode)) - abis[i] = &payload.Abis{ + meta[i] = &payload.ContractMeta{ CodeHash: hash.Sum(nil), - Abi: string(m.Abi), + Meta: string(m.Abi), } } } @@ -74,11 +72,11 @@ func CreateContract(cli rpctransact.TransactClient, inputAddress crypto.Address, Address: inputAddress, Amount: 2, }, - Address: nil, - Data: bytecode, - Fee: 2, - GasLimit: 10000, - Abis: abis, + Address: nil, + Data: bytecode, + Fee: 2, + GasLimit: 10000, + ContractMeta: meta, }) if err != nil { return nil, err diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index e0c6ddbc9..cc0286eff 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -299,7 +299,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) t.Run("DeployAbis", func(t *testing.T) { t.Parallel() - createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_A, []rpctest.AbiMap{ + createTxe, err := rpctest.CreateContract(cli, inputAddress, solidity.Bytecode_A, []rpctest.MetadataMap{ {DeployedCode: solidity.DeployedBytecode_A, Abi: solidity.Abi_A}, {DeployedCode: solidity.DeployedBytecode_B, Abi: solidity.Abi_B}, {DeployedCode: solidity.DeployedBytecode_C, Abi: solidity.Abi_C}, @@ -308,9 +308,9 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) addressA := lastCall(createTxe.Events).CallData.Callee // Check ABI for new contract A qcli := rpctest.NewQueryClient(t, kern.GRPCListenAddress().String()) - res, err := qcli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: addressA}) + res, err := qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: addressA}) require.NoError(t, err) - assert.Equal(t, res.Abi, string(solidity.Abi_A)) + assert.Equal(t, res.Metadata, string(solidity.Abi_A)) // CreateB spec, err := abi.ReadSpec(solidity.Abi_A) require.NoError(t, err) @@ -321,9 +321,9 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) var addressB crypto.Address err = spec.Unpack(callTxe.Result.Return, "createB", &addressB) // check ABI for contract B - res, err = qcli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: addressB}) + res, err = qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: addressB}) require.NoError(t, err) - assert.Equal(t, res.Abi, string(solidity.Abi_B)) + assert.Equal(t, res.Metadata, string(solidity.Abi_B)) // CreateC spec, err = abi.ReadSpec(solidity.Abi_B) require.NoError(t, err) @@ -334,9 +334,9 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) var addressC crypto.Address err = spec.Unpack(callTxe.Result.Return, "createC", &addressC) // check abi for contract C - res, err = qcli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: addressC}) + res, err = qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: addressC}) require.NoError(t, err) - assert.Equal(t, res.Abi, string(solidity.Abi_C)) + assert.Equal(t, res.Metadata, string(solidity.Abi_C)) return }) diff --git a/protobuf/acm.proto b/protobuf/acm.proto index 67a6250ee..8a18aa2cf 100644 --- a/protobuf/acm.proto +++ b/protobuf/acm.proto @@ -26,13 +26,13 @@ message Account { permission.AccountPermissions Permissions = 6 [(gogoproto.nullable) = false]; bytes WASMCode = 7 [(gogoproto.customtype) = "Bytecode", (gogoproto.jsontag) = ",omitempty", (gogoproto.nullable) = false]; bytes CodeHash = 8 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false, (gogoproto.jsontag) = "-"]; - repeated MetaMap MetaMap = 9; + repeated ContractMeta ContractMeta = 9; bytes Forebear = 10 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address"]; } -message MetaMap { +message ContractMeta { bytes CodeHash = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; - bytes AbiHash = 2 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; + bytes MetadataHash = 2 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; // In the dump format we would like the ABI rather than its hash - string Abi = 3; + string Metadata = 3; } diff --git a/protobuf/payload.proto b/protobuf/payload.proto index dcde0cb24..413efcfd2 100644 --- a/protobuf/payload.proto +++ b/protobuf/payload.proto @@ -71,12 +71,12 @@ message CallTx { // WASM bytecode bytes WASM = 6 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false, (gogoproto.jsontag)="tags,omitempty"]; // Set of contracts this code will deploy - repeated Abis Abis = 7; + repeated ContractMeta ContractMeta = 7; } -message Abis { +message ContractMeta { bytes CodeHash = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false]; - string Abi = 2; + string Meta = 2; } // A payment between two sets of parties diff --git a/protobuf/rpcquery.proto b/protobuf/rpcquery.proto index 1f596b814..353646e36 100644 --- a/protobuf/rpcquery.proto +++ b/protobuf/rpcquery.proto @@ -21,7 +21,7 @@ option (gogoproto.messagename_all) = true; service Query { rpc Status (StatusParam) returns (rpc.ResultStatus); rpc GetAccount (GetAccountParam) returns (acm.Account); - rpc GetAbi (GetAbiParam) returns (AbiValue); + rpc GetMetadata (GetMetadataParam) returns (MetadataResult); rpc GetStorage (GetStorageParam) returns (StorageValue); rpc ListAccounts (ListAccountsParam) returns (stream acm.Account); @@ -49,12 +49,12 @@ message GetAccountParam { bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; } -message GetAbiParam { +message GetMetadataParam { bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; } -message AbiValue { - string Abi = 1; +message MetadataResult { + string Metadata = 1; } message GetStorageParam { diff --git a/rpc/rpcquery/query_server.go b/rpc/rpcquery/query_server.go index a00f8c37d..8de7d89e1 100644 --- a/rpc/rpcquery/query_server.go +++ b/rpc/rpcquery/query_server.go @@ -61,38 +61,41 @@ func (qs *queryServer) GetAccount(ctx context.Context, param *GetAccountParam) ( return acc, err } -func (qs *queryServer) GetAbi(ctx context.Context, param *GetAbiParam) (*AbiValue, error) { - abi := AbiValue{} +func (qs *queryServer) GetMetadata(ctx context.Context, param *GetMetadataParam) (*MetadataResult, error) { + metadata := MetadataResult{} acc, err := qs.accounts.GetAccount(param.Address) + if err != nil { + return &metadata, err + } if acc != nil && acc.CodeHash != nil { codehash := acc.CodeHash if acc.Forebear != nil { acc, err = qs.accounts.GetAccount(*acc.Forebear) if err != nil { - return &abi, err + return &metadata, err } } - for _, m := range acc.MetaMap { + for _, m := range acc.ContractMeta { if bytes.Equal(m.CodeHash, codehash) { - var abihash acmstate.AbiHash - copy(abihash[:], m.AbiHash) - abi.Abi, err = qs.accounts.GetAbi(abihash) - return &abi, err + var metahash acmstate.MetadataHash + copy(metahash[:], m.MetadataHash) + metadata.Metadata, err = qs.accounts.GetMetadata(metahash) + return &metadata, err } } deployCodehash := compile.GetDeployCodeHash(acc.EVMCode, param.Address) - for _, m := range acc.MetaMap { + for _, m := range acc.ContractMeta { if bytes.Equal(m.CodeHash, deployCodehash) { - var abihash acmstate.AbiHash - copy(abihash[:], m.AbiHash) - abi.Abi, err = qs.accounts.GetAbi(abihash) - return &abi, err + var metahash acmstate.MetadataHash + copy(metahash[:], m.MetadataHash) + metadata.Metadata, err = qs.accounts.GetMetadata(metahash) + return &metadata, err } } } - return &abi, err + return &metadata, err } func (qs *queryServer) GetStorage(ctx context.Context, param *GetStorageParam) (*StorageValue, error) { diff --git a/rpc/rpcquery/rpcquery.pb.go b/rpc/rpcquery/rpcquery.pb.go index d877f4e04..335fa08f4 100644 --- a/rpc/rpcquery/rpcquery.pb.go +++ b/rpc/rpcquery/rpcquery.pb.go @@ -118,81 +118,81 @@ func (*GetAccountParam) XXX_MessageName() string { return "rpcquery.GetAccountParam" } -type GetAbiParam struct { +type GetMetadataParam struct { Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } -func (m *GetAbiParam) Reset() { *m = GetAbiParam{} } -func (m *GetAbiParam) String() string { return proto.CompactTextString(m) } -func (*GetAbiParam) ProtoMessage() {} -func (*GetAbiParam) Descriptor() ([]byte, []int) { +func (m *GetMetadataParam) Reset() { *m = GetMetadataParam{} } +func (m *GetMetadataParam) String() string { return proto.CompactTextString(m) } +func (*GetMetadataParam) ProtoMessage() {} +func (*GetMetadataParam) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{2} } -func (m *GetAbiParam) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetAbiParam.Unmarshal(m, b) +func (m *GetMetadataParam) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMetadataParam.Unmarshal(m, b) } -func (m *GetAbiParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetAbiParam.Marshal(b, m, deterministic) +func (m *GetMetadataParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMetadataParam.Marshal(b, m, deterministic) } -func (m *GetAbiParam) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetAbiParam.Merge(m, src) +func (m *GetMetadataParam) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMetadataParam.Merge(m, src) } -func (m *GetAbiParam) XXX_Size() int { - return xxx_messageInfo_GetAbiParam.Size(m) +func (m *GetMetadataParam) XXX_Size() int { + return xxx_messageInfo_GetMetadataParam.Size(m) } -func (m *GetAbiParam) XXX_DiscardUnknown() { - xxx_messageInfo_GetAbiParam.DiscardUnknown(m) +func (m *GetMetadataParam) XXX_DiscardUnknown() { + xxx_messageInfo_GetMetadataParam.DiscardUnknown(m) } -var xxx_messageInfo_GetAbiParam proto.InternalMessageInfo +var xxx_messageInfo_GetMetadataParam proto.InternalMessageInfo -func (*GetAbiParam) XXX_MessageName() string { - return "rpcquery.GetAbiParam" +func (*GetMetadataParam) XXX_MessageName() string { + return "rpcquery.GetMetadataParam" } -type AbiValue struct { - Abi string `protobuf:"bytes,1,opt,name=Abi,proto3" json:"Abi,omitempty"` +type MetadataResult struct { + Metadata string `protobuf:"bytes,1,opt,name=Metadata,proto3" json:"Metadata,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } -func (m *AbiValue) Reset() { *m = AbiValue{} } -func (m *AbiValue) String() string { return proto.CompactTextString(m) } -func (*AbiValue) ProtoMessage() {} -func (*AbiValue) Descriptor() ([]byte, []int) { +func (m *MetadataResult) Reset() { *m = MetadataResult{} } +func (m *MetadataResult) String() string { return proto.CompactTextString(m) } +func (*MetadataResult) ProtoMessage() {} +func (*MetadataResult) Descriptor() ([]byte, []int) { return fileDescriptor_88e25d9b99e39f02, []int{3} } -func (m *AbiValue) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AbiValue.Unmarshal(m, b) +func (m *MetadataResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetadataResult.Unmarshal(m, b) } -func (m *AbiValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AbiValue.Marshal(b, m, deterministic) +func (m *MetadataResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetadataResult.Marshal(b, m, deterministic) } -func (m *AbiValue) XXX_Merge(src proto.Message) { - xxx_messageInfo_AbiValue.Merge(m, src) +func (m *MetadataResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetadataResult.Merge(m, src) } -func (m *AbiValue) XXX_Size() int { - return xxx_messageInfo_AbiValue.Size(m) +func (m *MetadataResult) XXX_Size() int { + return xxx_messageInfo_MetadataResult.Size(m) } -func (m *AbiValue) XXX_DiscardUnknown() { - xxx_messageInfo_AbiValue.DiscardUnknown(m) +func (m *MetadataResult) XXX_DiscardUnknown() { + xxx_messageInfo_MetadataResult.DiscardUnknown(m) } -var xxx_messageInfo_AbiValue proto.InternalMessageInfo +var xxx_messageInfo_MetadataResult proto.InternalMessageInfo -func (m *AbiValue) GetAbi() string { +func (m *MetadataResult) GetMetadata() string { if m != nil { - return m.Abi + return m.Metadata } return "" } -func (*AbiValue) XXX_MessageName() string { - return "rpcquery.AbiValue" +func (*MetadataResult) XXX_MessageName() string { + return "rpcquery.MetadataResult" } type GetStorageParam struct { @@ -825,10 +825,10 @@ func init() { golang_proto.RegisterType((*StatusParam)(nil), "rpcquery.StatusParam") proto.RegisterType((*GetAccountParam)(nil), "rpcquery.GetAccountParam") golang_proto.RegisterType((*GetAccountParam)(nil), "rpcquery.GetAccountParam") - proto.RegisterType((*GetAbiParam)(nil), "rpcquery.GetAbiParam") - golang_proto.RegisterType((*GetAbiParam)(nil), "rpcquery.GetAbiParam") - proto.RegisterType((*AbiValue)(nil), "rpcquery.AbiValue") - golang_proto.RegisterType((*AbiValue)(nil), "rpcquery.AbiValue") + proto.RegisterType((*GetMetadataParam)(nil), "rpcquery.GetMetadataParam") + golang_proto.RegisterType((*GetMetadataParam)(nil), "rpcquery.GetMetadataParam") + proto.RegisterType((*MetadataResult)(nil), "rpcquery.MetadataResult") + golang_proto.RegisterType((*MetadataResult)(nil), "rpcquery.MetadataResult") proto.RegisterType((*GetStorageParam)(nil), "rpcquery.GetStorageParam") golang_proto.RegisterType((*GetStorageParam)(nil), "rpcquery.GetStorageParam") proto.RegisterType((*StorageValue)(nil), "rpcquery.StorageValue") @@ -866,64 +866,64 @@ func init() { golang_proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9 var fileDescriptor_88e25d9b99e39f02 = []byte{ // 923 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x8e, 0xdb, 0x44, - 0x14, 0xc6, 0x4d, 0x37, 0x9b, 0x3d, 0xc9, 0x6e, 0xb6, 0xd3, 0x25, 0x2c, 0x6e, 0x49, 0xab, 0x91, - 0xd8, 0x2e, 0x15, 0x75, 0xa2, 0xb4, 0x0b, 0x08, 0x90, 0xd0, 0x06, 0x41, 0xb2, 0x2a, 0xac, 0x16, - 0x07, 0xb5, 0x12, 0x08, 0xa4, 0xb1, 0x3d, 0x24, 0x16, 0x8e, 0xc7, 0x8c, 0xc7, 0x05, 0x3f, 0x12, - 0x57, 0xbc, 0x02, 0x97, 0x7d, 0x04, 0xc4, 0x45, 0x85, 0xda, 0x17, 0x41, 0x9e, 0x1f, 0xc7, 0xce, - 0xa6, 0x55, 0x6f, 0x7a, 0x13, 0x9d, 0x73, 0xfc, 0xcd, 0x77, 0xec, 0xe3, 0xf3, 0x7d, 0x0e, 0xec, - 0xf1, 0xc4, 0xff, 0x2d, 0xa3, 0x3c, 0x77, 0x12, 0xce, 0x04, 0x43, 0x2d, 0x93, 0xdb, 0xf7, 0xe6, - 0xa1, 0x58, 0x64, 0x9e, 0xe3, 0xb3, 0xe5, 0x60, 0xce, 0xe6, 0x6c, 0x20, 0x01, 0x5e, 0xf6, 0x8b, - 0xcc, 0x64, 0x22, 0x23, 0x75, 0xd0, 0xfe, 0xb8, 0x02, 0x17, 0x34, 0x0e, 0x28, 0x5f, 0x86, 0xb1, - 0xa8, 0x86, 0xc4, 0xf3, 0xc3, 0x81, 0xc8, 0x13, 0x9a, 0xaa, 0x5f, 0x7d, 0xb0, 0x1d, 0x93, 0x65, - 0x99, 0xec, 0x10, 0x7f, 0xa9, 0xc3, 0xee, 0x13, 0x12, 0x85, 0x01, 0x11, 0x8c, 0x9b, 0x6b, 0x3c, - 0xf1, 0x75, 0xb8, 0x9b, 0x90, 0x3c, 0x62, 0x24, 0x50, 0x29, 0x0e, 0xa1, 0x3d, 0x13, 0x44, 0x64, - 0xe9, 0x05, 0xe1, 0x64, 0x89, 0x8e, 0xa1, 0x3b, 0x8e, 0x98, 0xff, 0xeb, 0xf7, 0xe1, 0x92, 0x3e, - 0x0e, 0xc5, 0x22, 0x8c, 0x0f, 0xad, 0xdb, 0xd6, 0xf1, 0x8e, 0xbb, 0x5e, 0x46, 0x43, 0xb8, 0x2e, - 0x4b, 0x33, 0x4a, 0xe3, 0x0a, 0xfa, 0x8a, 0x44, 0x6f, 0xba, 0x84, 0x09, 0x74, 0x27, 0x54, 0x9c, - 0xfa, 0x3e, 0xcb, 0x62, 0xa1, 0xda, 0x9d, 0xc3, 0xf6, 0x69, 0x10, 0x70, 0x9a, 0xa6, 0xb2, 0x4d, - 0x67, 0xfc, 0xe0, 0xe9, 0xb3, 0x5b, 0x6f, 0xfd, 0xfb, 0xec, 0xd6, 0x87, 0x95, 0x91, 0x2c, 0xf2, - 0x84, 0xf2, 0x88, 0x06, 0x73, 0xca, 0x07, 0x5e, 0xc6, 0x39, 0xfb, 0x7d, 0xe0, 0xf3, 0x3c, 0x11, - 0xcc, 0xd1, 0x67, 0x5d, 0x43, 0x82, 0x7f, 0x82, 0x76, 0xd1, 0xc2, 0x0b, 0xdf, 0x0c, 0xfd, 0x4d, - 0x68, 0x9d, 0x7a, 0xe1, 0x23, 0x12, 0x65, 0x14, 0xed, 0x43, 0xe3, 0xd4, 0x0b, 0xf5, 0x74, 0x8a, - 0x10, 0xff, 0x69, 0xc9, 0x07, 0x9c, 0x09, 0xc6, 0xc9, 0x9c, 0xbe, 0x91, 0x3b, 0x40, 0x5f, 0x43, - 0xe3, 0x21, 0xcd, 0xe5, 0x94, 0x5f, 0x9b, 0xcb, 0x0b, 0x63, 0xc2, 0x73, 0xe7, 0x31, 0xe3, 0xc1, - 0xe8, 0xe4, 0x23, 0xb7, 0x20, 0xc0, 0x3f, 0x42, 0x47, 0xdf, 0xa7, 0x7a, 0x9a, 0x87, 0xb0, 0x25, - 0x03, 0x7d, 0x97, 0x27, 0x9a, 0xf9, 0xde, 0x6b, 0x31, 0x4f, 0xe9, 0x1f, 0xe3, 0x5c, 0xd0, 0xd4, - 0x55, 0x1c, 0xf8, 0x03, 0xb8, 0xf6, 0x4d, 0x98, 0x9a, 0x37, 0xad, 0x37, 0xeb, 0x00, 0xb6, 0xbe, - 0x2b, 0xc4, 0xa1, 0x27, 0xa6, 0x12, 0x8c, 0xa1, 0x33, 0xa1, 0xe2, 0x9c, 0x2c, 0xf5, 0xbc, 0x10, - 0x5c, 0x2d, 0x12, 0x0d, 0x92, 0x31, 0x3e, 0x82, 0xbd, 0x82, 0xae, 0x88, 0x5f, 0xc9, 0xd5, 0x83, - 0x83, 0x09, 0x15, 0x8f, 0xcc, 0xea, 0xcf, 0xa8, 0x5a, 0x32, 0x3c, 0x81, 0x1b, 0x6b, 0xf5, 0x69, - 0x98, 0x0a, 0xc6, 0xf3, 0x72, 0xe5, 0xcf, 0x62, 0x3f, 0xca, 0x02, 0x7a, 0xc1, 0xe9, 0x93, 0x90, - 0x65, 0xea, 0x55, 0x35, 0xdc, 0xf5, 0x32, 0x9e, 0xc0, 0xf5, 0x0d, 0x2c, 0x68, 0x08, 0xdb, 0x3a, - 0x3c, 0xb4, 0x6e, 0x37, 0x8e, 0xdb, 0xa3, 0x9e, 0x53, 0x3a, 0x43, 0x15, 0xef, 0x1a, 0x18, 0x3e, - 0x87, 0x4e, 0xf5, 0x02, 0xea, 0x41, 0x73, 0x41, 0xc3, 0xf9, 0x42, 0xc8, 0xce, 0x57, 0x5d, 0x9d, - 0xa1, 0x23, 0x68, 0xcc, 0xa8, 0x38, 0xbc, 0x22, 0x59, 0x0f, 0x9c, 0x95, 0xaa, 0xcb, 0xd3, 0x6e, - 0x01, 0xc0, 0x47, 0xb0, 0x3f, 0xa1, 0xe2, 0x82, 0xb3, 0x84, 0xa5, 0x24, 0x2a, 0x27, 0x39, 0x25, - 0xe9, 0x42, 0xbd, 0x50, 0x57, 0xc6, 0x78, 0x08, 0xa8, 0x98, 0xa4, 0x01, 0xea, 0x69, 0xda, 0xd0, - 0x52, 0x15, 0x1a, 0x48, 0x74, 0xcb, 0x2d, 0x73, 0xfc, 0x2d, 0xec, 0x19, 0xb4, 0x4b, 0xd3, 0x2c, - 0x12, 0x9b, 0x78, 0xd1, 0x1d, 0x68, 0x8e, 0x49, 0x14, 0x31, 0x21, 0x17, 0xb3, 0x3d, 0xea, 0x3a, - 0xc6, 0x64, 0x54, 0xd9, 0xd5, 0x97, 0x71, 0x17, 0x76, 0xa5, 0x42, 0x88, 0xde, 0x0a, 0x4c, 0x61, - 0x4b, 0x66, 0xe8, 0x2e, 0xec, 0x9b, 0x7d, 0x29, 0xec, 0xe2, 0x4b, 0x16, 0x50, 0x3d, 0x8c, 0x4b, - 0xf5, 0xc2, 0x7a, 0xaa, 0x35, 0x96, 0x09, 0x09, 0xbf, 0x22, 0xe1, 0x9b, 0x2e, 0xe1, 0x3b, 0xb2, - 0xaf, 0x34, 0x25, 0xf5, 0xcc, 0x3d, 0x68, 0x4e, 0x6b, 0x13, 0x57, 0xd9, 0xe8, 0xaf, 0xa6, 0x5e, - 0x2d, 0x34, 0x82, 0xa6, 0x32, 0x46, 0xf4, 0xf6, 0xea, 0x75, 0x56, 0xac, 0xd2, 0xbe, 0x56, 0x94, - 0x1d, 0x35, 0x15, 0x8d, 0x3c, 0x01, 0x58, 0x39, 0x1c, 0x7a, 0x77, 0x75, 0x6e, 0xcd, 0xf7, 0xec, - 0x8e, 0x53, 0x98, 0xb5, 0x01, 0xde, 0x87, 0xa6, 0x72, 0xad, 0x6a, 0xab, 0x8a, 0x8f, 0xd9, 0x68, - 0x55, 0x2e, 0xfd, 0xe7, 0x0b, 0xd9, 0x4b, 0x8b, 0x78, 0xad, 0x57, 0xd5, 0x82, 0xec, 0x5e, 0xf5, - 0xf6, 0x2b, 0x92, 0xff, 0x0c, 0x3a, 0x55, 0x95, 0xa2, 0x1b, 0x2b, 0xdc, 0x25, 0xf5, 0xd6, 0x6f, - 0x78, 0x68, 0xa1, 0x01, 0x6c, 0x6b, 0xdd, 0xa2, 0x5e, 0xad, 0x75, 0x29, 0x65, 0xbb, 0xe3, 0xa8, - 0xaf, 0xd3, 0x57, 0xb1, 0xe0, 0x39, 0x3a, 0x81, 0x9d, 0x52, 0xc4, 0xe8, 0xb0, 0xde, 0x6a, 0xa5, - 0xec, 0xfa, 0xa1, 0xa1, 0x85, 0xce, 0xa4, 0xa5, 0xd6, 0xc4, 0xd2, 0xaf, 0xf5, 0xbb, 0x24, 0x77, - 0xfb, 0x25, 0xea, 0x43, 0x3f, 0x43, 0x6f, 0xb3, 0x0d, 0xa0, 0xf7, 0x5f, 0xca, 0x58, 0x35, 0x0a, - 0xfb, 0xbd, 0xcd, 0xc4, 0x86, 0xe5, 0x53, 0xf9, 0xed, 0x31, 0x6a, 0x41, 0x76, 0x8d, 0xb4, 0xa6, - 0x4d, 0x7b, 0x5d, 0x1f, 0xe8, 0x0c, 0x76, 0x6b, 0xc2, 0x44, 0x37, 0xeb, 0x13, 0xaa, 0x2b, 0xd6, - 0xae, 0xcc, 0xaf, 0xae, 0xce, 0xa1, 0x85, 0x1e, 0x40, 0xcb, 0x48, 0x0c, 0xbd, 0xb3, 0xb6, 0x15, - 0x46, 0x76, 0x76, 0xb7, 0xbe, 0xd2, 0x29, 0xfa, 0x04, 0xf6, 0x8c, 0x40, 0xa6, 0x94, 0x04, 0x94, - 0xaf, 0x9d, 0x5d, 0x49, 0xc7, 0xde, 0x75, 0xd4, 0x5f, 0x10, 0x85, 0x1b, 0x7f, 0xfe, 0xcf, 0xf3, - 0xbe, 0xf5, 0xdf, 0xf3, 0xbe, 0xf5, 0xf7, 0x8b, 0xbe, 0xf5, 0xf4, 0x45, 0xdf, 0xfa, 0xe1, 0xee, - 0xab, 0x3f, 0x1a, 0x3c, 0xf1, 0x07, 0x86, 0xda, 0x6b, 0xca, 0x7f, 0x21, 0xf7, 0xff, 0x0f, 0x00, - 0x00, 0xff, 0xff, 0x6f, 0x49, 0x2c, 0xaf, 0x4c, 0x09, 0x00, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xef, 0x8e, 0xdb, 0x44, + 0x10, 0xc7, 0xbd, 0x5e, 0x2e, 0x37, 0xc9, 0x25, 0xed, 0xf6, 0x08, 0xc1, 0x85, 0xb4, 0x5a, 0x89, + 0xeb, 0x51, 0xb5, 0x4e, 0x14, 0x7a, 0x80, 0x00, 0x09, 0x35, 0x15, 0x24, 0xa7, 0xd2, 0xd3, 0xe1, + 0xa0, 0x56, 0x02, 0x09, 0x69, 0x63, 0x2f, 0x89, 0x85, 0xe3, 0x35, 0xeb, 0x75, 0xc1, 0x8f, 0xc4, + 0x53, 0xc0, 0xc7, 0x3e, 0x02, 0xe2, 0x43, 0x85, 0xda, 0x17, 0x41, 0xde, 0x3f, 0x8e, 0xed, 0x4b, + 0xab, 0x7e, 0xe9, 0x97, 0x68, 0x66, 0xf6, 0x37, 0x33, 0xf6, 0xec, 0xfc, 0x7e, 0x31, 0x74, 0x78, + 0xec, 0xfd, 0x96, 0x52, 0x9e, 0x39, 0x31, 0x67, 0x82, 0xa1, 0xa6, 0xf1, 0xed, 0xbb, 0xcb, 0x40, + 0xac, 0xd2, 0x85, 0xe3, 0xb1, 0xf5, 0x70, 0xc9, 0x96, 0x6c, 0x28, 0x01, 0x8b, 0xf4, 0x17, 0xe9, + 0x49, 0x47, 0x5a, 0x2a, 0xd1, 0xfe, 0xac, 0x04, 0x17, 0x34, 0xf2, 0x29, 0x5f, 0x07, 0x91, 0x28, + 0x9b, 0x64, 0xe1, 0x05, 0x43, 0x91, 0xc5, 0x34, 0x51, 0xbf, 0x3a, 0xb1, 0x15, 0x91, 0x75, 0xe1, + 0xec, 0x13, 0x6f, 0xad, 0xcd, 0xee, 0x53, 0x12, 0x06, 0x3e, 0x11, 0x8c, 0x9b, 0x33, 0x1e, 0x7b, + 0xda, 0x3c, 0x88, 0x49, 0x16, 0x32, 0xe2, 0x2b, 0x17, 0x07, 0xd0, 0x9a, 0x0b, 0x22, 0xd2, 0xe4, + 0x9c, 0x70, 0xb2, 0x46, 0xc7, 0xd0, 0x9d, 0x84, 0xcc, 0xfb, 0xf5, 0x87, 0x60, 0x4d, 0x9f, 0x04, + 0x62, 0x15, 0x44, 0x7d, 0xeb, 0xa6, 0x75, 0xbc, 0xef, 0xd6, 0xc3, 0x68, 0x04, 0xd7, 0x64, 0x68, + 0x4e, 0x69, 0x54, 0x42, 0x5f, 0x92, 0xe8, 0x6d, 0x47, 0x98, 0x40, 0x77, 0x4a, 0xc5, 0x7d, 0xcf, + 0x63, 0x69, 0x24, 0x54, 0xbb, 0x33, 0xd8, 0xbb, 0xef, 0xfb, 0x9c, 0x26, 0x89, 0x6c, 0xd3, 0x9e, + 0xdc, 0x7b, 0xf6, 0xfc, 0xc6, 0x3b, 0xff, 0x3e, 0xbf, 0x71, 0xa7, 0x34, 0x92, 0x55, 0x16, 0x53, + 0x1e, 0x52, 0x7f, 0x49, 0xf9, 0x70, 0x91, 0x72, 0xce, 0x7e, 0x1f, 0x7a, 0x3c, 0x8b, 0x05, 0x73, + 0x74, 0xae, 0x6b, 0x8a, 0xe0, 0x05, 0x5c, 0x99, 0x52, 0xf1, 0x88, 0x0a, 0xe2, 0x13, 0x41, 0xde, + 0x4e, 0x8f, 0x3b, 0xd0, 0x31, 0x0d, 0x5c, 0x9a, 0xa4, 0xa1, 0x40, 0x36, 0x34, 0x4d, 0x44, 0x4f, + 0xab, 0xf0, 0xf1, 0x9f, 0x96, 0x7c, 0xeb, 0xb9, 0x60, 0x9c, 0x2c, 0xe9, 0x5b, 0x79, 0x22, 0xf4, + 0x2d, 0xec, 0x3c, 0xa4, 0x99, 0x1c, 0xfd, 0x1b, 0xd7, 0x5a, 0x04, 0x11, 0xe1, 0x99, 0xf3, 0x84, + 0x71, 0x7f, 0x7c, 0xf2, 0xa9, 0x9b, 0x17, 0xc0, 0x3f, 0x41, 0x5b, 0x3f, 0xe7, 0x63, 0x12, 0xa6, + 0x14, 0x3d, 0x84, 0x5d, 0x69, 0xe8, 0xa7, 0x3c, 0xd1, 0x95, 0xef, 0xbe, 0x51, 0xe5, 0x19, 0xfd, + 0x63, 0x92, 0x09, 0x9a, 0xb8, 0xaa, 0x06, 0xfe, 0x18, 0xae, 0x7e, 0x17, 0x24, 0xe6, 0xfa, 0xf5, + 0xba, 0x1d, 0xc2, 0xee, 0xf7, 0x39, 0x63, 0xf4, 0xd8, 0x94, 0x83, 0x31, 0xb4, 0xa7, 0x54, 0x9c, + 0x91, 0xb5, 0x9e, 0x17, 0x82, 0xcb, 0xb9, 0xa3, 0x41, 0xd2, 0xc6, 0x47, 0xd0, 0xc9, 0xcb, 0xe5, + 0xf6, 0x6b, 0x6b, 0xf5, 0xe0, 0x70, 0x4a, 0xc5, 0x63, 0xc3, 0x87, 0x39, 0x55, 0x9b, 0x87, 0xa7, + 0x70, 0xbd, 0x16, 0x9f, 0x05, 0x89, 0x60, 0x3c, 0x2b, 0x78, 0x70, 0x1a, 0x79, 0x61, 0xea, 0xd3, + 0x73, 0x4e, 0x9f, 0x06, 0x2c, 0x55, 0x57, 0xb5, 0xe3, 0xd6, 0xc3, 0x78, 0x0a, 0xd7, 0xb6, 0x54, + 0x41, 0x23, 0xd8, 0xd3, 0x66, 0xdf, 0xba, 0xb9, 0x73, 0xdc, 0x1a, 0xf7, 0x9c, 0x42, 0x2e, 0xca, + 0x78, 0xd7, 0xc0, 0xf0, 0x19, 0xb4, 0xcb, 0x07, 0xa8, 0x07, 0x8d, 0x15, 0x0d, 0x96, 0x2b, 0x21, + 0x3b, 0x5f, 0x76, 0xb5, 0x87, 0x8e, 0x60, 0x67, 0x4e, 0x45, 0xff, 0x92, 0xac, 0x7a, 0xe8, 0x6c, + 0xa8, 0x5e, 0x64, 0xbb, 0x39, 0x00, 0x1f, 0x49, 0x2e, 0x9c, 0x73, 0x16, 0xb3, 0x84, 0x84, 0xc5, + 0x24, 0x67, 0x24, 0x59, 0xa9, 0x0b, 0x75, 0xa5, 0x8d, 0x47, 0x80, 0xf2, 0x49, 0x1a, 0xa0, 0x9e, + 0xa6, 0x0d, 0x4d, 0x15, 0xa1, 0xbe, 0x44, 0x37, 0xdd, 0xc2, 0xc7, 0x8f, 0xa0, 0x63, 0xd0, 0x9a, + 0x01, 0x5b, 0xea, 0xa2, 0x5b, 0xd0, 0x98, 0x90, 0x30, 0x64, 0x42, 0x2e, 0x66, 0x6b, 0xdc, 0x75, + 0x8c, 0xf2, 0xa8, 0xb0, 0xab, 0x8f, 0x71, 0x17, 0x0e, 0x24, 0x43, 0x88, 0xde, 0x0a, 0x4c, 0x61, + 0x57, 0x7a, 0xe8, 0x36, 0x5c, 0x31, 0xfb, 0x92, 0x6b, 0xc8, 0x03, 0xe6, 0x53, 0x3d, 0x8c, 0x0b, + 0xf1, 0x5c, 0x8f, 0xca, 0x31, 0x96, 0x0a, 0x09, 0xbf, 0x24, 0xe1, 0xdb, 0x8e, 0xf0, 0x2d, 0xd9, + 0x57, 0x2a, 0x95, 0x7a, 0xe7, 0x1e, 0x34, 0x66, 0x95, 0x89, 0x2b, 0x6f, 0xfc, 0x57, 0x43, 0xaf, + 0x16, 0x1a, 0x43, 0x43, 0xa9, 0x25, 0x7a, 0x77, 0x73, 0x9d, 0x25, 0xfd, 0xb4, 0xaf, 0xe6, 0x61, + 0x47, 0x4d, 0x45, 0x23, 0x4f, 0x00, 0x36, 0xb2, 0x87, 0xde, 0xdf, 0xe4, 0xd5, 0xc4, 0xd0, 0x6e, + 0x3b, 0xb9, 0x82, 0x1b, 0xe0, 0x03, 0x68, 0x95, 0xa4, 0x0c, 0xd9, 0x95, 0xbc, 0x8a, 0xc2, 0xd9, + 0xfd, 0xcd, 0x59, 0x4d, 0x99, 0xbe, 0x96, 0xbd, 0x35, 0xa9, 0x6b, 0xbd, 0xcb, 0x92, 0x64, 0xf7, + 0xca, 0xaf, 0x53, 0x92, 0x80, 0x2f, 0xa1, 0x5d, 0x66, 0x2d, 0xba, 0xbe, 0xc1, 0x5d, 0x60, 0x73, + 0xf5, 0x05, 0x46, 0x16, 0x1a, 0xc2, 0x9e, 0xe6, 0x31, 0xea, 0x55, 0x5a, 0x17, 0xd4, 0xb6, 0xdb, + 0x8e, 0xfa, 0x0b, 0xfb, 0x26, 0x12, 0x3c, 0x43, 0x27, 0xb0, 0x5f, 0x90, 0x1a, 0xf5, 0xab, 0xad, + 0x36, 0x4c, 0xaf, 0x26, 0x8d, 0x2c, 0x74, 0x2a, 0x25, 0xb6, 0x42, 0x9e, 0x41, 0xa5, 0xdf, 0x05, + 0xfa, 0xdb, 0xaf, 0x60, 0x23, 0xfa, 0x19, 0x7a, 0xdb, 0x65, 0x01, 0x7d, 0xf4, 0xca, 0x8a, 0x65, + 0xe1, 0xb0, 0x3f, 0xdc, 0x5e, 0xd8, 0x54, 0xf9, 0x42, 0xde, 0xaa, 0x61, 0x4f, 0xed, 0x56, 0x2b, + 0x5c, 0xb5, 0xeb, 0x7c, 0x41, 0xa7, 0x70, 0x50, 0x21, 0x2a, 0xfa, 0xa0, 0x3a, 0xa1, 0x2a, 0x83, + 0xcb, 0x5b, 0x51, 0x65, 0xeb, 0xc8, 0x42, 0xf7, 0xa0, 0x69, 0x28, 0x87, 0xde, 0xab, 0x6d, 0x85, + 0xa1, 0xa1, 0xdd, 0xad, 0xae, 0x78, 0x82, 0x3e, 0x87, 0x8e, 0x21, 0xcc, 0x8c, 0x12, 0x9f, 0xf2, + 0x5a, 0xee, 0x86, 0x4a, 0xf6, 0x81, 0xa3, 0xbe, 0x53, 0x14, 0x6e, 0xf2, 0xd5, 0x3f, 0x2f, 0x06, + 0xd6, 0x7f, 0x2f, 0x06, 0xd6, 0xdf, 0x2f, 0x07, 0xd6, 0xb3, 0x97, 0x03, 0xeb, 0xc7, 0xdb, 0xaf, + 0xff, 0x13, 0xe1, 0xb1, 0x37, 0x34, 0xa5, 0x17, 0x0d, 0xf9, 0xa9, 0xf2, 0xc9, 0xff, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x01, 0x12, 0xcc, 0x9d, 0x71, 0x09, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -940,7 +940,7 @@ const _ = grpc.SupportPackageIsVersion4 type QueryClient interface { Status(ctx context.Context, in *StatusParam, opts ...grpc.CallOption) (*rpc.ResultStatus, error) GetAccount(ctx context.Context, in *GetAccountParam, opts ...grpc.CallOption) (*acm.Account, error) - GetAbi(ctx context.Context, in *GetAbiParam, opts ...grpc.CallOption) (*AbiValue, error) + GetMetadata(ctx context.Context, in *GetMetadataParam, opts ...grpc.CallOption) (*MetadataResult, error) GetStorage(ctx context.Context, in *GetStorageParam, opts ...grpc.CallOption) (*StorageValue, error) ListAccounts(ctx context.Context, in *ListAccountsParam, opts ...grpc.CallOption) (Query_ListAccountsClient, error) GetName(ctx context.Context, in *GetNameParam, opts ...grpc.CallOption) (*names.Entry, error) @@ -979,9 +979,9 @@ func (c *queryClient) GetAccount(ctx context.Context, in *GetAccountParam, opts return out, nil } -func (c *queryClient) GetAbi(ctx context.Context, in *GetAbiParam, opts ...grpc.CallOption) (*AbiValue, error) { - out := new(AbiValue) - err := c.cc.Invoke(ctx, "/rpcquery.Query/GetAbi", in, out, opts...) +func (c *queryClient) GetMetadata(ctx context.Context, in *GetMetadataParam, opts ...grpc.CallOption) (*MetadataResult, error) { + out := new(MetadataResult) + err := c.cc.Invoke(ctx, "/rpcquery.Query/GetMetadata", in, out, opts...) if err != nil { return nil, err } @@ -1151,7 +1151,7 @@ func (c *queryClient) GetBlockHeader(ctx context.Context, in *GetBlockParam, opt type QueryServer interface { Status(context.Context, *StatusParam) (*rpc.ResultStatus, error) GetAccount(context.Context, *GetAccountParam) (*acm.Account, error) - GetAbi(context.Context, *GetAbiParam) (*AbiValue, error) + GetMetadata(context.Context, *GetMetadataParam) (*MetadataResult, error) GetStorage(context.Context, *GetStorageParam) (*StorageValue, error) ListAccounts(*ListAccountsParam, Query_ListAccountsServer) error GetName(context.Context, *GetNameParam) (*names.Entry, error) @@ -1204,20 +1204,20 @@ func _Query_GetAccount_Handler(srv interface{}, ctx context.Context, dec func(in return interceptor(ctx, in, info, handler) } -func _Query_GetAbi_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetAbiParam) +func _Query_GetMetadata_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMetadataParam) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(QueryServer).GetAbi(ctx, in) + return srv.(QueryServer).GetMetadata(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/rpcquery.Query/GetAbi", + FullMethod: "/rpcquery.Query/GetMetadata", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(QueryServer).GetAbi(ctx, req.(*GetAbiParam)) + return srv.(QueryServer).GetMetadata(ctx, req.(*GetMetadataParam)) } return interceptor(ctx, in, info, handler) } @@ -1424,8 +1424,8 @@ var _Query_serviceDesc = grpc.ServiceDesc{ Handler: _Query_GetAccount_Handler, }, { - MethodName: "GetAbi", - Handler: _Query_GetAbi_Handler, + MethodName: "GetMetadata", + Handler: _Query_GetMetadata_Handler, }, { MethodName: "GetStorage", @@ -1510,7 +1510,7 @@ func (m *GetAccountParam) Size() (n int) { return n } -func (m *GetAbiParam) Size() (n int) { +func (m *GetMetadataParam) Size() (n int) { if m == nil { return 0 } @@ -1524,13 +1524,13 @@ func (m *GetAbiParam) Size() (n int) { return n } -func (m *AbiValue) Size() (n int) { +func (m *MetadataResult) Size() (n int) { if m == nil { return 0 } var l int _ = l - l = len(m.Abi) + l = len(m.Metadata) if l > 0 { n += 1 + l + sovRpcquery(uint64(l)) } diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 9726af56e..0e2c9706a 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -305,10 +305,10 @@ type CallTx struct { // WASM bytecode WASM github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,6,opt,name=WASM,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"tags,omitempty"` // Set of contracts this code will deploy - Abis []*Abis `protobuf:"bytes,7,rep,name=Abis,proto3" json:"Abis,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ContractMeta []*ContractMeta `protobuf:"bytes,7,rep,name=ContractMeta,proto3" json:"ContractMeta,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CallTx) Reset() { *m = CallTx{} } @@ -364,9 +364,9 @@ func (m *CallTx) GetFee() uint64 { return 0 } -func (m *CallTx) GetAbis() []*Abis { +func (m *CallTx) GetContractMeta() []*ContractMeta { if m != nil { - return m.Abis + return m.ContractMeta } return nil } @@ -375,26 +375,26 @@ func (*CallTx) XXX_MessageName() string { return "payload.CallTx" } -type Abis struct { +type ContractMeta struct { CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,1,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"CodeHash"` - Abi string `protobuf:"bytes,2,opt,name=Abi,proto3" json:"Abi,omitempty"` + Meta string `protobuf:"bytes,2,opt,name=Meta,proto3" json:"Meta,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } -func (m *Abis) Reset() { *m = Abis{} } -func (m *Abis) String() string { return proto.CompactTextString(m) } -func (*Abis) ProtoMessage() {} -func (*Abis) Descriptor() ([]byte, []int) { +func (m *ContractMeta) Reset() { *m = ContractMeta{} } +func (m *ContractMeta) String() string { return proto.CompactTextString(m) } +func (*ContractMeta) ProtoMessage() {} +func (*ContractMeta) Descriptor() ([]byte, []int) { return fileDescriptor_678c914f1bee6d56, []int{4} } -func (m *Abis) XXX_Unmarshal(b []byte) error { +func (m *ContractMeta) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } -func (m *Abis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { +func (m *ContractMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { - return xxx_messageInfo_Abis.Marshal(b, m, deterministic) + return xxx_messageInfo_ContractMeta.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalTo(b) @@ -404,27 +404,27 @@ func (m *Abis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return b[:n], nil } } -func (m *Abis) XXX_Merge(src proto.Message) { - xxx_messageInfo_Abis.Merge(m, src) +func (m *ContractMeta) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContractMeta.Merge(m, src) } -func (m *Abis) XXX_Size() int { +func (m *ContractMeta) XXX_Size() int { return m.Size() } -func (m *Abis) XXX_DiscardUnknown() { - xxx_messageInfo_Abis.DiscardUnknown(m) +func (m *ContractMeta) XXX_DiscardUnknown() { + xxx_messageInfo_ContractMeta.DiscardUnknown(m) } -var xxx_messageInfo_Abis proto.InternalMessageInfo +var xxx_messageInfo_ContractMeta proto.InternalMessageInfo -func (m *Abis) GetAbi() string { +func (m *ContractMeta) GetMeta() string { if m != nil { - return m.Abi + return m.Meta } return "" } -func (*Abis) XXX_MessageName() string { - return "payload.Abis" +func (*ContractMeta) XXX_MessageName() string { + return "payload.ContractMeta" } // A payment between two sets of parties @@ -1004,8 +1004,8 @@ func init() { golang_proto.RegisterType((*TxOutput)(nil), "payload.TxOutput") proto.RegisterType((*CallTx)(nil), "payload.CallTx") golang_proto.RegisterType((*CallTx)(nil), "payload.CallTx") - proto.RegisterType((*Abis)(nil), "payload.Abis") - golang_proto.RegisterType((*Abis)(nil), "payload.Abis") + proto.RegisterType((*ContractMeta)(nil), "payload.ContractMeta") + golang_proto.RegisterType((*ContractMeta)(nil), "payload.ContractMeta") proto.RegisterType((*SendTx)(nil), "payload.SendTx") golang_proto.RegisterType((*SendTx)(nil), "payload.SendTx") proto.RegisterType((*PermsTx)(nil), "payload.PermsTx") @@ -1034,72 +1034,72 @@ func init() { proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d5 func init() { golang_proto.RegisterFile("payload.proto", fileDescriptor_678c914f1bee6d56) } var fileDescriptor_678c914f1bee6d56 = []byte{ - // 1025 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcf, 0x6f, 0xe3, 0xc4, - 0x17, 0xaf, 0x63, 0xe7, 0xc7, 0xbe, 0xa6, 0xfd, 0x66, 0xe7, 0x0b, 0x28, 0xaa, 0x44, 0x52, 0x02, - 0x82, 0x2e, 0x6c, 0x13, 0xe8, 0xf2, 0x43, 0xea, 0x05, 0xc5, 0x49, 0xda, 0x2d, 0x5a, 0xb6, 0x65, - 0xe2, 0xee, 0x22, 0x10, 0x07, 0x27, 0x19, 0x1c, 0x8b, 0xd8, 0x63, 0xec, 0xc9, 0xe2, 0x70, 0xe2, - 0xc0, 0x81, 0x2b, 0xe2, 0xc2, 0xb1, 0xff, 0x02, 0xff, 0x01, 0xc7, 0x1e, 0x39, 0x73, 0xa8, 0x50, - 0xf7, 0x82, 0xf8, 0x17, 0xb8, 0xa0, 0x19, 0xcf, 0x38, 0x4e, 0x76, 0xb5, 0x9b, 0x56, 0x88, 0x9b, - 0xe7, 0xbd, 0xcf, 0xbc, 0x1f, 0x9f, 0xf7, 0xe6, 0x3d, 0xc3, 0x46, 0x60, 0xcf, 0x26, 0xd4, 0x1e, - 0x35, 0x83, 0x90, 0x32, 0x8a, 0x8a, 0xf2, 0xb8, 0xb5, 0xeb, 0xb8, 0x6c, 0x3c, 0x1d, 0x34, 0x87, - 0xd4, 0x6b, 0x39, 0xd4, 0xa1, 0x2d, 0xa1, 0x1f, 0x4c, 0xbf, 0x14, 0x27, 0x71, 0x10, 0x5f, 0xc9, - 0xbd, 0xad, 0x4a, 0x40, 0x42, 0xcf, 0x8d, 0x22, 0x97, 0xfa, 0x52, 0x02, 0x51, 0x40, 0x86, 0xc9, - 0x77, 0xe3, 0x47, 0x1d, 0xf4, 0xb6, 0x3f, 0x43, 0x6f, 0x40, 0xa1, 0x63, 0x4f, 0x26, 0x56, 0x5c, - 0xd5, 0xb6, 0xb5, 0x9d, 0xf5, 0xbd, 0xff, 0x35, 0x95, 0xf7, 0x44, 0x8c, 0xa5, 0x9a, 0x03, 0xfb, - 0xc4, 0x1f, 0x59, 0x71, 0x35, 0xb7, 0x04, 0x4c, 0xc4, 0x58, 0xaa, 0x39, 0xf0, 0xbe, 0xed, 0x11, - 0x2b, 0xae, 0xea, 0x4b, 0xc0, 0x44, 0x8c, 0xa5, 0x1a, 0xbd, 0x09, 0xc5, 0x13, 0x12, 0x7a, 0x91, - 0x15, 0x57, 0x0d, 0x81, 0xac, 0xa4, 0x48, 0x29, 0xc7, 0x0a, 0x80, 0x5e, 0x83, 0xfc, 0x21, 0x7d, - 0x64, 0xc5, 0xd5, 0xbc, 0x40, 0x6e, 0xa6, 0x48, 0x21, 0xc5, 0x89, 0x92, 0xbb, 0x36, 0xa9, 0x88, - 0xb1, 0xb0, 0xe4, 0x3a, 0x11, 0x63, 0xa9, 0x46, 0xbb, 0x50, 0x3a, 0xf5, 0x07, 0x09, 0xb4, 0x28, - 0xa0, 0x37, 0x53, 0xa8, 0x52, 0xe0, 0x14, 0xc2, 0x23, 0x35, 0x6d, 0x36, 0x1c, 0x5b, 0x71, 0xb5, - 0xb4, 0x14, 0xa9, 0x94, 0x63, 0x05, 0x40, 0x77, 0x00, 0x4e, 0x42, 0x1a, 0xd0, 0xc8, 0xe6, 0xa4, - 0xde, 0x10, 0xf0, 0xff, 0xcf, 0x13, 0x4b, 0x55, 0x38, 0x03, 0xdb, 0x37, 0xce, 0xcf, 0xea, 0x5a, - 0xe3, 0x27, 0x0d, 0x8a, 0x56, 0x7c, 0xe4, 0x07, 0x53, 0x86, 0xee, 0x43, 0xb1, 0x3d, 0x1a, 0x85, - 0x24, 0x8a, 0x44, 0x61, 0xca, 0xe6, 0xbb, 0xe7, 0x17, 0xf5, 0xb5, 0xdf, 0x2f, 0xea, 0xb7, 0x33, - 0x5d, 0x30, 0x9e, 0x05, 0x24, 0x9c, 0x90, 0x91, 0x43, 0xc2, 0xd6, 0x60, 0x1a, 0x86, 0xf4, 0x9b, - 0xd6, 0x30, 0x9c, 0x05, 0x8c, 0x36, 0xe5, 0x5d, 0xac, 0x8c, 0xa0, 0x97, 0xa0, 0xd0, 0xf6, 0xe8, - 0xd4, 0x67, 0xa2, 0x7c, 0x06, 0x96, 0x27, 0xb4, 0x05, 0xa5, 0x3e, 0xf9, 0x7a, 0x4a, 0xfc, 0x21, - 0x11, 0xf5, 0x32, 0x70, 0x7a, 0xde, 0x37, 0x7e, 0x3e, 0xab, 0xaf, 0x35, 0x62, 0x28, 0x59, 0xf1, - 0xf1, 0x94, 0xfd, 0x87, 0x51, 0x49, 0xcf, 0x7f, 0xe7, 0x54, 0x73, 0xa2, 0xd7, 0x21, 0x2f, 0x78, - 0x91, 0x5d, 0x3a, 0xe7, 0x5f, 0xf2, 0x85, 0x13, 0x35, 0xfa, 0x68, 0x1e, 0x60, 0x4e, 0x04, 0xf8, - 0xf6, 0xf5, 0x83, 0xdb, 0x82, 0xd2, 0xa1, 0x1d, 0xdd, 0x73, 0x3d, 0x97, 0x29, 0x6a, 0xd4, 0x19, - 0x55, 0x40, 0x3f, 0x20, 0x44, 0xf4, 0xad, 0x81, 0xf9, 0x27, 0x3a, 0x02, 0xa3, 0x6b, 0x33, 0x5b, - 0x34, 0x68, 0xd9, 0x7c, 0x4f, 0xf2, 0xb2, 0xfb, 0x6c, 0xd7, 0x03, 0xd7, 0xb7, 0xc3, 0x59, 0xf3, - 0x2e, 0x89, 0xcd, 0x19, 0x23, 0x11, 0x16, 0x26, 0xd0, 0xe7, 0x60, 0x3c, 0x6c, 0xf7, 0x3f, 0x16, - 0x4d, 0x5c, 0x36, 0x0f, 0xaf, 0x65, 0xea, 0xaf, 0x8b, 0xfa, 0x26, 0xb3, 0x9d, 0xe8, 0x36, 0xf5, - 0x5c, 0x46, 0xbc, 0x80, 0xcd, 0xb0, 0x30, 0x8a, 0x5e, 0x01, 0xa3, 0x3d, 0x70, 0xa3, 0x6a, 0x71, - 0x5b, 0xdf, 0x59, 0xdf, 0xdb, 0x48, 0x89, 0xe4, 0x42, 0x2c, 0x54, 0x92, 0xfd, 0xaf, 0x12, 0x20, - 0xfa, 0x04, 0x4a, 0x1d, 0x3a, 0x22, 0x77, 0xed, 0x68, 0x2c, 0x8b, 0x7e, 0xcd, 0xe4, 0x52, 0x33, - 0x9c, 0xbd, 0xf6, 0xc0, 0x15, 0x15, 0xba, 0x81, 0xf9, 0x67, 0xc3, 0x55, 0xd3, 0x05, 0xed, 0x40, - 0x41, 0x94, 0x92, 0x77, 0x98, 0xfe, 0xd4, 0x52, 0x4b, 0x3d, 0x7a, 0x0b, 0x8a, 0x49, 0x5b, 0xf2, - 0x5a, 0xeb, 0x0b, 0x6f, 0x58, 0x35, 0x2c, 0x56, 0x88, 0xfd, 0xd2, 0x0f, 0x67, 0xf5, 0x35, 0x91, - 0x17, 0x4d, 0xc7, 0xce, 0xca, 0x5d, 0xf5, 0x3e, 0x94, 0xf8, 0x95, 0x76, 0xe8, 0x44, 0x72, 0xfa, - 0xbd, 0xd0, 0xcc, 0x4c, 0x57, 0xa5, 0x33, 0x0d, 0x4e, 0x0c, 0x4e, 0xb1, 0x92, 0xc8, 0x40, 0x0d, - 0xc4, 0x95, 0xfd, 0x21, 0x30, 0xf8, 0x0d, 0x49, 0x90, 0xf8, 0xe6, 0x32, 0xd1, 0x5f, 0x7a, 0x22, - 0x13, 0x8d, 0xf2, 0x44, 0x17, 0x4a, 0x8f, 0xfb, 0x6a, 0x0e, 0xae, 0xea, 0x31, 0x43, 0x8f, 0x33, - 0x1f, 0x8d, 0x2b, 0xc7, 0x7b, 0x0b, 0x0a, 0x09, 0xcf, 0x92, 0x9d, 0xa7, 0x14, 0x42, 0x02, 0x32, - 0x8e, 0xbe, 0xd3, 0xe4, 0x4c, 0xbf, 0x42, 0xc9, 0x3b, 0xb0, 0xd9, 0x1e, 0x0e, 0xf9, 0x88, 0x38, - 0x0d, 0x46, 0x36, 0x23, 0xaa, 0xf2, 0x2f, 0x36, 0xc5, 0x6a, 0xb3, 0x88, 0x17, 0x4c, 0x6c, 0x46, - 0x24, 0x46, 0xd4, 0x43, 0xc3, 0x4b, 0x57, 0x32, 0x21, 0xfc, 0xa9, 0x65, 0x87, 0xf5, 0xca, 0xe9, - 0x36, 0xa0, 0xfc, 0x80, 0x32, 0xd7, 0x77, 0x1e, 0x12, 0xd7, 0x19, 0x27, 0x49, 0xeb, 0x78, 0x41, - 0x86, 0x4e, 0xa1, 0xac, 0x2c, 0x8b, 0x97, 0xa3, 0x8b, 0x97, 0xf3, 0xce, 0xd5, 0x5f, 0xcd, 0x82, - 0x19, 0xbe, 0xb8, 0xd4, 0x59, 0x2e, 0xcd, 0x9b, 0x4f, 0xec, 0x16, 0x9c, 0x42, 0x32, 0xa9, 0x7e, - 0x91, 0xae, 0xb0, 0x2b, 0xd0, 0x5d, 0x03, 0xdd, 0x8a, 0x15, 0xc7, 0xe5, 0xf9, 0xa8, 0xf0, 0x67, - 0x98, 0x2b, 0x32, 0xe6, 0xbf, 0xd7, 0xc0, 0x78, 0x40, 0x19, 0xf9, 0xd7, 0x37, 0xc4, 0x0a, 0x5c, - 0x67, 0xc2, 0x78, 0x34, 0xa7, 0x27, 0x7d, 0x44, 0x5a, 0xe6, 0x11, 0x6d, 0xc3, 0x7a, 0x97, 0x44, - 0xc3, 0xd0, 0x0d, 0x98, 0x4b, 0x7d, 0xf9, 0xbe, 0xb2, 0xa2, 0xec, 0xaa, 0xd7, 0x9f, 0xb3, 0xea, - 0x33, 0x7e, 0x7f, 0xc9, 0x41, 0xc1, 0xb4, 0x27, 0x13, 0xca, 0x16, 0x2a, 0xa4, 0x3d, 0xb7, 0x42, - 0xbc, 0x4f, 0x0e, 0x5c, 0xdf, 0x9e, 0xb8, 0xdf, 0xba, 0xbe, 0x23, 0x7f, 0xae, 0xae, 0xd7, 0x27, - 0x59, 0x33, 0xa8, 0x03, 0x1b, 0x81, 0x74, 0xd1, 0x67, 0x36, 0x4b, 0x66, 0xc4, 0xe6, 0xde, 0xcb, - 0x99, 0x64, 0x78, 0xb4, 0x69, 0x44, 0x02, 0x84, 0x17, 0xef, 0xa0, 0x57, 0x21, 0xcf, 0x6b, 0x1a, - 0x55, 0xf3, 0x4b, 0xbb, 0x82, 0x4b, 0x71, 0xa2, 0x6b, 0x7c, 0x00, 0x1b, 0x0b, 0x46, 0x50, 0x19, - 0x4a, 0x27, 0xf8, 0xf8, 0xe4, 0xb8, 0xdf, 0xeb, 0x56, 0xd6, 0xf8, 0xa9, 0xf7, 0x69, 0xaf, 0x73, - 0x6a, 0xf5, 0xba, 0x15, 0x0d, 0x01, 0x14, 0x0e, 0xda, 0x47, 0xf7, 0x7a, 0xdd, 0x4a, 0xce, 0xfc, - 0xf0, 0xfc, 0xb2, 0xa6, 0xfd, 0x76, 0x59, 0xd3, 0xfe, 0xb8, 0xac, 0x69, 0xbf, 0x3e, 0xae, 0x69, - 0xe7, 0x8f, 0x6b, 0xda, 0x67, 0xb7, 0x9e, 0x9d, 0x35, 0x8b, 0xa3, 0x96, 0x8c, 0x62, 0x50, 0x10, - 0x7f, 0xb2, 0x77, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0xb4, 0x05, 0xdb, 0xfe, 0x30, 0x0b, 0x00, - 0x00, + // 1033 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x6f, 0x1b, 0x45, + 0x14, 0xcf, 0x66, 0xd7, 0x7f, 0xfa, 0xe2, 0x04, 0x77, 0xa0, 0xc8, 0x8a, 0x84, 0x1d, 0x19, 0x04, + 0x29, 0x34, 0x36, 0xa4, 0xfc, 0x11, 0xb9, 0x20, 0xff, 0x4b, 0x1a, 0xd4, 0x36, 0x61, 0xbc, 0x69, + 0x11, 0x88, 0xc3, 0xd8, 0x1e, 0xd6, 0x2b, 0xd9, 0x3b, 0xcb, 0xee, 0xb8, 0xac, 0x39, 0x71, 0xe0, + 0xc0, 0x15, 0xf5, 0xc2, 0x31, 0x5f, 0x81, 0x6f, 0xc0, 0x31, 0x47, 0xce, 0x1c, 0x22, 0x94, 0x5e, + 0x10, 0x9f, 0x02, 0xcd, 0xec, 0xcc, 0x7a, 0xed, 0x56, 0x8d, 0x13, 0x21, 0x6e, 0x33, 0xef, 0xfd, + 0xe6, 0xbd, 0x37, 0xef, 0xfd, 0xde, 0x9b, 0x81, 0x75, 0x9f, 0x4c, 0x47, 0x8c, 0x0c, 0x6a, 0x7e, + 0xc0, 0x38, 0x43, 0x39, 0xb5, 0xdd, 0xdc, 0x71, 0x5c, 0x3e, 0x9c, 0xf4, 0x6a, 0x7d, 0x36, 0xae, + 0x3b, 0xcc, 0x61, 0x75, 0xa9, 0xef, 0x4d, 0xbe, 0x95, 0x3b, 0xb9, 0x91, 0xab, 0xf8, 0xdc, 0x66, + 0xd1, 0xa7, 0xc1, 0xd8, 0x0d, 0x43, 0x97, 0x79, 0x4a, 0x02, 0xa1, 0x4f, 0xfb, 0xf1, 0xba, 0xfa, + 0x8b, 0x09, 0x66, 0xc3, 0x9b, 0xa2, 0x77, 0x20, 0xdb, 0x22, 0xa3, 0x91, 0x1d, 0x95, 0x8c, 0x2d, + 0x63, 0x7b, 0x6d, 0xf7, 0x95, 0x9a, 0xf6, 0x1e, 0x8b, 0xb1, 0x52, 0x0b, 0x60, 0x97, 0x7a, 0x03, + 0x3b, 0x2a, 0xad, 0x2e, 0x00, 0x63, 0x31, 0x56, 0x6a, 0x01, 0x7c, 0x48, 0xc6, 0xd4, 0x8e, 0x4a, + 0xe6, 0x02, 0x30, 0x16, 0x63, 0xa5, 0x46, 0xef, 0x42, 0xee, 0x98, 0x06, 0xe3, 0xd0, 0x8e, 0x4a, + 0x96, 0x44, 0x16, 0x13, 0xa4, 0x92, 0x63, 0x0d, 0x40, 0x6f, 0x41, 0xe6, 0x80, 0x3d, 0xb1, 0xa3, + 0x52, 0x46, 0x22, 0x37, 0x12, 0xa4, 0x94, 0xe2, 0x58, 0x29, 0x5c, 0x37, 0x99, 0x8c, 0x31, 0xbb, + 0xe0, 0x3a, 0x16, 0x63, 0xa5, 0x46, 0x3b, 0x90, 0x3f, 0xf1, 0x7a, 0x31, 0x34, 0x27, 0xa1, 0x37, + 0x13, 0xa8, 0x56, 0xe0, 0x04, 0x22, 0x22, 0x6d, 0x12, 0xde, 0x1f, 0xda, 0x51, 0x29, 0xbf, 0x10, + 0xa9, 0x92, 0x63, 0x0d, 0x40, 0x77, 0x01, 0x8e, 0x03, 0xe6, 0xb3, 0x90, 0x88, 0xa4, 0xde, 0x90, + 0xf0, 0x57, 0x67, 0x17, 0x4b, 0x54, 0x38, 0x05, 0xdb, 0xb3, 0xce, 0x4e, 0x2b, 0x46, 0xf5, 0xa9, + 0x01, 0x39, 0x3b, 0x3a, 0xf4, 0xfc, 0x09, 0x47, 0x0f, 0x21, 0xd7, 0x18, 0x0c, 0x02, 0x1a, 0x86, + 0xb2, 0x30, 0x85, 0xe6, 0x87, 0x67, 0xe7, 0x95, 0x95, 0x3f, 0xcf, 0x2b, 0x77, 0x52, 0x2c, 0x18, + 0x4e, 0x7d, 0x1a, 0x8c, 0xe8, 0xc0, 0xa1, 0x41, 0xbd, 0x37, 0x09, 0x02, 0xf6, 0x7d, 0xbd, 0x1f, + 0x4c, 0x7d, 0xce, 0x6a, 0xea, 0x2c, 0xd6, 0x46, 0xd0, 0xeb, 0x90, 0x6d, 0x8c, 0xd9, 0xc4, 0xe3, + 0xb2, 0x7c, 0x16, 0x56, 0x3b, 0xb4, 0x09, 0xf9, 0x2e, 0xfd, 0x6e, 0x42, 0xbd, 0x3e, 0x95, 0xf5, + 0xb2, 0x70, 0xb2, 0xdf, 0xb3, 0x7e, 0x3d, 0xad, 0xac, 0x54, 0x23, 0xc8, 0xdb, 0xd1, 0xd1, 0x84, + 0xff, 0x8f, 0x51, 0x29, 0xcf, 0x4f, 0x4d, 0x4d, 0x4e, 0xf4, 0x36, 0x64, 0x64, 0x5e, 0x14, 0x4b, + 0x67, 0xf9, 0x57, 0xf9, 0xc2, 0xb1, 0x1a, 0x7d, 0x3e, 0x0b, 0x70, 0x55, 0x06, 0xf8, 0xfe, 0xf5, + 0x83, 0xdb, 0x84, 0xfc, 0x01, 0x09, 0xef, 0xbb, 0x63, 0x97, 0xeb, 0xd4, 0xe8, 0x3d, 0x2a, 0x82, + 0xb9, 0x4f, 0xa9, 0xe4, 0xad, 0x85, 0xc5, 0x12, 0x1d, 0x82, 0xd5, 0x26, 0x9c, 0x48, 0x82, 0x16, + 0x9a, 0x1f, 0xa9, 0xbc, 0xec, 0xbc, 0xdc, 0x75, 0xcf, 0xf5, 0x48, 0x30, 0xad, 0xdd, 0xa3, 0x51, + 0x73, 0xca, 0x69, 0x88, 0xa5, 0x09, 0xf4, 0x35, 0x58, 0x8f, 0x1b, 0xdd, 0x07, 0x92, 0xc4, 0x85, + 0xe6, 0xc1, 0xb5, 0x4c, 0xfd, 0x73, 0x5e, 0xd9, 0xe0, 0xc4, 0x09, 0xef, 0xb0, 0xb1, 0xcb, 0xe9, + 0xd8, 0xe7, 0x53, 0x2c, 0x8d, 0xa2, 0x4f, 0xa1, 0xd0, 0x62, 0x1e, 0x0f, 0x48, 0x9f, 0x3f, 0xa0, + 0x9c, 0x94, 0x72, 0x5b, 0xe6, 0xf6, 0xda, 0xee, 0xad, 0x59, 0xdb, 0xa7, 0x94, 0x78, 0x0e, 0xaa, + 0xaa, 0x32, 0x99, 0x37, 0x80, 0xbe, 0x80, 0x7c, 0x8b, 0x0d, 0xe8, 0x3d, 0x12, 0x0e, 0x15, 0x29, + 0xae, 0x79, 0xf9, 0xc4, 0x0c, 0x42, 0x60, 0xc9, 0xd8, 0x44, 0x09, 0x6f, 0x60, 0xb9, 0xae, 0xba, + 0x7a, 0xfe, 0xa0, 0x6d, 0xc8, 0xca, 0x62, 0x0b, 0x0e, 0x9a, 0x2f, 0x24, 0x83, 0xd2, 0xa3, 0xf7, + 0x20, 0x17, 0x13, 0x57, 0xb0, 0xc1, 0x9c, 0xeb, 0x72, 0x4d, 0x69, 0xac, 0x11, 0x7b, 0xf9, 0x9f, + 0x4f, 0x2b, 0x2b, 0xf2, 0x86, 0x2c, 0x19, 0x4c, 0x4b, 0xf3, 0xee, 0x63, 0xc8, 0x8b, 0x23, 0x8d, + 0xc0, 0x09, 0xd5, 0x7c, 0x7c, 0xad, 0x96, 0x9a, 0xbf, 0x5a, 0xd7, 0xb4, 0x44, 0x6a, 0x70, 0x82, + 0x55, 0x29, 0xf5, 0xf5, 0xc8, 0x5c, 0xda, 0x1f, 0x02, 0x4b, 0x9c, 0xd0, 0x19, 0x12, 0x6b, 0x21, + 0x93, 0x0c, 0x34, 0x63, 0x99, 0xa4, 0xd2, 0x73, 0x3c, 0x55, 0x1e, 0xf7, 0xf4, 0xa4, 0x5c, 0xd6, + 0x63, 0x2a, 0x3d, 0xce, 0x6c, 0x78, 0x2e, 0x1d, 0xef, 0x6d, 0xc8, 0xc6, 0x79, 0x56, 0xd9, 0x79, + 0x41, 0x21, 0x14, 0x20, 0xe5, 0xe8, 0x47, 0x43, 0x4d, 0xfd, 0x2b, 0x94, 0xbc, 0x05, 0x1b, 0x8d, + 0x7e, 0x5f, 0x0c, 0x91, 0x13, 0x7f, 0x40, 0x38, 0xd5, 0x95, 0xbf, 0x55, 0x93, 0x8f, 0x9f, 0x4d, + 0xc7, 0xfe, 0x88, 0x70, 0xaa, 0x30, 0xb2, 0x1e, 0x06, 0x5e, 0x38, 0x92, 0x0a, 0xe1, 0x6f, 0x23, + 0x3d, 0xce, 0x97, 0xbe, 0x6e, 0x15, 0x0a, 0x8f, 0x18, 0x77, 0x3d, 0xe7, 0x31, 0x75, 0x9d, 0x61, + 0x7c, 0x69, 0x13, 0xcf, 0xc9, 0xd0, 0x09, 0x14, 0xb4, 0x65, 0xd9, 0x3b, 0xa6, 0xec, 0x9d, 0x0f, + 0xae, 0xde, 0x37, 0x73, 0x66, 0xc4, 0xd3, 0xa6, 0xf7, 0xea, 0x59, 0xbd, 0xf9, 0xdc, 0xeb, 0x83, + 0x13, 0x48, 0xea, 0xaa, 0xdf, 0x24, 0x8f, 0xdc, 0x15, 0xd2, 0x5d, 0x06, 0xd3, 0x8e, 0x74, 0x8e, + 0x0b, 0x09, 0xac, 0xe1, 0x4d, 0xb1, 0x50, 0xa4, 0xcc, 0xff, 0x64, 0x80, 0xf5, 0x88, 0x71, 0xfa, + 0x9f, 0xbf, 0x21, 0x4b, 0xe4, 0x3a, 0x15, 0xc6, 0x93, 0x59, 0x7a, 0x92, 0x26, 0x32, 0x52, 0x4d, + 0xb4, 0x05, 0x6b, 0x6d, 0x1a, 0xf6, 0x03, 0xd7, 0xe7, 0x2e, 0xf3, 0x54, 0x7f, 0xa5, 0x45, 0xe9, + 0xcf, 0x80, 0x79, 0xc9, 0x67, 0x20, 0xe5, 0xf7, 0xb7, 0x55, 0xc8, 0x36, 0xc9, 0x68, 0xc4, 0xf8, + 0x5c, 0x85, 0x8c, 0x4b, 0x2b, 0x24, 0x78, 0xb2, 0xef, 0x7a, 0x64, 0xe4, 0xfe, 0xe0, 0x7a, 0x8e, + 0xfa, 0x7e, 0x5d, 0x8f, 0x27, 0x69, 0x33, 0xa8, 0x05, 0xeb, 0xbe, 0x72, 0xd1, 0xe5, 0x84, 0xc7, + 0x33, 0x62, 0x63, 0xf7, 0x8d, 0xd4, 0x65, 0x44, 0xb4, 0x49, 0x44, 0x12, 0x84, 0xe7, 0xcf, 0xa0, + 0x37, 0x21, 0x23, 0x6a, 0x1a, 0x96, 0x32, 0x92, 0x00, 0xeb, 0xc9, 0x61, 0x21, 0xc5, 0xb1, 0xae, + 0xfa, 0x09, 0xac, 0xcf, 0x19, 0x41, 0x05, 0xc8, 0x1f, 0xe3, 0xa3, 0xe3, 0xa3, 0x6e, 0xa7, 0x5d, + 0x5c, 0x11, 0xbb, 0xce, 0x97, 0x9d, 0xd6, 0x89, 0xdd, 0x69, 0x17, 0x0d, 0x04, 0x90, 0xdd, 0x6f, + 0x1c, 0xde, 0xef, 0xb4, 0x8b, 0xab, 0xcd, 0xcf, 0xce, 0x2e, 0xca, 0xc6, 0x1f, 0x17, 0x65, 0xe3, + 0xaf, 0x8b, 0xb2, 0xf1, 0xfb, 0xb3, 0xb2, 0x71, 0xf6, 0xac, 0x6c, 0x7c, 0x75, 0xfb, 0xe5, 0xb7, + 0xe6, 0x51, 0x58, 0x57, 0x51, 0xf4, 0xb2, 0xf2, 0xaf, 0x7b, 0xf7, 0xdf, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x8c, 0x43, 0x16, 0xe2, 0x52, 0x0b, 0x00, 0x00, } func (m *Any) Marshal() (dAtA []byte, err error) { @@ -1347,8 +1347,8 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { return 0, err } i += n15 - if len(m.Abis) > 0 { - for _, msg := range m.Abis { + if len(m.ContractMeta) > 0 { + for _, msg := range m.ContractMeta { dAtA[i] = 0x3a i++ i = encodeVarintPayload(dAtA, i, uint64(msg.Size())) @@ -1365,7 +1365,7 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { return i, nil } -func (m *Abis) Marshal() (dAtA []byte, err error) { +func (m *ContractMeta) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) @@ -1375,7 +1375,7 @@ func (m *Abis) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *Abis) MarshalTo(dAtA []byte) (int, error) { +func (m *ContractMeta) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int @@ -1388,11 +1388,11 @@ func (m *Abis) MarshalTo(dAtA []byte) (int, error) { return 0, err } i += n16 - if len(m.Abi) > 0 { + if len(m.Meta) > 0 { dAtA[i] = 0x12 i++ - i = encodeVarintPayload(dAtA, i, uint64(len(m.Abi))) - i += copy(dAtA[i:], m.Abi) + i = encodeVarintPayload(dAtA, i, uint64(len(m.Meta))) + i += copy(dAtA[i:], m.Meta) } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) @@ -2003,8 +2003,8 @@ func (m *CallTx) Size() (n int) { n += 1 + l + sovPayload(uint64(l)) l = m.WASM.Size() n += 1 + l + sovPayload(uint64(l)) - if len(m.Abis) > 0 { - for _, e := range m.Abis { + if len(m.ContractMeta) > 0 { + for _, e := range m.ContractMeta { l = e.Size() n += 1 + l + sovPayload(uint64(l)) } @@ -2015,7 +2015,7 @@ func (m *CallTx) Size() (n int) { return n } -func (m *Abis) Size() (n int) { +func (m *ContractMeta) Size() (n int) { if m == nil { return 0 } @@ -2023,7 +2023,7 @@ func (m *Abis) Size() (n int) { _ = l l = m.CodeHash.Size() n += 1 + l + sovPayload(uint64(l)) - l = len(m.Abi) + l = len(m.Meta) if l > 0 { n += 1 + l + sovPayload(uint64(l)) } @@ -3167,7 +3167,7 @@ func (m *CallTx) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 7: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Abis", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ContractMeta", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -3194,8 +3194,8 @@ func (m *CallTx) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Abis = append(m.Abis, &Abis{}) - if err := m.Abis[len(m.Abis)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.ContractMeta = append(m.ContractMeta, &ContractMeta{}) + if err := m.ContractMeta[len(m.ContractMeta)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -3224,7 +3224,7 @@ func (m *CallTx) Unmarshal(dAtA []byte) error { } return nil } -func (m *Abis) Unmarshal(dAtA []byte) error { +func (m *ContractMeta) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -3247,10 +3247,10 @@ func (m *Abis) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: Abis: wiretype end group for non-group") + return fmt.Errorf("proto: ContractMeta: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: Abis: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ContractMeta: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -3288,7 +3288,7 @@ func (m *Abis) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Abi", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Meta", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -3316,7 +3316,7 @@ func (m *Abis) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Abi = string(dAtA[iNdEx:postIndex]) + m.Meta = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex diff --git a/vent/service/abis.go b/vent/service/abis.go index 4e6631c05..84d80dde9 100644 --- a/vent/service/abis.go +++ b/vent/service/abis.go @@ -39,23 +39,23 @@ func NewAbiProvider(paths []string, cli rpcquery.QueryClient) (provider *AbiProv func (p *AbiProvider) GetEventAbi(eventID abi.EventID, address crypto.Address, l *logging.Logger) (*abi.EventSpec, error) { evAbi, ok := p.abiSpec.EventsByID[eventID] if !ok { - resp, err := p.cli.GetAbi(context.Background(), &rpcquery.GetAbiParam{Address: address}) + resp, err := p.cli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: address}) if err != nil { l.InfoMsg("Error retrieving abi for event", "address", address.String(), "eventid", eventID.String(), "error", err) return nil, err } - if resp == nil || resp.Abi == "" { + if resp == nil || resp.Metadata == "" { l.InfoMsg("ABI not found for contract", "address", address.String(), "eventid", eventID.String()) return nil, fmt.Errorf("No ABI present for contract at address %v", address) } - a, err := abi.ReadSpec([]byte(resp.Abi)) + a, err := abi.ReadSpec([]byte(resp.Metadata)) if err != nil { - l.InfoMsg("Failed to parse abi", "address", address.String(), "eventid", eventID.String(), "abi", resp.Abi) + l.InfoMsg("Failed to parse abi", "address", address.String(), "eventid", eventID.String(), "abi", resp.Metadata) return nil, err } evAbi, ok = a.EventsByID[eventID] if !ok { - l.InfoMsg("Event missing from ABI spec for contract", "address", address.String(), "eventid", eventID.String(), "abi", resp.Abi) + l.InfoMsg("Event missing from ABI spec for contract", "address", address.String(), "eventid", eventID.String(), "abi", resp.Metadata) return nil, fmt.Errorf("Event missing from ABI spec for contract") } From 5bb22a515475eca57a84c9b6613ba2742376b0f2 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Fri, 19 Jul 2019 13:37:13 +0100 Subject: [PATCH 47/70] Introduce "burrow accounts" This lists all the accounts in a burrow server and metadata: Account: EFF6A1B21885946E18238A64F1004E398F2ADD5E Sequence: 0 EVM Code: 6080604052348015600F57600080FD5B50600436106045576000357C0100000000000000000000000000000000000000000000000000000000900480630188436814604A575B600080FD5B60506052565B005B7F4648813AACFA2D88DE47275BCC22E03BEDC6070DAA3E85BEAC3C31B1A30D162C30604051808273FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1673FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF16815260200180602001828103825260128152602001807F54686973206973206269672068756D70657200000000000000000000000000008152506020019250505060405180910390A156FEA165627A7A723058200272B57592245C750F51B3EF6CBF6CD85761E1C4D7E99E5D99A1045DEFCD58540029 Contract Name: Foo Source File: foo.sol Functions: foo() returns () Events: FooEvent(address,string) Signed-off-by: Sean Young --- cmd/burrow/commands/accounts.go | 95 +++++++++++++++++++++++++++++++++ cmd/burrow/main.go | 3 ++ execution/evm/abi/abi.go | 35 +++++++++--- 3 files changed, 126 insertions(+), 7 deletions(-) create mode 100644 cmd/burrow/commands/accounts.go diff --git a/cmd/burrow/commands/accounts.go b/cmd/burrow/commands/accounts.go new file mode 100644 index 000000000..31e52e606 --- /dev/null +++ b/cmd/burrow/commands/accounts.go @@ -0,0 +1,95 @@ +package commands + +import ( + "context" + "encoding/json" + "time" + + "github.com/hyperledger/burrow/deploy/compile" + "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/rpc/rpcquery" + cli "github.com/jawher/mow.cli" + "google.golang.org/grpc" +) + +// Keys runs as either client or server +func Accounts(output Output) func(cmd *cli.Cmd) { + return func(cmd *cli.Cmd) { + chainURLOpt := cmd.StringOpt("c chain", "127.0.0.1:10997", "chain to be used in IP:PORT format") + timeoutOpt := cmd.IntOpt("t timeout", 0, "Timeout in seconds") + + cmd.Action = func() { + ctx, cancel := context.WithCancel(context.Background()) + if *timeoutOpt != 0 { + timeout := time.Duration(*timeoutOpt) * time.Second + ctx, cancel = context.WithTimeout(context.Background(), timeout) + } + defer cancel() + + var opts []grpc.DialOption + opts = append(opts, grpc.WithInsecure()) + conn, err := grpc.DialContext(ctx, *chainURLOpt, opts...) + if err != nil { + output.Fatalf("failed to connect: %v", err) + } + + qCli := rpcquery.NewQueryClient(conn) + + stream, err := qCli.ListAccounts(context.Background(), &rpcquery.ListAccountsParam{}) + if err != nil { + output.Fatalf("failed to list accounts: %v", err) + } + + for acc, err := stream.Recv(); err == nil; acc, err = stream.Recv() { + output.Printf("Account: %s\n Sequence: %d", + acc.Address.String(), acc.Sequence) + + if len(acc.PublicKey.PublicKey) > 0 { + output.Printf(" Public Key: %s\n", acc.PublicKey.String()) + } + if acc.WASMCode != nil && len(acc.WASMCode) > 0 { + output.Printf(" WASM Code: %s", acc.WASMCode.String()) + } + if acc.EVMCode != nil && len(acc.EVMCode) > 0 { + output.Printf(" EVM Code: %s", acc.EVMCode.String()) + } + + meta, err := qCli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: acc.Address}) + if err != nil { + output.Fatalf("failed to get metadata for %s: %v", acc.Address, err) + } + if meta.Metadata != "" { + var metadata compile.Metadata + err = json.Unmarshal([]byte(meta.Metadata), &metadata) + if err != nil { + output.Fatalf("failed to unmarshal metadata %s: %v", meta.Metadata, err) + } + + output.Printf(" Contract Name: %s", metadata.ContractName) + output.Printf(" Source File: %s", metadata.SourceFile) + + spec, err := abi.ReadSpec(metadata.Abi) + if err != nil { + output.Fatalf("failed to unmarshall abi %s: %v", string(metadata.Abi), err) + } + + if len(spec.Functions) > 0 { + output.Printf(" Functions:") + for name, f := range spec.Functions { + output.Printf(" %s", f.String(name)) + } + } + + if len(spec.EventsByID) > 0 { + output.Printf(" Events:") + for _, e := range spec.EventsByID { + output.Printf(" %s", e.String()) + } + } + } + + output.Printf("") + } + } + } +} diff --git a/cmd/burrow/main.go b/cmd/burrow/main.go index 208433ab3..508e827ac 100644 --- a/cmd/burrow/main.go +++ b/cmd/burrow/main.go @@ -78,6 +78,9 @@ func burrow(output commands.Output) *cli.Cli { app.Command("restore", "Restore new chain from backup", commands.Restore(output)) + app.Command("accounts", "List accounts and metadata", + commands.Accounts(output)) + return app } diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index f7c30b1a8..8bec5601a 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -573,30 +573,51 @@ func SpecFromFunctionReflect(fname string, v reflect.Value, skipIn, skipOut int) return &s } -func Signature(name string, args []Argument) (sig string) { - sig = name + "(" +func argsToSignature(args []Argument) (str string) { + str = "(" for i, a := range args { if i > 0 { - sig += "," + str += "," + } + str += a.EVM.GetSignature() + if a.Indexed { + str += " indexed" } - sig += a.EVM.GetSignature() if a.IsArray { if a.ArrayLength > 0 { - sig += fmt.Sprintf("[%d]", a.ArrayLength) + str += fmt.Sprintf("[%d]", a.ArrayLength) } else { - sig += "[]" + str += "[]" } } } - sig += ")" + str += ")" return } +func Signature(name string, args []Argument) string { + return name + argsToSignature(args) +} + func (functionSpec *FunctionSpec) SetFunctionID(functionName string) { sig := Signature(functionName, functionSpec.Inputs) functionSpec.FunctionID = GetFunctionID(sig) } +func (f *FunctionSpec) String(name string) string { + return name + argsToSignature(f.Inputs) + + " returns " + argsToSignature(f.Outputs) +} + +func (e *EventSpec) String() string { + str := e.Name + argsToSignature(e.Inputs) + if e.Anonymous { + str += " anonymous" + } + + return str +} + func (fs FunctionID) Bytes() []byte { return fs[:] } From 34503e3a7ce5eddd352d7c8f06717ac61e6bfbba Mon Sep 17 00:00:00 2001 From: Sean Young Date: Fri, 19 Jul 2019 14:30:03 +0100 Subject: [PATCH 48/70] Add review comments Signed-off-by: Sean Young --- deploy/def/client.go | 1 + execution/contexts/call_context.go | 2 +- execution/evm/abi/abi.go | 12 ++++----- execution/evm/state.go | 40 ++++++++++++++++++------------ execution/evm/state_test.go | 2 +- execution/evm/vm.go | 2 +- execution/evm/vm_test.go | 8 +++--- rpc/rpcquery/query_server.go | 1 + 8 files changed, 39 insertions(+), 29 deletions(-) diff --git a/deploy/def/client.go b/deploy/def/client.go index 539d8828c..4a5a70d71 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -152,6 +152,7 @@ func (c *Client) GetMetadataForAccount(address crypto.Address) (string, error) { return metadata.Metadata, nil } +// GetMetadata is required for us to implement acmstate.Reader, but it is not used func (c *Client) GetMetadata(metahash acmstate.MetadataHash) (string, error) { panic("not implemented") return "", nil diff --git a/execution/contexts/call_context.go b/execution/contexts/call_context.go index 3f4131db2..8fb38193a 100644 --- a/execution/contexts/call_context.go +++ b/execution/contexts/call_context.go @@ -240,7 +240,7 @@ func (ctx *CallContext) Deliver(inAcc, outAcc *acm.Account, value uint64) error } else { ctx.Logger.TraceMsg("Successful execution") if createContract { - txCache.InitCode(callee, nil, ret) + txCache.InitCode(callee, ret) } err := txCache.Sync() if err != nil { diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index 8bec5601a..f5552584f 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -573,14 +573,14 @@ func SpecFromFunctionReflect(fname string, v reflect.Value, skipIn, skipOut int) return &s } -func argsToSignature(args []Argument) (str string) { +func argsToSignature(args []Argument, addIndexed bool) (str string) { str = "(" for i, a := range args { if i > 0 { str += "," } str += a.EVM.GetSignature() - if a.Indexed { + if addIndexed && a.Indexed { str += " indexed" } if a.IsArray { @@ -596,7 +596,7 @@ func argsToSignature(args []Argument) (str string) { } func Signature(name string, args []Argument) string { - return name + argsToSignature(args) + return name + argsToSignature(args, false) } func (functionSpec *FunctionSpec) SetFunctionID(functionName string) { @@ -605,12 +605,12 @@ func (functionSpec *FunctionSpec) SetFunctionID(functionName string) { } func (f *FunctionSpec) String(name string) string { - return name + argsToSignature(f.Inputs) + - " returns " + argsToSignature(f.Outputs) + return name + argsToSignature(f.Inputs, false) + + " returns " + argsToSignature(f.Outputs, false) } func (e *EventSpec) String() string { - str := e.Name + argsToSignature(e.Inputs) + str := e.Name + argsToSignature(e.Inputs, true) if e.Anonymous { str += " anonymous" } diff --git a/execution/evm/state.go b/execution/evm/state.go index b8863add9..d9db08f68 100644 --- a/execution/evm/state.go +++ b/execution/evm/state.go @@ -43,7 +43,8 @@ type Reader interface { type Writer interface { CreateAccount(address crypto.Address) InitWASMCode(address crypto.Address, code []byte) - InitCode(address crypto.Address, forebear *crypto.Address, code []byte) + InitCode(address crypto.Address, code []byte) + InitChildCode(address crypto.Address, forebear crypto.Address, code []byte) RemoveAccount(address crypto.Address) SetStorage(address crypto.Address, key binary.Word256, value []byte) AddToBalance(address crypto.Address, amount uint64) @@ -203,7 +204,16 @@ func (st *State) CreateAccount(address crypto.Address) { st.updateAccount(&acm.Account{Address: address}) } -func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code []byte) { +func (st *State) InitCode(address crypto.Address, code []byte) { + st.initCode(address, nil, code) +} + +func (st *State) InitChildCode(address crypto.Address, parent crypto.Address, code []byte) { + st.initCode(address, &parent, code) + +} + +func (st *State) initCode(address crypto.Address, parent *crypto.Address, code []byte) { acc := st.mustAccount(address) if acc == nil { st.PushError(errors.ErrorCodef(errors.ErrorCodeInvalidAddress, @@ -240,24 +250,12 @@ func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code [ // If we have a list of ABIs for this contract, we also know what contract code it is allowed to create // For compatibility with older contracts, allow any contract to be created if we have no mappings if metamap != nil && len(metamap) > 0 { - found := false - for _, m := range metamap { - if bytes.Equal(codehash, m.CodeHash) { - found = true - break - } - } + found := codehashPermitted(codehash, metamap) // Libraries lie about their deployed bytecode if !found { deployCodehash := compile.GetDeployCodeHash(code, address) - - for _, m := range metamap { - if bytes.Equal(deployCodehash, m.CodeHash) { - found = true - break - } - } + found = codehashPermitted(deployCodehash, metamap) } if !found { @@ -272,6 +270,16 @@ func (st *State) InitCode(address crypto.Address, parent *crypto.Address, code [ st.updateAccount(acc) } +func codehashPermitted(codehash []byte, metamap []*acm.ContractMeta) bool { + for _, m := range metamap { + if bytes.Equal(codehash, m.CodeHash) { + return true + } + } + + return false +} + func (st *State) InitWASMCode(address crypto.Address, code []byte) { acc := st.mustAccount(address) if acc == nil { diff --git a/execution/evm/state_test.go b/execution/evm/state_test.go index dd2035c7c..dfe7b2780 100644 --- a/execution/evm/state_test.go +++ b/execution/evm/state_test.go @@ -35,7 +35,7 @@ func TestState_CreateAccount(t *testing.T) { st = NewState(newAppState(), blockHashGetter) st.CreateAccount(address) require.Nil(t, st.Error()) - st.InitCode(address, nil, []byte{1, 2, 3}) + st.InitCode(address, []byte{1, 2, 3}) require.Nil(t, st.Error()) } diff --git a/execution/evm/vm.go b/execution/evm/vm.go index 23ce1898e..f140e3407 100644 --- a/execution/evm/vm.go +++ b/execution/evm/vm.go @@ -814,7 +814,7 @@ func (vm *VM) execute(callState Interface, eventSink EventSink, caller, callee c } else { // Update the account with its initialised contract code forebear := callState.GetForebear(callee) - childCallState.InitCode(newAccount, &forebear, ret) + childCallState.InitChildCode(newAccount, forebear, ret) callState.PushError(childCallState.Sync()) stack.PushAddress(newAccount) } diff --git a/execution/evm/vm_test.go b/execution/evm/vm_test.go index 4cb1fdfb2..8a1af4a41 100644 --- a/execution/evm/vm_test.go +++ b/execution/evm/vm_test.go @@ -94,7 +94,7 @@ func newAccount(st Interface, name string) crypto.Address { func makeAccountWithCode(st Interface, name string, code []byte) crypto.Address { address := newAddress(name) st.CreateAccount(address) - st.InitCode(address, nil, code) + st.InitCode(address, code) st.AddToBalance(address, 9999999) return address } @@ -999,7 +999,7 @@ func TestMsgSender(t *testing.T) { // Not needed for this test (since contract code is passed as argument to vm), but this is what an execution // framework must do - cache.InitCode(account2, nil, contractCode) + cache.InitCode(account2, contractCode) // Input is the function hash of `get()` input := hex.MustDecodeString("6d4ce63c") @@ -1429,8 +1429,8 @@ func TestCallStackOverflow(t *testing.T) { contractCode, err := ourVm.Call(cache, NewNoopEventSink(), account1, account2, code, code, 0, &gas) require.NoError(t, err) - cache.InitCode(account1, nil, contractCode) - cache.InitCode(account2, nil, contractCode) + cache.InitCode(account1, contractCode) + cache.InitCode(account2, contractCode) // keccak256 hash of 'callMeBack()' input, err := hex.DecodeString("692c3b7c") diff --git a/rpc/rpcquery/query_server.go b/rpc/rpcquery/query_server.go index 8de7d89e1..16c6d337d 100644 --- a/rpc/rpcquery/query_server.go +++ b/rpc/rpcquery/query_server.go @@ -61,6 +61,7 @@ func (qs *queryServer) GetAccount(ctx context.Context, param *GetAccountParam) ( return acc, err } +// GetMetadata returns empty metadata string if not found func (qs *queryServer) GetMetadata(ctx context.Context, param *GetMetadataParam) (*MetadataResult, error) { metadata := MetadataResult{} acc, err := qs.accounts.GetAccount(param.Address) From 56d45b45f3cac3084242477ebc655d3cb29b9409 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Fri, 19 Jul 2019 15:25:56 +0100 Subject: [PATCH 49/70] Add compiler version Signed-off-by: Sean Young --- cmd/burrow/commands/accounts.go | 1 + deploy/compile/compilers.go | 33 +++++++++++++++++++++++++++------ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/cmd/burrow/commands/accounts.go b/cmd/burrow/commands/accounts.go index 31e52e606..3558d0e39 100644 --- a/cmd/burrow/commands/accounts.go +++ b/cmd/burrow/commands/accounts.go @@ -67,6 +67,7 @@ func Accounts(output Output) func(cmd *cli.Cmd) { output.Printf(" Contract Name: %s", metadata.ContractName) output.Printf(" Source File: %s", metadata.SourceFile) + output.Printf(" Compiler version: %s", metadata.CompilerVersion) spec, err := abi.ReadSpec(metadata.Abi) if err != nil { diff --git a/deploy/compile/compilers.go b/deploy/compile/compilers.go index 20523d64b..0ff22ab9a 100644 --- a/deploy/compile/compilers.go +++ b/deploy/compile/compilers.go @@ -79,10 +79,28 @@ type ContractCode struct { LinkReferences json.RawMessage } +// SolidityMetadata is the json field metadata +type SolidityMetadata struct { + Version string + // The solidity compiler needs to tell us it compiles solidity + Language string + Compiler struct { + Version string + Keccak256 string + } + Sources map[string]struct { + Keccak256 string + Content string + Urls []string + } + // Other fields elided, see https://solidity.readthedocs.io/en/v0.5.10/metadata.html +} + type Metadata struct { - ContractName string - SourceFile string - Abi json.RawMessage + ContractName string + SourceFile string + CompilerVersion string + Abi json.RawMessage } type MetadataMap struct { @@ -248,13 +266,16 @@ func EVM(file string, optimize bool, workDir string, libraries map[string]string metamap := make([]MetadataMap, 0) for filename, src := range output.Contracts { for contractname, item := range src { + var meta SolidityMetadata + _ = json.Unmarshal([]byte(item.Metadata), &meta) if item.Evm.DeployedBytecode.Object != "" { metamap = append(metamap, MetadataMap{ DeployedBytecode: item.Evm.DeployedBytecode, Metadata: Metadata{ - ContractName: contractname, - SourceFile: filename, - Abi: item.Abi, + ContractName: contractname, + SourceFile: filename, + CompilerVersion: meta.Compiler.Version, + Abi: item.Abi, }, }) } From 642bceedcf4f011b9c011bb0b0ac48a2e7a39721 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Fri, 19 Jul 2019 15:45:49 +0100 Subject: [PATCH 50/70] Don't use burrow-side ABI if --local-abi is specified at deploy time Signed-off-by: Sean Young --- cmd/burrow/commands/deploy.go | 3 +++ deploy/def/deploy.go | 1 + deploy/jobs/jobs_contracts.go | 6 +++++- tests/jobs_fixtures/app47-constructor-arguments/deploy.yaml | 3 +++ 4 files changed, 12 insertions(+), 1 deletion(-) diff --git a/cmd/burrow/commands/deploy.go b/cmd/burrow/commands/deploy.go index ec78ae590..f1ec956c9 100644 --- a/cmd/burrow/commands/deploy.go +++ b/cmd/burrow/commands/deploy.go @@ -59,6 +59,8 @@ func Deploy(output Output) func(cmd *cli.Cmd) { verboseOpt := cmd.BoolOpt("v verbose", false, "verbose output") + localAbiOpt := cmd.BoolOpt("local-abi", false, "use local ABIs rather than fetching them from burrow") + wasmOpt := cmd.BoolOpt("wasm", false, "Compile to WASM using solang (experimental)") debugOpt := cmd.BoolOpt("d debug", false, "debug level output") @@ -97,6 +99,7 @@ func Deploy(output Output) func(cmd *cli.Cmd) { args.MempoolSign = *mempoolSigningOpt args.Timeout = *timeoutSecondsOpt args.Path = *pathOpt + args.LocalABI = *localAbiOpt args.Wasm = *wasmOpt args.DefaultOutput = *defaultOutputOpt args.DefaultSets = *defaultSetsOpt diff --git a/deploy/def/deploy.go b/deploy/def/deploy.go index 7dc1b9fe1..f43eeadad 100644 --- a/deploy/def/deploy.go +++ b/deploy/def/deploy.go @@ -11,6 +11,7 @@ type DeployArgs struct { Chain string `mapstructure:"," json:"," yaml:"," toml:","` KeysService string `mapstructure:"," json:"," yaml:"," toml:","` MempoolSign bool `mapstructure:"," json:"," yaml:"," toml:","` + LocalABI bool `mapstructure:"," json:"," yaml:"," toml:","` Wasm bool `mapstructure:"," json:"," yaml:"," toml:","` Timeout int `mapstructure:"," json:"," yaml:"," toml:","` Address string `mapstructure:"," json:"," yaml:"," toml:","` diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index 3abd4a887..cd6a8e085 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -446,8 +446,12 @@ func FormulateCallJob(call *def.Call, do *def.DeployArgs, deployScript *def.Play // formulate call var packedBytes []byte var funcSpec *abi.FunctionSpec + var abiJSON string + + if !do.LocalABI { + abiJSON, err = client.GetMetadataForAccount(address) + } - abiJSON, err := client.GetMetadataForAccount(address) if abiJSON != "" && err == nil { packedBytes, funcSpec, err = abi.EncodeFunctionCall(abiJSON, call.Function, logger, callDataArray...) if err != nil { diff --git a/tests/jobs_fixtures/app47-constructor-arguments/deploy.yaml b/tests/jobs_fixtures/app47-constructor-arguments/deploy.yaml index a98356e72..7f6da8ebb 100644 --- a/tests/jobs_fixtures/app47-constructor-arguments/deploy.yaml +++ b/tests/jobs_fixtures/app47-constructor-arguments/deploy.yaml @@ -1,3 +1,6 @@ +# This uses a bin file without any metadata to test deployment. +# This means uses local ABIs rather than burrow-side ABIs. + jobs: - name: setStorageBaseString1 From ba8efb4e883504803f5fa9ae69b8ef341fbfa211 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 22 Jul 2019 07:11:58 +0100 Subject: [PATCH 51/70] Review comments, round 2 Signed-off-by: Sean Young --- acm/acmstate/memory_state.go | 16 +-- acm/acmstate/state_cache.go | 34 +++++-- cmd/burrow/commands/accounts.go | 4 +- deploy/compile/compilers.go | 4 +- deploy/def/client.go | 14 ++- deploy/jobs/jobs_contracts.go | 4 +- integration/rpctransact/call_test.go | 6 +- protobuf/acm.proto | 2 + protobuf/rpcquery.proto | 3 +- rpc/rpcquery/query_server.go | 60 +++++++----- rpc/rpcquery/rpcquery.pb.go | 139 ++++++++++++++------------- vent/service/abis.go | 2 +- 12 files changed, 167 insertions(+), 121 deletions(-) diff --git a/acm/acmstate/memory_state.go b/acm/acmstate/memory_state.go index 6e7f78730..300163792 100644 --- a/acm/acmstate/memory_state.go +++ b/acm/acmstate/memory_state.go @@ -9,9 +9,9 @@ import ( ) type MemoryState struct { - Accounts map[crypto.Address]*acm.Account - Storage map[crypto.Address]map[binary.Word256][]byte - Metadatas map[MetadataHash]string + Accounts map[crypto.Address]*acm.Account + Storage map[crypto.Address]map[binary.Word256][]byte + Metadata map[MetadataHash]string } var _ IterableReaderWriter = &MemoryState{} @@ -19,9 +19,9 @@ var _ IterableReaderWriter = &MemoryState{} // Get an in-memory state IterableReader func NewMemoryState() *MemoryState { return &MemoryState{ - Accounts: make(map[crypto.Address]*acm.Account), - Storage: make(map[crypto.Address]map[binary.Word256][]byte), - Metadatas: make(map[MetadataHash]string), + Accounts: make(map[crypto.Address]*acm.Account), + Storage: make(map[crypto.Address]map[binary.Word256][]byte), + Metadata: make(map[MetadataHash]string), } } @@ -38,11 +38,11 @@ func (ms *MemoryState) UpdateAccount(updatedAccount *acm.Account) error { } func (ms *MemoryState) GetMetadata(metahash MetadataHash) (string, error) { - return ms.Metadatas[metahash], nil + return ms.Metadata[metahash], nil } func (ms *MemoryState) SetMetadata(metahash MetadataHash, metadata string) error { - ms.Metadatas[metahash] = metadata + ms.Metadata[metahash] = metadata return nil } diff --git a/acm/acmstate/state_cache.go b/acm/acmstate/state_cache.go index 0dd58682c..ef069a24d 100644 --- a/acm/acmstate/state_cache.go +++ b/acm/acmstate/state_cache.go @@ -111,15 +111,12 @@ func (cache *Cache) UpdateAccount(account *acm.Account) error { } func (cache *Cache) GetMetadata(metahash MetadataHash) (string, error) { - cache.RLock() - defer cache.RUnlock() - - metadataInfo, ok := cache.metadata[metahash] - if ok { - return metadataInfo.metadata, nil + metaInfo, err := cache.getMetadata(metahash) + if err != nil { + return "", err } - return "", nil + return metaInfo.metadata, nil } func (cache *Cache) SetMetadata(metahash MetadataHash, metadata string) error { @@ -341,3 +338,26 @@ func (cache *Cache) get(address crypto.Address) (*accountInfo, error) { } return accInfo, nil } + +// Get the cache accountInfo item creating it if necessary +func (cache *Cache) getMetadata(metahash MetadataHash) (*metadataInfo, error) { + cache.RLock() + metaInfo := cache.metadata[metahash] + cache.RUnlock() + if metaInfo == nil { + cache.Lock() + defer cache.Unlock() + metaInfo = cache.metadata[metahash] + if metaInfo == nil { + metadata, err := cache.backend.GetMetadata(metahash) + if err != nil { + return nil, err + } + metaInfo = &metadataInfo{ + metadata: metadata, + } + cache.metadata[metahash] = metaInfo + } + } + return metaInfo, nil +} diff --git a/cmd/burrow/commands/accounts.go b/cmd/burrow/commands/accounts.go index 3558d0e39..7e9ff0262 100644 --- a/cmd/burrow/commands/accounts.go +++ b/cmd/burrow/commands/accounts.go @@ -12,7 +12,7 @@ import ( "google.golang.org/grpc" ) -// Keys runs as either client or server +// Accounts lists all the accounts in a chain, alongside with any metadata like contract name and ABI func Accounts(output Output) func(cmd *cli.Cmd) { return func(cmd *cli.Cmd) { chainURLOpt := cmd.StringOpt("c chain", "127.0.0.1:10997", "chain to be used in IP:PORT format") @@ -54,7 +54,7 @@ func Accounts(output Output) func(cmd *cli.Cmd) { output.Printf(" EVM Code: %s", acc.EVMCode.String()) } - meta, err := qCli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: acc.Address}) + meta, err := qCli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &acc.Address}) if err != nil { output.Fatalf("failed to get metadata for %s: %v", acc.Address, err) } diff --git a/deploy/compile/compilers.go b/deploy/compile/compilers.go index 0ff22ab9a..5772a22eb 100644 --- a/deploy/compile/compilers.go +++ b/deploy/compile/compilers.go @@ -414,8 +414,8 @@ func PrintResponse(resp Response, cli bool, logger *logging.Logger) { } } -// GetMetadatas get the CodeHashes + Abis for the generated Code. So, we have a map for all the possible contracts codes hashes to abis -func (contract *SolidityContract) GetMetadatas(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { +// GetMetadata get the CodeHashes + Abis for the generated Code. So, we have a map for all the possible contracts codes hashes to abis +func (contract *SolidityContract) GetMetadata(logger *logging.Logger) (map[acmstate.CodeHash]string, error) { res := make(map[acmstate.CodeHash]string) if contract.Evm.DeployedBytecode.Object == "" { return nil, nil diff --git a/deploy/def/client.go b/deploy/def/client.go index 4a5a70d71..f0023f96b 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -144,7 +144,7 @@ func (c *Client) GetAccount(address crypto.Address) (*acm.Account, error) { func (c *Client) GetMetadataForAccount(address crypto.Address) (string, error) { ctx, cancel := context.WithTimeout(context.Background(), c.timeout) defer cancel() - metadata, err := c.queryClient.GetMetadata(ctx, &rpcquery.GetMetadataParam{Address: address}) + metadata, err := c.queryClient.GetMetadata(ctx, &rpcquery.GetMetadataParam{Address: &address}) if err != nil { return "", err } @@ -152,10 +152,16 @@ func (c *Client) GetMetadataForAccount(address crypto.Address) (string, error) { return metadata.Metadata, nil } -// GetMetadata is required for us to implement acmstate.Reader, but it is not used func (c *Client) GetMetadata(metahash acmstate.MetadataHash) (string, error) { - panic("not implemented") - return "", nil + ctx, cancel := context.WithTimeout(context.Background(), c.timeout) + defer cancel() + var bs binary.HexBytes = metahash.Bytes() + metadata, err := c.queryClient.GetMetadata(ctx, &rpcquery.GetMetadataParam{MetadataHash: &bs}) + if err != nil { + return "", err + } + + return metadata.Metadata, nil } func (c *Client) GetStorage(address crypto.Address, key binary.Word256) ([]byte, error) { diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index cd6a8e085..4c46954b1 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -164,7 +164,7 @@ func FormulateDeployJob(deploy *def.Deploy, do *def.DeployArgs, deployScript *de contractCode = contractCode + callData } - metaMap, err := contract.GetMetadatas(logger) + metaMap, err := contract.GetMetadata(logger) if err != nil { return nil, nil, err } @@ -369,7 +369,7 @@ func deployContract(deploy *def.Deploy, do *def.DeployArgs, script *def.Playbook } data = contract.Evm.Bytecode.Object - metaMap, err = contract.GetMetadatas(logger) + metaMap, err = contract.GetMetadata(logger) if err != nil { return nil, err } diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index cc0286eff..6f98b4d28 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -308,7 +308,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) addressA := lastCall(createTxe.Events).CallData.Callee // Check ABI for new contract A qcli := rpctest.NewQueryClient(t, kern.GRPCListenAddress().String()) - res, err := qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: addressA}) + res, err := qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &addressA}) require.NoError(t, err) assert.Equal(t, res.Metadata, string(solidity.Abi_A)) // CreateB @@ -321,7 +321,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) var addressB crypto.Address err = spec.Unpack(callTxe.Result.Return, "createB", &addressB) // check ABI for contract B - res, err = qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: addressB}) + res, err = qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &addressB}) require.NoError(t, err) assert.Equal(t, res.Metadata, string(solidity.Abi_B)) // CreateC @@ -334,7 +334,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) var addressC crypto.Address err = spec.Unpack(callTxe.Result.Return, "createC", &addressC) // check abi for contract C - res, err = qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: addressC}) + res, err = qcli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &addressC}) require.NoError(t, err) assert.Equal(t, res.Metadata, string(solidity.Abi_C)) return diff --git a/protobuf/acm.proto b/protobuf/acm.proto index 8a18aa2cf..8df8ef6fa 100644 --- a/protobuf/acm.proto +++ b/protobuf/acm.proto @@ -27,6 +27,8 @@ message Account { bytes WASMCode = 7 [(gogoproto.customtype) = "Bytecode", (gogoproto.jsontag) = ",omitempty", (gogoproto.nullable) = false]; bytes CodeHash = 8 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes", (gogoproto.nullable) = false, (gogoproto.jsontag) = "-"]; repeated ContractMeta ContractMeta = 9; + // The metadata is stored in the deployed account. When the deployed account creates new account (from Solidity/EVM), they point to the original deployed + // account where the metadata is stored. This original account is called the forebear. bytes Forebear = 10 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address"]; } diff --git a/protobuf/rpcquery.proto b/protobuf/rpcquery.proto index 353646e36..f92e1931e 100644 --- a/protobuf/rpcquery.proto +++ b/protobuf/rpcquery.proto @@ -50,7 +50,8 @@ message GetAccountParam { } message GetMetadataParam { - bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address", (gogoproto.nullable) = false]; + bytes Address = 1 [(gogoproto.customtype) = "github.com/hyperledger/burrow/crypto.Address"]; + bytes MetadataHash = 2 [(gogoproto.customtype) = "github.com/hyperledger/burrow/binary.HexBytes"]; } message MetadataResult { diff --git a/rpc/rpcquery/query_server.go b/rpc/rpcquery/query_server.go index 16c6d337d..e20274c8c 100644 --- a/rpc/rpcquery/query_server.go +++ b/rpc/rpcquery/query_server.go @@ -61,41 +61,49 @@ func (qs *queryServer) GetAccount(ctx context.Context, param *GetAccountParam) ( return acc, err } -// GetMetadata returns empty metadata string if not found +// GetMetadata returns empty metadata string if not found. Metadata can be retrieved by account, or +// by metadata hash func (qs *queryServer) GetMetadata(ctx context.Context, param *GetMetadataParam) (*MetadataResult, error) { metadata := MetadataResult{} - acc, err := qs.accounts.GetAccount(param.Address) - if err != nil { - return &metadata, err - } - if acc != nil && acc.CodeHash != nil { - codehash := acc.CodeHash - if acc.Forebear != nil { - acc, err = qs.accounts.GetAccount(*acc.Forebear) - if err != nil { - return &metadata, err - } + var metahash acmstate.MetadataHash + var err error + if param.Address != nil { + acc, err := qs.accounts.GetAccount(*param.Address) + if err != nil { + return &metadata, err } + if acc != nil && acc.CodeHash != nil { + codehash := acc.CodeHash + if acc.Forebear != nil { + acc, err = qs.accounts.GetAccount(*acc.Forebear) + if err != nil { + return &metadata, err + } + } - for _, m := range acc.ContractMeta { - if bytes.Equal(m.CodeHash, codehash) { - var metahash acmstate.MetadataHash - copy(metahash[:], m.MetadataHash) - metadata.Metadata, err = qs.accounts.GetMetadata(metahash) - return &metadata, err + found := false + for _, m := range acc.ContractMeta { + if bytes.Equal(m.CodeHash, codehash) { + copy(metahash[:], m.MetadataHash) + found = true + break + } } - } - deployCodehash := compile.GetDeployCodeHash(acc.EVMCode, param.Address) - for _, m := range acc.ContractMeta { - if bytes.Equal(m.CodeHash, deployCodehash) { - var metahash acmstate.MetadataHash - copy(metahash[:], m.MetadataHash) - metadata.Metadata, err = qs.accounts.GetMetadata(metahash) - return &metadata, err + if !found { + deployCodehash := compile.GetDeployCodeHash(acc.EVMCode, *param.Address) + for _, m := range acc.ContractMeta { + if bytes.Equal(m.CodeHash, deployCodehash) { + copy(metahash[:], m.MetadataHash) + break + } + } } } + } else if param.MetadataHash != nil { + copy(metahash[:], *param.MetadataHash) } + metadata.Metadata, err = qs.accounts.GetMetadata(metahash) return &metadata, err } diff --git a/rpc/rpcquery/rpcquery.pb.go b/rpc/rpcquery/rpcquery.pb.go index 335fa08f4..ee44c1ed5 100644 --- a/rpc/rpcquery/rpcquery.pb.go +++ b/rpc/rpcquery/rpcquery.pb.go @@ -119,10 +119,11 @@ func (*GetAccountParam) XXX_MessageName() string { } type GetMetadataParam struct { - Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Address *github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address,omitempty"` + MetadataHash *github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,2,opt,name=MetadataHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"MetadataHash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *GetMetadataParam) Reset() { *m = GetMetadataParam{} } @@ -865,65 +866,67 @@ func init() { proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9b99e39f func init() { golang_proto.RegisterFile("rpcquery.proto", fileDescriptor_88e25d9b99e39f02) } var fileDescriptor_88e25d9b99e39f02 = []byte{ - // 923 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xef, 0x8e, 0xdb, 0x44, - 0x10, 0xc7, 0xbd, 0x5e, 0x2e, 0x37, 0xc9, 0x25, 0xed, 0xf6, 0x08, 0xc1, 0x85, 0xb4, 0x5a, 0x89, - 0xeb, 0x51, 0xb5, 0x4e, 0x14, 0x7a, 0x80, 0x00, 0x09, 0x35, 0x15, 0x24, 0xa7, 0xd2, 0xd3, 0xe1, - 0xa0, 0x56, 0x02, 0x09, 0x69, 0x63, 0x2f, 0x89, 0x85, 0xe3, 0x35, 0xeb, 0x75, 0xc1, 0x8f, 0xc4, - 0x53, 0xc0, 0xc7, 0x3e, 0x02, 0xe2, 0x43, 0x85, 0xda, 0x17, 0x41, 0xde, 0x3f, 0x8e, 0xed, 0x4b, - 0xab, 0x7e, 0xe9, 0x97, 0x68, 0x66, 0xf6, 0x37, 0x33, 0xf6, 0xec, 0xfc, 0x7e, 0x31, 0x74, 0x78, - 0xec, 0xfd, 0x96, 0x52, 0x9e, 0x39, 0x31, 0x67, 0x82, 0xa1, 0xa6, 0xf1, 0xed, 0xbb, 0xcb, 0x40, - 0xac, 0xd2, 0x85, 0xe3, 0xb1, 0xf5, 0x70, 0xc9, 0x96, 0x6c, 0x28, 0x01, 0x8b, 0xf4, 0x17, 0xe9, - 0x49, 0x47, 0x5a, 0x2a, 0xd1, 0xfe, 0xac, 0x04, 0x17, 0x34, 0xf2, 0x29, 0x5f, 0x07, 0x91, 0x28, - 0x9b, 0x64, 0xe1, 0x05, 0x43, 0x91, 0xc5, 0x34, 0x51, 0xbf, 0x3a, 0xb1, 0x15, 0x91, 0x75, 0xe1, - 0xec, 0x13, 0x6f, 0xad, 0xcd, 0xee, 0x53, 0x12, 0x06, 0x3e, 0x11, 0x8c, 0x9b, 0x33, 0x1e, 0x7b, - 0xda, 0x3c, 0x88, 0x49, 0x16, 0x32, 0xe2, 0x2b, 0x17, 0x07, 0xd0, 0x9a, 0x0b, 0x22, 0xd2, 0xe4, - 0x9c, 0x70, 0xb2, 0x46, 0xc7, 0xd0, 0x9d, 0x84, 0xcc, 0xfb, 0xf5, 0x87, 0x60, 0x4d, 0x9f, 0x04, - 0x62, 0x15, 0x44, 0x7d, 0xeb, 0xa6, 0x75, 0xbc, 0xef, 0xd6, 0xc3, 0x68, 0x04, 0xd7, 0x64, 0x68, - 0x4e, 0x69, 0x54, 0x42, 0x5f, 0x92, 0xe8, 0x6d, 0x47, 0x98, 0x40, 0x77, 0x4a, 0xc5, 0x7d, 0xcf, - 0x63, 0x69, 0x24, 0x54, 0xbb, 0x33, 0xd8, 0xbb, 0xef, 0xfb, 0x9c, 0x26, 0x89, 0x6c, 0xd3, 0x9e, - 0xdc, 0x7b, 0xf6, 0xfc, 0xc6, 0x3b, 0xff, 0x3e, 0xbf, 0x71, 0xa7, 0x34, 0x92, 0x55, 0x16, 0x53, - 0x1e, 0x52, 0x7f, 0x49, 0xf9, 0x70, 0x91, 0x72, 0xce, 0x7e, 0x1f, 0x7a, 0x3c, 0x8b, 0x05, 0x73, - 0x74, 0xae, 0x6b, 0x8a, 0xe0, 0x05, 0x5c, 0x99, 0x52, 0xf1, 0x88, 0x0a, 0xe2, 0x13, 0x41, 0xde, - 0x4e, 0x8f, 0x3b, 0xd0, 0x31, 0x0d, 0x5c, 0x9a, 0xa4, 0xa1, 0x40, 0x36, 0x34, 0x4d, 0x44, 0x4f, - 0xab, 0xf0, 0xf1, 0x9f, 0x96, 0x7c, 0xeb, 0xb9, 0x60, 0x9c, 0x2c, 0xe9, 0x5b, 0x79, 0x22, 0xf4, - 0x2d, 0xec, 0x3c, 0xa4, 0x99, 0x1c, 0xfd, 0x1b, 0xd7, 0x5a, 0x04, 0x11, 0xe1, 0x99, 0xf3, 0x84, - 0x71, 0x7f, 0x7c, 0xf2, 0xa9, 0x9b, 0x17, 0xc0, 0x3f, 0x41, 0x5b, 0x3f, 0xe7, 0x63, 0x12, 0xa6, - 0x14, 0x3d, 0x84, 0x5d, 0x69, 0xe8, 0xa7, 0x3c, 0xd1, 0x95, 0xef, 0xbe, 0x51, 0xe5, 0x19, 0xfd, - 0x63, 0x92, 0x09, 0x9a, 0xb8, 0xaa, 0x06, 0xfe, 0x18, 0xae, 0x7e, 0x17, 0x24, 0xe6, 0xfa, 0xf5, - 0xba, 0x1d, 0xc2, 0xee, 0xf7, 0x39, 0x63, 0xf4, 0xd8, 0x94, 0x83, 0x31, 0xb4, 0xa7, 0x54, 0x9c, - 0x91, 0xb5, 0x9e, 0x17, 0x82, 0xcb, 0xb9, 0xa3, 0x41, 0xd2, 0xc6, 0x47, 0xd0, 0xc9, 0xcb, 0xe5, - 0xf6, 0x6b, 0x6b, 0xf5, 0xe0, 0x70, 0x4a, 0xc5, 0x63, 0xc3, 0x87, 0x39, 0x55, 0x9b, 0x87, 0xa7, - 0x70, 0xbd, 0x16, 0x9f, 0x05, 0x89, 0x60, 0x3c, 0x2b, 0x78, 0x70, 0x1a, 0x79, 0x61, 0xea, 0xd3, - 0x73, 0x4e, 0x9f, 0x06, 0x2c, 0x55, 0x57, 0xb5, 0xe3, 0xd6, 0xc3, 0x78, 0x0a, 0xd7, 0xb6, 0x54, - 0x41, 0x23, 0xd8, 0xd3, 0x66, 0xdf, 0xba, 0xb9, 0x73, 0xdc, 0x1a, 0xf7, 0x9c, 0x42, 0x2e, 0xca, - 0x78, 0xd7, 0xc0, 0xf0, 0x19, 0xb4, 0xcb, 0x07, 0xa8, 0x07, 0x8d, 0x15, 0x0d, 0x96, 0x2b, 0x21, - 0x3b, 0x5f, 0x76, 0xb5, 0x87, 0x8e, 0x60, 0x67, 0x4e, 0x45, 0xff, 0x92, 0xac, 0x7a, 0xe8, 0x6c, - 0xa8, 0x5e, 0x64, 0xbb, 0x39, 0x00, 0x1f, 0x49, 0x2e, 0x9c, 0x73, 0x16, 0xb3, 0x84, 0x84, 0xc5, - 0x24, 0x67, 0x24, 0x59, 0xa9, 0x0b, 0x75, 0xa5, 0x8d, 0x47, 0x80, 0xf2, 0x49, 0x1a, 0xa0, 0x9e, - 0xa6, 0x0d, 0x4d, 0x15, 0xa1, 0xbe, 0x44, 0x37, 0xdd, 0xc2, 0xc7, 0x8f, 0xa0, 0x63, 0xd0, 0x9a, - 0x01, 0x5b, 0xea, 0xa2, 0x5b, 0xd0, 0x98, 0x90, 0x30, 0x64, 0x42, 0x2e, 0x66, 0x6b, 0xdc, 0x75, - 0x8c, 0xf2, 0xa8, 0xb0, 0xab, 0x8f, 0x71, 0x17, 0x0e, 0x24, 0x43, 0x88, 0xde, 0x0a, 0x4c, 0x61, - 0x57, 0x7a, 0xe8, 0x36, 0x5c, 0x31, 0xfb, 0x92, 0x6b, 0xc8, 0x03, 0xe6, 0x53, 0x3d, 0x8c, 0x0b, - 0xf1, 0x5c, 0x8f, 0xca, 0x31, 0x96, 0x0a, 0x09, 0xbf, 0x24, 0xe1, 0xdb, 0x8e, 0xf0, 0x2d, 0xd9, - 0x57, 0x2a, 0x95, 0x7a, 0xe7, 0x1e, 0x34, 0x66, 0x95, 0x89, 0x2b, 0x6f, 0xfc, 0x57, 0x43, 0xaf, - 0x16, 0x1a, 0x43, 0x43, 0xa9, 0x25, 0x7a, 0x77, 0x73, 0x9d, 0x25, 0xfd, 0xb4, 0xaf, 0xe6, 0x61, - 0x47, 0x4d, 0x45, 0x23, 0x4f, 0x00, 0x36, 0xb2, 0x87, 0xde, 0xdf, 0xe4, 0xd5, 0xc4, 0xd0, 0x6e, - 0x3b, 0xb9, 0x82, 0x1b, 0xe0, 0x03, 0x68, 0x95, 0xa4, 0x0c, 0xd9, 0x95, 0xbc, 0x8a, 0xc2, 0xd9, - 0xfd, 0xcd, 0x59, 0x4d, 0x99, 0xbe, 0x96, 0xbd, 0x35, 0xa9, 0x6b, 0xbd, 0xcb, 0x92, 0x64, 0xf7, - 0xca, 0xaf, 0x53, 0x92, 0x80, 0x2f, 0xa1, 0x5d, 0x66, 0x2d, 0xba, 0xbe, 0xc1, 0x5d, 0x60, 0x73, - 0xf5, 0x05, 0x46, 0x16, 0x1a, 0xc2, 0x9e, 0xe6, 0x31, 0xea, 0x55, 0x5a, 0x17, 0xd4, 0xb6, 0xdb, - 0x8e, 0xfa, 0x0b, 0xfb, 0x26, 0x12, 0x3c, 0x43, 0x27, 0xb0, 0x5f, 0x90, 0x1a, 0xf5, 0xab, 0xad, - 0x36, 0x4c, 0xaf, 0x26, 0x8d, 0x2c, 0x74, 0x2a, 0x25, 0xb6, 0x42, 0x9e, 0x41, 0xa5, 0xdf, 0x05, - 0xfa, 0xdb, 0xaf, 0x60, 0x23, 0xfa, 0x19, 0x7a, 0xdb, 0x65, 0x01, 0x7d, 0xf4, 0xca, 0x8a, 0x65, - 0xe1, 0xb0, 0x3f, 0xdc, 0x5e, 0xd8, 0x54, 0xf9, 0x42, 0xde, 0xaa, 0x61, 0x4f, 0xed, 0x56, 0x2b, - 0x5c, 0xb5, 0xeb, 0x7c, 0x41, 0xa7, 0x70, 0x50, 0x21, 0x2a, 0xfa, 0xa0, 0x3a, 0xa1, 0x2a, 0x83, - 0xcb, 0x5b, 0x51, 0x65, 0xeb, 0xc8, 0x42, 0xf7, 0xa0, 0x69, 0x28, 0x87, 0xde, 0xab, 0x6d, 0x85, - 0xa1, 0xa1, 0xdd, 0xad, 0xae, 0x78, 0x82, 0x3e, 0x87, 0x8e, 0x21, 0xcc, 0x8c, 0x12, 0x9f, 0xf2, - 0x5a, 0xee, 0x86, 0x4a, 0xf6, 0x81, 0xa3, 0xbe, 0x53, 0x14, 0x6e, 0xf2, 0xd5, 0x3f, 0x2f, 0x06, - 0xd6, 0x7f, 0x2f, 0x06, 0xd6, 0xdf, 0x2f, 0x07, 0xd6, 0xb3, 0x97, 0x03, 0xeb, 0xc7, 0xdb, 0xaf, - 0xff, 0x13, 0xe1, 0xb1, 0x37, 0x34, 0xa5, 0x17, 0x0d, 0xf9, 0xa9, 0xf2, 0xc9, 0xff, 0x01, 0x00, - 0x00, 0xff, 0xff, 0x01, 0x12, 0xcc, 0x9d, 0x71, 0x09, 0x00, 0x00, + // 955 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0x66, 0x93, 0xc6, 0x71, 0x8e, 0x1d, 0xbb, 0x9d, 0x06, 0x63, 0xb6, 0xe0, 0x54, 0x23, 0x91, + 0x86, 0xaa, 0x5d, 0x1b, 0xd3, 0x00, 0x02, 0x24, 0x54, 0x57, 0x60, 0x87, 0xd2, 0x28, 0xac, 0xa1, + 0x95, 0x40, 0x42, 0x1a, 0xef, 0x0e, 0xf6, 0x8a, 0xb5, 0xc7, 0xcc, 0xce, 0x16, 0xf6, 0x91, 0x78, + 0x00, 0xae, 0xe1, 0xb2, 0x8f, 0x80, 0x7a, 0x11, 0xa1, 0xf6, 0x45, 0xd0, 0xce, 0xcf, 0x7a, 0x77, + 0xe3, 0x44, 0x80, 0xd4, 0x9b, 0x68, 0xce, 0x99, 0x6f, 0xbe, 0x93, 0x39, 0x73, 0xbe, 0x6f, 0x0d, + 0x0d, 0xbe, 0xf4, 0x7e, 0x8e, 0x29, 0x4f, 0x9c, 0x25, 0x67, 0x82, 0xa1, 0xaa, 0x89, 0xed, 0xbb, + 0xd3, 0x40, 0xcc, 0xe2, 0x89, 0xe3, 0xb1, 0x79, 0x77, 0xca, 0xa6, 0xac, 0x2b, 0x01, 0x93, 0xf8, + 0x47, 0x19, 0xc9, 0x40, 0xae, 0xd4, 0x41, 0xfb, 0xc3, 0x1c, 0x5c, 0xd0, 0x85, 0x4f, 0xf9, 0x3c, + 0x58, 0x88, 0xfc, 0x92, 0x4c, 0xbc, 0xa0, 0x2b, 0x92, 0x25, 0x8d, 0xd4, 0x5f, 0x7d, 0xb0, 0xb6, + 0x20, 0xf3, 0x2c, 0xd8, 0x21, 0xde, 0x5c, 0x2f, 0x9b, 0x4f, 0x49, 0x18, 0xf8, 0x44, 0x30, 0x6e, + 0xf6, 0xf8, 0xd2, 0xd3, 0xcb, 0xdd, 0x25, 0x49, 0x42, 0x46, 0x7c, 0x15, 0xe2, 0x00, 0x6a, 0x63, + 0x41, 0x44, 0x1c, 0x9d, 0x12, 0x4e, 0xe6, 0xe8, 0x10, 0x9a, 0x83, 0x90, 0x79, 0x3f, 0x7d, 0x13, + 0xcc, 0xe9, 0x93, 0x40, 0xcc, 0x82, 0x45, 0xdb, 0xba, 0x69, 0x1d, 0xee, 0xb8, 0xe5, 0x34, 0xea, + 0xc1, 0x75, 0x99, 0x1a, 0x53, 0xba, 0xc8, 0xa1, 0x37, 0x24, 0x7a, 0xdd, 0x16, 0x26, 0xd0, 0x1c, + 0x52, 0x71, 0xdf, 0xf3, 0x58, 0xbc, 0x10, 0xaa, 0xdc, 0x09, 0x6c, 0xdf, 0xf7, 0x7d, 0x4e, 0xa3, + 0x48, 0x96, 0xa9, 0x0f, 0xee, 0x3d, 0x3b, 0xdb, 0x7f, 0xed, 0xf9, 0xd9, 0xfe, 0x9d, 0x5c, 0x4b, + 0x66, 0xc9, 0x92, 0xf2, 0x90, 0xfa, 0x53, 0xca, 0xbb, 0x93, 0x98, 0x73, 0xf6, 0x4b, 0xd7, 0xe3, + 0xc9, 0x52, 0x30, 0x47, 0x9f, 0x75, 0x0d, 0x09, 0xfe, 0xdd, 0x82, 0xab, 0x43, 0x2a, 0x1e, 0x51, + 0x41, 0x7c, 0x22, 0x88, 0x2a, 0xf2, 0x65, 0xb9, 0x48, 0xef, 0x7f, 0x17, 0x40, 0xdf, 0x42, 0xdd, + 0x90, 0x8f, 0x48, 0x34, 0x93, 0xd7, 0xad, 0x0f, 0xde, 0x7b, 0x7e, 0xb6, 0x7f, 0xf7, 0x72, 0xc2, + 0x49, 0xb0, 0x20, 0x3c, 0x71, 0x46, 0xf4, 0xd7, 0x41, 0x22, 0x68, 0xe4, 0x16, 0x68, 0xf0, 0x1d, + 0x68, 0x98, 0xd8, 0xa5, 0x51, 0x1c, 0x0a, 0x64, 0x43, 0xd5, 0x64, 0xf4, 0x0b, 0x64, 0x31, 0xfe, + 0xcd, 0x92, 0x9d, 0x1c, 0x0b, 0xc6, 0xc9, 0x94, 0xbe, 0x92, 0x4e, 0xa2, 0x2f, 0x60, 0xf3, 0x21, + 0x4d, 0xf4, 0xfd, 0xfe, 0x25, 0x97, 0xbe, 0xe3, 0x13, 0xc6, 0xfd, 0xfe, 0xd1, 0x07, 0x6e, 0x4a, + 0x80, 0xbf, 0x87, 0xba, 0xfe, 0x3f, 0x1f, 0x93, 0x30, 0xa6, 0xe8, 0x21, 0x6c, 0xc9, 0x85, 0xfe, + 0x2f, 0x8f, 0x34, 0xf3, 0x7f, 0xec, 0x9e, 0xe2, 0xc0, 0xef, 0xc2, 0xb5, 0xaf, 0x82, 0xc8, 0x8c, + 0x94, 0x1e, 0xe1, 0x3d, 0xd8, 0xfa, 0x3a, 0x55, 0xa1, 0x6e, 0x9b, 0x0a, 0x30, 0x86, 0xfa, 0x90, + 0x8a, 0x13, 0x32, 0xd7, 0xfd, 0x42, 0x70, 0x25, 0x0d, 0x34, 0x48, 0xae, 0xf1, 0x01, 0x34, 0x52, + 0xba, 0x74, 0x7d, 0x29, 0x57, 0x0b, 0xf6, 0x86, 0x54, 0x3c, 0x36, 0x1a, 0x1b, 0x53, 0x35, 0xcd, + 0x78, 0x08, 0x37, 0x4a, 0xf9, 0x51, 0x10, 0x09, 0xc6, 0x93, 0x4c, 0x5b, 0xc7, 0x0b, 0x2f, 0x8c, + 0x7d, 0x7a, 0xca, 0xe9, 0xd3, 0x80, 0xc5, 0xea, 0xa9, 0x36, 0xdd, 0x72, 0x1a, 0x0f, 0xe1, 0xfa, + 0x1a, 0x16, 0xd4, 0x83, 0x6d, 0xbd, 0x6c, 0x5b, 0x37, 0x37, 0x0f, 0x6b, 0xfd, 0x96, 0x93, 0x59, + 0x50, 0x1e, 0xef, 0x1a, 0x18, 0x3e, 0x81, 0x7a, 0x7e, 0x03, 0xb5, 0xa0, 0x32, 0xa3, 0xc1, 0x74, + 0x26, 0x64, 0xe5, 0x2b, 0xae, 0x8e, 0xd0, 0x01, 0x6c, 0x8e, 0xa9, 0x68, 0x6f, 0x48, 0xd6, 0x3d, + 0x67, 0x65, 0x1f, 0xd9, 0x69, 0x37, 0x05, 0xe0, 0x03, 0x29, 0xaf, 0x53, 0xce, 0x96, 0x2c, 0x22, + 0x61, 0xd6, 0x49, 0x29, 0x05, 0xf9, 0xa0, 0xae, 0x5c, 0xe3, 0x1e, 0xa0, 0xb4, 0x93, 0x06, 0xa8, + 0xbb, 0x69, 0x43, 0x55, 0x65, 0xa8, 0x2f, 0xd1, 0x55, 0x37, 0x8b, 0xf1, 0x23, 0x68, 0x18, 0xb4, + 0x56, 0xc0, 0x1a, 0x5e, 0x74, 0x0b, 0x2a, 0x03, 0x12, 0x86, 0x4c, 0xc8, 0xc1, 0xac, 0xf5, 0x9b, + 0x8e, 0x71, 0x33, 0x95, 0x76, 0xf5, 0x36, 0x6e, 0xc2, 0xae, 0x54, 0x08, 0xd1, 0x53, 0x81, 0x29, + 0x6c, 0xc9, 0x08, 0xdd, 0x86, 0xab, 0x66, 0x5e, 0x52, 0x5f, 0x7a, 0xc0, 0x7c, 0xaa, 0x9b, 0x71, + 0x2e, 0x9f, 0x7a, 0x5c, 0x3e, 0xc7, 0x62, 0x21, 0xe1, 0x1b, 0x12, 0xbe, 0x6e, 0x0b, 0xdf, 0x92, + 0x75, 0xa5, 0xfb, 0xa9, 0x3b, 0xb7, 0xa0, 0x32, 0x2a, 0x74, 0x5c, 0x45, 0xfd, 0x3f, 0x2a, 0x7a, + 0xb4, 0x50, 0x1f, 0x2a, 0xca, 0x81, 0xd1, 0xeb, 0xab, 0xe7, 0xcc, 0x79, 0xb2, 0x7d, 0x2d, 0x4d, + 0x3b, 0xaa, 0x2b, 0x1a, 0x79, 0x04, 0xb0, 0xb2, 0x52, 0xf4, 0xe6, 0xea, 0x5c, 0xc9, 0x60, 0xed, + 0xba, 0x93, 0x7e, 0x15, 0x0c, 0xf0, 0x01, 0xd4, 0x72, 0xee, 0x88, 0xec, 0xc2, 0xb9, 0x82, 0x69, + 0xda, 0xed, 0xd5, 0x5e, 0xc9, 0x99, 0x3e, 0x93, 0xb5, 0xb5, 0xa8, 0x4b, 0xb5, 0xf3, 0x96, 0x64, + 0xb7, 0xf2, 0xd7, 0xc9, 0x59, 0xc0, 0x27, 0x50, 0xcf, 0xab, 0x16, 0xdd, 0x58, 0xe1, 0xce, 0xa9, + 0xb9, 0x78, 0x81, 0x9e, 0x85, 0xba, 0xb0, 0xad, 0x75, 0x8c, 0x5a, 0x85, 0xd2, 0x99, 0xb4, 0xed, + 0xba, 0xa3, 0x3e, 0x8b, 0x9f, 0x2f, 0x04, 0x4f, 0xd0, 0x11, 0xec, 0x64, 0xa2, 0x46, 0xed, 0x62, + 0xa9, 0x95, 0xd2, 0x8b, 0x87, 0x7a, 0x16, 0x3a, 0x96, 0x16, 0x5b, 0x10, 0x4f, 0xa7, 0x50, 0xef, + 0x9c, 0xfc, 0xed, 0x0b, 0xd4, 0x88, 0x7e, 0x80, 0xd6, 0x7a, 0x5b, 0x40, 0xef, 0x5c, 0xc8, 0x98, + 0x37, 0x0e, 0xfb, 0xed, 0xf5, 0xc4, 0x86, 0xe5, 0x63, 0xf9, 0xaa, 0x46, 0x3d, 0xa5, 0x57, 0x2d, + 0x68, 0xd5, 0x2e, 0xeb, 0x05, 0x1d, 0xc3, 0x6e, 0x41, 0xa8, 0xe8, 0xad, 0x62, 0x87, 0x8a, 0x0a, + 0xce, 0x4f, 0x45, 0x51, 0xad, 0x3d, 0x0b, 0xdd, 0x83, 0xaa, 0x91, 0x1c, 0x7a, 0xa3, 0x34, 0x15, + 0x46, 0x86, 0x76, 0xb3, 0x38, 0xe2, 0x11, 0xfa, 0x08, 0x1a, 0x46, 0x30, 0x23, 0x4a, 0x7c, 0xca, + 0x4b, 0x67, 0x57, 0x52, 0xb2, 0x77, 0x1d, 0xf5, 0xdb, 0x47, 0xe1, 0x06, 0x9f, 0xfe, 0xf5, 0xa2, + 0x63, 0xfd, 0xfd, 0xa2, 0x63, 0xfd, 0xf9, 0xb2, 0x63, 0x3d, 0x7b, 0xd9, 0xb1, 0xbe, 0xbb, 0x7d, + 0xf9, 0x47, 0x84, 0x2f, 0xbd, 0xae, 0xa1, 0x9e, 0x54, 0xe4, 0xcf, 0x9f, 0xf7, 0xff, 0x09, 0x00, + 0x00, 0xff, 0xff, 0xac, 0x34, 0xcd, 0x8f, 0xc5, 0x09, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1516,8 +1519,14 @@ func (m *GetMetadataParam) Size() (n int) { } var l int _ = l - l = m.Address.Size() - n += 1 + l + sovRpcquery(uint64(l)) + if m.Address != nil { + l = m.Address.Size() + n += 1 + l + sovRpcquery(uint64(l)) + } + if m.MetadataHash != nil { + l = m.MetadataHash.Size() + n += 1 + l + sovRpcquery(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } diff --git a/vent/service/abis.go b/vent/service/abis.go index 84d80dde9..b63f77703 100644 --- a/vent/service/abis.go +++ b/vent/service/abis.go @@ -39,7 +39,7 @@ func NewAbiProvider(paths []string, cli rpcquery.QueryClient) (provider *AbiProv func (p *AbiProvider) GetEventAbi(eventID abi.EventID, address crypto.Address, l *logging.Logger) (*abi.EventSpec, error) { evAbi, ok := p.abiSpec.EventsByID[eventID] if !ok { - resp, err := p.cli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: address}) + resp, err := p.cli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &address}) if err != nil { l.InfoMsg("Error retrieving abi for event", "address", address.String(), "eventid", eventID.String(), "error", err) return nil, err From ad85e420defe974c6939695443a21e99d7ce4967 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 22 Jul 2019 07:22:29 +0100 Subject: [PATCH 52/70] Add top level package file To prevent this warning: $ go get -d github.com/hyperledger/burrow can't load package: package github.com/hyperledger/burrow: no Go files in /home/vtp/go/src/github.com/hyperledger/burrow Signed-off-by: Sean Young --- burrow.go | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 burrow.go diff --git a/burrow.go b/burrow.go new file mode 100644 index 000000000..1152472f3 --- /dev/null +++ b/burrow.go @@ -0,0 +1,5 @@ +package burrow + +// This file exists to prevent go get from saying: +// $ go get -d github.com/hyperledger/burrow +// can't load package: package github.com/hyperledger/burrow: no Go files in .../go/src/github.com/hyperledger/burrow From 601c0acf461d2dd3f69d91d93e9ae1303436cc6b Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 22 Jul 2019 08:14:22 +0100 Subject: [PATCH 53/70] Improve burrow accounts output Signed-off-by: Sean Young --- cmd/burrow/commands/accounts.go | 18 +++++++++--------- cmd/burrow/commands/deploy.go | 2 +- execution/evm/abi/abi.go | 11 +++++++---- 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/cmd/burrow/commands/accounts.go b/cmd/burrow/commands/accounts.go index 7e9ff0262..4b34fb008 100644 --- a/cmd/burrow/commands/accounts.go +++ b/cmd/burrow/commands/accounts.go @@ -48,10 +48,10 @@ func Accounts(output Output) func(cmd *cli.Cmd) { output.Printf(" Public Key: %s\n", acc.PublicKey.String()) } if acc.WASMCode != nil && len(acc.WASMCode) > 0 { - output.Printf(" WASM Code: %s", acc.WASMCode.String()) + output.Printf(" WASM Code Hash: %s", acc.CodeHash.String()) } if acc.EVMCode != nil && len(acc.EVMCode) > 0 { - output.Printf(" EVM Code: %s", acc.EVMCode.String()) + output.Printf(" EVM Code Hash: %s", acc.CodeHash.String()) } meta, err := qCli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &acc.Address}) @@ -65,9 +65,9 @@ func Accounts(output Output) func(cmd *cli.Cmd) { output.Fatalf("failed to unmarshal metadata %s: %v", meta.Metadata, err) } - output.Printf(" Contract Name: %s", metadata.ContractName) - output.Printf(" Source File: %s", metadata.SourceFile) - output.Printf(" Compiler version: %s", metadata.CompilerVersion) + output.Printf(" Contract Name: %s", metadata.ContractName) + output.Printf(" Source File: %s", metadata.SourceFile) + output.Printf(" Compiler version: %s", metadata.CompilerVersion) spec, err := abi.ReadSpec(metadata.Abi) if err != nil { @@ -75,16 +75,16 @@ func Accounts(output Output) func(cmd *cli.Cmd) { } if len(spec.Functions) > 0 { - output.Printf(" Functions:") + output.Printf(" Functions:") for name, f := range spec.Functions { - output.Printf(" %s", f.String(name)) + output.Printf(" %s", f.String(name)) } } if len(spec.EventsByID) > 0 { - output.Printf(" Events:") + output.Printf(" Events:") for _, e := range spec.EventsByID { - output.Printf(" %s", e.String()) + output.Printf(" %s", e.String()) } } } diff --git a/cmd/burrow/commands/deploy.go b/cmd/burrow/commands/deploy.go index f1ec956c9..7d93266c2 100644 --- a/cmd/burrow/commands/deploy.go +++ b/cmd/burrow/commands/deploy.go @@ -77,7 +77,7 @@ func Deploy(output Output) func(cmd *cli.Cmd) { cmd.Spec = "[--chain=] [--keys=] [--mempool-signing] [--dir=] " + "[--output=] [--wasm] [--set=]... [--bin-path=] [--gas=] " + - "[--jobs=] [--address=
] [--fee=] [--amount=] " + + "[--jobs=] [--address=
] [--fee=] [--amount=] [--local-abi] " + "[--verbose] [--debug] [--timeout=] [--proposal-create|--proposal-verify|--proposal-create] FILE..." cmd.Action = func() { diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index f5552584f..3fbc9f64f 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -573,14 +573,14 @@ func SpecFromFunctionReflect(fname string, v reflect.Value, skipIn, skipOut int) return &s } -func argsToSignature(args []Argument, addIndexed bool) (str string) { +func argsToSignature(args []Argument, addIndexedName bool) (str string) { str = "(" for i, a := range args { if i > 0 { str += "," } str += a.EVM.GetSignature() - if addIndexed && a.Indexed { + if addIndexedName && a.Indexed { str += " indexed" } if a.IsArray { @@ -590,6 +590,9 @@ func argsToSignature(args []Argument, addIndexed bool) (str string) { str += "[]" } } + if addIndexedName && a.Name != "" { + str += " " + a.Name + } } str += ")" return @@ -605,8 +608,8 @@ func (functionSpec *FunctionSpec) SetFunctionID(functionName string) { } func (f *FunctionSpec) String(name string) string { - return name + argsToSignature(f.Inputs, false) + - " returns " + argsToSignature(f.Outputs, false) + return name + argsToSignature(f.Inputs, true) + + " returns " + argsToSignature(f.Outputs, true) } func (e *EventSpec) String() string { From 2fed39607d31ba5ccaff48f4e54c955c2704e995 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 22 Jul 2019 08:55:46 +0100 Subject: [PATCH 54/70] Fallback to local ABIs if needed Signed-off-by: Sean Young --- deploy/jobs/jobs_contracts.go | 26 +++++++++++--------------- execution/evm/abi/abi.go | 16 ++++++---------- 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index 4c46954b1..fcf20b94e 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -450,34 +450,30 @@ func FormulateCallJob(call *def.Call, do *def.DeployArgs, deployScript *def.Play if !do.LocalABI { abiJSON, err = client.GetMetadataForAccount(address) + if abiJSON != "" && err == nil { + packedBytes, funcSpec, err = abi.EncodeFunctionCall(abiJSON, call.Function, logger, callDataArray...) + } } - if abiJSON != "" && err == nil { - packedBytes, funcSpec, err = abi.EncodeFunctionCall(abiJSON, call.Function, logger, callDataArray...) - if err != nil { - return - } - callData = hex.EncodeToString(packedBytes) - } else { + // Sometimes the ABI for the contract needs to be overriden. For example, we might have a proxy contract which + // calls another contract via delegatecall from the fallback function. For example: + // https://github.com/agreements-network/blackstone/blob/develop/contracts/src/commons-management/AbstractDelegateProxy.sol#L26 + if funcSpec == nil { logger.TraceMsg("Looking for ABI in", "path", deployScript.BinPath, "bin", call.Bin, "dest", call.Destination) if call.Bin != "" { packedBytes, funcSpec, err = abi.EncodeFunctionCallFromFile(call.Bin, deployScript.BinPath, call.Function, logger, callDataArray...) - callData = hex.EncodeToString(packedBytes) } if call.Bin == "" || err != nil { packedBytes, funcSpec, err = abi.EncodeFunctionCallFromFile(call.Destination, deployScript.BinPath, call.Function, logger, callDataArray...) - callData = hex.EncodeToString(packedBytes) } if err != nil { - if call.Function == "()" { - logger.InfoMsg("Calling the fallback function") - } else { - err = util.ABIErrorHandler(err, call, nil, logger) - return - } + err = util.ABIErrorHandler(err, call, nil, logger) + return } } + callData = hex.EncodeToString(packedBytes) + if funcSpec.Constant { logger.InfoMsg("Function call to constant function, query-contract type job will be faster than call") } diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index 3fbc9f64f..62a44807e 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -736,22 +736,18 @@ func (abiSpec *Spec) Pack(fname string, args ...interface{}) ([]byte, *FunctionS if _, ok := abiSpec.Functions[fname]; ok { funcSpec = abiSpec.Functions[fname] } else { - funcSpec = abiSpec.Fallback + return nil, nil, fmt.Errorf("Unknown function %s", fname) } } else { - funcSpec = abiSpec.Constructor - } - - argSpec = funcSpec.Inputs - - if argSpec == nil { - if fname == "" { + if abiSpec.Constructor.Inputs != nil { + funcSpec = abiSpec.Constructor + } else { return nil, nil, fmt.Errorf("Contract does not have a constructor") } - - return nil, nil, fmt.Errorf("Unknown function %s", fname) } + argSpec = funcSpec.Inputs + packed := make([]byte, 0) if fname != "" { From 4280e7ef1ae18f2285866e80f3379acf7bc7eec5 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Mon, 22 Jul 2019 09:06:55 +0100 Subject: [PATCH 55/70] Rename GetKeyAddress to ParseAddress Signed-off-by: Sean Young --- deploy/def/client.go | 10 +++++----- deploy/jobs/jobs_contracts.go | 2 +- deploy/util/chains_info.go | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/deploy/def/client.go b/deploy/def/client.go index f0023f96b..612abd916 100644 --- a/deploy/def/client.go +++ b/deploy/def/client.go @@ -123,7 +123,7 @@ func (c *Client) Status(logger *logging.Logger) (*rpc.ResultStatus, error) { return c.queryClient.Status(ctx, &rpcquery.StatusParam{}) } -func (c *Client) GetKeyAddress(key string, logger *logging.Logger) (crypto.Address, error) { +func (c *Client) ParseAddress(key string, logger *logging.Logger) (crypto.Address, error) { address, err := crypto.AddressFromHexString(key) if err == nil { return address, nil @@ -363,7 +363,7 @@ func (c *Client) UpdateAccount(arg *GovArg, logger *logging.Logger) (*payload.Go Roles: arg.Permissions, } if arg.Address != "" { - addr, err := c.GetKeyAddress(arg.Address, logger) + addr, err := c.ParseAddress(arg.Address, logger) if err != nil { return nil, fmt.Errorf("could not parse address: %v", err) } @@ -529,7 +529,7 @@ func (c *Client) Send(arg *SendArg, logger *logging.Logger) (*payload.SendTx, er if err != nil { return nil, err } - outputAddress, err := c.GetKeyAddress(arg.Output, logger) + outputAddress, err := c.ParseAddress(arg.Output, logger) if err != nil { return nil, err } @@ -638,7 +638,7 @@ func (c *Client) Permissions(arg *PermArg, logger *logging.Logger) (*payload.Per Action: action, } if arg.Target != "" { - target, err := c.GetKeyAddress(arg.Target, logger) + target, err := c.ParseAddress(arg.Target, logger) if err != nil { return nil, err } @@ -679,7 +679,7 @@ func (c *Client) TxInput(inputString, amountString, sequenceString string, allow var err error var inputAddress crypto.Address if inputString != "" { - inputAddress, err = c.GetKeyAddress(inputString, logger) + inputAddress, err = c.ParseAddress(inputString, logger) if err != nil { return nil, fmt.Errorf("TxInput(): could not obtain input address from '%s': %v", inputString, err) } diff --git a/deploy/jobs/jobs_contracts.go b/deploy/jobs/jobs_contracts.go index fcf20b94e..bece7dffc 100644 --- a/deploy/jobs/jobs_contracts.go +++ b/deploy/jobs/jobs_contracts.go @@ -438,7 +438,7 @@ func FormulateCallJob(call *def.Call, do *def.DeployArgs, deployScript *def.Play call.Gas = FirstOf(call.Gas, do.DefaultGas) // Get address (possibly via key) - address, err := client.GetKeyAddress(call.Destination, logger) + address, err := client.ParseAddress(call.Destination, logger) if err != nil { return nil, err } diff --git a/deploy/util/chains_info.go b/deploy/util/chains_info.go index 5e178fb0f..8f54c0bc2 100644 --- a/deploy/util/chains_info.go +++ b/deploy/util/chains_info.go @@ -23,7 +23,7 @@ func GetBlockHeight(client *def.Client, logger *logging.Logger) (latestBlockHeig } func AccountsInfo(account, field string, client *def.Client, logger *logging.Logger) (string, error) { - address, err := client.GetKeyAddress(account, logger) + address, err := client.ParseAddress(account, logger) if err != nil { return "", err } From 1521010b85fad3446030c2000f24d73daef7f8c3 Mon Sep 17 00:00:00 2001 From: Sean Young Date: Tue, 23 Jul 2019 08:38:45 +0100 Subject: [PATCH 56/70] Preserve origin height and index Signed-off-by: Silas Davis Signed-off-by: Sean Young --- acm/acm.pb.go | 20 ++++----- dump/dump.go | 16 +++++--- dump/dump.pb.go | 99 +++++++++++++++++++++++++++++++-------------- dump/load.go | 1 + dump/load_test.go | 42 +++++++++++++++++++ dump/sink.go | 17 +++++++- dump/test_dump.json | 5 +++ protobuf/dump.proto | 3 +- 8 files changed, 155 insertions(+), 48 deletions(-) create mode 100644 dump/test_dump.json diff --git a/acm/acm.pb.go b/acm/acm.pb.go index ca198d56b..5084917ee 100644 --- a/acm/acm.pb.go +++ b/acm/acm.pb.go @@ -29,15 +29,17 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package type Account struct { - Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` - PublicKey crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey"` - Sequence uint64 `protobuf:"varint,3,opt,name=Sequence,proto3" json:"Sequence,omitempty"` - Balance uint64 `protobuf:"varint,4,opt,name=Balance,proto3" json:"Balance,omitempty"` - EVMCode Bytecode `protobuf:"bytes,5,opt,name=EVMCode,proto3,customtype=Bytecode" json:"EVMCode"` - Permissions permission.AccountPermissions `protobuf:"bytes,6,opt,name=Permissions,proto3" json:"Permissions"` - WASMCode Bytecode `protobuf:"bytes,7,opt,name=WASMCode,proto3,customtype=Bytecode" json:",omitempty"` - CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,8,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"-"` - ContractMeta []*ContractMeta `protobuf:"bytes,9,rep,name=ContractMeta,proto3" json:"ContractMeta,omitempty"` + Address github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,1,opt,name=Address,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Address"` + PublicKey crypto.PublicKey `protobuf:"bytes,2,opt,name=PublicKey,proto3" json:"PublicKey"` + Sequence uint64 `protobuf:"varint,3,opt,name=Sequence,proto3" json:"Sequence,omitempty"` + Balance uint64 `protobuf:"varint,4,opt,name=Balance,proto3" json:"Balance,omitempty"` + EVMCode Bytecode `protobuf:"bytes,5,opt,name=EVMCode,proto3,customtype=Bytecode" json:"EVMCode"` + Permissions permission.AccountPermissions `protobuf:"bytes,6,opt,name=Permissions,proto3" json:"Permissions"` + WASMCode Bytecode `protobuf:"bytes,7,opt,name=WASMCode,proto3,customtype=Bytecode" json:",omitempty"` + CodeHash github_com_hyperledger_burrow_binary.HexBytes `protobuf:"bytes,8,opt,name=CodeHash,proto3,customtype=github.com/hyperledger/burrow/binary.HexBytes" json:"-"` + ContractMeta []*ContractMeta `protobuf:"bytes,9,rep,name=ContractMeta,proto3" json:"ContractMeta,omitempty"` + // The metadata is stored in the deployed account. When the deployed account creates new account (from Solidity/EVM), they point to the original deployed + // account where the metadata is stored. This original account is called the forebear. Forebear *github_com_hyperledger_burrow_crypto.Address `protobuf:"bytes,10,opt,name=Forebear,proto3,customtype=github.com/hyperledger/burrow/crypto.Address" json:"Forebear,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` diff --git a/dump/dump.go b/dump/dump.go index 64963aa7c..f5301cdd4 100644 --- a/dump/dump.go +++ b/dump/dump.go @@ -174,17 +174,21 @@ func (ds *Dumper) Transmit(sink Sink, startHeight, endHeight uint64, options Opt case ev.BeginTx != nil: origin = ev.BeginTx.TxHeader.Origin case ev.Event != nil && ev.Event.Log != nil: - evmevent := EVMEvent{Event: ev.Event.Log} + row := &Dump{EVMEvent: &EVMEvent{Event: ev.Event.Log}} if origin != nil { // this event was already restored - evmevent.ChainID = origin.ChainID - evmevent.Time = origin.Time + row.EVMEvent.ChainID = origin.ChainID + row.EVMEvent.Time = origin.Time + row.EVMEvent.Index = origin.Index + row.Height = origin.Height } else { // this event was generated on this chain - evmevent.ChainID = ds.blockchain.ChainID() - evmevent.Time = blockTime + row.EVMEvent.ChainID = ds.blockchain.ChainID() + row.EVMEvent.Time = blockTime + row.EVMEvent.Index = ev.Event.Header.Index + row.Height = ev.Event.Header.Height } - err := sink.Send(&Dump{Height: ev.Event.Header.Height, EVMEvent: &evmevent}) + err := sink.Send(row) if err != nil { return err } diff --git a/dump/dump.pb.go b/dump/dump.pb.go index 75e01e226..ad3cfb437 100644 --- a/dump/dump.pb.go +++ b/dump/dump.pb.go @@ -125,6 +125,8 @@ func (*AccountStorage) XXX_MessageName() string { type EVMEvent struct { // The original ChainID from for this event ChainID string `protobuf:"bytes,1,opt,name=ChainID,proto3" json:"ChainID,omitempty"` + // The original index for this event + Index uint64 `protobuf:"varint,4,opt,name=Index,proto3" json:"Index,omitempty"` // The original block time for this transaction Time time.Time `protobuf:"bytes,2,opt,name=Time,proto3,stdtime" json:"Time"` // The event itself @@ -170,6 +172,13 @@ func (m *EVMEvent) GetChainID() string { return "" } +func (m *EVMEvent) GetIndex() uint64 { + if m != nil { + return m.Index + } + return 0 +} + func (m *EVMEvent) GetTime() time.Time { if m != nil { return m.Time @@ -281,37 +290,38 @@ func init() { proto.RegisterFile("dump.proto", fileDescriptor_58418148159c29a6) func init() { golang_proto.RegisterFile("dump.proto", fileDescriptor_58418148159c29a6) } var fileDescriptor_58418148159c29a6 = []byte{ - // 479 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0xbf, 0x6f, 0x13, 0x31, - 0x14, 0xc6, 0xcd, 0xa5, 0x69, 0x9d, 0xd2, 0xc1, 0x42, 0xe8, 0x94, 0xe1, 0x12, 0x9d, 0x10, 0x54, - 0x88, 0x3a, 0x52, 0xa0, 0x88, 0xa1, 0x4b, 0x43, 0x83, 0x8a, 0x0a, 0x1d, 0x4c, 0x55, 0x24, 0xb6, - 0xfb, 0xf1, 0x70, 0x4e, 0xca, 0x9d, 0x4f, 0x3e, 0x1f, 0xf4, 0x76, 0x16, 0x36, 0xfe, 0x02, 0xfe, - 0x16, 0xc6, 0x8c, 0x88, 0x09, 0x31, 0x14, 0x74, 0xfd, 0x47, 0xd0, 0xf9, 0x6c, 0x02, 0x1d, 0x10, - 0x6c, 0xef, 0xbd, 0xcf, 0xef, 0xf3, 0xe7, 0xef, 0x33, 0xc6, 0x71, 0x99, 0xe6, 0x34, 0x97, 0x42, - 0x09, 0xe2, 0x34, 0xf5, 0x60, 0x97, 0x27, 0x6a, 0x5e, 0x86, 0x34, 0x12, 0xe9, 0x98, 0x0b, 0x2e, - 0xc6, 0x1a, 0x0c, 0xcb, 0xd7, 0xba, 0xd3, 0x8d, 0xae, 0xda, 0xa5, 0xc1, 0x90, 0x0b, 0xc1, 0x17, - 0xb0, 0x3a, 0xa5, 0x92, 0x14, 0x0a, 0x15, 0x58, 0xd6, 0xc1, 0x66, 0x10, 0xa5, 0xa6, 0xc4, 0x70, - 0x0e, 0x91, 0xa9, 0xfb, 0x59, 0x90, 0x42, 0xd1, 0x36, 0xfe, 0x47, 0x84, 0x7b, 0x2f, 0x94, 0x90, - 0x01, 0x07, 0xf2, 0x04, 0x77, 0x8e, 0xa1, 0x72, 0xd1, 0x08, 0xed, 0x6c, 0x4d, 0x1f, 0x2c, 0x2f, - 0x86, 0xd7, 0xbe, 0x5d, 0x0c, 0xef, 0xfd, 0x26, 0x6a, 0x5e, 0xe5, 0x20, 0x17, 0x10, 0x73, 0x90, - 0xe3, 0xb0, 0x94, 0x52, 0xbc, 0x1d, 0x87, 0x49, 0x16, 0xc8, 0x8a, 0xbe, 0x14, 0x32, 0x9e, 0xec, - 0x3d, 0x64, 0x0d, 0x01, 0x39, 0xc6, 0xdd, 0xb3, 0x60, 0x51, 0x82, 0xbb, 0xa6, 0x99, 0xf6, 0x0c, - 0xd3, 0xee, 0x3f, 0x31, 0x1d, 0xc1, 0xf9, 0xb4, 0x52, 0x50, 0xb0, 0x96, 0xc3, 0x7f, 0x8f, 0xf0, - 0xf6, 0x41, 0x14, 0x89, 0x32, 0x53, 0x56, 0xe7, 0x09, 0xee, 0x1d, 0xc4, 0xb1, 0x84, 0xa2, 0xf8, - 0x3f, 0xad, 0x91, 0xac, 0x72, 0x25, 0xa8, 0xd9, 0x65, 0x96, 0x84, 0xdc, 0xf9, 0x65, 0x81, 0xbb, - 0x36, 0xea, 0xec, 0xf4, 0x27, 0xd7, 0xa9, 0xce, 0xc6, 0x0c, 0x99, 0x45, 0xfd, 0x77, 0x08, 0x6f, - 0xcc, 0xce, 0x9e, 0xcf, 0xde, 0x40, 0xa6, 0x88, 0x8b, 0x7b, 0x8f, 0xe7, 0x41, 0x92, 0x3d, 0x3d, - 0xd4, 0x2a, 0x36, 0x99, 0x6d, 0xc9, 0x23, 0xec, 0x9c, 0x26, 0x69, 0xfb, 0xfc, 0xfe, 0x64, 0x40, - 0xdb, 0x9c, 0xa8, 0xcd, 0x89, 0x9e, 0xda, 0x9c, 0xa6, 0x1b, 0x8d, 0xf0, 0x0f, 0xdf, 0x87, 0x88, - 0xe9, 0x0d, 0x72, 0x0b, 0x77, 0x35, 0xb9, 0xdb, 0xd1, 0xab, 0xdb, 0x54, 0xc7, 0xf6, 0x4c, 0x70, - 0x3d, 0x65, 0x2d, 0xe8, 0x7f, 0x41, 0xd8, 0x39, 0x2c, 0xd3, 0x9c, 0xdc, 0xc4, 0xeb, 0x47, 0x90, - 0xf0, 0xb9, 0xd2, 0x0a, 0x1c, 0x66, 0x3a, 0x72, 0x1b, 0xf7, 0x8c, 0x65, 0x46, 0xc3, 0x16, 0x6d, - 0xbe, 0x82, 0x99, 0x31, 0x0b, 0x92, 0xfd, 0xab, 0xd6, 0x9a, 0x7b, 0x6f, 0xb4, 0xef, 0xff, 0x13, - 0x63, 0x57, 0x63, 0xb8, 0xbb, 0x32, 0xc3, 0x75, 0x8c, 0x5e, 0xbd, 0x67, 0xa7, 0x6c, 0x65, 0xd6, - 0x08, 0x3b, 0x27, 0x41, 0x0a, 0x6e, 0xd7, 0xc8, 0x69, 0xbf, 0xe0, 0x2c, 0x53, 0xb2, 0x62, 0x1a, - 0x99, 0xee, 0x2f, 0x6b, 0x0f, 0x7d, 0xae, 0x3d, 0xf4, 0xb5, 0xf6, 0xd0, 0x8f, 0xda, 0x43, 0x9f, - 0x2e, 0x3d, 0xb4, 0xbc, 0xf4, 0xd0, 0x2b, 0xff, 0xef, 0xa9, 0x36, 0x57, 0x86, 0xeb, 0xda, 0xdc, - 0xfb, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe9, 0xd0, 0x90, 0x5d, 0x55, 0x03, 0x00, 0x00, + // 493 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0x65, 0x5b, 0xa7, 0x69, 0x37, 0xa5, 0x87, 0x55, 0x85, 0xac, 0x1c, 0x9c, 0xc8, 0x42, 0x10, + 0x21, 0xba, 0x91, 0x02, 0x45, 0x1c, 0x7a, 0x69, 0x68, 0x50, 0xab, 0x42, 0x0f, 0x4b, 0x55, 0x24, + 0x6e, 0xfe, 0x18, 0x1c, 0x4b, 0xb1, 0xd7, 0x5a, 0xaf, 0x21, 0xfe, 0x09, 0xdc, 0x38, 0x73, 0xe0, + 0xb7, 0x70, 0xcc, 0x11, 0x71, 0x42, 0x1c, 0x0a, 0x4a, 0xff, 0x08, 0xf2, 0x7e, 0x10, 0xe8, 0x01, + 0xc1, 0x6d, 0x66, 0x9e, 0xe7, 0xcd, 0xf3, 0x7b, 0x8b, 0x71, 0x5c, 0x65, 0x05, 0x2d, 0x04, 0x97, + 0x9c, 0x38, 0x4d, 0xdd, 0xdd, 0x4b, 0x52, 0x39, 0xad, 0x42, 0x1a, 0xf1, 0x6c, 0x98, 0xf0, 0x84, + 0x0f, 0x15, 0x18, 0x56, 0xaf, 0x55, 0xa7, 0x1a, 0x55, 0xe9, 0xa5, 0x6e, 0x2f, 0xe1, 0x3c, 0x99, + 0xc1, 0xea, 0x2b, 0x99, 0x66, 0x50, 0xca, 0xc0, 0xb2, 0x76, 0xb7, 0x82, 0x28, 0x33, 0x25, 0x86, + 0x39, 0x44, 0xa6, 0xee, 0xe4, 0x41, 0x06, 0xa5, 0x6e, 0xfc, 0x8f, 0x08, 0xb7, 0x5f, 0x48, 0x2e, + 0x82, 0x04, 0xc8, 0x53, 0xbc, 0x7e, 0x0a, 0xb5, 0x8b, 0xfa, 0x68, 0xb0, 0x3d, 0x7e, 0xb8, 0xb8, + 0xec, 0xdd, 0xf8, 0x76, 0xd9, 0xbb, 0xff, 0x9b, 0xa8, 0x69, 0x5d, 0x80, 0x98, 0x41, 0x9c, 0x80, + 0x18, 0x86, 0x95, 0x10, 0xfc, 0xed, 0x30, 0x4c, 0xf3, 0x40, 0xd4, 0xf4, 0x25, 0x17, 0xf1, 0x68, + 0xff, 0x11, 0x6b, 0x08, 0xc8, 0x29, 0x6e, 0x5d, 0x04, 0xb3, 0x0a, 0xdc, 0x35, 0xc5, 0xb4, 0x6f, + 0x98, 0xf6, 0xfe, 0x89, 0xe9, 0x18, 0xe6, 0xe3, 0x5a, 0x42, 0xc9, 0x34, 0x87, 0xff, 0x0e, 0xe1, + 0x9d, 0xc3, 0x28, 0xe2, 0x55, 0x2e, 0xad, 0xce, 0x33, 0xdc, 0x3e, 0x8c, 0x63, 0x01, 0x65, 0xf9, + 0x7f, 0x5a, 0x23, 0x51, 0x17, 0x92, 0x53, 0xb3, 0xcb, 0x2c, 0x09, 0xb9, 0xfb, 0xcb, 0x02, 0x77, + 0xad, 0xbf, 0x3e, 0xe8, 0x8c, 0x6e, 0x52, 0x95, 0x8d, 0x19, 0x32, 0x8b, 0xfa, 0x1f, 0x10, 0xde, + 0x9c, 0x5c, 0x3c, 0x9f, 0xbc, 0x81, 0x5c, 0x12, 0x17, 0xb7, 0x9f, 0x4c, 0x83, 0x34, 0x3f, 0x39, + 0x52, 0x2a, 0xb6, 0x98, 0x6d, 0xc9, 0x2e, 0x6e, 0x9d, 0xe4, 0x31, 0xcc, 0x5d, 0xa7, 0x8f, 0x06, + 0x0e, 0xd3, 0x0d, 0x79, 0x8c, 0x9d, 0xf3, 0x34, 0xd3, 0xa6, 0x74, 0x46, 0x5d, 0xaa, 0xd3, 0xa3, + 0x36, 0x3d, 0x7a, 0x6e, 0xd3, 0x1b, 0x6f, 0x36, 0xbf, 0xf3, 0xfe, 0x7b, 0x0f, 0x31, 0xb5, 0x41, + 0x6e, 0xe3, 0x96, 0x3a, 0xe9, 0xae, 0xab, 0xd5, 0x1d, 0xaa, 0xc2, 0x7c, 0xc6, 0x13, 0x35, 0x65, + 0x1a, 0xf4, 0xbf, 0x20, 0xec, 0x1c, 0x55, 0x59, 0x41, 0x6e, 0xe1, 0x8d, 0x63, 0x48, 0x93, 0xa9, + 0x54, 0xba, 0x1c, 0x66, 0x3a, 0x72, 0x07, 0xb7, 0x8d, 0x91, 0x46, 0xc3, 0x36, 0x6d, 0x1e, 0x88, + 0x99, 0x31, 0x0b, 0x92, 0x83, 0xeb, 0x86, 0x9b, 0xbb, 0xbb, 0xda, 0x95, 0x3f, 0x31, 0x76, 0x3d, + 0x9c, 0x7b, 0x2b, 0x8b, 0xd4, 0xff, 0x37, 0x7a, 0xd5, 0x9e, 0x9d, 0xb2, 0x95, 0x85, 0x7d, 0xec, + 0x9c, 0x05, 0x19, 0xb8, 0x2d, 0x23, 0x47, 0x3f, 0xcc, 0x49, 0x2e, 0x45, 0xcd, 0x14, 0x32, 0x3e, + 0x58, 0x2c, 0x3d, 0xf4, 0x79, 0xe9, 0xa1, 0xaf, 0x4b, 0x0f, 0xfd, 0x58, 0x7a, 0xe8, 0xd3, 0x95, + 0x87, 0x16, 0x57, 0x1e, 0x7a, 0xe5, 0xff, 0x3d, 0xeb, 0xe6, 0x64, 0xb8, 0xa1, 0xcc, 0x7d, 0xf0, + 0x33, 0x00, 0x00, 0xff, 0xff, 0x30, 0xe1, 0x0a, 0x94, 0x6b, 0x03, 0x00, 0x00, } func (m *Storage) Marshal() (dAtA []byte, err error) { @@ -431,6 +441,11 @@ func (m *EVMEvent) MarshalTo(dAtA []byte) (int, error) { } i += n5 } + if m.Index != 0 { + dAtA[i] = 0x20 + i++ + i = encodeVarintDump(dAtA, i, uint64(m.Index)) + } if m.XXX_unrecognized != nil { i += copy(dAtA[i:], m.XXX_unrecognized) } @@ -564,6 +579,9 @@ func (m *EVMEvent) Size() (n int) { l = m.Event.Size() n += 1 + l + sovDump(uint64(l)) } + if m.Index != 0 { + n += 1 + sovDump(uint64(m.Index)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -985,6 +1003,25 @@ func (m *EVMEvent) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Index", wireType) + } + m.Index = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowDump + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Index |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipDump(dAtA[iNdEx:]) diff --git a/dump/load.go b/dump/load.go index 17232a8ce..531e01849 100644 --- a/dump/load.go +++ b/dump/load.go @@ -77,6 +77,7 @@ func Load(source Source, st *state.State) error { ChainID: row.EVMEvent.ChainID, Height: row.Height, Time: row.EVMEvent.Time, + Index: row.EVMEvent.Index, }, }, } diff --git a/dump/load_test.go b/dump/load_test.go index 7715efd50..eb54979c5 100644 --- a/dump/load_test.go +++ b/dump/load_test.go @@ -2,8 +2,12 @@ package dump import ( "fmt" + "os" "testing" + "github.com/hyperledger/burrow/bcm" + + "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/execution/state" "github.com/hyperledger/burrow/genesis" "github.com/hyperledger/burrow/permission" @@ -32,3 +36,41 @@ func testLoad(t testing.TB, mock *MockSource) *state.State { require.NoError(t, err) return st } + +func TestLoadAndDump(t *testing.T) { + st, err := state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{GlobalPermissions: permission.DefaultAccountPermissions}) + require.NoError(t, err) + src, err := NewFileReader(os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/dump/test_dump.json") + require.NoError(t, err) + err = Load(src, st) + require.NoError(t, err) + + // dump and recreate + for i := 1; i < 10; i++ { + dumper := NewDumper(st, &bcm.Blockchain{}) + sink := CollectSink{ + Rows: make([]string, 0), + } + err = dumper.Transmit(&sink, 0, 0, All) + require.NoError(t, err) + + st, err = state.MakeGenesisState(testDB(t), &genesis.GenesisDoc{GlobalPermissions: permission.DefaultAccountPermissions, ChainName: fmt.Sprintf("CHAIN #%d", i)}) + require.NoError(t, err) + + err = Load(&sink, st) + require.NoError(t, err) + } + + err = st.IterateStreamEvents(nil, nil, func(se *exec.StreamEvent) error { + if se.BeginTx != nil { + require.Equal(t, se.BeginTx.TxHeader.Origin.Height, uint64(5)) + require.Equal(t, se.BeginTx.TxHeader.Origin.Index, uint64(2)) + require.Equal(t, se.BeginTx.TxHeader.Origin.ChainID, "BurrowChain_7DB5BD-5BCE58") + } + if se.Event != nil { + require.Equal(t, se.Event.Header.Height, uint64(5)) + } + return nil + }) + require.NoError(t, err) +} diff --git a/dump/sink.go b/dump/sink.go index 9b4fa9ef5..c42e9ccfc 100644 --- a/dump/sink.go +++ b/dump/sink.go @@ -2,6 +2,7 @@ package dump import ( "encoding/json" + "io" ) type NullSink struct{} @@ -11,7 +12,8 @@ func (NullSink) Send(*Dump) error { } type CollectSink struct { - Rows []string + Rows []string + Current int } func (c *CollectSink) Send(d *Dump) error { @@ -21,3 +23,16 @@ func (c *CollectSink) Send(d *Dump) error { return nil } + +func (c *CollectSink) Recv() (d *Dump, err error) { + if c.Current >= len(c.Rows) { + c.Current = 0 + return nil, io.EOF + } + d = new(Dump) + err = json.Unmarshal([]byte(c.Rows[c.Current]), d) + if err == nil { + c.Current++ + } + return +} diff --git a/dump/test_dump.json b/dump/test_dump.json new file mode 100644 index 000000000..32c86df6a --- /dev/null +++ b/dump/test_dump.json @@ -0,0 +1,5 @@ +{"Height":6,"Account":{"Address":"0000000000000000000000000000000000000000","PublicKey":{"CurveType":"","PublicKey":""},"Balance":1337,"EVMCode":"","Permissions":{"Base":{"Perms":"send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | hasRole","SetBit":"root | send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | setBase | unsetBase | setGlobal | hasRole | addRole | removeRole"}}}} +{"Height":6,"Account":{"Address":"4ABD4279D51DB2EDD2DB0BA0B07AF56ED78CDAC0","PublicKey":{"CurveType":"","PublicKey":""},"Balance":99999999999999,"EVMCode":"","Permissions":{"Base":{"Perms":"root | send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | setBase | unsetBase | setGlobal | hasRole | addRole | removeRole","SetBit":"root | send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | setBase | unsetBase | setGlobal | hasRole | addRole | removeRole"}}}} +{"Height":6,"Account":{"Address":"674A8125610CAF8B9969DC38AAC573691DF926EC","PublicKey":{"CurveType":"","PublicKey":""},"EVMCode":"6080604052600436106049576000357C0100000000000000000000000000000000000000000000000000000000900463FFFFFFFF1680635197C7AA14604E578063F38BD57A146082575B600080FD5B348015605957600080FD5B5060606096565B604051808263FFFFFFFF1663FFFFFFFF16815260200191505060405180910390F35B348015608D57600080FD5B50609460AF565B005B60008060009054906101000A900463FFFFFFFF16905090565B7F2B69880ACA8C98F8F8CA0C47711FEB5DBC892B1D4614F2D9A9DF8D5F6E3A960B60666040518082815260200191505060405180910390A15600A165627A7A723058205121C5ED3D588194B76A15A35AA065F79D0C22EB50FCD97A24AAA60E01E191740029","Permissions":{"Base":{"Perms":"","SetBit":""}},"ContractMeta":[{"CodeHash":"139CBA1935D25D8E42741DA38F353577EB40E0F0E5EAB38F69A4478100BF3504","MetadataHash":"","Metadata":"{\"ContractName\":\"Maker\",\"SourceFile\":\"const.sol\",\"CompilerVersion\":\"0.4.25+commit.59dbf8f1\",\"Abi\":[{\"constant\":false,\"inputs\":[],\"name\":\"getX\",\"outputs\":[{\"name\":\"\",\"type\":\"uint32\"}],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[],\"name\":\"emitMaker\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"name\":\"a\",\"type\":\"uint32\"}],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"MakerEvent\",\"type\":\"event\"}]}"}],"Forebear":"674A8125610CAF8B9969DC38AAC573691DF926EC"},"AccountStorage":{"Address":"674A8125610CAF8B9969DC38AAC573691DF926EC","Storage":[{"Key":"0000000000000000000000000000000000000000000000000000000000000000","Value":"0000000000000000000000000000000000000000000000000000000000000066"}]}} +{"Height":6,"Account":{"Address":"787FEB2434C96FB3D33FB6A23DA82B39FD4D466A","PublicKey":{"CurveType":"ed25519","PublicKey":"CCF9DF46B8496A96C6266A4FE29166C5F3FA020CA240D1B237BC3ABB77228357"},"Sequence":3,"Balance":9999970002,"EVMCode":"","Permissions":{"Base":{"Perms":"bond","SetBit":"bond"}}}} +{"Height":5,"EVMEvent":{"ChainID":"BurrowChain_7DB5BD-5BCE58","Index":2,"Time":"2019-07-23T07:23:41.701845678Z","Event":{"Address":"674A8125610CAF8B9969DC38AAC573691DF926EC","Data":"0000000000000000000000000000000000000000000000000000000000000066","Topics":["2B69880ACA8C98F8F8CA0C47711FEB5DBC892B1D4614F2D9A9DF8D5F6E3A960B"]}}} diff --git a/protobuf/dump.proto b/protobuf/dump.proto index ef5c84a2e..a0de7df67 100644 --- a/protobuf/dump.proto +++ b/protobuf/dump.proto @@ -36,11 +36,12 @@ message AccountStorage { message EVMEvent { // The original ChainID from for this event string ChainID = 1; + // The original index for this event + uint64 Index = 4; // The original block time for this transaction google.protobuf.Timestamp Time = 2 [(gogoproto.nullable)=false, (gogoproto.stdtime)=true]; // The event itself exec.LogEvent Event = 3; - } message Dump { From 41a3bd83cbc948b87f178bf357c574d3709ea034 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Sun, 28 Jul 2019 17:38:59 +0100 Subject: [PATCH 57/70] add event index column Signed-off-by: Gregory Hill --- go.mod | 1 - go.sum | 1 - vent/service/consumer_test.go | 8 +++++++- vent/service/decoder.go | 1 + vent/sqlsol/projection.go | 11 +++++++++++ vent/sqlsol/projection_test.go | 2 ++ vent/types/sql_table.go | 7 +++++-- 7 files changed, 26 insertions(+), 5 deletions(-) diff --git a/go.mod b/go.mod index 1c4e30d4d..84f8efa0b 100644 --- a/go.mod +++ b/go.mod @@ -37,7 +37,6 @@ require ( github.com/jmoiron/sqlx v1.2.0 github.com/kr/pretty v0.1.0 // indirect github.com/lib/pq v1.1.1 - github.com/magiconair/properties v1.8.0 github.com/mattn/go-colorable v0.1.2 // indirect github.com/mattn/go-sqlite3 v1.10.0 github.com/monax/relic v2.0.0+incompatible diff --git a/go.sum b/go.sum index e5c6abbc6..7aaab95b6 100644 --- a/go.sum +++ b/go.sum @@ -96,7 +96,6 @@ github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= diff --git a/vent/service/consumer_test.go b/vent/service/consumer_test.go index 62723786e..00ae76677 100644 --- a/vent/service/consumer_test.go +++ b/vent/service/consumer_test.go @@ -3,6 +3,7 @@ package service_test import ( + "fmt" "math/rand" "path" "runtime" @@ -55,7 +56,6 @@ func testConsumer(t *testing.T, chainID string, cfg *config.VentConfig, tcli rpc blockID := txeA.Height eventData, err := db.GetBlock(chainID, blockID) - require.NoError(t, err) require.Equal(t, blockID, eventData.BlockHeight) require.Equal(t, 3, len(eventData.Tables)) @@ -64,6 +64,9 @@ func testConsumer(t *testing.T, chainID string, cfg *config.VentConfig, tcli rpc require.Equal(t, 1, len(tblData)) require.Equal(t, "LogEvent", tblData[0].RowData["_eventtype"].(string)) require.Equal(t, "UpdateTestEvents", tblData[0].RowData["_eventname"].(string)) + for i := range tblData { + require.Equal(t, fmt.Sprintf("%d", i), tblData[i].RowData["_eventindex"].(string)) + } blockID = txeB.Height eventData, err = db.GetBlock(chainID, blockID) @@ -75,6 +78,9 @@ func testConsumer(t *testing.T, chainID string, cfg *config.VentConfig, tcli rpc require.Equal(t, 1, len(tblData)) require.Equal(t, "LogEvent", tblData[0].RowData["_eventtype"].(string)) require.Equal(t, "UpdateTestEvents", tblData[0].RowData["_eventname"].(string)) + for i := range tblData { + require.Equal(t, fmt.Sprintf("%d", i), tblData[i].RowData["_eventindex"].(string)) + } // block & tx raw data also persisted if cfg.SpecOpt&sqlsol.Block > 0 { diff --git a/vent/service/decoder.go b/vent/service/decoder.go index 5a9844337..92e2f842e 100644 --- a/vent/service/decoder.go +++ b/vent/service/decoder.go @@ -21,6 +21,7 @@ func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, e data[types.ChainIDLabel] = origin.ChainID data[types.BlockHeightLabel] = fmt.Sprintf("%v", origin.GetHeight()) data[types.BlockIndexLabel] = fmt.Sprintf("%v", origin.GetIndex()) + data[types.EventIndexLabel] = fmt.Sprintf("%v", header.Index) data[types.EventTypeLabel] = header.GetEventType().String() data[types.TxTxHashLabel] = header.TxHash.String() diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index 5955702c1..44ca13741 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -293,6 +293,11 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { Field: types.BlockIndexLabel, Type: types.EventFieldTypeUInt, }, + { + ColumnName: columns.EventIndex, + Field: types.EventIndexLabel, + Type: types.EventFieldTypeUInt, + }, { ColumnName: columns.TxHash, Field: types.TxTxHashLabel, @@ -333,6 +338,12 @@ func getGlobalFieldMappingsLogMode() []*types.EventFieldMapping { Type: types.EventFieldTypeUInt, Primary: true, }, + { + ColumnName: columns.EventIndex, + Field: types.EventIndexLabel, + Type: types.EventFieldTypeUInt, + Primary: true, + }, { ColumnName: columns.TxHash, Field: types.TxTxHashLabel, diff --git a/vent/sqlsol/projection_test.go b/vent/sqlsol/projection_test.go index 6774655b2..1c536ac20 100644 --- a/vent/sqlsol/projection_test.go +++ b/vent/sqlsol/projection_test.go @@ -281,6 +281,8 @@ func TestWithNoPrimaryKey(t *testing.T) { require.Equal(t, true, c.Primary) case "_index": require.Equal(t, true, c.Primary) + case "_eventindex": + require.Equal(t, true, c.Primary) default: require.Equal(t, false, c.Primary) } diff --git a/vent/types/sql_table.go b/vent/types/sql_table.go index ec96015a1..78126a112 100644 --- a/vent/types/sql_table.go +++ b/vent/types/sql_table.go @@ -108,6 +108,7 @@ type SQLColumnNames struct { ChainID string // context Index string + EventIndex string EventType string BlockHeader string TxType string @@ -143,6 +144,7 @@ var DefaultSQLColumnNames = SQLColumnNames{ ChainID: "_chainid", // context, Index: "_index", + EventIndex: "_eventindex", EventType: "_eventtype", BlockHeader: "_blockheader", TxType: "_txtype", @@ -157,8 +159,9 @@ var DefaultSQLColumnNames = SQLColumnNames{ // labels for column mapping const ( // event related - EventNameLabel = "eventName" - EventTypeLabel = "eventType" + EventNameLabel = "eventName" + EventTypeLabel = "eventType" + EventIndexLabel = "eventIndex" // block related ChainIDLabel = "chainid" From a4afaccba1cbff09baf0a3d4d47ccf5771695611 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 31 Jul 2019 14:31:50 +0100 Subject: [PATCH 58/70] better tests for duplicate events, add more pks Signed-off-by: Gregory Hill --- acm/acmstate/state_cache_test.go | 4 +- vent/service/consumer_test.go | 91 ++++++++++++------------- vent/sqldb/adapters/postgres_adapter.go | 6 +- vent/sqlsol/projection.go | 3 + vent/sqlsol/projection_test.go | 4 +- vent/test/EventsTest.sol | 22 ++++++ vent/test/EventsTest.sol.go | 6 +- vent/test/events.go | 9 +++ 8 files changed, 88 insertions(+), 57 deletions(-) diff --git a/acm/acmstate/state_cache_test.go b/acm/acmstate/state_cache_test.go index 3ac275f53..09ce0b59d 100644 --- a/acm/acmstate/state_cache_test.go +++ b/acm/acmstate/state_cache_test.go @@ -1,7 +1,6 @@ package acmstate import ( - "fmt" "testing" "github.com/hyperledger/burrow/acm" @@ -111,8 +110,7 @@ func TestStateCache_UpdateAccount(t *testing.T) { require.NoError(t, err) assert.Equal(t, balance, accNewOut.Balance) - fmt.Println(accNewOut == accNew) - fmt.Println(accNewOut == accNew) + require.Equal(t, accNewOut == accNew, false) } func TestStateCache_RemoveAccount(t *testing.T) { diff --git a/vent/service/consumer_test.go b/vent/service/consumer_test.go index 00ae76677..1678f252f 100644 --- a/vent/service/consumer_test.go +++ b/vent/service/consumer_test.go @@ -15,6 +15,7 @@ import ( "github.com/hyperledger/burrow/rpc/rpctransact" "github.com/hyperledger/burrow/vent/config" "github.com/hyperledger/burrow/vent/service" + "github.com/hyperledger/burrow/vent/sqldb" "github.com/hyperledger/burrow/vent/sqlsol" "github.com/hyperledger/burrow/vent/test" "github.com/hyperledger/burrow/vent/types" @@ -53,49 +54,29 @@ func testConsumer(t *testing.T, chainID string, cfg *config.VentConfig, tcli rpc // test data stored in database for two different block ids eventColumnName := "EventTest" - - blockID := txeA.Height - eventData, err := db.GetBlock(chainID, blockID) - require.NoError(t, err) - require.Equal(t, blockID, eventData.BlockHeight) - require.Equal(t, 3, len(eventData.Tables)) - - tblData := eventData.Tables[eventColumnName] - require.Equal(t, 1, len(tblData)) - require.Equal(t, "LogEvent", tblData[0].RowData["_eventtype"].(string)) - require.Equal(t, "UpdateTestEvents", tblData[0].RowData["_eventname"].(string)) - for i := range tblData { - require.Equal(t, fmt.Sprintf("%d", i), tblData[i].RowData["_eventindex"].(string)) - } - - blockID = txeB.Height - eventData, err = db.GetBlock(chainID, blockID) - require.NoError(t, err) - require.Equal(t, blockID, eventData.BlockHeight) - require.Equal(t, 3, len(eventData.Tables)) - - tblData = eventData.Tables[eventColumnName] - require.Equal(t, 1, len(tblData)) - require.Equal(t, "LogEvent", tblData[0].RowData["_eventtype"].(string)) - require.Equal(t, "UpdateTestEvents", tblData[0].RowData["_eventname"].(string)) - for i := range tblData { - require.Equal(t, fmt.Sprintf("%d", i), tblData[i].RowData["_eventindex"].(string)) - } + ensureEvents(t, db, chainID, eventColumnName, txeA.Height, 1) + eventData := ensureEvents(t, db, chainID, eventColumnName, txeB.Height, 1) // block & tx raw data also persisted if cfg.SpecOpt&sqlsol.Block > 0 { - tblData = eventData.Tables[tables.Block] + tblData := eventData.Tables[tables.Block] require.Equal(t, 1, len(tblData)) } if cfg.SpecOpt&sqlsol.Tx > 0 { - tblData = eventData.Tables[tables.Tx] + tblData := eventData.Tables[tables.Tx] require.Equal(t, 1, len(tblData)) require.Equal(t, txeB.TxHash.String(), tblData[0].RowData["_txhash"].(string)) } - //Restore - err = db.RestoreDB(time.Time{}, "RESTORED") + name = "TestEvent5" + description = "Description of TestEvent5" + txeC := test.CallAddEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + runConsumer(t, cfg) + ensureEvents(t, db, chainID, eventColumnName, txeC.Height, 2) + + // Restore + err := db.RestoreDB(time.Time{}, "RESTORED") require.NoError(t, err) } @@ -118,29 +99,45 @@ func testDeleteEvent(t *testing.T, chainID string, cfg *config.VentConfig, tcli runConsumer(t, cfg) // Expect block table, tx table, and EventTest table - eventData, err := db.GetBlock(chainID, txeAdd.Height) - require.NoError(t, err) - require.Equal(t, txeAdd.Height, eventData.BlockHeight) - require.Equal(t, 3, len(eventData.Tables)) - - // Expect data in the EventTest table - tblData := eventData.Tables[eventColumnName] - require.Equal(t, 1, len(tblData)) - require.Equal(t, "LogEvent", tblData[0].RowData["_eventtype"].(string)) - require.Equal(t, "UpdateTestEvents", tblData[0].RowData["_eventname"].(string)) + ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 1) // Now emit a deletion event for that table test.CallRemoveEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name) runConsumer(t, cfg) + ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 0) + + // do the same as above but for duplicate events + txeAdd = test.CallAddEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + runConsumer(t, cfg) + ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 2) + + test.CallRemoveEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name) + runConsumer(t, cfg) + ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 0) +} - eventData, err = db.GetBlock(chainID, txeAdd.Height) +func ensureEvents(t *testing.T, db *sqldb.SQLDB, chainID, column string, height, numEvents uint64) types.EventData { + eventData, err := db.GetBlock(chainID, height) require.NoError(t, err) - require.Equal(t, txeAdd.Height, eventData.BlockHeight) + require.Equal(t, height, eventData.BlockHeight) require.Equal(t, 3, len(eventData.Tables)) - // Check the row was deleted - tblData = eventData.Tables[eventColumnName] - require.Equal(t, 0, len(tblData)) + // Check the number of rows + tblData := eventData.Tables[column] + require.Equal(t, numEvents, uint64(len(tblData))) + + if numEvents > 0 && len(tblData) > 0 { + // Expect data in the EventTest table + require.Equal(t, "LogEvent", tblData[0].RowData["_eventtype"].(string)) + require.Equal(t, "UpdateTestEvents", tblData[0].RowData["_eventname"].(string)) + for i := 0; i < len(tblData); i++ { + require.Equal(t, fmt.Sprintf("%d", i), tblData[i].RowData["_eventindex"].(string)) + } + } else if numEvents > 0 && len(tblData) == 0 { + require.Failf(t, "no events found", "expected %d", numEvents) + } + + return eventData } func testResume(t *testing.T, cfg *config.VentConfig) { diff --git a/vent/sqldb/adapters/postgres_adapter.go b/vent/sqldb/adapters/postgres_adapter.go index a2a8065ae..7599f958e 100644 --- a/vent/sqldb/adapters/postgres_adapter.go +++ b/vent/sqldb/adapters/postgres_adapter.go @@ -314,9 +314,9 @@ func (pa *PostgresAdapter) UpsertQuery(table *types.SQLTable, row types.EventDat columns += secureColumn insValues += "$" + Cleanf("%d", i) - //find data for column + // find data for column if value, ok := row.RowData[column.Name]; ok { - //load hash value + // load hash value if column.Name == pa.Columns.TxHash { txHash = value } @@ -327,7 +327,7 @@ func (pa *PostgresAdapter) UpsertQuery(table *types.SQLTable, row types.EventDat values += fmt.Sprint(value) if !column.Primary { - // column is no PK + // column is not PK // add to update list // INSERT........... ON CONFLICT......DO UPDATE (*updValues) if updValues != "" { diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index 44ca13741..dff57a1b5 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -282,6 +282,7 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { ColumnName: columns.ChainID, Field: types.ChainIDLabel, Type: types.EventFieldTypeString, + Primary: true, }, { ColumnName: columns.Height, @@ -292,11 +293,13 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { ColumnName: columns.Index, Field: types.BlockIndexLabel, Type: types.EventFieldTypeUInt, + Primary: true, }, { ColumnName: columns.EventIndex, Field: types.EventIndexLabel, Type: types.EventFieldTypeUInt, + Primary: true, }, { ColumnName: columns.TxHash, diff --git a/vent/sqlsol/projection_test.go b/vent/sqlsol/projection_test.go index 1c536ac20..fbc0b7f4f 100644 --- a/vent/sqlsol/projection_test.go +++ b/vent/sqlsol/projection_test.go @@ -326,6 +326,8 @@ func TestWithNoPrimaryKey(t *testing.T) { require.NoError(t, err, "projection with primary key should be allowed") for _, c := range projection.Tables[tableName].Columns { - require.Equal(t, c.Name == "name", c.Primary) + if c.Name == "name" { + require.Equal(t, true, c.Primary) + } } } diff --git a/vent/test/EventsTest.sol b/vent/test/EventsTest.sol index 58f66f7cc..c09c1c13c 100644 --- a/vent/test/EventsTest.sol +++ b/vent/test/EventsTest.sol @@ -34,6 +34,18 @@ contract EventsTest { emit UpdateTestEvents(prefix32(_name), TABLE_EVENTS_TEST, prefix32(_description)); } + function addThings(string calldata _name, string calldata _description) external { + Thing storage thing = things[_name]; + if (!thing.exists) { + length++; + } + thing.name = _name; + thing.description = _description; + thing.exists = true; + emit UpdateTestEvents(prefix32(_name), TABLE_EVENTS_TEST, prefix32(_description)); + emit UpdateTestEvents(prefix32(_name), TABLE_EVENTS_TEST, prefix32(_description)); + } + function removeThing(string calldata _name) external { Thing storage thing = things[_name]; if (thing.exists) { @@ -43,6 +55,16 @@ contract EventsTest { } } + function removeThings(string calldata _name) external { + Thing storage thing = things[_name]; + if (thing.exists) { + length--; + delete things[_name]; + emit DeleteTestEvents(prefix32(_name), TABLE_EVENTS_TEST, 0); + emit DeleteTestEvents(prefix32(_name), TABLE_EVENTS_TEST, 0); + } + } + function count() external view returns (int size) { return length; } diff --git a/vent/test/EventsTest.sol.go b/vent/test/EventsTest.sol.go index 9edb1450e..37b406173 100644 --- a/vent/test/EventsTest.sol.go +++ b/vent/test/EventsTest.sol.go @@ -2,6 +2,6 @@ package test import hex "github.com/tmthrgd/go-hex" -var Bytecode_EventsTest = hex.MustDecodeString("608060405234801561001057600080fd5b506107a8806100206000396000f3fe608060405234801561001057600080fd5b5060043610610069576000357c01000000000000000000000000000000000000000000000000000000009004806306661abd1461006e578063882551991461008c578063c1de9c6d1461017e578063dc667a62146101f7575b600080fd5b6100766102c5565b6040518082815260200191505060405180910390f35b610103600480360360208110156100a257600080fd5b81019080803590602001906401000000008111156100bf57600080fd5b8201836020820111156100d157600080fd5b803590602001918460018302840111640100000000831117156100f357600080fd5b90919293919293905050506102ce565b6040518080602001828103825283818151815260200191508051906020019080838360005b83811015610143578082015181840152602081019050610128565b50505050905090810190601f1680156101705780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101f56004803603602081101561019457600080fd5b81019080803590602001906401000000008111156101b157600080fd5b8201836020820111156101c357600080fd5b803590602001918460018302840111640100000000831117156101e557600080fd5b909192939192939050505061039b565b005b6102c36004803603604081101561020d57600080fd5b810190808035906020019064010000000081111561022a57600080fd5b82018360208201111561023c57600080fd5b8035906020019184600183028401116401000000008311171561025e57600080fd5b90919293919293908035906020019064010000000081111561027f57600080fd5b82018360208201111561029157600080fd5b803590602001918460018302840111640100000000831117156102b357600080fd5b90919293919293905050506104f9565b005b60008054905090565b60606001838360405180838380828437808301925050509250505090815260200160405180910390206001018054600181600116156101000203166002900480601f01602080910402602001604051908101604052809291908181526020018280546001816001161561010002031660029004801561038e5780601f106103635761010080835404028352916020019161038e565b820191906000526020600020905b81548152906001019060200180831161037157829003601f168201915b5050505050905092915050565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16156104f45760008081548092919060019003919050555060018383604051808383808284378083019250505092505050908152602001604051809103902060008082016000610427919061068f565b600182016000610437919061068f565b6002820160006101000a81549060ff021916905550507f544553545f4556454e54530000000000000000000000000000000000000000006104bb84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16151561054e5760008081548092919060010191905055505b84848260000191906105619291906106d7565b5082828260010191906105759291906106d7565b5060018160020160006101000a81548160ff0219169083151502179055506105e083838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f544553545f4556454e545300000000000000000000000000000000000000000061064e87878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060208201519050919050565b50805460018160011615610100020316600290046000825580601f106106b557506106d4565b601f0160209004906000526020600020908101906106d39190610757565b5b50565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f1061071857803560ff1916838001178555610746565b82800160010185558215610746579182015b8281111561074557823582559160200191906001019061072a565b5b5090506107539190610757565b5090565b61077991905b8082111561077557600081600090555060010161075d565b5090565b9056fea165627a7a72305820b80269f52800df97ea8d0eaa20a8d8541f670004f26b92c1fad10c4bd238a6020029") -var DeployedBytecode_EventsTest = hex.MustDecodeString("608060405234801561001057600080fd5b5060043610610069576000357c01000000000000000000000000000000000000000000000000000000009004806306661abd1461006e578063882551991461008c578063c1de9c6d1461017e578063dc667a62146101f7575b600080fd5b6100766102c5565b6040518082815260200191505060405180910390f35b610103600480360360208110156100a257600080fd5b81019080803590602001906401000000008111156100bf57600080fd5b8201836020820111156100d157600080fd5b803590602001918460018302840111640100000000831117156100f357600080fd5b90919293919293905050506102ce565b6040518080602001828103825283818151815260200191508051906020019080838360005b83811015610143578082015181840152602081019050610128565b50505050905090810190601f1680156101705780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101f56004803603602081101561019457600080fd5b81019080803590602001906401000000008111156101b157600080fd5b8201836020820111156101c357600080fd5b803590602001918460018302840111640100000000831117156101e557600080fd5b909192939192939050505061039b565b005b6102c36004803603604081101561020d57600080fd5b810190808035906020019064010000000081111561022a57600080fd5b82018360208201111561023c57600080fd5b8035906020019184600183028401116401000000008311171561025e57600080fd5b90919293919293908035906020019064010000000081111561027f57600080fd5b82018360208201111561029157600080fd5b803590602001918460018302840111640100000000831117156102b357600080fd5b90919293919293905050506104f9565b005b60008054905090565b60606001838360405180838380828437808301925050509250505090815260200160405180910390206001018054600181600116156101000203166002900480601f01602080910402602001604051908101604052809291908181526020018280546001816001161561010002031660029004801561038e5780601f106103635761010080835404028352916020019161038e565b820191906000526020600020905b81548152906001019060200180831161037157829003601f168201915b5050505050905092915050565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16156104f45760008081548092919060019003919050555060018383604051808383808284378083019250505092505050908152602001604051809103902060008082016000610427919061068f565b600182016000610437919061068f565b6002820160006101000a81549060ff021916905550507f544553545f4556454e54530000000000000000000000000000000000000000006104bb84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16151561054e5760008081548092919060010191905055505b84848260000191906105619291906106d7565b5082828260010191906105759291906106d7565b5060018160020160006101000a81548160ff0219169083151502179055506105e083838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f544553545f4556454e545300000000000000000000000000000000000000000061064e87878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610681565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060208201519050919050565b50805460018160011615610100020316600290046000825580601f106106b557506106d4565b601f0160209004906000526020600020908101906106d39190610757565b5b50565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f1061071857803560ff1916838001178555610746565b82800160010185558215610746579182015b8281111561074557823582559160200191906001019061072a565b5b5090506107539190610757565b5090565b61077991905b8082111561077557600081600090555060010161075d565b5090565b9056fea165627a7a72305820b80269f52800df97ea8d0eaa20a8d8541f670004f26b92c1fad10c4bd238a6020029") -var Abi_EventsTest = []byte(`[{"constant":true,"inputs":[],"name":"count","outputs":[{"name":"size","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"_name","type":"string"}],"name":"description","outputs":[{"name":"_description","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"}],"name":"removeThing","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_description","type":"string"}],"name":"addThing","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":true,"name":"description","type":"bytes32"}],"name":"UpdateTestEvents","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":false,"name":"__DELETE__","type":"int256"}],"name":"DeleteTestEvents","type":"event"}]`) +var Bytecode_EventsTest = hex.MustDecodeString("608060405234801561001057600080fd5b50610d60806100206000396000f3fe608060405234801561001057600080fd5b50600436106100625760003560e01c806306661abd146100675780632cda56b01461008557806388255199146100fe578063c1de9c6d146101f0578063d4a0c25314610269578063dc667a6214610337575b600080fd5b61006f610405565b6040518082815260200191505060405180910390f35b6100fc6004803603602081101561009b57600080fd5b81019080803590602001906401000000008111156100b857600080fd5b8201836020820111156100ca57600080fd5b803590602001918460018302840111640100000000831117156100ec57600080fd5b909192939192939050505061040e565b005b6101756004803603602081101561011457600080fd5b810190808035906020019064010000000081111561013157600080fd5b82018360208201111561014357600080fd5b8035906020019184600183028401116401000000008311171561016557600080fd5b9091929391929390505050610612565b6040518080602001828103825283818151815260200191508051906020019080838360005b838110156101b557808201518184015260208101905061019a565b50505050905090810190601f1680156101e25780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6102676004803603602081101561020657600080fd5b810190808035906020019064010000000081111561022357600080fd5b82018360208201111561023557600080fd5b8035906020019184600183028401116401000000008311171561025757600080fd5b90919293919293905050506106df565b005b6103356004803603604081101561027f57600080fd5b810190808035906020019064010000000081111561029c57600080fd5b8201836020820111156102ae57600080fd5b803590602001918460018302840111640100000000831117156102d057600080fd5b9091929391929390803590602001906401000000008111156102f157600080fd5b82018360208201111561030357600080fd5b8035906020019184600183028401116401000000008311171561032557600080fd5b909192939192939050505061083d565b005b6104036004803603604081101561034d57600080fd5b810190808035906020019064010000000081111561036a57600080fd5b82018360208201111561037c57600080fd5b8035906020019184600183028401116401000000008311171561039e57600080fd5b9091929391929390803590602001906401000000008111156103bf57600080fd5b8201836020820111156103d157600080fd5b803590602001918460018302840111640100000000831117156103f357600080fd5b9091929391929390505050610aaa565b005b60008054905090565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff161561060d576000808154809291906001900391905055506001838360405180838380828437808301925050509250505090815260200160405180910390206000808201600061049a9190610c3e565b6001820160006104aa9190610c3e565b6002820160006101000a81549060ff021916905550507f544553545f4556454e545300000000000000000000000000000000000000000061052e84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a37f544553545f4556454e54530000000000000000000000000000000000000000006105d484848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b60606001838360405180838380828437808301925050509250505090815260200160405180910390206001018054600181600116156101000203166002900480601f0160208091040260200160405190810160405280929190818152602001828054600181600116156101000203166002900480156106d25780601f106106a7576101008083540402835291602001916106d2565b820191906000526020600020905b8154815290600101906020018083116106b557829003601f168201915b5050505050905092915050565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff1615610838576000808154809291906001900391905055506001838360405180838380828437808301925050509250505090815260200160405180910390206000808201600061076b9190610c3e565b60018201600061077b9190610c3e565b6002820160006101000a81549060ff021916905550507f544553545f4556454e54530000000000000000000000000000000000000000006107ff84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff166108905760008081548092919060010191905055505b84848260000191906108a3929190610c86565b5082828260010191906108b7929190610c86565b5060018160020160006101000a81548160ff02191690831515021790555061092283838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f544553545f4556454e545300000000000000000000000000000000000000000061099087878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a4610a0983838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f544553545f4556454e5453000000000000000000000000000000000000000000610a7787878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16610afd5760008081548092919060010191905055505b8484826000019190610b10929190610c86565b508282826001019190610b24929190610c86565b5060018160020160006101000a81548160ff021916908315150217905550610b8f83838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f544553545f4556454e5453000000000000000000000000000000000000000000610bfd87878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060208201519050919050565b50805460018160011615610100020316600290046000825580601f10610c645750610c83565b601f016020900490600052602060002090810190610c829190610d06565b5b50565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f10610cc757803560ff1916838001178555610cf5565b82800160010185558215610cf5579182015b82811115610cf4578235825591602001919060010190610cd9565b5b509050610d029190610d06565b5090565b610d2891905b80821115610d24576000816000905550600101610d0c565b5090565b9056fea265627a7a72305820429a1b2031fb00f9be80be95e568f9aabac617b6b634c5d23aa72693e9ec1cbb64736f6c634300050a0032") +var DeployedBytecode_EventsTest = hex.MustDecodeString("608060405234801561001057600080fd5b50600436106100625760003560e01c806306661abd146100675780632cda56b01461008557806388255199146100fe578063c1de9c6d146101f0578063d4a0c25314610269578063dc667a6214610337575b600080fd5b61006f610405565b6040518082815260200191505060405180910390f35b6100fc6004803603602081101561009b57600080fd5b81019080803590602001906401000000008111156100b857600080fd5b8201836020820111156100ca57600080fd5b803590602001918460018302840111640100000000831117156100ec57600080fd5b909192939192939050505061040e565b005b6101756004803603602081101561011457600080fd5b810190808035906020019064010000000081111561013157600080fd5b82018360208201111561014357600080fd5b8035906020019184600183028401116401000000008311171561016557600080fd5b9091929391929390505050610612565b6040518080602001828103825283818151815260200191508051906020019080838360005b838110156101b557808201518184015260208101905061019a565b50505050905090810190601f1680156101e25780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6102676004803603602081101561020657600080fd5b810190808035906020019064010000000081111561022357600080fd5b82018360208201111561023557600080fd5b8035906020019184600183028401116401000000008311171561025757600080fd5b90919293919293905050506106df565b005b6103356004803603604081101561027f57600080fd5b810190808035906020019064010000000081111561029c57600080fd5b8201836020820111156102ae57600080fd5b803590602001918460018302840111640100000000831117156102d057600080fd5b9091929391929390803590602001906401000000008111156102f157600080fd5b82018360208201111561030357600080fd5b8035906020019184600183028401116401000000008311171561032557600080fd5b909192939192939050505061083d565b005b6104036004803603604081101561034d57600080fd5b810190808035906020019064010000000081111561036a57600080fd5b82018360208201111561037c57600080fd5b8035906020019184600183028401116401000000008311171561039e57600080fd5b9091929391929390803590602001906401000000008111156103bf57600080fd5b8201836020820111156103d157600080fd5b803590602001918460018302840111640100000000831117156103f357600080fd5b9091929391929390505050610aaa565b005b60008054905090565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff161561060d576000808154809291906001900391905055506001838360405180838380828437808301925050509250505090815260200160405180910390206000808201600061049a9190610c3e565b6001820160006104aa9190610c3e565b6002820160006101000a81549060ff021916905550507f544553545f4556454e545300000000000000000000000000000000000000000061052e84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a37f544553545f4556454e54530000000000000000000000000000000000000000006105d484848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b60606001838360405180838380828437808301925050509250505090815260200160405180910390206001018054600181600116156101000203166002900480601f0160208091040260200160405190810160405280929190818152602001828054600181600116156101000203166002900480156106d25780601f106106a7576101008083540402835291602001916106d2565b820191906000526020600020905b8154815290600101906020018083116106b557829003601f168201915b5050505050905092915050565b600060018383604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff1615610838576000808154809291906001900391905055506001838360405180838380828437808301925050509250505090815260200160405180910390206000808201600061076b9190610c3e565b60018201600061077b9190610c3e565b6002820160006101000a81549060ff021916905550507f544553545f4556454e54530000000000000000000000000000000000000000006107ff84848080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7fb64cbe0d18263bbda93ed76420a5e44f12291ff1187828c94336f06dcb61017860006040518082815260200191505060405180910390a35b505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff166108905760008081548092919060010191905055505b84848260000191906108a3929190610c86565b5082828260010191906108b7929190610c86565b5060018160020160006101000a81548160ff02191690831515021790555061092283838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f544553545f4556454e545300000000000000000000000000000000000000000061099087878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a4610a0983838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f544553545f4556454e5453000000000000000000000000000000000000000000610a7787878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060018585604051808383808284378083019250505092505050908152602001604051809103902090508060020160009054906101000a900460ff16610afd5760008081548092919060010191905055505b8484826000019190610b10929190610c86565b508282826001019190610b24929190610c86565b5060018160020160006101000a81548160ff021916908315150217905550610b8f83838080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f544553545f4556454e5453000000000000000000000000000000000000000000610bfd87878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050610c30565b7f6f50070fb9de82a81ea57052fbdf4459d17a1a9d68083b6f326b47bf17441e2960405160405180910390a45050505050565b600060208201519050919050565b50805460018160011615610100020316600290046000825580601f10610c645750610c83565b601f016020900490600052602060002090810190610c829190610d06565b5b50565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f10610cc757803560ff1916838001178555610cf5565b82800160010185558215610cf5579182015b82811115610cf4578235825591602001919060010190610cd9565b5b509050610d029190610d06565b5090565b610d2891905b80821115610d24576000816000905550600101610d0c565b5090565b9056fea265627a7a72305820429a1b2031fb00f9be80be95e568f9aabac617b6b634c5d23aa72693e9ec1cbb64736f6c634300050a0032") +var Abi_EventsTest = []byte(`[{"constant":true,"inputs":[],"name":"count","outputs":[{"name":"size","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"}],"name":"removeThings","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"_name","type":"string"}],"name":"description","outputs":[{"name":"_description","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"}],"name":"removeThing","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_description","type":"string"}],"name":"addThings","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_description","type":"string"}],"name":"addThing","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":true,"name":"description","type":"bytes32"}],"name":"UpdateTestEvents","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":false,"name":"__DELETE__","type":"int256"}],"name":"DeleteTestEvents","type":"event"}]`) diff --git a/vent/test/events.go b/vent/test/events.go index 51148226e..34955d697 100644 --- a/vent/test/events.go +++ b/vent/test/events.go @@ -46,7 +46,11 @@ func CreateContract(t testing.TB, cli rpctransact.TransactClient, inputAddress c func CallRemoveEvent(t testing.TB, cli rpctransact.TransactClient, inputAddress, contractAddress crypto.Address, name string) *exec.TxExecution { return Call(t, cli, inputAddress, contractAddress, "removeThing", name) +} +func CallRemoveEvents(t testing.TB, cli rpctransact.TransactClient, inputAddress, contractAddress crypto.Address, + name string) *exec.TxExecution { + return Call(t, cli, inputAddress, contractAddress, "removeThings", name) } func CallAddEvent(t testing.TB, cli rpctransact.TransactClient, inputAddress, contractAddress crypto.Address, @@ -54,6 +58,11 @@ func CallAddEvent(t testing.TB, cli rpctransact.TransactClient, inputAddress, co return Call(t, cli, inputAddress, contractAddress, "addThing", name, description) } +func CallAddEvents(t testing.TB, cli rpctransact.TransactClient, inputAddress, contractAddress crypto.Address, + name, description string) *exec.TxExecution { + return Call(t, cli, inputAddress, contractAddress, "addThings", name, description) +} + func Call(t testing.TB, cli rpctransact.TransactClient, inputAddress, contractAddress crypto.Address, functionName string, args ...interface{}) *exec.TxExecution { t.Helper() From 1935e6022fe36308be065775e682221f615eac70 Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 31 Jul 2019 17:24:15 +0100 Subject: [PATCH 59/70] split log and view specs Signed-off-by: Gregory Hill --- vent/service/consumer_postgres_test.go | 3 +- vent/service/consumer_test.go | 128 ++++++++++-------- vent/service/server_test.go | 2 +- vent/sqlsol/projection.go | 3 - vent/sqlsol/projection_test.go | 4 +- vent/sqlsol/spec_loader_test.go | 2 +- vent/test/sqlsol_log.json | 48 +++++++ .../{sqlsol_example.json => sqlsol_view.json} | 0 vent/test/test.sh | 2 +- 9 files changed, 123 insertions(+), 69 deletions(-) create mode 100644 vent/test/sqlsol_log.json rename vent/test/{sqlsol_example.json => sqlsol_view.json} (100%) diff --git a/vent/service/consumer_postgres_test.go b/vent/service/consumer_postgres_test.go index 9ad3b5362..6d7a3870f 100644 --- a/vent/service/consumer_postgres_test.go +++ b/vent/service/consumer_postgres_test.go @@ -67,7 +67,7 @@ func TestPostgresConsumer(t *testing.T) { require.NoError(t, err) }) - // These are defined n sqlsol_example.json + // These are defined in sqlsol_view.json err := listener.Listen("meta") require.NoError(t, err) @@ -101,6 +101,7 @@ func TestPostgresConsumer(t *testing.T) { } } }() + resolveSpec(cfg, testViewSpec) runConsumer(t, cfg) // Give events a chance diff --git a/vent/service/consumer_test.go b/vent/service/consumer_test.go index 1678f252f..80437cf01 100644 --- a/vent/service/consumer_test.go +++ b/vent/service/consumer_test.go @@ -23,76 +23,93 @@ import ( "github.com/stretchr/testify/require" ) +const ( + testViewSpec = "sqlsol_view.json" + testLogSpec = "sqlsol_log.json" +) + var tables = types.DefaultSQLTableNames func testConsumer(t *testing.T, chainID string, cfg *config.VentConfig, tcli rpctransact.TransactClient, inputAddress crypto.Address) { create := test.CreateContract(t, tcli, inputAddress) + eventColumnName := "EventTest" - // generate events - name := "TestEvent1" - description := "Description of TestEvent1" - txeA := test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + t.Run("view mode", func(t *testing.T) { + // create test db + db, closeDB := test.NewTestDB(t, cfg) + defer closeDB() + resolveSpec(cfg, testViewSpec) - name = "TestEvent2" - description = "Description of TestEvent2" - test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + // generate events + name := "TestEvent1" + description := "Description of TestEvent1" + txeA := test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) - name = "TestEvent3" - description = "Description of TestEvent3" - test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + name = "TestEvent2" + description = "Description of TestEvent2" + test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) - name = "TestEvent4" - description = "Description of TestEvent4" - txeB := test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + name = "TestEvent3" + description = "Description of TestEvent3" + test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) - // create test db - db, closeDB := test.NewTestDB(t, cfg) - defer closeDB() + name = "TestEvent4" + description = "Description of TestEvent4" + txeB := test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) - // Run the consumer - runConsumer(t, cfg) + // Run the consumer + runConsumer(t, cfg) - // test data stored in database for two different block ids - eventColumnName := "EventTest" - ensureEvents(t, db, chainID, eventColumnName, txeA.Height, 1) - eventData := ensureEvents(t, db, chainID, eventColumnName, txeB.Height, 1) + // test data stored in database for two different block ids + ensureEvents(t, db, chainID, eventColumnName, txeA.Height, 1) + eventData := ensureEvents(t, db, chainID, eventColumnName, txeB.Height, 1) - // block & tx raw data also persisted - if cfg.SpecOpt&sqlsol.Block > 0 { - tblData := eventData.Tables[tables.Block] - require.Equal(t, 1, len(tblData)) + // block & tx raw data also persisted + if cfg.SpecOpt&sqlsol.Block > 0 { + tblData := eventData.Tables[tables.Block] + require.Equal(t, 1, len(tblData)) - } - if cfg.SpecOpt&sqlsol.Tx > 0 { - tblData := eventData.Tables[tables.Tx] - require.Equal(t, 1, len(tblData)) - require.Equal(t, txeB.TxHash.String(), tblData[0].RowData["_txhash"].(string)) - } + } + if cfg.SpecOpt&sqlsol.Tx > 0 { + tblData := eventData.Tables[tables.Tx] + require.Equal(t, 1, len(tblData)) + require.Equal(t, txeB.TxHash.String(), tblData[0].RowData["_txhash"].(string)) + } - name = "TestEvent5" - description = "Description of TestEvent5" - txeC := test.CallAddEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) - runConsumer(t, cfg) - ensureEvents(t, db, chainID, eventColumnName, txeC.Height, 2) + // Restore + err := db.RestoreDB(time.Time{}, "RESTORED") + require.NoError(t, err) + }) + + t.Run("log mode", func(t *testing.T) { + db, closeDB := test.NewTestDB(t, cfg) + defer closeDB() + resolveSpec(cfg, testLogSpec) + + name := "TestEvent5" + description := "Description of TestEvent5" + txeC := test.CallAddEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) + runConsumer(t, cfg) + ensureEvents(t, db, chainID, eventColumnName, txeC.Height, 2) + }) - // Restore - err := db.RestoreDB(time.Time{}, "RESTORED") - require.NoError(t, err) } func testDeleteEvent(t *testing.T, chainID string, cfg *config.VentConfig, tcli rpctransact.TransactClient, inputAddress crypto.Address) { create := test.CreateContract(t, tcli, inputAddress) + eventColumnName := "EventTest" + name := "TestEventForDeletion" + description := "to be deleted" + + // test data stored in database for two different block ids + // create test db db, closeDB := test.NewTestDB(t, cfg) defer closeDB() - - // test data stored in database for two different block ids - eventColumnName := "EventTest" + resolveSpec(cfg, testViewSpec) // Add a test event - name := "TestEventForDeletion" - description := "to be deleted" txeAdd := test.CallAddEvent(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) // Spin the consumer @@ -106,14 +123,7 @@ func testDeleteEvent(t *testing.T, chainID string, cfg *config.VentConfig, tcli runConsumer(t, cfg) ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 0) - // do the same as above but for duplicate events - txeAdd = test.CallAddEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name, description) - runConsumer(t, cfg) - ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 2) - - test.CallRemoveEvents(t, tcli, inputAddress, create.Receipt.ContractAddress, name) - runConsumer(t, cfg) - ensureEvents(t, db, chainID, eventColumnName, txeAdd.Height, 0) + // delete not allowed on log mode } func ensureEvents(t *testing.T, db *sqldb.SQLDB, chainID, column string, height, numEvents uint64) types.EventData { @@ -143,6 +153,7 @@ func ensureEvents(t *testing.T, db *sqldb.SQLDB, chainID, column string, height, func testResume(t *testing.T, cfg *config.VentConfig) { _, closeDB := test.NewTestDB(t, cfg) defer closeDB() + resolveSpec(cfg, testViewSpec) numRestarts := 6 // Add some pseudo-random timings @@ -177,6 +188,7 @@ func testInvalidUTF8(t *testing.T, cfg *config.VentConfig, tcli rpctransact.Tran // create test db _, closeDB := test.NewTestDB(t, cfg) defer closeDB() + resolveSpec(cfg, testViewSpec) // Run the consumer with this event - this used to create an error on UPSERT runConsumer(t, cfg) @@ -186,22 +198,20 @@ func testInvalidUTF8(t *testing.T, cfg *config.VentConfig, tcli rpctransact.Tran //require.Contains(t, err.Error(), "pq: invalid byte sequence for encoding \"UTF8\": 0xf3 0x6e") } -func newConsumer(t *testing.T, cfg *config.VentConfig) *service.Consumer { +func resolveSpec(cfg *config.VentConfig, specFile string) { // Resolve relative path to test dir _, testFile, _, _ := runtime.Caller(0) testDir := path.Join(path.Dir(testFile), "..", "test") - cfg.SpecFileOrDirs = []string{path.Join(testDir, "sqlsol_example.json")} + cfg.SpecFileOrDirs = []string{path.Join(testDir, specFile)} cfg.AbiFileOrDirs = []string{path.Join(testDir, "EventsTest.abi")} cfg.SpecOpt = sqlsol.BlockTx - - ch := make(chan types.EventData, 100) - return service.NewConsumer(cfg, logging.NewNoopLogger(), ch) } // Run consumer to listen to events func runConsumer(t *testing.T, cfg *config.VentConfig) chan types.EventData { - consumer := newConsumer(t, cfg) + ch := make(chan types.EventData, 100) + consumer := service.NewConsumer(cfg, logging.NewNoopLogger(), ch) projection, err := sqlsol.SpecLoader(cfg.SpecFileOrDirs, cfg.SpecOpt) require.NoError(t, err) diff --git a/vent/service/server_test.go b/vent/service/server_test.go index 73e3f2698..e50df96e4 100644 --- a/vent/service/server_test.go +++ b/vent/service/server_test.go @@ -36,7 +36,7 @@ func TestServer(t *testing.T) { _, closeDB := test.NewTestDB(t, cfg) defer closeDB() - cfg.SpecFileOrDirs = []string{os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/vent/test/sqlsol_example.json"} + cfg.SpecFileOrDirs = []string{os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/vent/test/sqlsol_view.json"} cfg.AbiFileOrDirs = []string{os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/vent/test/EventsTest.abi"} cfg.GRPCAddr = kern.GRPCListenAddress().String() diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index dff57a1b5..44ca13741 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -282,7 +282,6 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { ColumnName: columns.ChainID, Field: types.ChainIDLabel, Type: types.EventFieldTypeString, - Primary: true, }, { ColumnName: columns.Height, @@ -293,13 +292,11 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { ColumnName: columns.Index, Field: types.BlockIndexLabel, Type: types.EventFieldTypeUInt, - Primary: true, }, { ColumnName: columns.EventIndex, Field: types.EventIndexLabel, Type: types.EventFieldTypeUInt, - Primary: true, }, { ColumnName: columns.TxHash, diff --git a/vent/sqlsol/projection_test.go b/vent/sqlsol/projection_test.go index fbc0b7f4f..1c536ac20 100644 --- a/vent/sqlsol/projection_test.go +++ b/vent/sqlsol/projection_test.go @@ -326,8 +326,6 @@ func TestWithNoPrimaryKey(t *testing.T) { require.NoError(t, err, "projection with primary key should be allowed") for _, c := range projection.Tables[tableName].Columns { - if c.Name == "name" { - require.Equal(t, true, c.Primary) - } + require.Equal(t, c.Name == "name", c.Primary) } } diff --git a/vent/sqlsol/spec_loader_test.go b/vent/sqlsol/spec_loader_test.go index c77a0fb81..77d0367ed 100644 --- a/vent/sqlsol/spec_loader_test.go +++ b/vent/sqlsol/spec_loader_test.go @@ -12,7 +12,7 @@ import ( var tables = types.DefaultSQLTableNames func TestSpecLoader(t *testing.T) { - specFile := []string{os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/vent/test/sqlsol_example.json"} + specFile := []string{os.Getenv("GOPATH") + "/src/github.com/hyperledger/burrow/vent/test/sqlsol_view.json"} t.Run("successfully add block and transaction tables to event structures", func(t *testing.T) { projection, err := sqlsol.SpecLoader(specFile, sqlsol.BlockTx) require.NoError(t, err) diff --git a/vent/test/sqlsol_log.json b/vent/test/sqlsol_log.json new file mode 100644 index 000000000..bb11812d9 --- /dev/null +++ b/vent/test/sqlsol_log.json @@ -0,0 +1,48 @@ +[ + { + "TableName": "EventTest", + "Filter": "EventType = 'LogEvent'", + "FieldMappings": [ + { + "Field": "key", + "ColumnName": "testkey", + "Type": "bytes32", + "Primary": false, + "Notify": ["keyed_meta"] + }, + { + "Field": "name", + "ColumnName": "testname", + "Type": "bytes32", + "Primary": false, + "BytesToString": true, + "Notify": ["meta", "keyed_meta"] + }, + { + "Field": "description", + "ColumnName": "testdescription", + "Type": "bytes32", + "Primary": false, + "Notify": ["meta", "keyed_meta"] + } + ] + }, + { + "TableName": "UserAccounts", + "Filter": "LOG1 = 'UserAccounts'", + "FieldMappings": [ + { + "Field": "userAddress", + "ColumnName": "address", + "Type": "address", + "Primary": false + }, + { + "Field": "userName", + "ColumnName": "username", + "Type": "string", + "Primary": false + } + ] + } +] diff --git a/vent/test/sqlsol_example.json b/vent/test/sqlsol_view.json similarity index 100% rename from vent/test/sqlsol_example.json rename to vent/test/sqlsol_view.json diff --git a/vent/test/test.sh b/vent/test/test.sh index 4aa3653bd..30bb06989 100755 --- a/vent/test/test.sh +++ b/vent/test/test.sh @@ -8,7 +8,7 @@ vent_test_dir="$REPO/vent/test" [[ ! -f burrow.toml ]] && burrow spec -f1 | burrow configure -s- > burrow.toml && rm -rf .burrow burrow start -v0 &> burrow.log & sleep 2s -burrow vent start --db-block --abi "$vent_test_dir/EventsTest.abi" --spec "$vent_test_dir/sqlsol_example.json" +burrow vent start --db-block --abi "$vent_test_dir/EventsTest.abi" --spec "$vent_test_dir/sqlsol_view.json" # Now: # psql -h 127.0.0.1 -p 5432 -U postgres # LISTEN height; From f10a306df71728afdaa8487ffca1fe57460c84fe Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 31 Jul 2019 14:36:04 +0100 Subject: [PATCH 60/70] remove duplicate height field from test spec Signed-off-by: Gregory Hill --- vent/sqlsol/projection.go | 1 + vent/test/sqlsol_view.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index 44ca13741..3aa461dcc 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -287,6 +287,7 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { ColumnName: columns.Height, Field: types.BlockHeightLabel, Type: types.EventFieldTypeUInt, + Primary: true, }, { ColumnName: columns.Index, diff --git a/vent/test/sqlsol_view.json b/vent/test/sqlsol_view.json index 24a0c119b..1c116410a 100644 --- a/vent/test/sqlsol_view.json +++ b/vent/test/sqlsol_view.json @@ -32,12 +32,6 @@ "TableName": "UserAccounts", "Filter": "LOG1 = 'UserAccounts'", "FieldMappings": [ - { - "Field": "height", - "ColumnName": "_height", - "Type": "uint", - "Notify": ["meta", "keyed_meta"] - }, { "Field": "userAddress", "ColumnName": "address", From 4a18b03db8e7be51efc5e2ade344fbfb6102e7c7 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Wed, 31 Jul 2019 15:27:59 +0100 Subject: [PATCH 61/70] Fix indices Signed-off-by: Silas Davis --- vent/service/consumer.go | 13 ++++++++----- vent/service/decoder.go | 16 ++++++++-------- vent/service/rowbuilder.go | 8 ++++---- vent/sqlsol/projection.go | 9 ++++----- vent/sqlsol/projection_test.go | 8 ++++---- vent/sqlsol/spec_loader.go | 2 +- vent/types/sql_table.go | 8 ++++---- 7 files changed, 33 insertions(+), 31 deletions(-) diff --git a/vent/service/consumer.go b/vent/service/consumer.go index ab32620b5..7687729a7 100644 --- a/vent/service/consumer.go +++ b/vent/service/consumer.go @@ -261,11 +261,14 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiProvider // so check that condition to filter them if txe.Exception == nil { - origin := txe.Origin - if origin == nil { - origin = &exec.Origin{ + txOrigin := txe.Origin + if txOrigin == nil { + // This is an original transaction from the current chain so we build its origin from context + txOrigin = &exec.Origin{ + Time: blockExecution.GetHeader().GetTime(), ChainID: c.Burrow.ChainID, - Height: txe.Height, + Height: txe.GetHeight(), + Index: txe.GetIndex(), } } @@ -289,7 +292,7 @@ func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiProvider "filter", eventClass.Filter) // unpack, decode & build event data - eventData, err := buildEventData(projection, eventClass, event, origin, abiProvider, c.Log) + eventData, err := buildEventData(projection, eventClass, event, txOrigin, abiProvider, c.Log) if err != nil { return errors.Wrapf(err, "Error building event data") } diff --git a/vent/service/decoder.go b/vent/service/decoder.go index 92e2f842e..91ad9d83a 100644 --- a/vent/service/decoder.go +++ b/vent/service/decoder.go @@ -1,8 +1,8 @@ package service import ( - "fmt" "math/big" + "strconv" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/execution/evm/abi" @@ -12,18 +12,18 @@ import ( ) // decodeEvent unpacks & decodes event data -func decodeEvent(header *exec.Header, log *exec.LogEvent, origin *exec.Origin, evAbi *abi.EventSpec) (map[string]interface{}, error) { +func decodeEvent(eventHeader *exec.Header, log *exec.LogEvent, txOrigin *exec.Origin, evAbi *abi.EventSpec) (map[string]interface{}, error) { // to prepare decoded data and map to event item name data := make(map[string]interface{}) // decode header to get context data for each event data[types.EventNameLabel] = evAbi.Name - data[types.ChainIDLabel] = origin.ChainID - data[types.BlockHeightLabel] = fmt.Sprintf("%v", origin.GetHeight()) - data[types.BlockIndexLabel] = fmt.Sprintf("%v", origin.GetIndex()) - data[types.EventIndexLabel] = fmt.Sprintf("%v", header.Index) - data[types.EventTypeLabel] = header.GetEventType().String() - data[types.TxTxHashLabel] = header.TxHash.String() + data[types.ChainIDLabel] = txOrigin.ChainID + data[types.BlockHeightLabel] = strconv.FormatUint(txOrigin.GetHeight(), 10) + data[types.TxIndexLabel] = strconv.FormatUint(txOrigin.GetIndex(), 10) + data[types.EventIndexLabel] = strconv.FormatUint(eventHeader.GetIndex(), 10) + data[types.EventTypeLabel] = eventHeader.GetEventType().String() + data[types.TxTxHashLabel] = eventHeader.TxHash.String() // build expected interface type array to get log event values unpackedData := abi.GetPackingTypes(evAbi.Inputs) diff --git a/vent/service/rowbuilder.go b/vent/service/rowbuilder.go index 3dba2dcf4..6a7774315 100644 --- a/vent/service/rowbuilder.go +++ b/vent/service/rowbuilder.go @@ -16,8 +16,8 @@ import ( ) // buildEventData builds event data from transactions -func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, origin *exec.Origin, abiProvider *AbiProvider, - l *logging.Logger) (types.EventDataRow, error) { +func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, + txOrigin *exec.Origin, abiProvider *AbiProvider, l *logging.Logger) (types.EventDataRow, error) { // a fresh new row to store column/value data row := make(map[string]interface{}) @@ -36,7 +36,7 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, } // decode event data using the provided abi specification - decodedData, err := decodeEvent(eventHeader, eventLog, origin, evAbi) + decodedData, err := decodeEvent(eventHeader, eventLog, txOrigin, evAbi) if err != nil { return types.EventDataRow{}, errors.Wrapf(err, "Error decoding event (filter: %s)", eventClass.Filter) } @@ -134,7 +134,7 @@ func buildTxData(txe *exec.TxExecution) (types.EventDataRow, error) { RowData: map[string]interface{}{ columns.Height: txe.Height, columns.TxHash: txe.TxHash.String(), - columns.Index: txe.Index, + columns.TxIndex: txe.Index, columns.TxType: txe.TxType.String(), columns.Envelope: string(envelope), columns.Events: string(events), diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index 3aa461dcc..2e11b4f65 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -287,11 +287,10 @@ func getGlobalFieldMappings() []*types.EventFieldMapping { ColumnName: columns.Height, Field: types.BlockHeightLabel, Type: types.EventFieldTypeUInt, - Primary: true, }, { - ColumnName: columns.Index, - Field: types.BlockIndexLabel, + ColumnName: columns.TxIndex, + Field: types.TxIndexLabel, Type: types.EventFieldTypeUInt, }, { @@ -334,8 +333,8 @@ func getGlobalFieldMappingsLogMode() []*types.EventFieldMapping { Primary: true, }, { - ColumnName: columns.Index, - Field: types.BlockIndexLabel, + ColumnName: columns.TxIndex, + Field: types.TxIndexLabel, Type: types.EventFieldTypeUInt, Primary: true, }, diff --git a/vent/sqlsol/projection_test.go b/vent/sqlsol/projection_test.go index 1c536ac20..c740ae9d3 100644 --- a/vent/sqlsol/projection_test.go +++ b/vent/sqlsol/projection_test.go @@ -275,13 +275,13 @@ func TestWithNoPrimaryKey(t *testing.T) { for _, c := range projection.Tables[tableName].Columns { switch c.Name { - case "_chainid": + case columns.ChainID: require.Equal(t, true, c.Primary) - case "_height": + case columns.Height: require.Equal(t, true, c.Primary) - case "_index": + case columns.TxIndex: require.Equal(t, true, c.Primary) - case "_eventindex": + case columns.EventIndex: require.Equal(t, true, c.Primary) default: require.Equal(t, false, c.Primary) diff --git a/vent/sqlsol/spec_loader.go b/vent/sqlsol/spec_loader.go index 7bbb16c29..f3ea0261e 100644 --- a/vent/sqlsol/spec_loader.go +++ b/vent/sqlsol/spec_loader.go @@ -89,7 +89,7 @@ func txTables() types.EventTables { Primary: true, }, { - Name: columns.Index, + Name: columns.TxIndex, Type: types.SQLColumnTypeNumeric, Length: 0, Primary: false, diff --git a/vent/types/sql_table.go b/vent/types/sql_table.go index 78126a112..4c3670740 100644 --- a/vent/types/sql_table.go +++ b/vent/types/sql_table.go @@ -107,7 +107,7 @@ type SQLColumnNames struct { BurrowVersion string ChainID string // context - Index string + TxIndex string EventIndex string EventType string BlockHeader string @@ -143,7 +143,7 @@ var DefaultSQLColumnNames = SQLColumnNames{ BurrowVersion: "_burrowversion", ChainID: "_chainid", // context, - Index: "_index", + TxIndex: "_txindex", EventIndex: "_eventindex", EventType: "_eventtype", BlockHeader: "_blockheader", @@ -164,9 +164,9 @@ const ( EventIndexLabel = "eventIndex" // block related - ChainIDLabel = "chainid" + ChainIDLabel = "chainID" BlockHeightLabel = "height" - BlockIndexLabel = "index" + TxIndexLabel = "txIndex" // transaction related TxTxHashLabel = "txHash" From f484ea954d574cf76557b8815bdedd953e8f5020 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Fri, 2 Aug 2019 10:06:12 +0100 Subject: [PATCH 62/70] Fix casing of node id in --pool Allow use of --separate-genesis-doc and --pool together Revert filtering of tendermint logging - hurts debug and our log volume is already vastly reduced with no empty blocks Signed-off-by: Silas Davis --- cmd/burrow/commands/configure.go | 54 ++++++++++++++++++-------------- logging/logconfig/config.go | 5 +-- 2 files changed, 31 insertions(+), 28 deletions(-) diff --git a/cmd/burrow/commands/configure.go b/cmd/burrow/commands/configure.go index e8a57b8f2..73ca79bab 100644 --- a/cmd/burrow/commands/configure.go +++ b/cmd/burrow/commands/configure.go @@ -20,7 +20,7 @@ import ( "github.com/hyperledger/burrow/logging/logconfig/presets" "github.com/hyperledger/burrow/rpc" cli "github.com/jawher/mow.cli" - amino "github.com/tendermint/go-amino" + "github.com/tendermint/go-amino" cryptoAmino "github.com/tendermint/tendermint/crypto/encoding/amino" "github.com/tendermint/tendermint/libs/db" ) @@ -141,11 +141,11 @@ func Configure(output Output) func(cmd *cli.Cmd) { if err != nil { output.Fatalf("failed to get key: %s: %v", k, err) } - json, err := json.Marshal(key) + bs, err := json.Marshal(key) if err != nil { output.Fatalf("failed to json marshal key: %s: %v", k, err) } - pkg.Keys[addr] = deployment.Key{Name: k, Address: addr, KeyJSON: json} + pkg.Keys[addr] = deployment.Key{Name: k, Address: addr, KeyJSON: bs} } } else { keyClient, err := keys.NewRemoteKeyClient(conf.Keys.RemoteAddress, logging.NewNoopLogger()) @@ -213,21 +213,6 @@ func Configure(output Output) func(cmd *cli.Cmd) { } } - if *separateGenesisDoc != "" { - if conf.GenesisDoc == nil { - output.Fatalf("cannot write separate genesis doc since no GenesisDoc/GenesisSpec was provided") - } - genesisDocJSON, err := conf.GenesisDoc.JSONBytes() - if err != nil { - output.Fatalf("could not form GenesisDoc JSON: %v", err) - } - err = ioutil.WriteFile(*separateGenesisDoc, genesisDocJSON, 0644) - if err != nil { - output.Fatalf("could not write GenesisDoc JSON: %v", err) - } - conf.GenesisDoc = nil - } - if *emptyBlocksOpt != "" { conf.Tendermint.CreateEmptyBlocks = *emptyBlocksOpt } @@ -243,11 +228,11 @@ func Configure(output Output) func(cmd *cli.Cmd) { nodeKey := tendermint.NewNodeKey() nodeAddress, _ := crypto.AddressFromHexString(string(nodeKey.ID())) - json, err := cdc.MarshalJSON(nodeKey) + bs, err := cdc.MarshalJSON(nodeKey) if err != nil { output.Fatalf("go-amino failed to json marshal private key: %v", err) } - pkg.Keys[nodeAddress] = deployment.Key{Name: val.Name, Address: nodeAddress, KeyJSON: json} + pkg.Keys[nodeAddress] = deployment.Key{Name: val.Name, Address: nodeAddress, KeyJSON: bs} pkg.Validators = append(pkg.Validators, deployment.Validator{ Name: val.Name, @@ -264,6 +249,23 @@ func Configure(output Output) func(cmd *cli.Cmd) { } } + // Store this for use in pool + genesisDoc := conf.GenesisDoc + if *separateGenesisDoc != "" { + if conf.GenesisDoc == nil { + output.Fatalf("cannot write separate genesis doc since no GenesisDoc/GenesisSpec was provided") + } + genesisDocJSON, err := conf.GenesisDoc.JSONBytes() + if err != nil { + output.Fatalf("could not form GenesisDoc JSON: %v", err) + } + err = ioutil.WriteFile(*separateGenesisDoc, genesisDocJSON, 0644) + if err != nil { + output.Fatalf("could not write GenesisDoc JSON: %v", err) + } + conf.GenesisDoc = nil + } + if *pool { for i, val := range pkg.Validators { tmConf, err := conf.Tendermint.Config(fmt.Sprintf(".burrow%03d", i), conf.Execution.TimeoutFactor) @@ -275,9 +277,10 @@ func Configure(output Output) func(cmd *cli.Cmd) { if err != nil { output.Fatalf("failed to create node key for %03d: %v", i, err) } - peers = append(peers, fmt.Sprintf("tcp://%s@127.0.0.1:%d", nodeKey.Address.String(), 26656+i)) + peers = append(peers, fmt.Sprintf("tcp://%s@127.0.0.1:%d", + strings.ToLower(nodeKey.Address.String()), 26656+i)) } - for i, acc := range conf.GenesisDoc.Accounts { + for i, acc := range genesisDoc.Accounts { // set stuff conf.Address = &acc.Address conf.Tendermint.PersistentPeers = strings.Join(peers, ",") @@ -294,9 +297,12 @@ func Configure(output Output) func(cmd *cli.Cmd) { conf.Logging.RootSink.Output.FileConfig = &logconfig.FileConfig{Path: fmt.Sprintf("burrow%03d.log", i)} if *jsonOutOpt { - ioutil.WriteFile(fmt.Sprintf("burrow%03d.json", i), []byte(conf.JSONString()), 0644) + err = ioutil.WriteFile(fmt.Sprintf("burrow%03d.json", i), []byte(conf.JSONString()), 0644) } else { - ioutil.WriteFile(fmt.Sprintf("burrow%03d.toml", i), []byte(conf.TOMLString()), 0644) + err = ioutil.WriteFile(fmt.Sprintf("burrow%03d.toml", i), []byte(conf.TOMLString()), 0644) + } + if err != nil { + output.Fatalf("Could not write Burrow config file: %v", err) } } } else if *jsonOutOpt { diff --git a/logging/logconfig/config.go b/logging/logconfig/config.go index cb0739668..49341b6ef 100644 --- a/logging/logconfig/config.go +++ b/logging/logconfig/config.go @@ -2,14 +2,12 @@ package logconfig import ( "bytes" + "encoding/json" "fmt" "github.com/eapache/channels" "github.com/go-kit/kit/log" "github.com/hyperledger/burrow/logging" - "github.com/hyperledger/burrow/logging/structure" - - "encoding/json" "github.com/BurntSushi/toml" "github.com/hyperledger/burrow/logging/loggers" @@ -33,7 +31,6 @@ func DefaultNodeLoggingConfig() *LoggingConfig { // Output only Burrow messages on stdout return &LoggingConfig{ RootSink: Sink(). - SetTransform(FilterTransform(ExcludeWhenAnyMatches, structure.ComponentKey, structure.Tendermint)). SetOutput(StdoutOutput().SetFormat(loggers.JSONFormat)), } } From 64984bbb1dd73ddacf98bccd6ccf8bf834fcaaad Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Fri, 2 Aug 2019 13:29:13 +0100 Subject: [PATCH 63/70] Fix panic in RPC/Info /names, add regex param Signed-off-by: Silas Davis --- integration/rpcinfo/info_server_test.go | 50 ++++++++++++++++++++++- integration/rpcquery/query_server_test.go | 6 ++- integration/rpctest/helpers.go | 8 ++-- integration/rpctransact/name_test.go | 9 ++-- project/history.go | 1 + rpc/lib/server/http_server.go | 8 ++-- rpc/rpcinfo/infoclient/client.go | 9 ++++ rpc/rpcinfo/methods.go | 44 +++++++++++++++++--- rpc/service.go | 8 ++-- 9 files changed, 118 insertions(+), 25 deletions(-) diff --git a/integration/rpcinfo/info_server_test.go b/integration/rpcinfo/info_server_test.go index e34a0e5f0..06bc695b7 100644 --- a/integration/rpcinfo/info_server_test.go +++ b/integration/rpcinfo/info_server_test.go @@ -20,10 +20,12 @@ package rpcinfo import ( "context" "encoding/json" + "sort" "testing" "time" "github.com/hyperledger/burrow/integration" + "github.com/hyperledger/burrow/txs/payload" "github.com/hyperledger/burrow/core" @@ -47,6 +49,7 @@ const timeout = 5 * time.Second func TestInfoServer(t *testing.T) { kern, shutdown := integration.RunNode(t, rpctest.GenesisDoc, rpctest.PrivateAccounts) defer shutdown() + inputAddress := rpctest.PrivateAccounts[0].GetAddress() infoAddress := kern.InfoListenAddress().String() var clients = map[string]infoclient.RPCClient{ "JSON RPC": client.NewJSONRPCClient(infoAddress), @@ -81,8 +84,16 @@ func TestInfoServer(t *testing.T) { amt, gasLim, fee := uint64(1100), uint64(1000), uint64(1000) code := []byte{0x60, 0x5, 0x60, 0x1, 0x55} // Call with nil address will create a contract - tx := rpctest.MakeDefaultCallTx(t, rpcClient, nil, code, amt, gasLim, fee) - txe := broadcastTxSync(t, cli, tx) + txe, err := cli.CallTxSync(context.Background(), &payload.CallTx{ + Input: &payload.TxInput{ + Address: inputAddress, + Amount: amt, + }, + Data: code, + GasLimit: gasLim, + Fee: fee, + }) + require.NoError(t, err) assert.Equal(t, true, txe.Receipt.CreatesContract, "This transaction should"+ " create a contract") assert.NotEqual(t, 0, len(txe.TxHash), "Receipt should contain a"+ @@ -219,6 +230,41 @@ func TestInfoServer(t *testing.T) { assert.Equal(t, rs.Validators.Validators[0].Address, rs.Validators.Proposer.Address) }) + + t.Run("Names", func(t *testing.T) { + t.Parallel() + names := []string{"bib", "flub", "flib"} + sort.Strings(names) + for _, name := range names { + _, err := rpctest.UpdateName(cli, inputAddress, name, name, 99999) + require.NoError(t, err) + } + + entry, err := infoclient.Name(rpcClient, names[0]) + require.NoError(t, err) + assert.Equal(t, names[0], entry.Name) + assert.Equal(t, names[0], entry.Data) + + entry, err = infoclient.Name(rpcClient, "asdasdas") + require.NoError(t, err) + require.Nil(t, entry) + + var namesOut []string + entries, err := infoclient.Names(rpcClient, "") + require.NoError(t, err) + for _, entry := range entries { + namesOut = append(namesOut, entry.Name) + } + require.Equal(t, names, namesOut) + + namesOut = namesOut[:0] + entries, err = infoclient.Names(rpcClient, "fl") + require.NoError(t, err) + for _, entry := range entries { + namesOut = append(namesOut, entry.Name) + } + require.Equal(t, []string{"flib", "flub"}, namesOut) + }) }) } diff --git a/integration/rpcquery/query_server_test.go b/integration/rpcquery/query_server_test.go index b1f636598..5bf6e1fee 100644 --- a/integration/rpcquery/query_server_test.go +++ b/integration/rpcquery/query_server_test.go @@ -94,9 +94,11 @@ func TestQueryServer(t *testing.T) { for i := 0; i < n; i++ { name := fmt.Sprintf("Flub/%v", i) if i%2 == 0 { - rpctest.UpdateName(t, tcli, rpctest.PrivateAccounts[0].GetAddress(), name, dataA, 200) + _, err := rpctest.UpdateName(tcli, rpctest.PrivateAccounts[0].GetAddress(), name, dataA, 200) + require.NoError(t, err) } else { - rpctest.UpdateName(t, tcli, rpctest.PrivateAccounts[1].GetAddress(), name, dataB, 200) + _, err := rpctest.UpdateName(tcli, rpctest.PrivateAccounts[1].GetAddress(), name, dataB, 200) + require.NoError(t, err) } } qcli := rpctest.NewQueryClient(t, kern.GRPCListenAddress().String()) diff --git a/integration/rpctest/helpers.go b/integration/rpctest/helpers.go index efd4e73c6..b68603a01 100644 --- a/integration/rpctest/helpers.go +++ b/integration/rpctest/helpers.go @@ -101,10 +101,10 @@ func CallContract(cli rpctransact.TransactClient, inputAddress, contractAddress return txe, nil } -func UpdateName(t testing.TB, cli rpctransact.TransactClient, inputAddress crypto.Address, name, data string, - expiresIn uint64) *exec.TxExecution { +func UpdateName(cli rpctransact.TransactClient, inputAddress crypto.Address, name, data string, + expiresIn uint64) (*exec.TxExecution, error) { - txe, err := cli.NameTxSync(context.Background(), &payload.NameTx{ + return cli.NameTxSync(context.Background(), &payload.NameTx{ Input: &payload.TxInput{ Address: inputAddress, Amount: names.NameCostForExpiryIn(name, data, expiresIn), @@ -112,8 +112,6 @@ func UpdateName(t testing.TB, cli rpctransact.TransactClient, inputAddress crypt Name: name, Data: data, }) - require.NoError(t, err) - return txe } //------------------------------------------------------------------------------- diff --git a/integration/rpctransact/name_test.go b/integration/rpctransact/name_test.go index 5d94f51f4..6281ce2f2 100644 --- a/integration/rpctransact/name_test.go +++ b/integration/rpctransact/name_test.go @@ -75,7 +75,8 @@ func TestNameTx(t *testing.T) { const data = "if not now, when" numDesiredBlocks := uint64(2) - txe := rpctest.UpdateName(t, tcli, inputAddress, name, data, numDesiredBlocks) + txe, err := rpctest.UpdateName(tcli, inputAddress, name, data, numDesiredBlocks) + require.NoError(t, err) entry := txe.Result.NameEntry assert.NotNil(t, entry, "name should return") @@ -95,7 +96,8 @@ func TestNameTx(t *testing.T) { const updatedData = "these are amongst the things I wish to bestow upon " + "the youth of generations come: a safe supply of honey, and a better " + "money. For what else shall they need" - rpctest.UpdateName(t, tcli, inputAddress, name, updatedData, numDesiredBlocks) + _, err = rpctest.UpdateName(tcli, inputAddress, name, updatedData, numDesiredBlocks) + require.NoError(t, err) entry, err = qcli.GetName(context.Background(), &rpcquery.GetNameParam{Name: name}) require.NoError(t, err) @@ -118,7 +120,8 @@ func TestNameTx(t *testing.T) { //now the entry should be expired, so we can update as non owner const data2 = "this is not my beautiful house" owner := rpctest.PrivateAccounts[3].GetAddress() - txe = rpctest.UpdateName(t, tcli, owner, name, data2, numDesiredBlocks) + txe, err = rpctest.UpdateName(tcli, owner, name, data2, numDesiredBlocks) + require.NoError(t, err) entry = txe.Result.NameEntry entryQuery, err = qcli.GetName(context.Background(), &rpcquery.GetNameParam{Name: name}) diff --git a/project/history.go b/project/history.go index 1c49f30ab..2e491a4f6 100644 --- a/project/history.go +++ b/project/history.go @@ -52,6 +52,7 @@ var History relic.ImmutableHistory = relic.NewHistory("Hyperledger Burrow", "htt `### Fixed - [Dump] Fix dump missing events emitted at end height provided - [Dump] EVM events were not dumped if no height was provided to burrow dump remote commandline +- [RPC/Info] Fix panic in /names and implement properly - now accepts a 'regex' parameter which is a regular expression to match names. Empty for all names. ### Changed - [State] IterateStreamEvents now takes inclusive start and end points (end used to be exclusive) avoid bug-prone conversion diff --git a/rpc/lib/server/http_server.go b/rpc/lib/server/http_server.go index 1db169089..baecf81ac 100644 --- a/rpc/lib/server/http_server.go +++ b/rpc/lib/server/http_server.go @@ -4,6 +4,7 @@ package server import ( "bufio" "encoding/json" + "errors" "fmt" "net" "net/http" @@ -61,16 +62,17 @@ func RecoverAndLogHandler(handler http.Handler, logger *logging.Logger) http.Han // Without this, Chrome & Firefox were retrying aborted ajax requests, // at least to my localhost. if e := recover(); e != nil { - // If RPCResponse if res, ok := e.(types.RPCResponse); ok { WriteRPCResponseHTTP(rww, res) } else { // For the rest, - logger.TraceMsg("Panic in RPC HTTP handler", structure.ErrorKey, e, + err := errors.New(fmt.Sprint(e)) + logger.TraceMsg("Panic in RPC HTTP handler", + structure.ErrorKey, err, "stack", string(debug.Stack())) rww.WriteHeader(http.StatusInternalServerError) - WriteRPCResponseHTTP(rww, types.RPCInternalError("", e.(error))) + WriteRPCResponseHTTP(rww, types.RPCInternalError("", err)) } } diff --git a/rpc/rpcinfo/infoclient/client.go b/rpc/rpcinfo/infoclient/client.go index 68ea24042..292eeb732 100644 --- a/rpc/rpcinfo/infoclient/client.go +++ b/rpc/rpcinfo/infoclient/client.go @@ -83,6 +83,15 @@ func Name(client RPCClient, name string) (*names.Entry, error) { return res.Entry, nil } +func Names(client RPCClient, regex string) ([]*names.Entry, error) { + res := new(rpc.ResultNames) + _, err := client.Call(rpcinfo.Names, pmap("regex", regex), res) + if err != nil { + return nil, err + } + return res.Names, nil +} + func Blocks(client RPCClient, minHeight, maxHeight int) (*rpc.ResultBlocks, error) { res := new(rpc.ResultBlocks) _, err := client.Call(rpcinfo.Blocks, pmap("minHeight", minHeight, "maxHeight", maxHeight), res) diff --git a/rpc/rpcinfo/methods.go b/rpc/rpcinfo/methods.go index 1309aecfc..bea4c66c1 100644 --- a/rpc/rpcinfo/methods.go +++ b/rpc/rpcinfo/methods.go @@ -1,7 +1,11 @@ package rpcinfo import ( + "fmt" + "regexp" + "github.com/hyperledger/burrow/acm" + "github.com/hyperledger/burrow/execution/names" "github.com/hyperledger/burrow/rpc" "github.com/hyperledger/burrow/rpc/lib/server" ) @@ -20,10 +24,6 @@ const ( GetAccountHuman = "account_human" AccountStats = "account_stats" - // Simulated call - Call = "call" - CallCode = "call_code" - // Names Name = "name" Names = "names" @@ -40,7 +40,24 @@ const ( Consensus = "consensus" ) +const maxRegexLength = 255 + +// The methods below all get mounted at the info server address (specified in config at RPC/Info) in the following form: +// +// http://:/?=&=[&...] +// +// For example: +// http://0.0.0.0:26658/status?block_time_within=10m&block_seen_time_within=1h +// http://0.0.0.0:26658/names?regex= +// +// They keys in the route map below are the endpoint name, and the comma separated values are the url query params +// +// They info endpoint also all be called with a JSON-RPC payload like: +// +// curl -X POST -d '{"method": "names", "id": "foo", "params": ["loves"]}' http://0.0.0.0:26658 +// func GetRoutes(service *rpc.Service) map[string]*server.RPCFunc { + // TODO: overhaul this with gRPC-gateway / swagger return map[string]*server.RPCFunc{ // Status Status: server.NewRPCFunc(service.StatusWithin, "block_time_within,block_seen_time_within"), @@ -71,7 +88,22 @@ func GetRoutes(service *rpc.Service) map[string]*server.RPCFunc { Consensus: server.NewRPCFunc(service.ConsensusState, ""), // Names - Name: server.NewRPCFunc(service.Name, "name"), - Names: server.NewRPCFunc(service.Names, ""), + Name: server.NewRPCFunc(service.Name, "name"), + Names: server.NewRPCFunc(func(regex string) (*rpc.ResultNames, error) { + if regex == "" { + return service.Names(func(*names.Entry) bool { return true }) + } + // Regex attacks... + if len(regex) > maxRegexLength { + return nil, fmt.Errorf("regular expression longer than maximum length %d", maxRegexLength) + } + re, err := regexp.Compile(regex) + if err != nil { + return nil, fmt.Errorf("could not compile '%s' as regular expression: %v", regex, err) + } + return service.Names(func(entry *names.Entry) bool { + return re.MatchString(entry.Name) + }) + }, "regex"), } } diff --git a/rpc/service.go b/rpc/service.go index eb366bd8a..7757a6f49 100644 --- a/rpc/service.go +++ b/rpc/service.go @@ -255,20 +255,20 @@ func (s *Service) Name(name string) (*ResultName, error) { if err != nil { return nil, err } - if entry == nil { - return nil, fmt.Errorf("name %s not found", name) - } return &ResultName{Entry: entry}, nil } func (s *Service) Names(predicate func(*names.Entry) bool) (*ResultNames, error) { var nms []*names.Entry - s.nameReg.IterateNames(func(entry *names.Entry) error { + err := s.nameReg.IterateNames(func(entry *names.Entry) error { if predicate(entry) { nms = append(nms, entry) } return nil }) + if err != nil { + return nil, fmt.Errorf("could not iterate names: %v", err) + } return &ResultNames{ BlockHeight: s.blockchain.LastBlockHeight(), Names: nms, From 612126db59655deb533a6e1ab8f93dc70b5d0b38 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Thu, 8 Aug 2019 19:01:37 +0100 Subject: [PATCH 64/70] Reimplement query grammar to support OR And also boolean logic with nesting and precedence. The matching interface is cleaner and _should_ also be faster. Stopped erasing type information for tagged objects by converting everything to strings (only having to parse it back later). With this grammar and interpreter it would be easy to add other helpers and operations. Signed-off-by: Silas Davis --- Makefile | 14 + acm/account_test.go | 9 +- crypto/address.go | 10 +- event/convention.go | 4 + event/emitter_test.go | 24 +- event/pubsub/pubsub.go | 16 + event/query/builder.go | 5 - event/query/empty.go | 4 + event/query/expression.go | 307 ++++ event/query/expression_test.go | 33 + event/query/query.go | 327 +--- event/query/query.peg | 69 +- event/query/query.peg.go | 2210 +++++++++++++++++----------- event/query/query_test.go | 50 +- event/query/reflect_tagged.go | 4 +- event/query/reflect_tagged_test.go | 8 +- event/query/tags.go | 18 +- execution/exec/event_test.go | 7 + execution/exec/log_event.go | 10 +- txs/tx_test.go | 8 +- 20 files changed, 1876 insertions(+), 1261 deletions(-) create mode 100644 event/query/expression.go create mode 100644 event/query/expression_test.go diff --git a/Makefile b/Makefile index 059992661..ef158c612 100644 --- a/Makefile +++ b/Makefile @@ -95,6 +95,20 @@ protobuf: $(PROTO_GO_FILES) clean_protobuf: @rm -f $(PROTO_GO_FILES_REAL) + +### PEG query grammar + +# This allows us to filter tagged objects with things like (EventID = 'foo' OR Height > 10) AND EventName CONTAINS 'frog' + +.PHONY: peg_deps +peg_deps: + go get -u github.com/pointlander/peg + +# regenerate the parser +.PHONY: peg +peg: + peg event/query/query.peg + ### Building github.com/hyperledger/burrow # Output commit_hash but only if we have the git repo (e.g. not in docker build diff --git a/acm/account_test.go b/acm/account_test.go index 06d94350f..639eb3551 100644 --- a/acm/account_test.go +++ b/acm/account_test.go @@ -78,10 +78,11 @@ func TestAccountTags(t *testing.T) { } tagged := acc.Tagged() assert.Equal(t, []string{"Address", "Balance", "Sequence", "EVMCode", "Permissions", "Roles"}, tagged.Keys()) - str, _ := tagged.Get("Permissions") - assert.Equal(t, "send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | hasRole", str) - str, _ = tagged.Get("Roles") - assert.Equal(t, "frogs;dogs", str) + flag, _ := tagged.Get("Permissions") + permString := permission.String(flag.(permission.PermFlag)) + assert.Equal(t, "send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | hasRole", permString) + roles, _ := tagged.Get("Roles") + assert.Equal(t, []string{"frogs", "dogs"}, roles) tagged.Get("EVMCode") qry, err := query.New("EVMCode CONTAINS '0116002556001600360006101000A815'") require.NoError(t, err) diff --git a/crypto/address.go b/crypto/address.go index 3936ca3b2..631c1dcc6 100644 --- a/crypto/address.go +++ b/crypto/address.go @@ -92,10 +92,18 @@ func AddressFromHexString(str string) (Address, error) { return AddressFromBytes(bs) } +func MustAddressFromHexString(str string) Address { + address, err := AddressFromHexString(str) + if err != nil { + panic(fmt.Errorf("error reading address from hex string: %s", err)) + } + return address +} + func MustAddressFromBytes(addr []byte) Address { address, err := AddressFromBytes(addr) if err != nil { - panic(fmt.Errorf("error reading address from bytes that caller does not expect: %s", err)) + panic(fmt.Errorf("error reading address from bytes: %s", err)) } return address } diff --git a/event/convention.go b/event/convention.go index b8b60cc48..cf520cb2d 100644 --- a/event/convention.go +++ b/event/convention.go @@ -31,6 +31,10 @@ func (eid EventID) String() string { return fmt.Sprintf("%s = %s", EventIDKey, string(eid)) } +func (eid EventID) MatchError() error { + return nil +} + // Get a query that matches events with a specific eventID func QueryForEventID(eventID string) query.Queryable { // Since we're accepting external output here there is a chance it won't parse... diff --git a/event/emitter_test.go b/event/emitter_test.go index 669fd347c..9be922aca 100644 --- a/event/emitter_test.go +++ b/event/emitter_test.go @@ -2,23 +2,25 @@ package event import ( "context" + "strings" "testing" "time" - "strings" - "github.com/hyperledger/burrow/event/query" "github.com/hyperledger/burrow/logging" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) +const timeout = 2 * time.Second + func TestEmitter(t *testing.T) { em := NewEmitter() em.SetLogger(logging.NewNoopLogger()) ctx := context.Background() - out, err := em.Subscribe(ctx, "TestEmitter", query.NewBuilder().AndStrictlyGreaterThan("foo", 10), 1) + qry := query.NewBuilder().AndStrictlyGreaterThan("foo", 10) + out, err := em.Subscribe(ctx, "TestEmitter", qry, 1) require.NoError(t, err) msgMiss := struct{ flob string }{"flib"} @@ -32,7 +34,7 @@ func TestEmitter(t *testing.T) { select { case msg := <-out: assert.Equal(t, msgHit, msg) - case <-time.After(time.Second): + case <-time.After(time.Second * 100000): t.Errorf("timed out before receiving message matching subscription query") } } @@ -71,9 +73,19 @@ func TestOrdering(t *testing.T) { for _, msg := range msgs { str := msg[0].(string) if strings.HasPrefix(str, "bar") { - assert.Equal(t, str, <-out1) + assert.Equal(t, str, getTimeout(t, out1)) } else { - assert.Equal(t, str, <-out2) + assert.Equal(t, str, getTimeout(t, out2)) } } } + +func getTimeout(t *testing.T, out <-chan interface{}) interface{} { + select { + case <-time.After(timeout): + t.Fatalf("timed out waiting on channel after %v", timeout) + case msg := <-out: + return msg + } + return nil +} diff --git a/event/pubsub/pubsub.go b/event/pubsub/pubsub.go index 41904fed7..6ff16ba40 100644 --- a/event/pubsub/pubsub.go +++ b/event/pubsub/pubsub.go @@ -19,6 +19,8 @@ import ( "sync" "github.com/hyperledger/burrow/event/query" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/logging/structure" "github.com/tendermint/tendermint/libs/common" ) @@ -60,6 +62,7 @@ type Server struct { mtx sync.RWMutex subscriptions map[string]map[string]query.Query // subscriber -> query (string) -> query.Query + logger *logging.Logger } // Option sets a parameter for the server. @@ -71,6 +74,7 @@ type Option func(*Server) func NewServer(options ...Option) *Server { s := &Server{ subscriptions: make(map[string]map[string]query.Query), + logger: logging.NewNoopLogger(), } s.BaseService = *common.NewBaseService(nil, "PubSub", s) @@ -96,6 +100,12 @@ func BufferCapacity(cap int) Option { } } +func WithLogger(logger *logging.Logger) Option { + return func(s *Server) { + s.logger = logger.WithScope("PubSub") + } +} + // BufferCapacity returns capacity of the internal server's queue. func (s *Server) BufferCapacity() int { return s.cmdsCap @@ -210,6 +220,7 @@ type state struct { queries map[query.Query]map[string]chan interface{} // client -> query -> struct{} clients map[string]map[query.Query]struct{} + logger *logging.Logger } // OnStart implements Service.OnStart by starting the server. @@ -217,6 +228,7 @@ func (s *Server) OnStart() error { go s.loop(state{ queries: make(map[query.Query]map[string]chan interface{}), clients: make(map[string]map[query.Query]struct{}), + logger: s.logger, }) return nil } @@ -326,5 +338,9 @@ func (state *state) send(msg interface{}, tags query.Tagged) { } } } + err := q.MatchError() + if err != nil { + state.logger.InfoMsg("pubsub Server could not execute query", structure.ErrorKey, err) + } } } diff --git a/event/query/builder.go b/event/query/builder.go index 4e8d3dd3c..3c4cb7cd5 100644 --- a/event/query/builder.go +++ b/event/query/builder.go @@ -31,11 +31,6 @@ const ( dateString = "DATE" ) -type Query interface { - Matches(tags Tagged) bool - String() string -} - type Queryable interface { Query() (Query, error) } diff --git a/event/query/empty.go b/event/query/empty.go index 5d10ce4c7..91d2195e5 100644 --- a/event/query/empty.go +++ b/event/query/empty.go @@ -19,3 +19,7 @@ func (Empty) String() string { func (Empty) Query() (Query, error) { return Empty{}, nil } + +func (empty Empty) MatchError() error { + return nil +} diff --git a/event/query/expression.go b/event/query/expression.go new file mode 100644 index 000000000..445b652f7 --- /dev/null +++ b/event/query/expression.go @@ -0,0 +1,307 @@ +package query + +import ( + "fmt" + "math/big" + "strings" + "time" + + "github.com/hyperledger/burrow/logging/errors" +) + +const ( + // DateLayout defines a layout for all dates (`DATE date`) + DateLayout = "2006-01-02" + // TimeLayout defines a layout for all times (`TIME time`) + TimeLayout = time.RFC3339 +) + +// Operator is an operator that defines some kind of relation between tag and +// operand (equality, etc.). +type Operator uint8 + +const ( + OpTerminal Operator = iota + OpAnd + OpOr + OpLessEqual + OpGreaterEqual + OpLess + OpGreater + OpEqual + OpContains +) + +var opNames = map[Operator]string{ + OpAnd: "AND", + OpOr: "OR", + OpLessEqual: "<=", + OpGreaterEqual: ">=", + OpLess: "<", + OpGreater: ">", + OpEqual: "=", + OpContains: "CONTAINS", +} + +func (op Operator) String() string { + return opNames[op] +} + +// Instruction is a container suitable for the code tape and the stack to hold values an operations +type instruction struct { + op Operator + tag *string + string *string + time *time.Time + number *big.Float + match bool +} + +func (in *instruction) String() string { + switch { + case in.op != OpTerminal: + return in.op.String() + case in.tag != nil: + return *in.tag + case in.string != nil: + return "'" + *in.string + "'" + case in.time != nil: + return in.time.String() + case in.number != nil: + return in.number.String() + default: + if in.match { + return "true" + } + return "false" + } +} + +// A Boolean expression for the query grammar +type Expression struct { + // This is our 'bytecode' + code []*instruction + errors errors.MultipleErrors + explainer func(fmt string, args ...interface{}) +} + +// Evaluate expects an Execute() to have filled the code of the Expression so it can be run in the little stack machine +// below +func (e *Expression) Evaluate(getTagValue func(tag string) (interface{}, bool)) (bool, error) { + if len(e.errors) > 0 { + return false, e.errors + } + var left, right *instruction + stack := make([]*instruction, 0, len(e.code)) + for _, in := range e.code { + if in.op == OpTerminal { + // just push terminals on to the stack + stack = append(stack, in) + continue + } + + if len(stack) < 2 { + return false, fmt.Errorf("cannot pop from stack for query expression [%v] because stack has "+ + "fewer than 2 elements", e) + } + stack, left, right = pop(stack) + ins := &instruction{} + switch in.op { + case OpAnd: + ins.match = left.match && right.match + case OpOr: + ins.match = left.match || right.match + default: + // We have a a non-terminal, non-connective operation + tagValue, ok := getTagValue(*left.tag) + // No match if we can't get tag value + if ok { + switch { + case right.string != nil: + ins.match = compareString(in.op, tagValue, *right.string) + case right.number != nil: + ins.match = compareNumber(in.op, tagValue, right.number) + case right.time != nil: + ins.match = compareTime(in.op, tagValue, *right.time) + } + } + // Uncomment this for a little bit of debug: + //e.explainf("%v := %v\n", left, tagValue) + } + // Uncomment this for a little bit of debug: + //e.explainf("%v %v %v => %v\n", left, in.op, right, ins.match) + + // Push whether this was a match back on to stack + stack = append(stack, ins) + } + if len(stack) != 1 { + return false, fmt.Errorf("stack for query expression [%v] should have exactly one element after "+ + "evaulation but has %d", e, len(stack)) + } + return stack[0].match, nil +} + +func (e *Expression) explainf(fmt string, args ...interface{}) { + if e.explainer != nil { + e.explainer(fmt, args...) + } +} + +func pop(stack []*instruction) ([]*instruction, *instruction, *instruction) { + return stack[:len(stack)-2], stack[len(stack)-2], stack[len(stack)-1] +} + +func compareString(op Operator, tagValue interface{}, value string) bool { + tagString := StringFromValue(tagValue) + switch op { + case OpContains: + return strings.Contains(tagString, value) + case OpEqual: + return tagString == value + } + return false +} + +func compareNumber(op Operator, tagValue interface{}, value *big.Float) bool { + tagNumber := new(big.Float) + switch n := tagValue.(type) { + case string: + f, _, err := big.ParseFloat(n, 10, 64, big.ToNearestEven) + if err != nil { + return false + } + tagNumber.Set(f) + case *big.Float: + tagNumber.Set(n) + case *big.Int: + tagNumber.SetInt(n) + case float32: + tagNumber.SetFloat64(float64(n)) + case float64: + tagNumber.SetFloat64(n) + case int: + tagNumber.SetInt64(int64(n)) + case int32: + tagNumber.SetInt64(int64(n)) + case int64: + tagNumber.SetInt64(n) + case uint: + tagNumber.SetUint64(uint64(n)) + case uint32: + tagNumber.SetUint64(uint64(n)) + case uint64: + tagNumber.SetUint64(n) + default: + return false + } + cmp := tagNumber.Cmp(value) + switch op { + case OpLessEqual: + return cmp < 1 + case OpGreaterEqual: + return cmp > -1 + case OpLess: + return cmp == -1 + case OpGreater: + return cmp == 1 + case OpEqual: + return cmp == 0 + } + return false +} + +func compareTime(op Operator, tagValue interface{}, value time.Time) bool { + var tagTime time.Time + var err error + switch t := tagValue.(type) { + case time.Time: + tagTime = t + case int64: + // Hmmm, should we? + tagTime = time.Unix(t, 0) + case string: + tagTime, err = time.Parse(TimeLayout, t) + if err != nil { + tagTime, err = time.Parse(DateLayout, t) + if err != nil { + return false + } + } + default: + return false + } + switch op { + case OpLessEqual: + return tagTime.Before(value) || tagTime.Equal(value) + case OpGreaterEqual: + return tagTime.Equal(value) || tagTime.After(value) + case OpLess: + return tagTime.Before(value) + case OpGreater: + return tagTime.After(value) + case OpEqual: + return tagTime.Equal(value) + } + return false +} + +// These methods implement the various visitors that are called in the PEG grammar with statements like +// { p.Operator(OpEqual) } + +func (e *Expression) String() string { + strs := make([]string, len(e.code)) + for i, in := range e.code { + strs[i] = in.String() + } + return strings.Join(strs, ", ") +} + +func (e *Expression) Operator(operator Operator) { + e.code = append(e.code, &instruction{ + op: operator, + }) +} + +// Terminals... + +func (e *Expression) Tag(value string) { + e.code = append(e.code, &instruction{ + tag: &value, + }) +} + +func (e *Expression) Time(value string) { + t, err := time.Parse(TimeLayout, value) + e.pushErr(err) + e.code = append(e.code, &instruction{ + time: &t, + }) + +} +func (e *Expression) Date(value string) { + date, err := time.Parse(DateLayout, value) + e.pushErr(err) + e.code = append(e.code, &instruction{ + time: &date, + }) +} + +func (e *Expression) Number(value string) { + number, _, err := big.ParseFloat(value, 10, 64, big.ToNearestEven) + e.pushErr(err) + e.code = append(e.code, &instruction{ + number: number, + }) +} + +func (e *Expression) Value(value string) { + e.code = append(e.code, &instruction{ + string: &value, + }) +} + +func (e *Expression) pushErr(err error) { + if err != nil { + e.errors = append(e.errors, err) + } +} diff --git a/event/query/expression_test.go b/event/query/expression_test.go new file mode 100644 index 000000000..5704d8e55 --- /dev/null +++ b/event/query/expression_test.go @@ -0,0 +1,33 @@ +package query + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestExpression(t *testing.T) { + t.Run("Basic OR AND", func(t *testing.T) { + qry, err := New("(something = 'awful' OR something = 'nice') AND another_thing = 'OKAY'") + require.NoError(t, err) + out := qry.parser.String() + require.Equal(t, "something, 'awful', =, something, 'nice', =, OR, another_thing, 'OKAY', =, AND", out) + + getter := func(key string) (interface{}, bool) { + switch key { + case "something": + return "awful", true + + case "another_thing": + return "OKAY", true + + default: + return "", false + } + } + + matches, err := qry.parser.Evaluate(getter) + require.NoError(t, err) + require.True(t, matches) + }) +} diff --git a/event/query/query.go b/event/query/query.go index f8d29ee2c..a44ce974f 100644 --- a/event/query/query.go +++ b/event/query/query.go @@ -10,19 +10,33 @@ package query import ( "fmt" - "reflect" - "strconv" "strings" - "time" ) -var _ Query = &query{} -var _ Queryable = &query{} +type Query interface { + Matches(tags Tagged) bool + String() string + MatchError() error +} + +var _ Query = &PegQuery{} +var _ Queryable = &PegQuery{} // Query holds the query string and the query parser. -type query struct { +type PegQuery struct { str string parser *QueryParser + error *MatchError +} + +type MatchError struct { + Tagged Tagged + Cause error +} + +func (m *MatchError) Error() string { + keys := strings.Join(m.Tagged.Keys(), ", ") + return fmt.Sprintf("matching error occurred with tagged with keys [%s]: %v", keys, m.Cause) } // Condition represents a single condition within a query and consists of tag @@ -35,18 +49,23 @@ type Condition struct { // New parses the given string and returns a query or error if the string is // invalid. -func New(s string) (*query, error) { - p := &QueryParser{Buffer: fmt.Sprintf(`"%s"`, s)} - p.Init() - if err := p.Parse(); err != nil { +func New(s string) (*PegQuery, error) { + p := &QueryParser{Buffer: s} + err := p.Init() + if err != nil { + return nil, err + } + err = p.Parse() + if err != nil { return nil, err } - return &query{str: s, parser: p}, nil + p.Execute() + return &PegQuery{str: s, parser: p}, nil } // MustParse turns the given string into a query or panics; for tests or others // cases where you know the string is valid. -func MustParse(s string) *query { +func MustParse(s string) *PegQuery { q, err := New(s) if err != nil { panic(fmt.Sprintf("failed to parse %s: %v", s, err)) @@ -55,290 +74,38 @@ func MustParse(s string) *query { } // String returns the original string. -func (q *query) String() string { +func (q *PegQuery) String() string { return q.str } -func (q *query) Query() (Query, error) { +func (q *PegQuery) Query() (Query, error) { return q, nil } -// Operator is an operator that defines some kind of relation between tag and -// operand (equality, etc.). -type Operator uint8 - -const ( - // "<=" - OpLessEqual Operator = iota - // ">=" - OpGreaterEqual - // "<" - OpLess - // ">" - OpGreater - // "=" - OpEqual - // "CONTAINS"; used to check if a string contains a certain sub string. - OpContains -) - -const ( - // DateLayout defines a layout for all dates (`DATE date`) - DateLayout = "2006-01-02" - // TimeLayout defines a layout for all times (`TIME time`) - TimeLayout = time.RFC3339 -) - -// Conditions returns a list of conditions. -func (q *query) Conditions() []Condition { - conditions := make([]Condition, 0) - - buffer, begin, end := q.parser.Buffer, 0, 0 - - var tag string - var op Operator - - // tokens must be in the following order: tag ("tx.gas") -> operator ("=") -> operand ("7") - for _, token := range q.parser.Tokens() { - switch token.pegRule { - - case rulePegText: - begin, end = int(token.begin), int(token.end) - case ruletag: - tag = buffer[begin:end] - case rulele: - op = OpLessEqual - case rulege: - op = OpGreaterEqual - case rulel: - op = OpLess - case ruleg: - op = OpGreater - case ruleequal: - op = OpEqual - case rulecontains: - op = OpContains - case rulevalue: - // strip single quotes from value (i.e. "'NewBlock'" -> "NewBlock") - valueWithoutSingleQuotes := buffer[begin+1 : end-1] - conditions = append(conditions, Condition{tag, op, valueWithoutSingleQuotes}) - case rulenumber: - number := buffer[begin:end] - if strings.ContainsAny(number, ".") { // if it looks like a floating-point number - value, err := strconv.ParseFloat(number, 64) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as float64 (should never happen if the grammar is correct)", err, number)) - } - conditions = append(conditions, Condition{tag, op, value}) - } else { - value, err := strconv.ParseInt(number, 10, 64) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as int64 (should never happen if the grammar is correct)", err, number)) - } - conditions = append(conditions, Condition{tag, op, value}) - } - case ruletime: - value, err := time.Parse(TimeLayout, buffer[begin:end]) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as time.Time / RFC3339 (should never happen if the grammar is correct)", err, buffer[begin:end])) - } - conditions = append(conditions, Condition{tag, op, value}) - case ruledate: - value, err := time.Parse("2006-01-02", buffer[begin:end]) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as time.Time / '2006-01-02' (should never happen if the grammar is correct)", err, buffer[begin:end])) - } - conditions = append(conditions, Condition{tag, op, value}) - } - } - - return conditions -} - // Matches returns true if the query matches the given set of tags, false otherwise. // // For example, query "name=John" matches tags = {"name": "John"}. More // examples could be found in parser_test.go and query_test.go. -func (q *query) Matches(tags Tagged) bool { +func (q *PegQuery) Matches(tags Tagged) bool { if tags.Len() == 0 { return false } - - buffer, begin, end := q.parser.Buffer, 0, 0 - - var tag string - var op Operator - - // tokens must be in the following order: tag ("tx.gas") -> operator ("=") -> operand ("7") - for _, token := range q.parser.Tokens() { - switch token.pegRule { - - case rulePegText: - begin, end = int(token.begin), int(token.end) - case ruletag: - tag = buffer[begin:end] - case rulele: - op = OpLessEqual - case rulege: - op = OpGreaterEqual - case rulel: - op = OpLess - case ruleg: - op = OpGreater - case ruleequal: - op = OpEqual - case rulecontains: - op = OpContains - case rulevalue: - // strip single quotes from value (i.e. "'NewBlock'"tagged -> "NewBlock") - valueWithoutSingleQuotes := buffer[begin+1 : end-1] - - // see if the triplet (tag, operator, operand) matches any tag - // "tx.gas", "=", "7", { "tx.gas": 7, "tx.ID": "4AE393495334" } - if !match(tag, op, reflect.ValueOf(valueWithoutSingleQuotes), tags) { - return false - } - case rulenumber: - number := buffer[begin:end] - if strings.ContainsAny(number, ".") { // if it looks like a floating-point number - value, err := strconv.ParseFloat(number, 64) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as float64 (should never happen if the grammar is correct)", err, number)) - } - if !match(tag, op, reflect.ValueOf(value), tags) { - return false - } - } else { - value, err := strconv.ParseInt(number, 10, 64) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as int64 (should never happen if the grammar is correct)", err, number)) - } - if !match(tag, op, reflect.ValueOf(value), tags) { - return false - } - } - case ruletime: - value, err := time.Parse(TimeLayout, buffer[begin:end]) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as time.Time / RFC3339 (should never happen if the grammar is correct)", err, buffer[begin:end])) - } - if !match(tag, op, reflect.ValueOf(value), tags) { - return false - } - case ruledate: - value, err := time.Parse("2006-01-02", buffer[begin:end]) - if err != nil { - panic(fmt.Sprintf("got %v while trying to parse %s as time.Time / '2006-01-02' (should never happen if the grammar is correct)", err, buffer[begin:end])) - } - if !match(tag, op, reflect.ValueOf(value), tags) { - return false - } - } + match, err := q.parser.Evaluate(tags.Get) + if err != nil { + q.error = &MatchError{Cause: err, Tagged: tags} + return false } - - return true + return match } -// match returns true if the given triplet (tag, operator, operand) matches any tag. -// -// First, it looks up the tag in tags and if it finds one, tries to compare the -// value from it to the operand using the operator. -// -// "tx.gas", "=", "7", { "tx.gas": 7, "tx.ID": "4AE393495334" } -func match(tag string, op Operator, operand reflect.Value, tags Tagged) bool { - // look up the tag from the query in tags - value, ok := tags.Get(tag) - if !ok { - return false - } - switch operand.Kind() { - case reflect.Struct: // time - operandAsTime := operand.Interface().(time.Time) - // try our best to convert value from tags to time.Time - var ( - v time.Time - err error - ) - if strings.ContainsAny(value, "T") { - v, err = time.Parse(TimeLayout, value) - } else { - v, err = time.Parse(DateLayout, value) - } - if err != nil { - panic(fmt.Sprintf("Failed to convert value %v from tag to time.Time: %v", value, err)) - } - switch op { - case OpLessEqual: - return v.Before(operandAsTime) || v.Equal(operandAsTime) - case OpGreaterEqual: - return v.Equal(operandAsTime) || v.After(operandAsTime) - case OpLess: - return v.Before(operandAsTime) - case OpGreater: - return v.After(operandAsTime) - case OpEqual: - return v.Equal(operandAsTime) - } - case reflect.Float64: - operandFloat64 := operand.Interface().(float64) - var v float64 - // try our best to convert value from tags to float64 - v, err := strconv.ParseFloat(value, 64) - if err != nil { - panic(fmt.Sprintf("Failed to convert value %v from tag to float64: %v", value, err)) - } - switch op { - case OpLessEqual: - return v <= operandFloat64 - case OpGreaterEqual: - return v >= operandFloat64 - case OpLess: - return v < operandFloat64 - case OpGreater: - return v > operandFloat64 - case OpEqual: - return v == operandFloat64 - } - case reflect.Int64: - operandInt := operand.Interface().(int64) - var v int64 - // if value looks like float, we try to parse it as float - if strings.ContainsAny(value, ".") { - v1, err := strconv.ParseFloat(value, 64) - if err != nil { - panic(fmt.Sprintf("Failed to convert value %v from tag to float64: %v", value, err)) - } - v = int64(v1) - } else { - var err error - // try our best to convert value from tags to int64 - v, err = strconv.ParseInt(value, 10, 64) - if err != nil { - panic(fmt.Sprintf("Failed to convert value %v from tag to int64: %v", value, err)) - } - } - switch op { - case OpLessEqual: - return v <= operandInt - case OpGreaterEqual: - return v >= operandInt - case OpLess: - return v < operandInt - case OpGreater: - return v > operandInt - case OpEqual: - return v == operandInt - } - case reflect.String: - switch op { - case OpEqual: - return value == operand.String() - case OpContains: - return strings.Contains(value, operand.String()) - } - default: - panic(fmt.Sprintf("Unknown kind of operand %v", operand.Kind())) +// Returns whether a matching error occurred (which would result in a false from Matches) +func (q *PegQuery) MatchError() error { + if q.error == nil { + return nil } + return q.error +} - return false +func (q *PegQuery) ExplainTo(explainer func(fmt string, args ...interface{})) { + q.parser.explainer = explainer } diff --git a/event/query/query.peg b/event/query/query.peg index 739892e4f..7363a6a57 100644 --- a/event/query/query.peg +++ b/event/query/query.peg @@ -1,33 +1,58 @@ package query +# We specify the name of the generated parse to be QueryParser then Expression is a struct type that we are expected +# to define to provide parse internal state when we run parser.Execute() + type QueryParser Peg { + Expression } -e <- '\"' condition ( ' '+ and ' '+ condition )* '\"' !. +# By recursing through OR then AND AND gets stronger precedent. PEG goes depth first so the hooks that are deeper +# in the AST get run first - this allows us to naturally form code for a stack machine (implemented in Expression) + +e <- eor !. + +eor <- eand ( or eand { p.Operator(OpOr) })* + +eand <- condition ( and condition { p.Operator(OpAnd) })* + +condition <- tag sp (le (number / time / date) { p.Operator(OpLessEqual) } + / ge (number / time / date) { p.Operator(OpGreaterEqual) } + / l (number / time / date) { p.Operator(OpLess) } + / g (number / time / date) { p.Operator(OpGreater) } + / equal (number / time / date / qvalue) { p.Operator(OpEqual) } + / contains qvalue { p.Operator(OpContains) } + ) sp / open eor close + +## Terminals -condition <- tag ' '* (le ' '* (number / time / date) - / ge ' '* (number / time / date) - / l ' '* (number / time / date) - / g ' '* (number / time / date) - / equal ' '* (number / time / date / value) - / contains ' '* value - ) +tag <- < (![ \t\n\r\\()"'=><] .)+ > sp { p.Tag(buffer[begin:end]) } -tag <- < (![ \t\n\r\\()"'=><] .)+ > -value <- < '\'' (!["'] .)* '\''> -number <- < ('0' - / [1-9] digit* ('.' digit*)?) > +qvalue <- '\'' value '\'' sp +value <- < (!["'] .)* > { p.Value(buffer[begin:end]) } + +number <- < ('0' / [1-9] digit* ('.' digit*)?) > { p.Number(buffer[begin:end]) } digit <- [0-9] -time <- "TIME " < year '-' month '-' day 'T' digit digit ':' digit digit ':' digit digit (('-' / '+') digit digit ':' digit digit / 'Z') > -date <- "DATE " < year '-' month '-' day > + +time <- "TIME " < year '-' month '-' day 'T' digit digit ':' digit digit ':' digit digit (('-' / '+') digit digit ':' digit digit / 'Z') > { p.Time(buffer[begin:end]) } + +date <- "DATE " < year '-' month '-' day > { p.Date(buffer[begin:end]) } year <- ('1' / '2') digit digit digit month <- ('0' / '1') digit day <- ('0' / '1' / '2' / '3') digit -and <- "AND" - -equal <- "=" -contains <- "CONTAINS" -le <- "<=" -ge <- ">=" -l <- "<" -g <- ">" + +## Operators + +and <- "AND" sp +or <- "OR" sp +equal <- "=" sp +contains <- "CONTAINS" sp +le <- "<=" sp +ge <- ">=" sp +l <- "<" sp +g <- ">" sp + +# Whitespace and grouping +open <- '(' sp +close <- ')' sp +sp <- (' ' / '\t')* diff --git a/event/query/query.peg.go b/event/query/query.peg.go index c86e4a47f..d9a83190b 100644 --- a/event/query/query.peg.go +++ b/event/query/query.peg.go @@ -1,9 +1,11 @@ -// nolint package query +// Code generated by peg event/query/query.peg DO NOT EDIT. + import ( "fmt" - "math" + "io" + "os" "sort" "strconv" ) @@ -16,8 +18,11 @@ type pegRule uint8 const ( ruleUnknown pegRule = iota rulee + ruleeor + ruleeand rulecondition ruletag + ruleqvalue rulevalue rulenumber ruledigit @@ -27,20 +32,40 @@ const ( rulemonth ruleday ruleand + ruleor ruleequal rulecontains rulele rulege rulel ruleg + ruleopen + ruleclose + rulesp + ruleAction0 + ruleAction1 + ruleAction2 + ruleAction3 + ruleAction4 + ruleAction5 + ruleAction6 + ruleAction7 rulePegText + ruleAction8 + ruleAction9 + ruleAction10 + ruleAction11 + ruleAction12 ) var rul3s = [...]string{ "Unknown", "e", + "eor", + "eand", "condition", "tag", + "qvalue", "value", "number", "digit", @@ -50,13 +75,30 @@ var rul3s = [...]string{ "month", "day", "and", + "or", "equal", "contains", "le", "ge", "l", "g", + "open", + "close", + "sp", + "Action0", + "Action1", + "Action2", + "Action3", + "Action4", + "Action5", + "Action6", + "Action7", "PegText", + "Action8", + "Action9", + "Action10", + "Action11", + "Action12", } type token32 struct { @@ -73,19 +115,19 @@ type node32 struct { up, next *node32 } -func (node *node32) print(pretty bool, buffer string) { +func (node *node32) print(w io.Writer, pretty bool, buffer string) { var print func(node *node32, depth int) print = func(node *node32, depth int) { for node != nil { for c := 0; c < depth; c++ { - fmt.Printf(" ") + fmt.Fprintf(w, " ") } rule := rul3s[node.pegRule] quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) if !pretty { - fmt.Printf("%v %v\n", rule, quote) + fmt.Fprintf(w, "%v %v\n", rule, quote) } else { - fmt.Printf("\x1B[34m%v\x1B[m %v\n", rule, quote) + fmt.Fprintf(w, "\x1B[34m%v\x1B[m %v\n", rule, quote) } if node.up != nil { print(node.up, depth+1) @@ -96,12 +138,12 @@ func (node *node32) print(pretty bool, buffer string) { print(node, 0) } -func (node *node32) Print(buffer string) { - node.print(false, buffer) +func (node *node32) Print(w io.Writer, buffer string) { + node.print(w, false, buffer) } -func (node *node32) PrettyPrint(buffer string) { - node.print(true, buffer) +func (node *node32) PrettyPrint(w io.Writer, buffer string) { + node.print(w, true, buffer) } type tokens32 struct { @@ -144,24 +186,24 @@ func (t *tokens32) AST() *node32 { } func (t *tokens32) PrintSyntaxTree(buffer string) { - t.AST().Print(buffer) + t.AST().Print(os.Stdout, buffer) +} + +func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { + t.AST().Print(w, buffer) } func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { - t.AST().PrettyPrint(buffer) + t.AST().PrettyPrint(os.Stdout, buffer) } func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { - if tree := t.tree; int(index) >= len(tree) { - expanded := make([]token32, 2*len(tree)) - copy(expanded, tree) - t.tree = expanded - } - t.tree[index] = token32{ - pegRule: rule, - begin: begin, - end: end, + tree, i := t.tree, int(index) + if i >= len(tree) { + t.tree = append(tree, token32{pegRule: rule, begin: begin, end: end}) + return } + tree[i] = token32{pegRule: rule, begin: begin, end: end} } func (t *tokens32) Tokens() []token32 { @@ -169,9 +211,11 @@ func (t *tokens32) Tokens() []token32 { } type QueryParser struct { + Expression + Buffer string buffer []rune - rules [20]func() bool + rules [40]func() bool parse func(rule ...int) error reset func() Pretty bool @@ -223,7 +267,7 @@ type parseError struct { } func (e *parseError) Error() string { - tokens, error := []token32{e.max}, "\n" + tokens, err := []token32{e.max}, "\n" positions, p := make([]int, 2*len(tokens)), 0 for _, token := range tokens { positions[p], p = int(token.begin), p+1 @@ -236,14 +280,14 @@ func (e *parseError) Error() string { } for _, token := range tokens { begin, end := int(token.begin), int(token.end) - error += fmt.Sprintf(format, + err += fmt.Sprintf(format, rul3s[token.pegRule], translations[begin].line, translations[begin].symbol, translations[end].line, translations[end].symbol, strconv.Quote(string(e.p.buffer[begin:end]))) } - return error + return err } func (p *QueryParser) PrintSyntaxTree() { @@ -254,12 +298,76 @@ func (p *QueryParser) PrintSyntaxTree() { } } -func (p *QueryParser) Init() { +func (p *QueryParser) WriteSyntaxTree(w io.Writer) { + p.tokens32.WriteSyntaxTree(w, p.Buffer) +} + +func (p *QueryParser) Execute() { + buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 + for _, token := range p.Tokens() { + switch token.pegRule { + + case rulePegText: + begin, end = int(token.begin), int(token.end) + text = string(_buffer[begin:end]) + + case ruleAction0: + p.Operator(OpOr) + case ruleAction1: + p.Operator(OpAnd) + case ruleAction2: + p.Operator(OpLessEqual) + case ruleAction3: + p.Operator(OpGreaterEqual) + case ruleAction4: + p.Operator(OpLess) + case ruleAction5: + p.Operator(OpGreater) + case ruleAction6: + p.Operator(OpEqual) + case ruleAction7: + p.Operator(OpContains) + case ruleAction8: + p.Tag(buffer[begin:end]) + case ruleAction9: + p.Value(buffer[begin:end]) + case ruleAction10: + p.Number(buffer[begin:end]) + case ruleAction11: + p.Time(buffer[begin:end]) + case ruleAction12: + p.Date(buffer[begin:end]) + + } + } + _, _, _, _, _ = buffer, _buffer, text, begin, end +} + +func Pretty(pretty bool) func(*QueryParser) error { + return func(p *QueryParser) error { + p.Pretty = pretty + return nil + } +} + +func Size(size int) func(*QueryParser) error { + return func(p *QueryParser) error { + p.tokens32 = tokens32{tree: make([]token32, 0, size)} + return nil + } +} +func (p *QueryParser) Init(options ...func(*QueryParser) error) error { var ( max token32 position, tokenIndex uint32 buffer []rune ) + for _, option := range options { + err := option(p) + if err != nil { + return err + } + } p.reset = func() { max = token32{} position, tokenIndex = 0, 0 @@ -273,7 +381,7 @@ func (p *QueryParser) Init() { p.reset() _rules := p.rules - tree := tokens32{tree: make([]token32, math.MaxInt16)} + tree := p.tokens32 p.parse = func(rule ...int) error { r := 1 if len(rule) > 0 { @@ -322,1232 +430,1548 @@ func (p *QueryParser) Init() { _rules = [...]func() bool{ nil, - /* 0 e <- <('"' condition (' '+ and ' '+ condition)* '"' !.)> */ + /* 0 e <- <(eor !.)> */ func() bool { position0, tokenIndex0 := position, tokenIndex { position1 := position - if buffer[position] != rune('"') { + if !_rules[ruleeor]() { goto l0 } - position++ - if !_rules[rulecondition]() { + { + position2, tokenIndex2 := position, tokenIndex + if !matchDot() { + goto l2 + } goto l0 + l2: + position, tokenIndex = position2, tokenIndex2 + } + add(rulee, position1) + } + return true + l0: + position, tokenIndex = position0, tokenIndex0 + return false + }, + /* 1 eor <- <(eand (or eand Action0)*)> */ + func() bool { + position3, tokenIndex3 := position, tokenIndex + { + position4 := position + if !_rules[ruleeand]() { + goto l3 } - l2: + l5: { - position3, tokenIndex3 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l3 - } - position++ - l4: - { - position5, tokenIndex5 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l5 - } - position++ - goto l4 - l5: - position, tokenIndex = position5, tokenIndex5 - } - { - position6 := position - { - position7, tokenIndex7 := position, tokenIndex - if buffer[position] != rune('a') { - goto l8 - } - position++ - goto l7 - l8: - position, tokenIndex = position7, tokenIndex7 - if buffer[position] != rune('A') { - goto l3 - } - position++ - } - l7: - { - position9, tokenIndex9 := position, tokenIndex - if buffer[position] != rune('n') { - goto l10 - } - position++ - goto l9 - l10: - position, tokenIndex = position9, tokenIndex9 - if buffer[position] != rune('N') { - goto l3 - } - position++ - } - l9: - { - position11, tokenIndex11 := position, tokenIndex - if buffer[position] != rune('d') { - goto l12 - } - position++ - goto l11 - l12: - position, tokenIndex = position11, tokenIndex11 - if buffer[position] != rune('D') { - goto l3 - } - position++ - } - l11: - add(ruleand, position6) + position6, tokenIndex6 := position, tokenIndex + if !_rules[ruleor]() { + goto l6 } - if buffer[position] != rune(' ') { - goto l3 - } - position++ - l13: - { - position14, tokenIndex14 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l14 - } - position++ - goto l13 - l14: - position, tokenIndex = position14, tokenIndex14 + if !_rules[ruleeand]() { + goto l6 } - if !_rules[rulecondition]() { - goto l3 + if !_rules[ruleAction0]() { + goto l6 } - goto l2 - l3: - position, tokenIndex = position3, tokenIndex3 + goto l5 + l6: + position, tokenIndex = position6, tokenIndex6 } - if buffer[position] != rune('"') { - goto l0 + add(ruleeor, position4) + } + return true + l3: + position, tokenIndex = position3, tokenIndex3 + return false + }, + /* 2 eand <- <(condition (and condition Action1)*)> */ + func() bool { + position7, tokenIndex7 := position, tokenIndex + { + position8 := position + if !_rules[rulecondition]() { + goto l7 } - position++ + l9: { - position15, tokenIndex15 := position, tokenIndex - if !matchDot() { - goto l15 + position10, tokenIndex10 := position, tokenIndex + if !_rules[ruleand]() { + goto l10 } - goto l0 - l15: - position, tokenIndex = position15, tokenIndex15 + if !_rules[rulecondition]() { + goto l10 + } + if !_rules[ruleAction1]() { + goto l10 + } + goto l9 + l10: + position, tokenIndex = position10, tokenIndex10 } - add(rulee, position1) + add(ruleeand, position8) } return true - l0: - position, tokenIndex = position0, tokenIndex0 + l7: + position, tokenIndex = position7, tokenIndex7 return false }, - /* 1 condition <- <(tag ' '* ((le ' '* ((&('D' | 'd') date) | (&('T' | 't') time) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') number))) / (ge ' '* ((&('D' | 'd') date) | (&('T' | 't') time) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') number))) / ((&('=') (equal ' '* ((&('\'') value) | (&('D' | 'd') date) | (&('T' | 't') time) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') number)))) | (&('>') (g ' '* ((&('D' | 'd') date) | (&('T' | 't') time) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') number)))) | (&('<') (l ' '* ((&('D' | 'd') date) | (&('T' | 't') time) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') number)))) | (&('C' | 'c') (contains ' '* value)))))> */ + /* 3 condition <- <((tag sp ((le (number / time / date) Action2) / (ge (number / time / date) Action3) / (l (number / time / date) Action4) / (g (number / time / date) Action5) / (equal (number / time / date / qvalue) Action6) / (contains qvalue Action7)) sp) / (open eor close))> */ func() bool { - position16, tokenIndex16 := position, tokenIndex + position11, tokenIndex11 := position, tokenIndex { - position17 := position + position12 := position { - position18 := position + position13, tokenIndex13 := position, tokenIndex + if !_rules[ruletag]() { + goto l14 + } + if !_rules[rulesp]() { + goto l14 + } { - position19 := position + position15, tokenIndex15 := position, tokenIndex + if !_rules[rulele]() { + goto l16 + } { - position22, tokenIndex22 := position, tokenIndex - { - switch buffer[position] { - case '<': - if buffer[position] != rune('<') { - goto l22 - } - position++ - break - case '>': - if buffer[position] != rune('>') { - goto l22 - } - position++ - break - case '=': - if buffer[position] != rune('=') { - goto l22 - } - position++ - break - case '\'': - if buffer[position] != rune('\'') { - goto l22 - } - position++ - break - case '"': - if buffer[position] != rune('"') { - goto l22 - } - position++ - break - case ')': - if buffer[position] != rune(')') { - goto l22 - } - position++ - break - case '(': - if buffer[position] != rune('(') { - goto l22 - } - position++ - break - case '\\': - if buffer[position] != rune('\\') { - goto l22 - } - position++ - break - case '\r': - if buffer[position] != rune('\r') { - goto l22 - } - position++ - break - case '\n': - if buffer[position] != rune('\n') { - goto l22 - } - position++ - break - case '\t': - if buffer[position] != rune('\t') { - goto l22 - } - position++ - break - default: - if buffer[position] != rune(' ') { - goto l22 - } - position++ - break - } + position17, tokenIndex17 := position, tokenIndex + if !_rules[rulenumber]() { + goto l18 + } + goto l17 + l18: + position, tokenIndex = position17, tokenIndex17 + if !_rules[ruletime]() { + goto l19 + } + goto l17 + l19: + position, tokenIndex = position17, tokenIndex17 + if !_rules[ruledate]() { + goto l16 } - - goto l16 - l22: - position, tokenIndex = position22, tokenIndex22 } - if !matchDot() { + l17: + if !_rules[ruleAction2]() { goto l16 } - l20: + goto l15 + l16: + position, tokenIndex = position15, tokenIndex15 + if !_rules[rulege]() { + goto l20 + } { position21, tokenIndex21 := position, tokenIndex - { - position24, tokenIndex24 := position, tokenIndex - { - switch buffer[position] { - case '<': - if buffer[position] != rune('<') { - goto l24 - } - position++ - break - case '>': - if buffer[position] != rune('>') { - goto l24 - } - position++ - break - case '=': - if buffer[position] != rune('=') { - goto l24 - } - position++ - break - case '\'': - if buffer[position] != rune('\'') { - goto l24 - } - position++ - break - case '"': - if buffer[position] != rune('"') { - goto l24 - } - position++ - break - case ')': - if buffer[position] != rune(')') { - goto l24 - } - position++ - break - case '(': - if buffer[position] != rune('(') { - goto l24 - } - position++ - break - case '\\': - if buffer[position] != rune('\\') { - goto l24 - } - position++ - break - case '\r': - if buffer[position] != rune('\r') { - goto l24 - } - position++ - break - case '\n': - if buffer[position] != rune('\n') { - goto l24 - } - position++ - break - case '\t': - if buffer[position] != rune('\t') { - goto l24 - } - position++ - break - default: - if buffer[position] != rune(' ') { - goto l24 - } - position++ - break - } - } - - goto l21 - l24: - position, tokenIndex = position24, tokenIndex24 + if !_rules[rulenumber]() { + goto l22 } - if !matchDot() { - goto l21 + goto l21 + l22: + position, tokenIndex = position21, tokenIndex21 + if !_rules[ruletime]() { + goto l23 } - goto l20 - l21: + goto l21 + l23: position, tokenIndex = position21, tokenIndex21 + if !_rules[ruledate]() { + goto l20 + } } - add(rulePegText, position19) - } - add(ruletag, position18) - } - l26: - { - position27, tokenIndex27 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l27 - } - position++ - goto l26 - l27: - position, tokenIndex = position27, tokenIndex27 - } - { - position28, tokenIndex28 := position, tokenIndex - { - position30 := position - if buffer[position] != rune('<') { - goto l29 + l21: + if !_rules[ruleAction3]() { + goto l20 } - position++ - if buffer[position] != rune('=') { + goto l15 + l20: + position, tokenIndex = position15, tokenIndex15 + if !_rules[rulel]() { + goto l24 + } + { + position25, tokenIndex25 := position, tokenIndex + if !_rules[rulenumber]() { + goto l26 + } + goto l25 + l26: + position, tokenIndex = position25, tokenIndex25 + if !_rules[ruletime]() { + goto l27 + } + goto l25 + l27: + position, tokenIndex = position25, tokenIndex25 + if !_rules[ruledate]() { + goto l24 + } + } + l25: + if !_rules[ruleAction4]() { + goto l24 + } + goto l15 + l24: + position, tokenIndex = position15, tokenIndex15 + if !_rules[ruleg]() { + goto l28 + } + { + position29, tokenIndex29 := position, tokenIndex + if !_rules[rulenumber]() { + goto l30 + } + goto l29 + l30: + position, tokenIndex = position29, tokenIndex29 + if !_rules[ruletime]() { + goto l31 + } goto l29 + l31: + position, tokenIndex = position29, tokenIndex29 + if !_rules[ruledate]() { + goto l28 + } } - position++ - add(rulele, position30) - } - l31: - { - position32, tokenIndex32 := position, tokenIndex - if buffer[position] != rune(' ') { + l29: + if !_rules[ruleAction5]() { + goto l28 + } + goto l15 + l28: + position, tokenIndex = position15, tokenIndex15 + if !_rules[ruleequal]() { goto l32 } - position++ - goto l31 - l32: - position, tokenIndex = position32, tokenIndex32 - } - { - switch buffer[position] { - case 'D', 'd': - if !_rules[ruledate]() { - goto l29 + { + position33, tokenIndex33 := position, tokenIndex + if !_rules[rulenumber]() { + goto l34 } - break - case 'T', 't': + goto l33 + l34: + position, tokenIndex = position33, tokenIndex33 if !_rules[ruletime]() { - goto l29 + goto l35 } - break - default: - if !_rules[rulenumber]() { - goto l29 + goto l33 + l35: + position, tokenIndex = position33, tokenIndex33 + if !_rules[ruledate]() { + goto l36 + } + goto l33 + l36: + position, tokenIndex = position33, tokenIndex33 + if !_rules[ruleqvalue]() { + goto l32 } - break } - } - - goto l28 - l29: - position, tokenIndex = position28, tokenIndex28 - { - position35 := position - if buffer[position] != rune('>') { - goto l34 + l33: + if !_rules[ruleAction6]() { + goto l32 } - position++ - if buffer[position] != rune('=') { - goto l34 + goto l15 + l32: + position, tokenIndex = position15, tokenIndex15 + if !_rules[rulecontains]() { + goto l14 } - position++ - add(rulege, position35) - } - l36: - { - position37, tokenIndex37 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l37 + if !_rules[ruleqvalue]() { + goto l14 } - position++ - goto l36 - l37: - position, tokenIndex = position37, tokenIndex37 + if !_rules[ruleAction7]() { + goto l14 + } + } + l15: + if !_rules[rulesp]() { + goto l14 + } + goto l13 + l14: + position, tokenIndex = position13, tokenIndex13 + if !_rules[ruleopen]() { + goto l11 } + if !_rules[ruleeor]() { + goto l11 + } + if !_rules[ruleclose]() { + goto l11 + } + } + l13: + add(rulecondition, position12) + } + return true + l11: + position, tokenIndex = position11, tokenIndex11 + return false + }, + /* 4 tag <- <(<(!(' ' / '\t' / '\n' / '\r' / '\\' / '(' / ')' / '"' / '\'' / '=' / '>' / '<') .)+> sp Action8)> */ + func() bool { + position37, tokenIndex37 := position, tokenIndex + { + position38 := position + { + position39 := position { - switch buffer[position] { - case 'D', 'd': - if !_rules[ruledate]() { - goto l34 + position42, tokenIndex42 := position, tokenIndex + { + position43, tokenIndex43 := position, tokenIndex + if buffer[position] != rune(' ') { + goto l44 } - break - case 'T', 't': - if !_rules[ruletime]() { - goto l34 + position++ + goto l43 + l44: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('\t') { + goto l45 } - break - default: - if !_rules[rulenumber]() { - goto l34 + position++ + goto l43 + l45: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('\n') { + goto l46 + } + position++ + goto l43 + l46: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('\r') { + goto l47 + } + position++ + goto l43 + l47: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('\\') { + goto l48 + } + position++ + goto l43 + l48: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('(') { + goto l49 + } + position++ + goto l43 + l49: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune(')') { + goto l50 + } + position++ + goto l43 + l50: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('"') { + goto l51 + } + position++ + goto l43 + l51: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('\'') { + goto l52 + } + position++ + goto l43 + l52: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('=') { + goto l53 + } + position++ + goto l43 + l53: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('>') { + goto l54 + } + position++ + goto l43 + l54: + position, tokenIndex = position43, tokenIndex43 + if buffer[position] != rune('<') { + goto l42 } - break + position++ } + l43: + goto l37 + l42: + position, tokenIndex = position42, tokenIndex42 } - - goto l28 - l34: - position, tokenIndex = position28, tokenIndex28 + if !matchDot() { + goto l37 + } + l40: { - switch buffer[position] { - case '=': + position41, tokenIndex41 := position, tokenIndex + { + position55, tokenIndex55 := position, tokenIndex { - position40 := position - if buffer[position] != rune('=') { - goto l16 + position56, tokenIndex56 := position, tokenIndex + if buffer[position] != rune(' ') { + goto l57 } position++ - add(ruleequal, position40) - } - l41: - { - position42, tokenIndex42 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l42 + goto l56 + l57: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('\t') { + goto l58 } position++ - goto l41 - l42: - position, tokenIndex = position42, tokenIndex42 - } - { - switch buffer[position] { - case '\'': - if !_rules[rulevalue]() { - goto l16 - } - break - case 'D', 'd': - if !_rules[ruledate]() { - goto l16 - } - break - case 'T', 't': - if !_rules[ruletime]() { - goto l16 - } - break - default: - if !_rules[rulenumber]() { - goto l16 - } - break + goto l56 + l58: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('\n') { + goto l59 } - } - - break - case '>': - { - position44 := position - if buffer[position] != rune('>') { - goto l16 + position++ + goto l56 + l59: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('\r') { + goto l60 } position++ - add(ruleg, position44) - } - l45: - { - position46, tokenIndex46 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l46 + goto l56 + l60: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('\\') { + goto l61 } position++ - goto l45 - l46: - position, tokenIndex = position46, tokenIndex46 - } - { - switch buffer[position] { - case 'D', 'd': - if !_rules[ruledate]() { - goto l16 - } - break - case 'T', 't': - if !_rules[ruletime]() { - goto l16 - } - break - default: - if !_rules[rulenumber]() { - goto l16 - } - break - } - } - - break - case '<': - { - position48 := position - if buffer[position] != rune('<') { - goto l16 - } - position++ - add(rulel, position48) - } - l49: - { - position50, tokenIndex50 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l50 + goto l56 + l61: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('(') { + goto l62 } position++ - goto l49 - l50: - position, tokenIndex = position50, tokenIndex50 - } - { - switch buffer[position] { - case 'D', 'd': - if !_rules[ruledate]() { - goto l16 - } - break - case 'T', 't': - if !_rules[ruletime]() { - goto l16 - } - break - default: - if !_rules[rulenumber]() { - goto l16 - } - break - } - } - - break - default: - { - position52 := position - { - position53, tokenIndex53 := position, tokenIndex - if buffer[position] != rune('c') { - goto l54 - } - position++ - goto l53 - l54: - position, tokenIndex = position53, tokenIndex53 - if buffer[position] != rune('C') { - goto l16 - } - position++ - } - l53: - { - position55, tokenIndex55 := position, tokenIndex - if buffer[position] != rune('o') { - goto l56 - } - position++ - goto l55 - l56: - position, tokenIndex = position55, tokenIndex55 - if buffer[position] != rune('O') { - goto l16 - } - position++ - } - l55: - { - position57, tokenIndex57 := position, tokenIndex - if buffer[position] != rune('n') { - goto l58 - } - position++ - goto l57 - l58: - position, tokenIndex = position57, tokenIndex57 - if buffer[position] != rune('N') { - goto l16 - } - position++ - } - l57: - { - position59, tokenIndex59 := position, tokenIndex - if buffer[position] != rune('t') { - goto l60 - } - position++ - goto l59 - l60: - position, tokenIndex = position59, tokenIndex59 - if buffer[position] != rune('T') { - goto l16 - } - position++ - } - l59: - { - position61, tokenIndex61 := position, tokenIndex - if buffer[position] != rune('a') { - goto l62 - } - position++ - goto l61 - l62: - position, tokenIndex = position61, tokenIndex61 - if buffer[position] != rune('A') { - goto l16 - } - position++ - } - l61: - { - position63, tokenIndex63 := position, tokenIndex - if buffer[position] != rune('i') { - goto l64 - } - position++ + goto l56 + l62: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune(')') { goto l63 - l64: - position, tokenIndex = position63, tokenIndex63 - if buffer[position] != rune('I') { - goto l16 - } - position++ } + position++ + goto l56 l63: - { - position65, tokenIndex65 := position, tokenIndex - if buffer[position] != rune('n') { - goto l66 - } - position++ + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('"') { + goto l64 + } + position++ + goto l56 + l64: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('\'') { goto l65 - l66: - position, tokenIndex = position65, tokenIndex65 - if buffer[position] != rune('N') { - goto l16 - } - position++ } + position++ + goto l56 l65: - { - position67, tokenIndex67 := position, tokenIndex - if buffer[position] != rune('s') { - goto l68 - } - position++ + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('=') { + goto l66 + } + position++ + goto l56 + l66: + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('>') { goto l67 - l68: - position, tokenIndex = position67, tokenIndex67 - if buffer[position] != rune('S') { - goto l16 - } - position++ } + position++ + goto l56 l67: - add(rulecontains, position52) - } - l69: - { - position70, tokenIndex70 := position, tokenIndex - if buffer[position] != rune(' ') { - goto l70 + position, tokenIndex = position56, tokenIndex56 + if buffer[position] != rune('<') { + goto l55 } position++ - goto l69 - l70: - position, tokenIndex = position70, tokenIndex70 - } - if !_rules[rulevalue]() { - goto l16 } - break + l56: + goto l41 + l55: + position, tokenIndex = position55, tokenIndex55 } + if !matchDot() { + goto l41 + } + goto l40 + l41: + position, tokenIndex = position41, tokenIndex41 } - + add(rulePegText, position39) + } + if !_rules[rulesp]() { + goto l37 } - l28: - add(rulecondition, position17) + if !_rules[ruleAction8]() { + goto l37 + } + add(ruletag, position38) } return true - l16: - position, tokenIndex = position16, tokenIndex16 + l37: + position, tokenIndex = position37, tokenIndex37 return false }, - /* 2 tag <- <<(!((&('<') '<') | (&('>') '>') | (&('=') '=') | (&('\'') '\'') | (&('"') '"') | (&(')') ')') | (&('(') '(') | (&('\\') '\\') | (&('\r') '\r') | (&('\n') '\n') | (&('\t') '\t') | (&(' ') ' ')) .)+>> */ - nil, - /* 3 value <- <<('\'' (!('"' / '\'') .)* '\'')>> */ + /* 5 qvalue <- <('\'' value '\'' sp)> */ func() bool { - position72, tokenIndex72 := position, tokenIndex + position68, tokenIndex68 := position, tokenIndex { - position73 := position + position69 := position + if buffer[position] != rune('\'') { + goto l68 + } + position++ + if !_rules[rulevalue]() { + goto l68 + } + if buffer[position] != rune('\'') { + goto l68 + } + position++ + if !_rules[rulesp]() { + goto l68 + } + add(ruleqvalue, position69) + } + return true + l68: + position, tokenIndex = position68, tokenIndex68 + return false + }, + /* 6 value <- <(<(!('"' / '\'') .)*> Action9)> */ + func() bool { + position70, tokenIndex70 := position, tokenIndex + { + position71 := position { - position74 := position - if buffer[position] != rune('\'') { - goto l72 - } - position++ - l75: + position72 := position + l73: { - position76, tokenIndex76 := position, tokenIndex + position74, tokenIndex74 := position, tokenIndex { - position77, tokenIndex77 := position, tokenIndex + position75, tokenIndex75 := position, tokenIndex { - position78, tokenIndex78 := position, tokenIndex + position76, tokenIndex76 := position, tokenIndex if buffer[position] != rune('"') { - goto l79 + goto l77 } position++ - goto l78 - l79: - position, tokenIndex = position78, tokenIndex78 + goto l76 + l77: + position, tokenIndex = position76, tokenIndex76 if buffer[position] != rune('\'') { - goto l77 + goto l75 } position++ } - l78: - goto l76 - l77: - position, tokenIndex = position77, tokenIndex77 + l76: + goto l74 + l75: + position, tokenIndex = position75, tokenIndex75 } if !matchDot() { - goto l76 + goto l74 } - goto l75 - l76: - position, tokenIndex = position76, tokenIndex76 + goto l73 + l74: + position, tokenIndex = position74, tokenIndex74 } - if buffer[position] != rune('\'') { - goto l72 - } - position++ - add(rulePegText, position74) + add(rulePegText, position72) } - add(rulevalue, position73) + if !_rules[ruleAction9]() { + goto l70 + } + add(rulevalue, position71) } return true - l72: - position, tokenIndex = position72, tokenIndex72 + l70: + position, tokenIndex = position70, tokenIndex70 return false }, - /* 4 number <- <<('0' / ([1-9] digit* ('.' digit*)?))>> */ + /* 7 number <- <(<('0' / ([1-9] digit* ('.' digit*)?))> Action10)> */ func() bool { - position80, tokenIndex80 := position, tokenIndex + position78, tokenIndex78 := position, tokenIndex { - position81 := position + position79 := position { - position82 := position + position80 := position { - position83, tokenIndex83 := position, tokenIndex + position81, tokenIndex81 := position, tokenIndex if buffer[position] != rune('0') { - goto l84 + goto l82 } position++ - goto l83 - l84: - position, tokenIndex = position83, tokenIndex83 + goto l81 + l82: + position, tokenIndex = position81, tokenIndex81 if c := buffer[position]; c < rune('1') || c > rune('9') { - goto l80 + goto l78 } position++ - l85: + l83: { - position86, tokenIndex86 := position, tokenIndex + position84, tokenIndex84 := position, tokenIndex if !_rules[ruledigit]() { - goto l86 + goto l84 } - goto l85 - l86: - position, tokenIndex = position86, tokenIndex86 + goto l83 + l84: + position, tokenIndex = position84, tokenIndex84 } { - position87, tokenIndex87 := position, tokenIndex + position85, tokenIndex85 := position, tokenIndex if buffer[position] != rune('.') { - goto l87 + goto l85 } position++ - l89: + l87: { - position90, tokenIndex90 := position, tokenIndex + position88, tokenIndex88 := position, tokenIndex if !_rules[ruledigit]() { - goto l90 + goto l88 } - goto l89 - l90: - position, tokenIndex = position90, tokenIndex90 + goto l87 + l88: + position, tokenIndex = position88, tokenIndex88 } - goto l88 - l87: - position, tokenIndex = position87, tokenIndex87 + goto l86 + l85: + position, tokenIndex = position85, tokenIndex85 } - l88: + l86: } - l83: - add(rulePegText, position82) + l81: + add(rulePegText, position80) } - add(rulenumber, position81) + if !_rules[ruleAction10]() { + goto l78 + } + add(rulenumber, position79) } return true - l80: - position, tokenIndex = position80, tokenIndex80 + l78: + position, tokenIndex = position78, tokenIndex78 return false }, - /* 5 digit <- <[0-9]> */ + /* 8 digit <- <[0-9]> */ func() bool { - position91, tokenIndex91 := position, tokenIndex + position89, tokenIndex89 := position, tokenIndex { - position92 := position + position90 := position if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l91 + goto l89 } position++ - add(ruledigit, position92) + add(ruledigit, position90) } return true - l91: - position, tokenIndex = position91, tokenIndex91 + l89: + position, tokenIndex = position89, tokenIndex89 return false }, - /* 6 time <- <(('t' / 'T') ('i' / 'I') ('m' / 'M') ('e' / 'E') ' ' <(year '-' month '-' day 'T' digit digit ':' digit digit ':' digit digit ((('-' / '+') digit digit ':' digit digit) / 'Z'))>)> */ + /* 9 time <- <(('t' / 'T') ('i' / 'I') ('m' / 'M') ('e' / 'E') ' ' <(year '-' month '-' day 'T' digit digit ':' digit digit ':' digit digit ((('-' / '+') digit digit ':' digit digit) / 'Z'))> Action11)> */ func() bool { - position93, tokenIndex93 := position, tokenIndex + position91, tokenIndex91 := position, tokenIndex { - position94 := position + position92 := position { - position95, tokenIndex95 := position, tokenIndex + position93, tokenIndex93 := position, tokenIndex if buffer[position] != rune('t') { + goto l94 + } + position++ + goto l93 + l94: + position, tokenIndex = position93, tokenIndex93 + if buffer[position] != rune('T') { + goto l91 + } + position++ + } + l93: + { + position95, tokenIndex95 := position, tokenIndex + if buffer[position] != rune('i') { goto l96 } position++ goto l95 l96: position, tokenIndex = position95, tokenIndex95 - if buffer[position] != rune('T') { - goto l93 + if buffer[position] != rune('I') { + goto l91 } position++ } l95: { position97, tokenIndex97 := position, tokenIndex - if buffer[position] != rune('i') { + if buffer[position] != rune('m') { goto l98 } position++ goto l97 l98: position, tokenIndex = position97, tokenIndex97 - if buffer[position] != rune('I') { - goto l93 + if buffer[position] != rune('M') { + goto l91 } position++ } l97: { position99, tokenIndex99 := position, tokenIndex - if buffer[position] != rune('m') { + if buffer[position] != rune('e') { goto l100 } position++ goto l99 l100: position, tokenIndex = position99, tokenIndex99 - if buffer[position] != rune('M') { - goto l93 - } - position++ - } - l99: - { - position101, tokenIndex101 := position, tokenIndex - if buffer[position] != rune('e') { - goto l102 - } - position++ - goto l101 - l102: - position, tokenIndex = position101, tokenIndex101 if buffer[position] != rune('E') { - goto l93 + goto l91 } position++ } - l101: + l99: if buffer[position] != rune(' ') { - goto l93 + goto l91 } position++ { - position103 := position + position101 := position if !_rules[ruleyear]() { - goto l93 + goto l91 } if buffer[position] != rune('-') { - goto l93 + goto l91 } position++ if !_rules[rulemonth]() { - goto l93 + goto l91 } if buffer[position] != rune('-') { - goto l93 + goto l91 } position++ if !_rules[ruleday]() { - goto l93 + goto l91 } if buffer[position] != rune('T') { - goto l93 + goto l91 } position++ if !_rules[ruledigit]() { - goto l93 + goto l91 } if !_rules[ruledigit]() { - goto l93 + goto l91 } if buffer[position] != rune(':') { - goto l93 + goto l91 } position++ if !_rules[ruledigit]() { - goto l93 + goto l91 } if !_rules[ruledigit]() { - goto l93 + goto l91 } if buffer[position] != rune(':') { - goto l93 + goto l91 } position++ if !_rules[ruledigit]() { - goto l93 + goto l91 } if !_rules[ruledigit]() { - goto l93 + goto l91 } { - position104, tokenIndex104 := position, tokenIndex + position102, tokenIndex102 := position, tokenIndex { - position106, tokenIndex106 := position, tokenIndex + position104, tokenIndex104 := position, tokenIndex if buffer[position] != rune('-') { - goto l107 + goto l105 } position++ - goto l106 - l107: - position, tokenIndex = position106, tokenIndex106 + goto l104 + l105: + position, tokenIndex = position104, tokenIndex104 if buffer[position] != rune('+') { - goto l105 + goto l103 } position++ } - l106: + l104: if !_rules[ruledigit]() { - goto l105 + goto l103 } if !_rules[ruledigit]() { - goto l105 + goto l103 } if buffer[position] != rune(':') { - goto l105 + goto l103 } position++ if !_rules[ruledigit]() { - goto l105 + goto l103 } if !_rules[ruledigit]() { - goto l105 + goto l103 } - goto l104 - l105: - position, tokenIndex = position104, tokenIndex104 + goto l102 + l103: + position, tokenIndex = position102, tokenIndex102 if buffer[position] != rune('Z') { - goto l93 + goto l91 } position++ } - l104: - add(rulePegText, position103) + l102: + add(rulePegText, position101) + } + if !_rules[ruleAction11]() { + goto l91 } - add(ruletime, position94) + add(ruletime, position92) } return true - l93: - position, tokenIndex = position93, tokenIndex93 + l91: + position, tokenIndex = position91, tokenIndex91 return false }, - /* 7 date <- <(('d' / 'D') ('a' / 'A') ('t' / 'T') ('e' / 'E') ' ' <(year '-' month '-' day)>)> */ + /* 10 date <- <(('d' / 'D') ('a' / 'A') ('t' / 'T') ('e' / 'E') ' ' <(year '-' month '-' day)> Action12)> */ func() bool { - position108, tokenIndex108 := position, tokenIndex + position106, tokenIndex106 := position, tokenIndex { - position109 := position + position107 := position { - position110, tokenIndex110 := position, tokenIndex + position108, tokenIndex108 := position, tokenIndex if buffer[position] != rune('d') { + goto l109 + } + position++ + goto l108 + l109: + position, tokenIndex = position108, tokenIndex108 + if buffer[position] != rune('D') { + goto l106 + } + position++ + } + l108: + { + position110, tokenIndex110 := position, tokenIndex + if buffer[position] != rune('a') { goto l111 } position++ goto l110 l111: position, tokenIndex = position110, tokenIndex110 - if buffer[position] != rune('D') { - goto l108 + if buffer[position] != rune('A') { + goto l106 } position++ } l110: { position112, tokenIndex112 := position, tokenIndex - if buffer[position] != rune('a') { + if buffer[position] != rune('t') { goto l113 } position++ goto l112 l113: position, tokenIndex = position112, tokenIndex112 - if buffer[position] != rune('A') { - goto l108 + if buffer[position] != rune('T') { + goto l106 } position++ } l112: { position114, tokenIndex114 := position, tokenIndex - if buffer[position] != rune('t') { + if buffer[position] != rune('e') { goto l115 } position++ goto l114 l115: position, tokenIndex = position114, tokenIndex114 - if buffer[position] != rune('T') { - goto l108 - } - position++ - } - l114: - { - position116, tokenIndex116 := position, tokenIndex - if buffer[position] != rune('e') { - goto l117 - } - position++ - goto l116 - l117: - position, tokenIndex = position116, tokenIndex116 if buffer[position] != rune('E') { - goto l108 + goto l106 } position++ } - l116: + l114: if buffer[position] != rune(' ') { - goto l108 + goto l106 } position++ { - position118 := position + position116 := position if !_rules[ruleyear]() { - goto l108 + goto l106 } if buffer[position] != rune('-') { - goto l108 + goto l106 } position++ if !_rules[rulemonth]() { - goto l108 + goto l106 } if buffer[position] != rune('-') { - goto l108 + goto l106 } position++ if !_rules[ruleday]() { - goto l108 + goto l106 } - add(rulePegText, position118) + add(rulePegText, position116) } - add(ruledate, position109) + if !_rules[ruleAction12]() { + goto l106 + } + add(ruledate, position107) } return true - l108: - position, tokenIndex = position108, tokenIndex108 + l106: + position, tokenIndex = position106, tokenIndex106 return false }, - /* 8 year <- <(('1' / '2') digit digit digit)> */ + /* 11 year <- <(('1' / '2') digit digit digit)> */ func() bool { - position119, tokenIndex119 := position, tokenIndex + position117, tokenIndex117 := position, tokenIndex { - position120 := position + position118 := position { - position121, tokenIndex121 := position, tokenIndex + position119, tokenIndex119 := position, tokenIndex if buffer[position] != rune('1') { - goto l122 + goto l120 } position++ - goto l121 - l122: - position, tokenIndex = position121, tokenIndex121 + goto l119 + l120: + position, tokenIndex = position119, tokenIndex119 if buffer[position] != rune('2') { - goto l119 + goto l117 } position++ } - l121: + l119: if !_rules[ruledigit]() { - goto l119 + goto l117 } if !_rules[ruledigit]() { - goto l119 + goto l117 } if !_rules[ruledigit]() { - goto l119 + goto l117 } - add(ruleyear, position120) + add(ruleyear, position118) } return true - l119: - position, tokenIndex = position119, tokenIndex119 + l117: + position, tokenIndex = position117, tokenIndex117 return false }, - /* 9 month <- <(('0' / '1') digit)> */ + /* 12 month <- <(('0' / '1') digit)> */ func() bool { - position123, tokenIndex123 := position, tokenIndex + position121, tokenIndex121 := position, tokenIndex { - position124 := position + position122 := position { - position125, tokenIndex125 := position, tokenIndex + position123, tokenIndex123 := position, tokenIndex if buffer[position] != rune('0') { - goto l126 + goto l124 } position++ - goto l125 - l126: - position, tokenIndex = position125, tokenIndex125 + goto l123 + l124: + position, tokenIndex = position123, tokenIndex123 if buffer[position] != rune('1') { - goto l123 + goto l121 } position++ } - l125: + l123: if !_rules[ruledigit]() { - goto l123 + goto l121 } - add(rulemonth, position124) + add(rulemonth, position122) } return true - l123: - position, tokenIndex = position123, tokenIndex123 + l121: + position, tokenIndex = position121, tokenIndex121 return false }, - /* 10 day <- <(((&('3') '3') | (&('2') '2') | (&('1') '1') | (&('0') '0')) digit)> */ + /* 13 day <- <(('0' / '1' / '2' / '3') digit)> */ func() bool { - position127, tokenIndex127 := position, tokenIndex + position125, tokenIndex125 := position, tokenIndex { - position128 := position + position126 := position { - switch buffer[position] { - case '3': - if buffer[position] != rune('3') { - goto l127 - } - position++ - break - case '2': - if buffer[position] != rune('2') { - goto l127 - } - position++ - break - case '1': - if buffer[position] != rune('1') { - goto l127 + position127, tokenIndex127 := position, tokenIndex + if buffer[position] != rune('0') { + goto l128 + } + position++ + goto l127 + l128: + position, tokenIndex = position127, tokenIndex127 + if buffer[position] != rune('1') { + goto l129 + } + position++ + goto l127 + l129: + position, tokenIndex = position127, tokenIndex127 + if buffer[position] != rune('2') { + goto l130 + } + position++ + goto l127 + l130: + position, tokenIndex = position127, tokenIndex127 + if buffer[position] != rune('3') { + goto l125 + } + position++ + } + l127: + if !_rules[ruledigit]() { + goto l125 + } + add(ruleday, position126) + } + return true + l125: + position, tokenIndex = position125, tokenIndex125 + return false + }, + /* 14 and <- <(('a' / 'A') ('n' / 'N') ('d' / 'D') sp)> */ + func() bool { + position131, tokenIndex131 := position, tokenIndex + { + position132 := position + { + position133, tokenIndex133 := position, tokenIndex + if buffer[position] != rune('a') { + goto l134 + } + position++ + goto l133 + l134: + position, tokenIndex = position133, tokenIndex133 + if buffer[position] != rune('A') { + goto l131 + } + position++ + } + l133: + { + position135, tokenIndex135 := position, tokenIndex + if buffer[position] != rune('n') { + goto l136 + } + position++ + goto l135 + l136: + position, tokenIndex = position135, tokenIndex135 + if buffer[position] != rune('N') { + goto l131 + } + position++ + } + l135: + { + position137, tokenIndex137 := position, tokenIndex + if buffer[position] != rune('d') { + goto l138 + } + position++ + goto l137 + l138: + position, tokenIndex = position137, tokenIndex137 + if buffer[position] != rune('D') { + goto l131 + } + position++ + } + l137: + if !_rules[rulesp]() { + goto l131 + } + add(ruleand, position132) + } + return true + l131: + position, tokenIndex = position131, tokenIndex131 + return false + }, + /* 15 or <- <(('o' / 'O') ('r' / 'R') sp)> */ + func() bool { + position139, tokenIndex139 := position, tokenIndex + { + position140 := position + { + position141, tokenIndex141 := position, tokenIndex + if buffer[position] != rune('o') { + goto l142 + } + position++ + goto l141 + l142: + position, tokenIndex = position141, tokenIndex141 + if buffer[position] != rune('O') { + goto l139 + } + position++ + } + l141: + { + position143, tokenIndex143 := position, tokenIndex + if buffer[position] != rune('r') { + goto l144 + } + position++ + goto l143 + l144: + position, tokenIndex = position143, tokenIndex143 + if buffer[position] != rune('R') { + goto l139 + } + position++ + } + l143: + if !_rules[rulesp]() { + goto l139 + } + add(ruleor, position140) + } + return true + l139: + position, tokenIndex = position139, tokenIndex139 + return false + }, + /* 16 equal <- <('=' sp)> */ + func() bool { + position145, tokenIndex145 := position, tokenIndex + { + position146 := position + if buffer[position] != rune('=') { + goto l145 + } + position++ + if !_rules[rulesp]() { + goto l145 + } + add(ruleequal, position146) + } + return true + l145: + position, tokenIndex = position145, tokenIndex145 + return false + }, + /* 17 contains <- <(('c' / 'C') ('o' / 'O') ('n' / 'N') ('t' / 'T') ('a' / 'A') ('i' / 'I') ('n' / 'N') ('s' / 'S') sp)> */ + func() bool { + position147, tokenIndex147 := position, tokenIndex + { + position148 := position + { + position149, tokenIndex149 := position, tokenIndex + if buffer[position] != rune('c') { + goto l150 + } + position++ + goto l149 + l150: + position, tokenIndex = position149, tokenIndex149 + if buffer[position] != rune('C') { + goto l147 + } + position++ + } + l149: + { + position151, tokenIndex151 := position, tokenIndex + if buffer[position] != rune('o') { + goto l152 + } + position++ + goto l151 + l152: + position, tokenIndex = position151, tokenIndex151 + if buffer[position] != rune('O') { + goto l147 + } + position++ + } + l151: + { + position153, tokenIndex153 := position, tokenIndex + if buffer[position] != rune('n') { + goto l154 + } + position++ + goto l153 + l154: + position, tokenIndex = position153, tokenIndex153 + if buffer[position] != rune('N') { + goto l147 + } + position++ + } + l153: + { + position155, tokenIndex155 := position, tokenIndex + if buffer[position] != rune('t') { + goto l156 + } + position++ + goto l155 + l156: + position, tokenIndex = position155, tokenIndex155 + if buffer[position] != rune('T') { + goto l147 + } + position++ + } + l155: + { + position157, tokenIndex157 := position, tokenIndex + if buffer[position] != rune('a') { + goto l158 + } + position++ + goto l157 + l158: + position, tokenIndex = position157, tokenIndex157 + if buffer[position] != rune('A') { + goto l147 + } + position++ + } + l157: + { + position159, tokenIndex159 := position, tokenIndex + if buffer[position] != rune('i') { + goto l160 + } + position++ + goto l159 + l160: + position, tokenIndex = position159, tokenIndex159 + if buffer[position] != rune('I') { + goto l147 + } + position++ + } + l159: + { + position161, tokenIndex161 := position, tokenIndex + if buffer[position] != rune('n') { + goto l162 + } + position++ + goto l161 + l162: + position, tokenIndex = position161, tokenIndex161 + if buffer[position] != rune('N') { + goto l147 + } + position++ + } + l161: + { + position163, tokenIndex163 := position, tokenIndex + if buffer[position] != rune('s') { + goto l164 + } + position++ + goto l163 + l164: + position, tokenIndex = position163, tokenIndex163 + if buffer[position] != rune('S') { + goto l147 + } + position++ + } + l163: + if !_rules[rulesp]() { + goto l147 + } + add(rulecontains, position148) + } + return true + l147: + position, tokenIndex = position147, tokenIndex147 + return false + }, + /* 18 le <- <('<' '=' sp)> */ + func() bool { + position165, tokenIndex165 := position, tokenIndex + { + position166 := position + if buffer[position] != rune('<') { + goto l165 + } + position++ + if buffer[position] != rune('=') { + goto l165 + } + position++ + if !_rules[rulesp]() { + goto l165 + } + add(rulele, position166) + } + return true + l165: + position, tokenIndex = position165, tokenIndex165 + return false + }, + /* 19 ge <- <('>' '=' sp)> */ + func() bool { + position167, tokenIndex167 := position, tokenIndex + { + position168 := position + if buffer[position] != rune('>') { + goto l167 + } + position++ + if buffer[position] != rune('=') { + goto l167 + } + position++ + if !_rules[rulesp]() { + goto l167 + } + add(rulege, position168) + } + return true + l167: + position, tokenIndex = position167, tokenIndex167 + return false + }, + /* 20 l <- <('<' sp)> */ + func() bool { + position169, tokenIndex169 := position, tokenIndex + { + position170 := position + if buffer[position] != rune('<') { + goto l169 + } + position++ + if !_rules[rulesp]() { + goto l169 + } + add(rulel, position170) + } + return true + l169: + position, tokenIndex = position169, tokenIndex169 + return false + }, + /* 21 g <- <('>' sp)> */ + func() bool { + position171, tokenIndex171 := position, tokenIndex + { + position172 := position + if buffer[position] != rune('>') { + goto l171 + } + position++ + if !_rules[rulesp]() { + goto l171 + } + add(ruleg, position172) + } + return true + l171: + position, tokenIndex = position171, tokenIndex171 + return false + }, + /* 22 open <- <('(' sp)> */ + func() bool { + position173, tokenIndex173 := position, tokenIndex + { + position174 := position + if buffer[position] != rune('(') { + goto l173 + } + position++ + if !_rules[rulesp]() { + goto l173 + } + add(ruleopen, position174) + } + return true + l173: + position, tokenIndex = position173, tokenIndex173 + return false + }, + /* 23 close <- <(')' sp)> */ + func() bool { + position175, tokenIndex175 := position, tokenIndex + { + position176 := position + if buffer[position] != rune(')') { + goto l175 + } + position++ + if !_rules[rulesp]() { + goto l175 + } + add(ruleclose, position176) + } + return true + l175: + position, tokenIndex = position175, tokenIndex175 + return false + }, + /* 24 sp <- <(' ' / '\t')*> */ + func() bool { + { + position178 := position + l179: + { + position180, tokenIndex180 := position, tokenIndex + { + position181, tokenIndex181 := position, tokenIndex + if buffer[position] != rune(' ') { + goto l182 } position++ - break - default: - if buffer[position] != rune('0') { - goto l127 + goto l181 + l182: + position, tokenIndex = position181, tokenIndex181 + if buffer[position] != rune('\t') { + goto l180 } position++ - break } + l181: + goto l179 + l180: + position, tokenIndex = position180, tokenIndex180 } - - if !_rules[ruledigit]() { - goto l127 - } - add(ruleday, position128) + add(rulesp, position178) + } + return true + }, + /* 26 Action0 <- <{ p.Operator(OpOr) }> */ + func() bool { + { + add(ruleAction0, position) + } + return true + }, + /* 27 Action1 <- <{ p.Operator(OpAnd) }> */ + func() bool { + { + add(ruleAction1, position) + } + return true + }, + /* 28 Action2 <- <{ p.Operator(OpLessEqual) }> */ + func() bool { + { + add(ruleAction2, position) + } + return true + }, + /* 29 Action3 <- <{ p.Operator(OpGreaterEqual) }> */ + func() bool { + { + add(ruleAction3, position) + } + return true + }, + /* 30 Action4 <- <{ p.Operator(OpLess) }> */ + func() bool { + { + add(ruleAction4, position) + } + return true + }, + /* 31 Action5 <- <{ p.Operator(OpGreater) }> */ + func() bool { + { + add(ruleAction5, position) + } + return true + }, + /* 32 Action6 <- <{ p.Operator(OpEqual) }> */ + func() bool { + { + add(ruleAction6, position) + } + return true + }, + /* 33 Action7 <- <{ p.Operator(OpContains) }> */ + func() bool { + { + add(ruleAction7, position) } return true - l127: - position, tokenIndex = position127, tokenIndex127 - return false }, - /* 11 and <- <(('a' / 'A') ('n' / 'N') ('d' / 'D'))> */ - nil, - /* 12 equal <- <'='> */ - nil, - /* 13 contains <- <(('c' / 'C') ('o' / 'O') ('n' / 'N') ('t' / 'T') ('a' / 'A') ('i' / 'I') ('n' / 'N') ('s' / 'S'))> */ - nil, - /* 14 le <- <('<' '=')> */ - nil, - /* 15 ge <- <('>' '=')> */ - nil, - /* 16 l <- <'<'> */ - nil, - /* 17 g <- <'>'> */ - nil, nil, + /* 35 Action8 <- <{ p.Tag(buffer[begin:end]) }> */ + func() bool { + { + add(ruleAction8, position) + } + return true + }, + /* 36 Action9 <- <{ p.Value(buffer[begin:end]) }> */ + func() bool { + { + add(ruleAction9, position) + } + return true + }, + /* 37 Action10 <- <{ p.Number(buffer[begin:end]) }> */ + func() bool { + { + add(ruleAction10, position) + } + return true + }, + /* 38 Action11 <- <{ p.Time(buffer[begin:end]) }> */ + func() bool { + { + add(ruleAction11, position) + } + return true + }, + /* 39 Action12 <- <{ p.Date(buffer[begin:end]) }> */ + func() bool { + { + add(ruleAction12, position) + } + return true + }, } p.rules = _rules + return nil } diff --git a/event/query/query_test.go b/event/query/query_test.go index 0951bf434..0347a461c 100644 --- a/event/query/query_test.go +++ b/event/query/query_test.go @@ -15,14 +15,29 @@ func TestMatches(t *testing.T) { txTime = "2018-05-03T14:45:00Z" ) + now := time.Now().Format(TimeLayout) + testCases := []struct { s string tags map[string]interface{} err bool matches bool }{ + {"foo > 10", map[string]interface{}{"foo": 11}, false, true}, + {"foo >= 10", map[string]interface{}{"foo": uint64(11)}, false, true}, + {"foo >= 10", map[string]interface{}{"foo": uint32(11)}, false, true}, + {"foo >= 10", map[string]interface{}{"foo": uint(11)}, false, true}, + {fmt.Sprintf("(foo >= 10 OR foo CONTAINS 'frogs') AND badger < TIME %s", now), + map[string]interface{}{"foo": "Ilikefrogs", "badger": time.Unix(343, 0)}, false, true}, + {fmt.Sprintf("foo >= 10 OR foo CONTAINS 'frogs' AND badger < TIME %s", now), + map[string]interface{}{"foo": "Ilikefrogs", "badger": time.Unix(343, 0)}, false, true}, + {fmt.Sprintf("foo CONTAINS 'frosgs' OR (foo >= 10 AND badger < TIME %s)", now), + map[string]interface{}{"foo": "Ilikefrogs", "badger": time.Unix(343, 0)}, false, false}, + {fmt.Sprintf("foo CONTAINS 'mute' AND foo >= 10 OR badger < TIME %s", now), + map[string]interface{}{"foo": "Ilikefrogs", "badger": time.Unix(343, 0)}, false, true}, + {fmt.Sprintf("foo CONTAINS 'mute' AND (foo >= 10 OR badger < TIME %s)", now), + map[string]interface{}{"foo": "Ilikefrogs", "badger": time.Unix(343, 0)}, false, false}, {"tm.events.type='NewBlock'", map[string]interface{}{"tm.events.type": "NewBlock"}, false, true}, - {"tx.gas > 7", map[string]interface{}{"tx.gas": "8"}, false, true}, {"tx.gas > 7 AND tx.gas < 9", map[string]interface{}{"tx.gas": "8"}, false, true}, {"body.weight >= 3.5", map[string]interface{}{"body.weight": "3.5"}, false, true}, @@ -45,14 +60,22 @@ func TestMatches(t *testing.T) { for _, tc := range testCases { q, err := New(tc.s) + require.NoError(t, err) if !tc.err { require.Nil(t, err) } + q.ExplainTo(func(format string, args ...interface{}) { + fmt.Printf(format, args...) + }) + + matches := q.Matches(TagMap(tc.tags)) + err = q.MatchError() + require.NoError(t, err) if tc.matches { - assert.True(t, q.Matches(TagMap(tc.tags)), "Query '%s' should match %v", tc.s, tc.tags) + assert.True(t, matches, "Query '%s' should match %v", tc.s, tc.tags) } else { - assert.False(t, q.Matches(TagMap(tc.tags)), "Query '%s' should not match %v", tc.s, tc.tags) + assert.False(t, matches, "Query '%s' should not match %v", tc.s, tc.tags) } } } @@ -61,24 +84,3 @@ func TestMustParse(t *testing.T) { assert.Panics(t, func() { MustParse("=") }) assert.NotPanics(t, func() { MustParse("tm.events.type='NewBlock'") }) } - -func TestConditions(t *testing.T) { - txTime, err := time.Parse(time.RFC3339, "2013-05-03T14:45:00Z") - require.NoError(t, err) - - testCases := []struct { - s string - conditions []Condition - }{ - {s: "tm.events.type='NewBlock'", conditions: []Condition{{Tag: "tm.events.type", Op: OpEqual, Operand: "NewBlock"}}}, - {s: "tx.gas > 7 AND tx.gas < 9", conditions: []Condition{{Tag: "tx.gas", Op: OpGreater, Operand: int64(7)}, {Tag: "tx.gas", Op: OpLess, Operand: int64(9)}}}, - {s: "tx.time >= TIME 2013-05-03T14:45:00Z", conditions: []Condition{{Tag: "tx.time", Op: OpGreaterEqual, Operand: txTime}}}, - } - - for _, tc := range testCases { - q, err := New(tc.s) - require.Nil(t, err) - - assert.Equal(t, tc.conditions, q.Conditions()) - } -} diff --git a/event/query/reflect_tagged.go b/event/query/reflect_tagged.go index 4d0466783..834bf0f1b 100644 --- a/event/query/reflect_tagged.go +++ b/event/query/reflect_tagged.go @@ -96,9 +96,9 @@ func (rt *ReflectTagged) Keys() []string { return rt.keys } -func (rt *ReflectTagged) Get(key string) (value string, ok bool) { +func (rt *ReflectTagged) Get(key string) (value interface{}, ok bool) { if _, ok := rt.ks[key]; ok { - return StringFromValue(rt.rv.Elem().FieldByName(key).Interface()), true + return rt.rv.Elem().FieldByName(key).Interface(), true } return "", false } diff --git a/event/query/reflect_tagged_test.go b/event/query/reflect_tagged_test.go index 3e2392ec8..486788261 100644 --- a/event/query/reflect_tagged_test.go +++ b/event/query/reflect_tagged_test.go @@ -44,15 +44,15 @@ func TestReflectTagged_Get(t *testing.T) { value, ok = rt.Get("Baz") assert.True(t, ok) - assert.Equal(t, "FFFFFF", value) + assert.Equal(t, binary.HexBytes{0xFF, 0xFF, 0xFF}, value) value, ok = rt.Get("Indices") assert.True(t, ok) - assert.Equal(t, "5;7;9", value) + assert.Equal(t, []int{5, 7, 9}, value) value, ok = rt.Get("Address") assert.True(t, ok) - assert.Equal(t, "0102030000000000000000000000000000000000", value) + assert.Equal(t, crypto.MustAddressFromHexString("0102030000000000000000000000000000000000"), value) // Make sure we see updates through pointer tt.Foo = "Plums" @@ -83,7 +83,7 @@ func TestExplicitFields(t *testing.T) { value, ok = rt.Get("Address") assert.True(t, ok) - assert.Equal(t, "0102030000000000000000000000000000000000", value) + assert.Equal(t, crypto.MustAddressFromHexString("0102030000000000000000000000000000000000"), value) _, ok = rt.Get("Bar") assert.False(t, ok) diff --git a/event/query/tags.go b/event/query/tags.go index f1ace8e45..31ccbacf0 100644 --- a/event/query/tags.go +++ b/event/query/tags.go @@ -2,12 +2,11 @@ package query import ( "sort" - "strings" ) type Tagged interface { Keys() []string - Get(key string) (value string, ok bool) + Get(key string) (value interface{}, ok bool) // Len returns the number of tags. Len() int } @@ -25,13 +24,13 @@ func MapFromTagged(tagged Tagged) map[string]interface{} { return tags } -func (ts TagMap) Get(key string) (value string, ok bool) { +func (ts TagMap) Get(key string) (value interface{}, ok bool) { var vint interface{} vint, ok = ts[key] if !ok { return "", false } - return StringFromValue(vint), true + return vint, true } func (ts TagMap) Len() int { @@ -91,12 +90,12 @@ func (ct *CombinedTags) AddTags(concat bool, tagsList ...Tagged) { } } -func (ct *CombinedTags) Get(key string) (string, bool) { +func (ct *CombinedTags) Get(key string) (interface{}, bool) { ts := ct.ks[key] if len(ts) == 0 { return "", false } - values := make([]string, 0, len(ts)) + values := make([]interface{}, 0, len(ts)) for _, t := range ts { value, ok := t.Get(key) if ok { @@ -104,9 +103,12 @@ func (ct *CombinedTags) Get(key string) (string, bool) { } } if len(values) == 0 { - return "", false + return nil, false + } + if len(values) == 1 { + return values[0], true } - return strings.Join(values, MultipleValueTagSeparator), true + return values, true } func (ct *CombinedTags) Len() (length int) { diff --git a/execution/exec/event_test.go b/execution/exec/event_test.go index 3daddb60e..d60c2d425 100644 --- a/execution/exec/event_test.go +++ b/execution/exec/event_test.go @@ -36,36 +36,43 @@ func TestEventTagQueries(t *testing.T) { qry, err := qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) qb = qb.AndContains(event.EventIDKey, "bar") qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) qb = qb.AndEquals(event.TxHashKey, hex.EncodeUpperToString(tev.Header.TxHash)) qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) qb = qb.AndGreaterThanOrEqual(event.HeightKey, tev.Header.Height) qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) qb = qb.AndStrictlyLessThan(event.IndexKey, tev.Header.Index+1) qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) qb = qb.AndEquals(event.AddressKey, addressHex) qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) qb = qb.AndEquals(LogNTextKey(0), "marmot") qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) + require.NoError(t, qry.MatchError()) t.Logf("Query: %v", qry) t.Logf("Keys: %v", tev.Keys()) diff --git a/execution/exec/log_event.go b/execution/exec/log_event.go index 672b1e542..6ab61a03f 100644 --- a/execution/exec/log_event.go +++ b/execution/exec/log_event.go @@ -15,14 +15,12 @@ package exec import ( - "strings" - "fmt" + "strings" . "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/event" - "github.com/hyperledger/burrow/event/query" - hex "github.com/tmthrgd/go-hex" + "github.com/tmthrgd/go-hex" ) const logNTextTopicCutset = "\x00" @@ -52,7 +50,7 @@ func init() { logTagKeys = append(logTagKeys, event.AddressKey) } -func (log *LogEvent) Get(key string) (string, bool) { +func (log *LogEvent) Get(key string) (interface{}, bool) { if log == nil { return "", false } @@ -69,7 +67,7 @@ func (log *LogEvent) Get(key string) (string, bool) { } return "", false } - return query.StringFromValue(value), true + return value, true } func (log *LogEvent) GetTopic(i int) Word256 { diff --git a/txs/tx_test.go b/txs/tx_test.go index 6e19c1e3e..20d444a38 100644 --- a/txs/tx_test.go +++ b/txs/tx_test.go @@ -19,11 +19,8 @@ import ( "runtime/debug" "testing" - "fmt" - "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/crypto" - "github.com/hyperledger/burrow/event/query" "github.com/hyperledger/burrow/permission" "github.com/hyperledger/burrow/txs/payload" "github.com/stretchr/testify/assert" @@ -71,8 +68,7 @@ func TestSendTx(t *testing.T) { tx := Enclose("Foo", sendTx).Tx value, ok := tx.Tagged().Get("Inputs") require.True(t, ok) - assert.Equal(t, fmt.Sprintf("%v%s%v", sendTx.Inputs[0], query.MultipleValueTagSeparator, sendTx.Inputs[1]), - value) + assert.Equal(t, sendTx.Inputs, value) value, ok = tx.Tagged().Get("ChainID") require.True(t, ok) @@ -168,7 +164,7 @@ func TestTxWrapper_MarshalJSON(t *testing.T) { tx := Enclose("Foo", callTx).Tx value, ok := tx.Tagged().Get("Input") require.True(t, ok) - assert.Equal(t, callTx.Input.String(), value) + assert.Equal(t, callTx.Input, value) } func TestNewPermissionsTxWithSequence(t *testing.T) { From d22fec70d25fa5a39ede8c5a89d69b12ab2f5219 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Fri, 9 Aug 2019 17:49:23 +0100 Subject: [PATCH 65/70] Add query expression benchmark Signed-off-by: Silas Davis --- event/query/query.go | 4 ++- execution/exec/event_test.go | 56 ++++++++++++++++++++++++------------ 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/event/query/query.go b/event/query/query.go index a44ce974f..b245a40ee 100644 --- a/event/query/query.go +++ b/event/query/query.go @@ -50,7 +50,9 @@ type Condition struct { // New parses the given string and returns a query or error if the string is // invalid. func New(s string) (*PegQuery, error) { - p := &QueryParser{Buffer: s} + p := &QueryParser{ + Buffer: s, + } err := p.Init() if err != nil { return nil, err diff --git a/execution/exec/event_test.go b/execution/exec/event_test.go index d60c2d425..faee34834 100644 --- a/execution/exec/event_test.go +++ b/execution/exec/event_test.go @@ -9,26 +9,11 @@ import ( "github.com/hyperledger/burrow/event/query" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - hex "github.com/tmthrgd/go-hex" + "github.com/tmthrgd/go-hex" ) func TestEventTagQueries(t *testing.T) { - addressHex := "DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF" - address, err := crypto.AddressFromHexString(addressHex) - require.NoError(t, err) - ev := &Event{ - Header: &Header{ - EventType: TypeLog, - EventID: "foo/bar", - TxHash: []byte{2, 3, 4}, - Height: 34, - Index: 2, - }, - Log: &LogEvent{ - Address: address, - Topics: []binary.Word256{binary.RightPadWord256([]byte("marmot"))}, - }, - } + ev := logEvent() tev := ev.Tagged() @@ -62,7 +47,7 @@ func TestEventTagQueries(t *testing.T) { assert.True(t, qry.Matches(tev)) require.NoError(t, qry.MatchError()) - qb = qb.AndEquals(event.AddressKey, addressHex) + qb = qb.AndEquals(event.AddressKey, ev.Log.Address) qry, err = qb.Query() require.NoError(t, err) assert.True(t, qry.Matches(tev)) @@ -77,3 +62,38 @@ func TestEventTagQueries(t *testing.T) { t.Logf("Query: %v", qry) t.Logf("Keys: %v", tev.Keys()) } + +func BenchmarkMatching(b *testing.B) { + b.StopTimer() + ev := logEvent() + qb := query.NewBuilder().AndEquals(event.EventTypeKey, TypeLog.String()). + AndContains(event.EventIDKey, "bar"). + AndEquals(event.TxHashKey, hex.EncodeUpperToString(ev.Header.TxHash)). + AndGreaterThanOrEqual(event.HeightKey, ev.Header.Height). + AndStrictlyLessThan(event.IndexKey, ev.Header.Index+1). + AndEquals(event.AddressKey, ev.Log.Address). + AndEquals(LogNTextKey(0), "marmot") + qry, err := qb.Query() + require.NoError(b, err) + tev := ev.Tagged() + b.StartTimer() + for i := 0; i < b.N; i++ { + qry.Matches(tev) + } +} + +func logEvent() *Event { + return &Event{ + Header: &Header{ + EventType: TypeLog, + EventID: "foo/bar", + TxHash: []byte{2, 3, 4}, + Height: 34, + Index: 2, + }, + Log: &LogEvent{ + Address: crypto.MustAddressFromHexString("DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF"), + Topics: []binary.Word256{binary.RightPadWord256([]byte("marmot"))}, + }, + } +} From 3678efbc285d78fa009c90d5637d928d74d2cd87 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Mon, 12 Aug 2019 15:37:53 +0100 Subject: [PATCH 66/70] Refactor ABI and support packing events I wanted to be able to integration test vent and noticed there was no `PackEvent`. When I went to add this I saw that ABI had become a bit overgrown and hard to follow so I factored it out into a few different files. Outside the reorganisation: - Implemented `PackEvent` - Added tests for event packing - Reduced public API service by making `Pack*` and `Unpack*` work with variadic args or a struct combing the functionality of `PackIntoStruct` (I didn't want us maintain `PackEventStruct` `UnpackEventStruct` and all the other variants. - Supported packing of uint8, int8, and big.Int (we were unpacking big ints, but didn't support packing them) - Made `pack` elide indexed fields (i.e. fields that go into Log topics) in the same way that `unpack` does Signed-off-by: Silas Davis --- execution/evm/abi/abi.go | 908 ++---------------- execution/evm/abi/{ab_test.go => abi_test.go} | 2 +- execution/evm/abi/event_spec.go | 91 ++ execution/evm/abi/function_spec.go | 69 ++ execution/evm/abi/packing.go | 374 ++++++++ execution/evm/abi/packing_test.go | 99 ++ execution/evm/abi/primitives.go | 157 ++- execution/evm/abi/primitives_test.go | 22 + execution/evm/abi/spec.go | 309 ++++++ execution/evm/snative.go | 4 +- execution/evm/snative_test.go | 2 +- 11 files changed, 1161 insertions(+), 876 deletions(-) rename execution/evm/abi/{ab_test.go => abi_test.go} (99%) create mode 100644 execution/evm/abi/event_spec.go create mode 100644 execution/evm/abi/function_spec.go create mode 100644 execution/evm/abi/packing.go create mode 100644 execution/evm/abi/packing_test.go create mode 100644 execution/evm/abi/primitives_test.go create mode 100644 execution/evm/abi/spec.go diff --git a/execution/evm/abi/abi.go b/execution/evm/abi/abi.go index 62a44807e..7dddb9b6d 100644 --- a/execution/evm/abi/abi.go +++ b/execution/evm/abi/abi.go @@ -1,27 +1,16 @@ package abi import ( - "encoding/json" "fmt" "io/ioutil" "math/big" - "reflect" - "regexp" - "strconv" - "strings" - - hex "github.com/tmthrgd/go-hex" - - burrow_binary "github.com/hyperledger/burrow/binary" - "github.com/hyperledger/burrow/crypto" - "github.com/hyperledger/burrow/crypto/sha3" - "os" "path" "path/filepath" + "reflect" + "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/deploy/compile" - "github.com/hyperledger/burrow/execution/errors" "github.com/hyperledger/burrow/logging" ) @@ -32,17 +21,36 @@ type Variable struct { Value string } -func init() { - var err error - revertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) - if err != nil { - panic(fmt.Sprintf("internal error: failed to build revert abi: %v", err)) +// LoadPath loads one abi file or finds all files in a directory +func LoadPath(abiFileOrDirs ...string) (*Spec, error) { + if len(abiFileOrDirs) == 0 { + return &Spec{}, fmt.Errorf("no ABI file or directory provided") } -} -// revertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). -// If a function exits this way, the this hardcoded ABI will be used. -var revertAbi *Spec + specs := make([]*Spec, 0) + + for _, dir := range abiFileOrDirs { + err := filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil { + return fmt.Errorf("error returned while walking abiDir '%s': %v", dir, err) + } + ext := filepath.Ext(path) + if fi.IsDir() || !(ext == ".bin" || ext == ".abi") { + return nil + } + abiSpc, err := ReadSpecFile(path) + if err != nil { + return fmt.Errorf("error parsing abi file at %s: %v", path, err) + } + specs = append(specs, abiSpc) + return nil + }) + if err != nil { + return &Spec{}, err + } + } + return MergeSpec(specs), nil +} // EncodeFunctionCallFromFile ABI encodes a function call based on ABI in file, and the // arguments specified as strings. @@ -157,303 +165,7 @@ func DecodeFunctionReturn(abiData, name string, data []byte) ([]*Variable, error return vars, nil } -func readAbi(root, contract string, logger *logging.Logger) (string, error) { - p := path.Join(root, stripHex(contract)) - if _, err := os.Stat(p); err != nil { - logger.TraceMsg("abifile not found", "tried", p) - p = path.Join(root, stripHex(contract)+".bin") - if _, err = os.Stat(p); err != nil { - logger.TraceMsg("abifile not found", "tried", p) - return "", fmt.Errorf("abi doesn't exist for =>\t%s", p) - } - } - logger.TraceMsg("Found ABI file", "path", p) - sol, err := compile.LoadSolidityContract(p) - if err != nil { - return "", err - } - return string(sol.Abi), nil -} - -// LoadPath loads one abi file or finds all files in a directory -func LoadPath(abiFileOrDirs ...string) (*Spec, error) { - if len(abiFileOrDirs) == 0 { - return &Spec{}, fmt.Errorf("no ABI file or directory provided") - } - - specs := make([]*Spec, 0) - - for _, dir := range abiFileOrDirs { - err := filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil { - return fmt.Errorf("error returned while walking abiDir '%s': %v", dir, err) - } - ext := filepath.Ext(path) - if fi.IsDir() || !(ext == ".bin" || ext == ".abi") { - return nil - } - if err == nil { - abiSpc, err := ReadSpecFile(path) - if err != nil { - return errors.Wrap(err, "Error parsing abi file "+path) - } - specs = append(specs, abiSpc) - } - return nil - }) - if err != nil { - return &Spec{}, err - } - } - return MergeSpec(specs), nil -} - -func stripHex(s string) string { - if len(s) > 1 { - if s[:2] == "0x" { - s = s[2:] - if len(s)%2 != 0 { - s = "0" + s - } - return s - } - } - return s -} - -// Argument is a decoded function parameter, return or event field -type Argument struct { - Name string - EVM EVMType - IsArray bool - Indexed bool - Hashed bool - ArrayLength uint64 -} - -// FunctionIDSize is the length of the function selector -const FunctionIDSize = 4 - -type FunctionID [FunctionIDSize]byte - -// EventIDSize is the length of the event selector -const EventIDSize = 32 - -type EventID [EventIDSize]byte - -func (e EventID) String() string { - return hex.EncodeUpperToString(e[:]) -} - -type FunctionSpec struct { - FunctionID FunctionID - Constant bool - Inputs []Argument - Outputs []Argument -} - -type EventSpec struct { - EventID EventID - Inputs []Argument - Name string - Anonymous bool -} - -// Spec is the ABI for contract decoded. -type Spec struct { - Constructor FunctionSpec - Fallback FunctionSpec - Functions map[string]FunctionSpec - EventsByName map[string]EventSpec - EventsByID map[EventID]EventSpec -} - -type argumentJSON struct { - Name string - Type string - Components []argumentJSON - Indexed bool -} - -type specJSON struct { - Name string - Type string - Inputs []argumentJSON - Outputs []argumentJSON - Constant bool - Payable bool - StateMutability string - Anonymous bool -} - -func readArgSpec(argsJ []argumentJSON) ([]Argument, error) { - args := make([]Argument, len(argsJ)) - var err error - - for i, a := range argsJ { - args[i].Name = a.Name - args[i].Indexed = a.Indexed - - baseType := a.Type - isArray := regexp.MustCompile(`(.*)\[([0-9]+)\]`) - m := isArray.FindStringSubmatch(a.Type) - if m != nil { - args[i].IsArray = true - args[i].ArrayLength, err = strconv.ParseUint(m[2], 10, 32) - if err != nil { - return nil, err - } - baseType = m[1] - } else if strings.HasSuffix(a.Type, "[]") { - args[i].IsArray = true - baseType = strings.TrimSuffix(a.Type, "[]") - } - - isM := regexp.MustCompile("(bytes|uint|int)([0-9]+)") - m = isM.FindStringSubmatch(baseType) - if m != nil { - M, err := strconv.ParseUint(m[2], 10, 32) - if err != nil { - return nil, err - } - switch m[1] { - case "bytes": - if M < 1 || M > 32 { - return nil, fmt.Errorf("bytes%d is not valid type", M) - } - args[i].EVM = EVMBytes{M} - case "uint": - if M < 8 || M > 256 || (M%8) != 0 { - return nil, fmt.Errorf("uint%d is not valid type", M) - } - args[i].EVM = EVMUint{M} - case "int": - if M < 8 || M > 256 || (M%8) != 0 { - return nil, fmt.Errorf("uint%d is not valid type", M) - } - args[i].EVM = EVMInt{M} - } - continue - } - - isMxN := regexp.MustCompile("(fixed|ufixed)([0-9]+)x([0-9]+)") - m = isMxN.FindStringSubmatch(baseType) - if m != nil { - M, err := strconv.ParseUint(m[2], 10, 32) - if err != nil { - return nil, err - } - N, err := strconv.ParseUint(m[3], 10, 32) - if err != nil { - return nil, err - } - if M < 8 || M > 256 || (M%8) != 0 { - return nil, fmt.Errorf("%s is not valid type", baseType) - } - if N == 0 || N > 80 { - return nil, fmt.Errorf("%s is not valid type", baseType) - } - if m[1] == "fixed" { - args[i].EVM = EVMFixed{N: N, M: M, signed: true} - } else if m[1] == "ufixed" { - args[i].EVM = EVMFixed{N: N, M: M, signed: false} - } else { - panic(m[1]) - } - continue - } - switch baseType { - case "uint": - args[i].EVM = EVMUint{M: 256} - case "int": - args[i].EVM = EVMInt{M: 256} - case "address": - args[i].EVM = EVMAddress{} - case "bool": - args[i].EVM = EVMBool{} - case "fixed": - args[i].EVM = EVMFixed{M: 128, N: 8, signed: true} - case "ufixed": - args[i].EVM = EVMFixed{M: 128, N: 8, signed: false} - case "bytes": - args[i].EVM = EVMBytes{M: 0} - case "string": - args[i].EVM = EVMString{} - default: - // Assume it is a type of Contract - args[i].EVM = EVMAddress{} - } - } - - return args, nil -} - -// ReadSpec takes an ABI and decodes it for futher use -func ReadSpec(specBytes []byte) (*Spec, error) { - var specJ []specJSON - err := json.Unmarshal(specBytes, &specJ) - if err != nil { - // The abi spec file might a bin file, with the Abi under the Abi field in json - var binFile struct { - Abi []specJSON - } - err = json.Unmarshal(specBytes, &binFile) - if err != nil { - return nil, err - } - specJ = binFile.Abi - } - - abiSpec := Spec{ - EventsByName: make(map[string]EventSpec), - EventsByID: make(map[EventID]EventSpec), - Functions: make(map[string]FunctionSpec), - } - - for _, s := range specJ { - switch s.Type { - case "constructor": - abiSpec.Constructor.Inputs, err = readArgSpec(s.Inputs) - if err != nil { - return nil, err - } - case "fallback": - abiSpec.Fallback.Inputs = make([]Argument, 0) - abiSpec.Fallback.Outputs = make([]Argument, 0) - case "event": - inputs, err := readArgSpec(s.Inputs) - if err != nil { - return nil, err - } - // Get signature before we deal with hashed types - sig := Signature(s.Name, inputs) - for i := range inputs { - if inputs[i].Indexed && inputs[i].EVM.Dynamic() { - // For Dynamic types, the hash is stored in stead - inputs[i].EVM = EVMBytes{M: 32} - inputs[i].Hashed = true - } - } - ev := EventSpec{Name: s.Name, EventID: GetEventID(sig), Inputs: inputs, Anonymous: s.Anonymous} - abiSpec.EventsByName[ev.Name] = ev - abiSpec.EventsByID[ev.EventID] = ev - case "function": - inputs, err := readArgSpec(s.Inputs) - if err != nil { - return nil, err - } - outputs, err := readArgSpec(s.Outputs) - if err != nil { - return nil, err - } - fs := FunctionSpec{Inputs: inputs, Outputs: outputs, Constant: s.Constant} - fs.SetFunctionID(s.Name) - abiSpec.Functions[s.Name] = fs - } - } - - return &abiSpec, nil -} +// Spec // ReadSpecFile reads an ABI file from a file func ReadSpecFile(filename string) (*Spec, error) { @@ -465,65 +177,7 @@ func ReadSpecFile(filename string) (*Spec, error) { return ReadSpec(specBytes) } -// MergeSpec takes multiple Specs and merges them into once structure. Note that -// the same function name or event name can occur in different abis, so there might be -// some information loss. -func MergeSpec(abiSpec []*Spec) *Spec { - newSpec := Spec{ - EventsByName: make(map[string]EventSpec), - EventsByID: make(map[EventID]EventSpec), - Functions: make(map[string]FunctionSpec), - } - - for _, s := range abiSpec { - for n, f := range s.Functions { - newSpec.Functions[n] = f - } - - // Different Abis can have the Event name, but with a different signature - // Loop over the signatures, as these are less likely to have collisions - for _, e := range s.EventsByID { - newSpec.EventsByName[e.Name] = e - newSpec.EventsByID[e.EventID] = e - } - } - - return &newSpec -} - -func typeFromReflect(v reflect.Type) Argument { - arg := Argument{Name: v.Name()} - - if v == reflect.TypeOf(crypto.Address{}) { - arg.EVM = EVMAddress{} - } else if v == reflect.TypeOf(big.Int{}) { - arg.EVM = EVMInt{M: 256} - } else { - if v.Kind() == reflect.Array { - arg.IsArray = true - arg.ArrayLength = uint64(v.Len()) - v = v.Elem() - } else if v.Kind() == reflect.Slice { - arg.IsArray = true - v = v.Elem() - } - - switch v.Kind() { - case reflect.Bool: - arg.EVM = EVMBool{} - case reflect.String: - arg.EVM = EVMString{} - case reflect.Uint64: - arg.EVM = EVMUint{M: 64} - case reflect.Int64: - arg.EVM = EVMInt{M: 64} - default: - panic(fmt.Sprintf("no mapping for type %v", v.Kind())) - } - } - - return arg -} +// Struct reflection // SpecFromStructReflect generates a FunctionSpec where the arguments and return values are // described a struct. Both args and rets should be set to the return value of reflect.TypeOf() @@ -573,312 +227,6 @@ func SpecFromFunctionReflect(fname string, v reflect.Value, skipIn, skipOut int) return &s } -func argsToSignature(args []Argument, addIndexedName bool) (str string) { - str = "(" - for i, a := range args { - if i > 0 { - str += "," - } - str += a.EVM.GetSignature() - if addIndexedName && a.Indexed { - str += " indexed" - } - if a.IsArray { - if a.ArrayLength > 0 { - str += fmt.Sprintf("[%d]", a.ArrayLength) - } else { - str += "[]" - } - } - if addIndexedName && a.Name != "" { - str += " " + a.Name - } - } - str += ")" - return -} - -func Signature(name string, args []Argument) string { - return name + argsToSignature(args, false) -} - -func (functionSpec *FunctionSpec) SetFunctionID(functionName string) { - sig := Signature(functionName, functionSpec.Inputs) - functionSpec.FunctionID = GetFunctionID(sig) -} - -func (f *FunctionSpec) String(name string) string { - return name + argsToSignature(f.Inputs, true) + - " returns " + argsToSignature(f.Outputs, true) -} - -func (e *EventSpec) String() string { - str := e.Name + argsToSignature(e.Inputs, true) - if e.Anonymous { - str += " anonymous" - } - - return str -} - -func (fs FunctionID) Bytes() []byte { - return fs[:] -} - -func GetFunctionID(signature string) (id FunctionID) { - hash := sha3.NewKeccak256() - hash.Write([]byte(signature)) - copy(id[:], hash.Sum(nil)[:4]) - return -} - -func (fs EventID) Bytes() []byte { - return fs[:] -} - -func GetEventID(signature string) (id EventID) { - hash := sha3.NewKeccak256() - hash.Write([]byte(signature)) - copy(id[:], hash.Sum(nil)) - return -} - -// UnpackRevert decodes the revert reason if a contract called revert. If no -// reason was given, message will be nil else it will point to the string -func UnpackRevert(data []byte) (message *string, err error) { - if len(data) > 0 { - var msg string - err = revertAbi.UnpackWithID(data, &msg) - message = &msg - } - return -} - -// UnpackEvent decodes all the fields in an event (indexed topic fields or not) -func UnpackEvent(eventSpec *EventSpec, topics []burrow_binary.Word256, data []byte, args ...interface{}) error { - // First unpack the topic fields - topicIndex := 0 - if !eventSpec.Anonymous { - topicIndex++ - } - - for i, a := range eventSpec.Inputs { - if a.Indexed { - _, err := a.EVM.unpack(topics[topicIndex].Bytes(), 0, args[i]) - if err != nil { - return err - } - topicIndex++ - } - } - - // Now unpack the other fields. unpack will step over any indexed fields - return unpack(eventSpec.Inputs, data, func(i int) interface{} { - return args[i] - }) -} - -// Unpack decodes the return values from a function call -func (abiSpec *Spec) Unpack(data []byte, fname string, args ...interface{}) error { - var funcSpec FunctionSpec - var argSpec []Argument - if fname != "" { - if _, ok := abiSpec.Functions[fname]; ok { - funcSpec = abiSpec.Functions[fname] - } else { - funcSpec = abiSpec.Fallback - } - } else { - funcSpec = abiSpec.Constructor - } - - argSpec = funcSpec.Outputs - - if argSpec == nil { - return fmt.Errorf("Unknown function %s", fname) - } - - return unpack(argSpec, data, func(i int) interface{} { - return args[i] - }) -} - -func (abiSpec *Spec) UnpackWithID(data []byte, args ...interface{}) error { - var argSpec []Argument - - var id FunctionID - copy(id[:], data) - for _, fspec := range abiSpec.Functions { - if id == fspec.FunctionID { - argSpec = fspec.Outputs - } - } - - if argSpec == nil { - return fmt.Errorf("Unknown function %x", id) - } - - return unpack(argSpec, data[4:], func(i int) interface{} { - return args[i] - }) -} - -// Pack ABI encodes a function call. The fname specifies which function should called, if -// it doesn't exist exist the fallback function will be called. If fname is the empty -// string, the constructor is called. The arguments must be specified in args. The count -// must match the function being called. -// Returns the ABI encoded function call, whether the function is constant according -// to the ABI (which means it does not modified contract state) -func (abiSpec *Spec) Pack(fname string, args ...interface{}) ([]byte, *FunctionSpec, error) { - var funcSpec FunctionSpec - var argSpec []Argument - if fname != "" { - if _, ok := abiSpec.Functions[fname]; ok { - funcSpec = abiSpec.Functions[fname] - } else { - return nil, nil, fmt.Errorf("Unknown function %s", fname) - } - } else { - if abiSpec.Constructor.Inputs != nil { - funcSpec = abiSpec.Constructor - } else { - return nil, nil, fmt.Errorf("Contract does not have a constructor") - } - } - - argSpec = funcSpec.Inputs - - packed := make([]byte, 0) - - if fname != "" { - packed = funcSpec.FunctionID[:] - } - - packedArgs, err := Pack(argSpec, args...) - if err != nil { - return nil, nil, err - } - - return append(packed, packedArgs...), &funcSpec, nil -} - -func PackIntoStruct(argSpec []Argument, st interface{}) ([]byte, error) { - v := reflect.ValueOf(st) - - fields := v.NumField() - if fields != len(argSpec) { - return nil, fmt.Errorf("%d arguments expected, %d received", len(argSpec), fields) - } - - return pack(argSpec, func(i int) interface{} { - return v.Field(i).Interface() - }) -} - -func Pack(argSpec []Argument, args ...interface{}) ([]byte, error) { - if len(args) != len(argSpec) { - return nil, fmt.Errorf("%d arguments expected, %d received", len(argSpec), len(args)) - } - - return pack(argSpec, func(i int) interface{} { - return args[i] - }) -} - -func pack(argSpec []Argument, getArg func(int) interface{}) ([]byte, error) { - packed := make([]byte, 0) - packedDynamic := []byte{} - fixedSize := 0 - // Anything dynamic is stored after the "fixed" block. For the dynamic types, the fixed - // block contains byte offsets to the data. We need to know the length of the fixed - // block, so we can calcute the offsets - for _, a := range argSpec { - if a.IsArray { - if a.ArrayLength > 0 { - fixedSize += ElementSize * int(a.ArrayLength) - } else { - fixedSize += ElementSize - } - } else { - fixedSize += ElementSize - } - } - - addArg := func(v interface{}, a Argument) error { - var b []byte - var err error - if a.EVM.Dynamic() { - offset := EVMUint{M: 256} - b, _ = offset.pack(fixedSize) - d, err := a.EVM.pack(v) - if err != nil { - return err - } - fixedSize += len(d) - packedDynamic = append(packedDynamic, d...) - } else { - b, err = a.EVM.pack(v) - if err != nil { - return err - } - } - packed = append(packed, b...) - return nil - } - - for i, as := range argSpec { - a := getArg(i) - if as.IsArray { - s, ok := a.(string) - if ok && s[0:1] == "[" && s[len(s)-1:] == "]" { - a = strings.Split(s[1:len(s)-1], ",") - } - - val := reflect.ValueOf(a) - if val.Kind() != reflect.Slice && val.Kind() != reflect.Array { - return nil, fmt.Errorf("argument %d should be array or slice, not %s", i, val.Kind().String()) - } - - if as.ArrayLength > 0 { - if as.ArrayLength != uint64(val.Len()) { - return nil, fmt.Errorf("argumment %d should be array of %d, not %d", i, as.ArrayLength, val.Len()) - } - - for n := 0; n < val.Len(); n++ { - err := addArg(val.Index(n).Interface(), as) - if err != nil { - return nil, err - } - } - } else { - // dynamic array - offset := EVMUint{M: 256} - b, _ := offset.pack(fixedSize) - packed = append(packed, b...) - fixedSize += len(b) - - // store length - b, _ = offset.pack(val.Len()) - packedDynamic = append(packedDynamic, b...) - for n := 0; n < val.Len(); n++ { - d, err := as.EVM.pack(val.Index(n).Interface()) - if err != nil { - return nil, err - } - packedDynamic = append(packedDynamic, d...) - } - } - } else { - err := addArg(a, as) - if err != nil { - return nil, err - } - } - } - - return append(packed, packedDynamic...), nil -} - func GetPackingTypes(args []Argument) []interface{} { res := make([]interface{}, len(args)) @@ -894,155 +242,67 @@ func GetPackingTypes(args []Argument) []interface{} { return res } -func UnpackIntoStruct(argSpec []Argument, data []byte, st interface{}) error { - v := reflect.ValueOf(st).Elem() - return unpack(argSpec, data, func(i int) interface{} { - return v.Field(i).Addr().Interface() - }) -} - -func Unpack(argSpec []Argument, data []byte, args ...interface{}) error { - return unpack(argSpec, data, func(i int) interface{} { - return args[i] - }) -} - -func unpack(argSpec []Argument, data []byte, getArg func(int) interface{}) error { - offset := 0 - offType := EVMInt{M: 64} +func typeFromReflect(v reflect.Type) Argument { + arg := Argument{Name: v.Name()} - getPrimitive := func(e interface{}, a Argument) error { - if a.EVM.Dynamic() { - var o int64 - l, err := offType.unpack(data, offset, &o) - if err != nil { - return err - } - offset += l - _, err = a.EVM.unpack(data, int(o), e) - if err != nil { - return err - } - } else { - l, err := a.EVM.unpack(data, offset, e) - if err != nil { - return err - } - offset += l + if v == reflect.TypeOf(crypto.Address{}) { + arg.EVM = EVMAddress{} + } else if v == reflect.TypeOf(big.Int{}) { + arg.EVM = EVMInt{M: 256} + } else { + if v.Kind() == reflect.Array { + arg.IsArray = true + arg.ArrayLength = uint64(v.Len()) + v = v.Elem() + } else if v.Kind() == reflect.Slice { + arg.IsArray = true + v = v.Elem() } - return nil - } - - for i, a := range argSpec { - if a.Indexed { - continue + switch v.Kind() { + case reflect.Bool: + arg.EVM = EVMBool{} + case reflect.String: + arg.EVM = EVMString{} + case reflect.Uint64: + arg.EVM = EVMUint{M: 64} + case reflect.Int64: + arg.EVM = EVMInt{M: 64} + default: + panic(fmt.Sprintf("no mapping for type %v", v.Kind())) } + } - arg := getArg(i) - if a.IsArray { - var array *[]interface{} - - array, ok := arg.(*[]interface{}) - if !ok { - if _, ok := arg.(*string); ok { - // We have been asked to return the value as a string; make intermediate - // array of strings; we will concatenate after - intermediate := make([]interface{}, a.ArrayLength) - for i := range intermediate { - intermediate[i] = new(string) - } - array = &intermediate - } else { - return fmt.Errorf("argument %d should be array, slice or string", i) - } - } - - if a.ArrayLength > 0 { - if int(a.ArrayLength) != len(*array) { - return fmt.Errorf("argument %d should be array or slice of %d elements", i, a.ArrayLength) - } - - for n := 0; n < len(*array); n++ { - err := getPrimitive((*array)[n], a) - if err != nil { - return err - } - } - } else { - var o int64 - var length int64 - - l, err := offType.unpack(data, offset, &o) - if err != nil { - return err - } - - offset += l - s, err := offType.unpack(data, int(o), &length) - if err != nil { - return err - } - o += int64(s) - - intermediate := make([]interface{}, length) - - if _, ok := arg.(*string); ok { - // We have been asked to return the value as a string; make intermediate - // array of strings; we will concatenate after - for i := range intermediate { - intermediate[i] = new(string) - } - } else { - for i := range intermediate { - intermediate[i] = a.EVM.getGoType() - } - } - - for i := 0; i < int(length); i++ { - l, err = a.EVM.unpack(data, int(o), intermediate[i]) - if err != nil { - return err - } - o += int64(l) - } - - array = &intermediate - } + return arg +} - // If we were supposed to return a string, convert it back - if ret, ok := arg.(*string); ok { - s := "[" - for i, e := range *array { - if i > 0 { - s += "," - } - s += *(e.(*string)) - } - s += "]" - *ret = s - } - } else { - err := getPrimitive(arg, a) - if err != nil { - return err - } +func readAbi(root, contract string, logger *logging.Logger) (string, error) { + p := path.Join(root, stripHex(contract)) + if _, err := os.Stat(p); err != nil { + logger.TraceMsg("abifile not found", "tried", p) + p = path.Join(root, stripHex(contract)+".bin") + if _, err = os.Stat(p); err != nil { + logger.TraceMsg("abifile not found", "tried", p) + return "", fmt.Errorf("abi doesn't exist for =>\t%s", p) } } - - return nil + logger.TraceMsg("Found ABI file", "path", p) + sol, err := compile.LoadSolidityContract(p) + if err != nil { + return "", err + } + return string(sol.Abi), nil } -// quick helper padding -func pad(input []byte, size int, left bool) []byte { - if len(input) >= size { - return input[:size] - } - padded := make([]byte, size) - if left { - copy(padded[size-len(input):], input) - } else { - copy(padded, input) +func stripHex(s string) string { + if len(s) > 1 { + if s[:2] == "0x" { + s = s[2:] + if len(s)%2 != 0 { + s = "0" + s + } + return s + } } - return padded + return s } diff --git a/execution/evm/abi/ab_test.go b/execution/evm/abi/abi_test.go similarity index 99% rename from execution/evm/abi/ab_test.go rename to execution/evm/abi/abi_test.go index a904a21bf..5c84aa495 100644 --- a/execution/evm/abi/ab_test.go +++ b/execution/evm/abi/abi_test.go @@ -6,7 +6,7 @@ import ( "strings" "testing" - hex "github.com/tmthrgd/go-hex" + "github.com/tmthrgd/go-hex" "github.com/hyperledger/burrow/logging" "github.com/stretchr/testify/assert" diff --git a/execution/evm/abi/event_spec.go b/execution/evm/abi/event_spec.go new file mode 100644 index 000000000..b111c0dd6 --- /dev/null +++ b/execution/evm/abi/event_spec.go @@ -0,0 +1,91 @@ +package abi + +import ( + "encoding/json" + + "github.com/hyperledger/burrow/crypto/sha3" + "github.com/tmthrgd/go-hex" +) + +// Argument is a decoded function parameter, return or event field +type Argument struct { + Name string + EVM EVMType + IsArray bool + Indexed bool + Hashed bool + ArrayLength uint64 +} + +type argumentJSON struct { + Name string + Type string + Components []argumentJSON + Indexed bool +} + +// EventIDSize is the length of the event selector +const EventIDSize = 32 + +type EventSpec struct { + EventID EventID + Inputs []Argument + Name string + Anonymous bool +} + +func (e *EventSpec) UnmarshalJSON(data []byte) error { + s := new(specJSON) + err := json.Unmarshal(data, s) + if err != nil { + return err + } + return e.unmarshalSpec(s) +} + +func (e *EventSpec) unmarshalSpec(s *specJSON) error { + inputs, err := readArgSpec(s.Inputs) + if err != nil { + return err + } + // Get signature before we deal with hashed types + sig := Signature(s.Name, inputs) + for i := range inputs { + if inputs[i].Indexed && inputs[i].EVM.Dynamic() { + // For Dynamic types, the hash is stored in stead + inputs[i].EVM = EVMBytes{M: 32} + inputs[i].Hashed = true + } + } + e.Name = s.Name + e.EventID = GetEventID(sig) + e.Inputs = inputs + e.Anonymous = s.Anonymous + return nil +} + +type EventID [EventIDSize]byte + +func GetEventID(signature string) (id EventID) { + hash := sha3.NewKeccak256() + hash.Write([]byte(signature)) + copy(id[:], hash.Sum(nil)) + return +} + +func (e *EventSpec) String() string { + str := e.Name + argsToSignature(e.Inputs, true) + if e.Anonymous { + str += " anonymous" + } + + return str +} + +func (id EventID) String() string { + return hex.EncodeUpperToString(id[:]) +} + +func (id EventID) Bytes() []byte { + return id[:] +} diff --git a/execution/evm/abi/function_spec.go b/execution/evm/abi/function_spec.go new file mode 100644 index 000000000..ccfca6f79 --- /dev/null +++ b/execution/evm/abi/function_spec.go @@ -0,0 +1,69 @@ +package abi + +import ( + "fmt" + + "github.com/hyperledger/burrow/crypto/sha3" +) + +// FunctionIDSize is the length of the function selector +const FunctionIDSize = 4 + +type FunctionSpec struct { + FunctionID FunctionID + Constant bool + Inputs []Argument + Outputs []Argument +} + +type FunctionID [FunctionIDSize]byte + +func GetFunctionID(signature string) (id FunctionID) { + hash := sha3.NewKeccak256() + hash.Write([]byte(signature)) + copy(id[:], hash.Sum(nil)[:4]) + return +} + +func Signature(name string, args []Argument) string { + return name + argsToSignature(args, false) +} + +func (f *FunctionSpec) String(name string) string { + return name + argsToSignature(f.Inputs, true) + + " returns " + argsToSignature(f.Outputs, true) +} + +func (f *FunctionSpec) SetFunctionID(functionName string) { + sig := Signature(functionName, f.Inputs) + f.FunctionID = GetFunctionID(sig) +} + +func (fs FunctionID) Bytes() []byte { + return fs[:] +} + +func argsToSignature(args []Argument, addIndexedName bool) (str string) { + str = "(" + for i, a := range args { + if i > 0 { + str += "," + } + str += a.EVM.GetSignature() + if addIndexedName && a.Indexed { + str += " indexed" + } + if a.IsArray { + if a.ArrayLength > 0 { + str += fmt.Sprintf("[%d]", a.ArrayLength) + } else { + str += "[]" + } + } + if addIndexedName && a.Name != "" { + str += " " + a.Name + } + } + str += ")" + return +} diff --git a/execution/evm/abi/packing.go b/execution/evm/abi/packing.go new file mode 100644 index 000000000..7e6a575d6 --- /dev/null +++ b/execution/evm/abi/packing.go @@ -0,0 +1,374 @@ +package abi + +import ( + "fmt" + "reflect" + "strings" + + "github.com/hyperledger/burrow/binary" +) + +func Pack(argSpec []Argument, args ...interface{}) ([]byte, error) { + getArg, err := argGetter(argSpec, args, false) + if err != nil { + return nil, err + } + return pack(argSpec, getArg) +} + +func Unpack(argSpec []Argument, data []byte, args ...interface{}) error { + getArg, err := argGetter(argSpec, args, true) + if err != nil { + return err + } + return unpack(argSpec, data, getArg) +} + +func PackEvent(eventSpec *EventSpec, args ...interface{}) ([]binary.Word256, []byte, error) { + getArg, err := argGetter(eventSpec.Inputs, args, false) + if err != nil { + return nil, nil, err + } + data, err := pack(eventSpec.Inputs, getArg) + if err != nil { + return nil, nil, err + } + topics, err := packTopics(eventSpec, getArg) + return topics, data, err +} + +func UnpackEvent(eventSpec *EventSpec, topics []binary.Word256, data []byte, args ...interface{}) error { + getArg, err := argGetter(eventSpec.Inputs, args, true) + if err != nil { + return err + } + err = unpack(eventSpec.Inputs, data, getArg) + if err != nil { + return err + } + return unpackTopics(eventSpec, topics, getArg) +} + +// UnpackRevert decodes the revert reason if a contract called revert. If no +// reason was given, message will be nil else it will point to the string +func UnpackRevert(data []byte) (message *string, err error) { + if len(data) > 0 { + var msg string + err = revertAbi.UnpackWithID(data, &msg) + message = &msg + } + return +} + +// revertAbi exists to decode reverts. Any contract function call fail using revert(), assert() or require(). +// If a function exits this way, the this hardcoded ABI will be used. +var revertAbi *Spec + +func init() { + var err error + revertAbi, err = ReadSpec([]byte(`[{"name":"Error","type":"function","outputs":[{"type":"string"}],"inputs":[{"type":"string"}]}]`)) + if err != nil { + panic(fmt.Sprintf("internal error: failed to build revert abi: %v", err)) + } +} + +func argGetter(argSpec []Argument, args []interface{}, ptr bool) (func(int) interface{}, error) { + if len(args) == 1 { + rv := reflect.ValueOf(args[0]) + if rv.Kind() == reflect.Ptr { + rv = rv.Elem() + } else if ptr { + return nil, fmt.Errorf("struct pointer required in order to set values, but got %v", rv.Kind()) + } + if rv.Kind() != reflect.Struct { + if len(args) == 1 { + // Treat s single arg + return func(i int) interface{} { return args[i] }, nil + } + return nil, fmt.Errorf("expected single argument to be struct but got %v", rv.Kind()) + } + fields := rv.NumField() + if fields != len(argSpec) { + return nil, fmt.Errorf("%d arguments in struct expected, %d received", len(argSpec), fields) + } + if ptr { + return func(i int) interface{} { + return rv.Field(i).Addr().Interface() + }, nil + } + return func(i int) interface{} { + return rv.Field(i).Interface() + }, nil + } + if len(args) == len(argSpec) { + return func(i int) interface{} { + return args[i] + }, nil + } + return nil, fmt.Errorf("%d arguments expected, %d received", len(argSpec), len(args)) +} + +func packTopics(eventSpec *EventSpec, getArg func(int) interface{}) ([]binary.Word256, error) { + topics := make([]binary.Word256, 0, 5) + if !eventSpec.Anonymous { + topics = append(topics, binary.Word256(eventSpec.EventID)) + } + for i, a := range eventSpec.Inputs { + if a.Indexed { + data, err := a.EVM.pack(getArg(i)) + if err != nil { + return nil, err + } + var topic binary.Word256 + copy(topic[:], data) + topics = append(topics, topic) + } + } + return topics, nil +} + +// Unpack event topics +func unpackTopics(eventSpec *EventSpec, topics []binary.Word256, getArg func(int) interface{}) error { + // First unpack the topic fields + topicIndex := 0 + if !eventSpec.Anonymous { + topicIndex++ + } + + for i, a := range eventSpec.Inputs { + if a.Indexed { + _, err := a.EVM.unpack(topics[topicIndex][:], 0, getArg(i)) + if err != nil { + return err + } + topicIndex++ + } + } + return nil +} + +func pack(argSpec []Argument, getArg func(int) interface{}) ([]byte, error) { + packed := make([]byte, 0) + var packedDynamic []byte + fixedSize := 0 + // Anything dynamic is stored after the "fixed" block. For the dynamic types, the fixed + // block contains byte offsets to the data. We need to know the length of the fixed + // block, so we can calcute the offsets + for _, as := range argSpec { + if as.Indexed { + continue + } + if as.IsArray { + if as.ArrayLength > 0 { + fixedSize += ElementSize * int(as.ArrayLength) + } else { + fixedSize += ElementSize + } + } else { + fixedSize += ElementSize + } + } + + addArg := func(v interface{}, a Argument) error { + var b []byte + var err error + if a.EVM.Dynamic() { + offset := EVMUint{M: 256} + b, _ = offset.pack(fixedSize) + d, err := a.EVM.pack(v) + if err != nil { + return err + } + fixedSize += len(d) + packedDynamic = append(packedDynamic, d...) + } else { + b, err = a.EVM.pack(v) + if err != nil { + return err + } + } + packed = append(packed, b...) + return nil + } + + for i, as := range argSpec { + if as.Indexed { + continue + } + a := getArg(i) + if as.IsArray { + s, ok := a.(string) + if ok && s[0:1] == "[" && s[len(s)-1:] == "]" { + a = strings.Split(s[1:len(s)-1], ",") + } + + val := reflect.ValueOf(a) + if val.Kind() != reflect.Slice && val.Kind() != reflect.Array { + return nil, fmt.Errorf("argument %d should be array or slice, not %s", i, val.Kind().String()) + } + + if as.ArrayLength > 0 { + if as.ArrayLength != uint64(val.Len()) { + return nil, fmt.Errorf("argumment %d should be array of %d, not %d", i, as.ArrayLength, val.Len()) + } + + for n := 0; n < val.Len(); n++ { + err := addArg(val.Index(n).Interface(), as) + if err != nil { + return nil, err + } + } + } else { + // dynamic array + offset := EVMUint{M: 256} + b, _ := offset.pack(fixedSize) + packed = append(packed, b...) + fixedSize += len(b) + + // store length + b, _ = offset.pack(val.Len()) + packedDynamic = append(packedDynamic, b...) + for n := 0; n < val.Len(); n++ { + d, err := as.EVM.pack(val.Index(n).Interface()) + if err != nil { + return nil, err + } + packedDynamic = append(packedDynamic, d...) + } + } + } else { + err := addArg(a, as) + if err != nil { + return nil, err + } + } + } + + return append(packed, packedDynamic...), nil +} + +func unpack(argSpec []Argument, data []byte, getArg func(int) interface{}) error { + offset := 0 + offType := EVMInt{M: 64} + + getPrimitive := func(e interface{}, a Argument) error { + if a.EVM.Dynamic() { + var o int64 + l, err := offType.unpack(data, offset, &o) + if err != nil { + return err + } + offset += l + _, err = a.EVM.unpack(data, int(o), e) + if err != nil { + return err + } + } else { + l, err := a.EVM.unpack(data, offset, e) + if err != nil { + return err + } + offset += l + } + + return nil + } + + for i, as := range argSpec { + if as.Indexed { + continue + } + + arg := getArg(i) + if as.IsArray { + var array *[]interface{} + + array, ok := arg.(*[]interface{}) + if !ok { + if _, ok := arg.(*string); ok { + // We have been asked to return the value as a string; make intermediate + // array of strings; we will concatenate after + intermediate := make([]interface{}, as.ArrayLength) + for i := range intermediate { + intermediate[i] = new(string) + } + array = &intermediate + } else { + return fmt.Errorf("argument %d should be array, slice or string", i) + } + } + + if as.ArrayLength > 0 { + if int(as.ArrayLength) != len(*array) { + return fmt.Errorf("argument %d should be array or slice of %d elements", i, as.ArrayLength) + } + + for n := 0; n < len(*array); n++ { + err := getPrimitive((*array)[n], as) + if err != nil { + return err + } + } + } else { + var o int64 + var length int64 + + l, err := offType.unpack(data, offset, &o) + if err != nil { + return err + } + + offset += l + s, err := offType.unpack(data, int(o), &length) + if err != nil { + return err + } + o += int64(s) + + intermediate := make([]interface{}, length) + + if _, ok := arg.(*string); ok { + // We have been asked to return the value as a string; make intermediate + // array of strings; we will concatenate after + for i := range intermediate { + intermediate[i] = new(string) + } + } else { + for i := range intermediate { + intermediate[i] = as.EVM.getGoType() + } + } + + for i := 0; i < int(length); i++ { + l, err = as.EVM.unpack(data, int(o), intermediate[i]) + if err != nil { + return err + } + o += int64(l) + } + + array = &intermediate + } + + // If we were supposed to return a string, convert it back + if ret, ok := arg.(*string); ok { + s := "[" + for i, e := range *array { + if i > 0 { + s += "," + } + s += *(e.(*string)) + } + s += "]" + *ret = s + } + } else { + err := getPrimitive(arg, as) + if err != nil { + return err + } + } + } + + return nil +} diff --git a/execution/evm/abi/packing_test.go b/execution/evm/abi/packing_test.go new file mode 100644 index 000000000..c7bf4d6ec --- /dev/null +++ b/execution/evm/abi/packing_test.go @@ -0,0 +1,99 @@ +package abi + +import ( + "encoding/json" + "math/big" + "reflect" + "testing" + + "github.com/hyperledger/burrow/execution/solidity" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPackEvent(t *testing.T) { + t.Run("simple event", func(t *testing.T) { + eventAbi := `{"anonymous":false,"name":"TestEvent","type":"event","inputs":[{"indexed":true,"name":"direction","type":"bytes32"},{"indexed":false,"name":"trueism","type":"bool"},{"indexed":true,"name":"newDepth","type":"int128"},{"indexed":true,"name":"hash","type":"string"}]}` + + type args struct { + Direction string + Trueism bool + NewDepth int64 + Hash string + } + in := &args{ + Direction: "foo", + Trueism: true, + NewDepth: 232, + Hash: "DEADBEEFCAFEBADE01234567DEADBEEF", + } + eventSpec := new(EventSpec) + + err := json.Unmarshal([]byte(eventAbi), eventSpec) + require.NoError(t, err) + + topics, data, err := PackEvent(eventSpec, in) + require.NoError(t, err) + + out := new(args) + err = UnpackEvent(eventSpec, topics, data, out) + require.NoError(t, err) + assert.Equal(t, in, out) + }) + + t.Run("EventEmitter", func(t *testing.T) { + type args struct { + Direction []byte + Trueism bool + German string + NewDepth *big.Int + Bignum int8 + Hash string + } + spec, err := ReadSpec(solidity.Abi_EventEmitter) + require.NoError(t, err) + + eventSpec := spec.EventsByName["ManyTypes"] + + dir := make([]byte, 32) + copy(dir, "frogs") + bignum := big.NewInt(1000) + in := args{ + Direction: dir, + Trueism: false, + German: "foo", + NewDepth: bignum, + Bignum: 100, + Hash: "ba", + } + topics, data, err := PackEvent(&eventSpec, in) + require.NoError(t, err) + + out := new(args) + err = UnpackEvent(&eventSpec, topics, data, out) + require.NoError(t, err) + }) + +} + +func splatPtr(v interface{}) []interface{} { + rv := reflect.ValueOf(v).Elem() + + vals := make([]interface{}, rv.NumField()) + for i := 0; i < rv.NumField(); i++ { + vals[i] = rv.Field(i).Addr().Interface() + } + + return vals +} + +func splat(v interface{}) []interface{} { + rv := reflect.ValueOf(v).Elem() + + vals := make([]interface{}, rv.NumField()) + for i := 0; i < rv.NumField(); i++ { + vals[i] = rv.Field(i).Interface() + } + + return vals +} diff --git a/execution/evm/abi/primitives.go b/execution/evm/abi/primitives.go index 5051a92e0..5f89715c6 100644 --- a/execution/evm/abi/primitives.go +++ b/execution/evm/abi/primitives.go @@ -29,6 +29,10 @@ var _ EVMType = (*EVMBool)(nil) type EVMBool struct { } +func (e EVMBool) String() string { + return "EVMBool" +} + func (e EVMBool) GetSignature() string { return "bool" } @@ -63,7 +67,7 @@ func (e EVMBool) pack(v interface{}) ([]byte, error) { func (e EVMBool) unpack(data []byte, offset int, v interface{}) (int, error) { if len(data)-offset < 32 { - return 0, fmt.Errorf("not enough data") + return 0, fmt.Errorf("%v: not enough data", e) } data = data[offset:] switch v := v.(type) { @@ -191,7 +195,7 @@ func (e EVMUint) pack(v interface{}) ([]byte, error) { func (e EVMUint) unpack(data []byte, offset int, v interface{}) (int, error) { if len(data)-offset < ElementSize { - return 0, fmt.Errorf("not enough data") + return 0, fmt.Errorf("%v: not enough data", e) } data = data[offset:] @@ -271,12 +275,20 @@ func (e EVMUint) Dynamic() bool { return false } +func (e EVMUint) String() string { + return fmt.Sprintf("EVMUInt{%v}", e.M) +} + var _ EVMType = (*EVMInt)(nil) type EVMInt struct { M uint64 } +func (e EVMInt) String() string { + return fmt.Sprintf("EVMInt{%v}", e.M) +} + func (e EVMInt) getGoType() interface{} { switch e.M { case 8: @@ -304,39 +316,40 @@ func (e EVMInt) GetSignature() string { func (e EVMInt) pack(v interface{}) ([]byte, error) { n := new(big.Int) - arg := reflect.ValueOf(v) - switch arg.Kind() { - case reflect.String: - _, ok := n.SetString(arg.String(), 0) + switch arg := v.(type) { + case *big.Int: + n.Set(arg) + case string: + _, ok := n.SetString(arg, 0) if !ok { - return nil, fmt.Errorf("Failed to parse `%s", arg.String()) - } - case reflect.Uint8: - fallthrough - case reflect.Uint16: - fallthrough - case reflect.Uint32: - fallthrough - case reflect.Uint64: - fallthrough - case reflect.Uint: - n.SetUint64(arg.Uint()) - case reflect.Int8: - fallthrough - case reflect.Int16: - fallthrough - case reflect.Int32: - fallthrough - case reflect.Int64: - fallthrough - case reflect.Int: - n.SetInt64(arg.Int()) + return nil, fmt.Errorf("failed to parse `%s", arg) + } + case uint: + n.SetUint64(uint64(arg)) + case uint8: + n.SetUint64(uint64(arg)) + case uint16: + n.SetUint64(uint64(arg)) + case uint32: + n.SetUint64(uint64(arg)) + case uint64: + n.SetUint64(arg) + case int: + n.SetInt64(int64(arg)) + case int8: + n.SetInt64(int64(arg)) + case int16: + n.SetInt64(int64(arg)) + case int32: + n.SetInt64(int64(arg)) + case int64: + n.SetInt64(arg) default: t := reflect.TypeOf(new(int64)) if reflect.TypeOf(v).ConvertibleTo(t) { n.SetInt64(reflect.ValueOf(v).Convert(t).Int()) } else { - return nil, fmt.Errorf("cannot convert type %s to int%d", arg.Kind().String(), e.M) + return nil, fmt.Errorf("cannot convert type %v to int%d", v, e.M) } } @@ -362,7 +375,7 @@ func (e EVMInt) pack(v interface{}) ([]byte, error) { func (e EVMInt) unpack(data []byte, offset int, v interface{}) (int, error) { if len(data)-offset < ElementSize { - return 0, fmt.Errorf("not enough data") + return 0, fmt.Errorf("%v: not enough data", e) } data = data[offset:] @@ -384,9 +397,15 @@ func (e EVMInt) unpack(data []byte, offset int, v interface{}) (int, error) { inv[i] = data[i] } } - toType := reflect.ValueOf(v).Kind().String() switch v := v.(type) { + case **big.Int: + b := new(big.Int).SetBytes(inv[empty:ElementSize]) + if sign { + *v = b.Sub(big.NewInt(-1), b) + } else { + *v = b + } case *string: b := new(big.Int) b.SetBytes(inv[empty:ElementSize]) @@ -405,51 +424,64 @@ func (e EVMInt) unpack(data []byte, offset int, v interface{}) (int, error) { } case *uint64: if sign { - return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) + return 0, fmt.Errorf("cannot convert negative EVM int to %T", *v) } maxLen := int(unsafe.Sizeof(*v)) if length > maxLen { - return 0, fmt.Errorf("value to large for uint64") + return 0, fmt.Errorf("value to large for %T", *v) } *v = binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize]) case *uint32: if sign { - return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) + return 0, fmt.Errorf("cannot convert negative EVM int to %T", *v) } maxLen := int(unsafe.Sizeof(*v)) if length > maxLen { - return 0, fmt.Errorf("value to large for int32") + return 0, fmt.Errorf("value to large for %T", *v) } *v = binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize]) case *uint16: if sign { - return 0, fmt.Errorf("cannot convert negative EVM int to %s", toType) + return 0, fmt.Errorf("cannot convert negative EVM int to %T", *v) } maxLen := int(unsafe.Sizeof(*v)) if length > maxLen { - return 0, fmt.Errorf("value to large for uint16") + return 0, fmt.Errorf("value to large for %T", *v) } *v = binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize]) + case *uint8: + if sign { + return 0, fmt.Errorf("cannot convert negative EVM int to %T", *v) + } + if length > 1 { + return 0, fmt.Errorf("value to large for %T", *v) + } + *v = data[ElementSize-1] case *int64: maxLen := int(unsafe.Sizeof(*v)) if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for int64") + return 0, fmt.Errorf("value to large for %T", *v) } *v = int64(binary.BigEndian.Uint64(data[ElementSize-maxLen : ElementSize])) case *int32: maxLen := int(unsafe.Sizeof(*v)) if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for uint64") + return 0, fmt.Errorf("value to large for %T", *v) } *v = int32(binary.BigEndian.Uint32(data[ElementSize-maxLen : ElementSize])) case *int16: maxLen := int(unsafe.Sizeof(*v)) if length > maxLen || (inv[ElementSize-maxLen]&0x80) != 0 { - return 0, fmt.Errorf("value to large for uint16") + return 0, fmt.Errorf("value to large for %T", *v) } *v = int16(binary.BigEndian.Uint16(data[ElementSize-maxLen : ElementSize])) + case *int8: + if length > 1 || (inv[ElementSize-1]&0x80) != 0 { + return 0, fmt.Errorf("value to large for %T", *v) + } + *v = int8(data[ElementSize-1]) default: - return 0, fmt.Errorf("unable to convert %s to %s", e.GetSignature(), toType) + return 0, fmt.Errorf("unable to convert %s to %T", e.GetSignature(), v) } return ElementSize, nil @@ -469,6 +501,10 @@ var _ EVMType = (*EVMAddress)(nil) type EVMAddress struct { } +func (e EVMAddress) String() string { + return "EVMAddress" +} + func (e EVMAddress) getGoType() interface{} { return new(crypto.Address) } @@ -536,6 +572,13 @@ type EVMBytes struct { M uint64 } +func (e EVMBytes) String() string { + if e.M == 0 { + return "EVMBytes" + } + return fmt.Sprintf("EVMBytes[%v]", e.M) +} + func (e EVMBytes) getGoType() interface{} { v := make([]byte, e.M) return &v @@ -548,7 +591,7 @@ func (e EVMBytes) pack(v interface{}) ([]byte, error) { if ok { b = []byte(s) } else { - return nil, fmt.Errorf("cannot map to %s to EVM bytes", reflect.ValueOf(v).Kind().String()) + return nil, fmt.Errorf("cannot map from %s to EVM bytes", reflect.ValueOf(v).Kind().String()) } } @@ -600,7 +643,7 @@ func (e EVMBytes) unpack(data []byte, offset int, v interface{}) (int, error) { case reflect.Slice: v2.SetBytes(data[offset : offset+int(e.M)]) default: - return 0, fmt.Errorf("cannot map EVM %s to %s", e.GetSignature(), reflect.ValueOf(v).Kind().String()) + return 0, fmt.Errorf("cannot map EVM %s to %v", e.GetSignature(), reflect.ValueOf(v).Kind()) } return ElementSize, nil @@ -627,6 +670,10 @@ var _ EVMType = (*EVMString)(nil) type EVMString struct { } +func (e EVMString) String() string { + return "EVMString" +} + func (e EVMString) GetSignature() string { return "string" } @@ -643,18 +690,18 @@ func (e EVMString) pack(v interface{}) ([]byte, error) { func (e EVMString) unpack(data []byte, offset int, v interface{}) (int, error) { lenType := EVMInt{M: 64} - var len int64 - l, err := lenType.unpack(data, offset, &len) + var length int64 + l, err := lenType.unpack(data, offset, &length) if err != nil { - return 0, err + return 0, fmt.Errorf("could not unpack string length prefix: %v", err) } offset += l switch v := v.(type) { case *string: - *v = string(data[offset : offset+int(len)]) + *v = string(data[offset : offset+int(length)]) case *[]byte: - *v = data[offset : offset+int(len)] + *v = data[offset : offset+int(length)] default: return 0, fmt.Errorf("cannot map EVM string to %s", reflect.ValueOf(v).Kind().String()) } @@ -709,3 +756,17 @@ func (e EVMFixed) Dynamic() bool { func (e EVMFixed) ImplicitCast(o EVMType) bool { return false } + +// quick helper padding +func pad(input []byte, size int, left bool) []byte { + if len(input) >= size { + return input[:size] + } + padded := make([]byte, size) + if left { + copy(padded[size-len(input):], input) + } else { + copy(padded, input) + } + return padded +} diff --git a/execution/evm/abi/primitives_test.go b/execution/evm/abi/primitives_test.go new file mode 100644 index 000000000..a52c4ee9e --- /dev/null +++ b/execution/evm/abi/primitives_test.go @@ -0,0 +1,22 @@ +package abi + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEVMInt(t *testing.T) { + t.Run("pack big.Int", func(t *testing.T) { + e := EVMInt{256} + b := big.NewInt(-23423) + data, err := e.pack(b) + require.NoError(t, err) + bOut := new(big.Int) + _, err = e.unpack(data, 0, &bOut) + require.NoError(t, err) + assert.Equal(t, bOut, b) + }) +} diff --git a/execution/evm/abi/spec.go b/execution/evm/abi/spec.go new file mode 100644 index 000000000..ddb49a6c6 --- /dev/null +++ b/execution/evm/abi/spec.go @@ -0,0 +1,309 @@ +package abi + +import ( + "encoding/json" + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/hyperledger/burrow/crypto" +) + +// Spec is the ABI for contract decoded. +type Spec struct { + Constructor FunctionSpec + Fallback FunctionSpec + Functions map[string]FunctionSpec + EventsByName map[string]EventSpec + EventsByID map[EventID]EventSpec +} + +type specJSON struct { + Name string + Type string + Inputs []argumentJSON + Outputs []argumentJSON + Constant bool + Payable bool + StateMutability string + Anonymous bool +} + +// ReadSpec takes an ABI and decodes it for futher use +func ReadSpec(specBytes []byte) (*Spec, error) { + var specJ []specJSON + err := json.Unmarshal(specBytes, &specJ) + if err != nil { + // The abi spec file might a bin file, with the Abi under the Abi field in json + var binFile struct { + Abi []specJSON + } + err = json.Unmarshal(specBytes, &binFile) + if err != nil { + return nil, err + } + specJ = binFile.Abi + } + + abiSpec := Spec{ + EventsByName: make(map[string]EventSpec), + EventsByID: make(map[EventID]EventSpec), + Functions: make(map[string]FunctionSpec), + } + + for _, s := range specJ { + switch s.Type { + case "constructor": + abiSpec.Constructor.Inputs, err = readArgSpec(s.Inputs) + if err != nil { + return nil, err + } + case "fallback": + abiSpec.Fallback.Inputs = make([]Argument, 0) + abiSpec.Fallback.Outputs = make([]Argument, 0) + case "event": + var ev EventSpec + err = ev.unmarshalSpec(&s) + if err != nil { + return nil, err + } + abiSpec.EventsByName[ev.Name] = ev + abiSpec.EventsByID[ev.EventID] = ev + case "function": + inputs, err := readArgSpec(s.Inputs) + if err != nil { + return nil, err + } + outputs, err := readArgSpec(s.Outputs) + if err != nil { + return nil, err + } + fs := FunctionSpec{Inputs: inputs, Outputs: outputs, Constant: s.Constant} + fs.SetFunctionID(s.Name) + abiSpec.Functions[s.Name] = fs + } + } + + return &abiSpec, nil +} + +// MergeSpec takes multiple Specs and merges them into once structure. Note that +// the same function name or event name can occur in different abis, so there might be +// some information loss. +func MergeSpec(abiSpec []*Spec) *Spec { + newSpec := Spec{ + EventsByName: make(map[string]EventSpec), + EventsByID: make(map[EventID]EventSpec), + Functions: make(map[string]FunctionSpec), + } + + for _, s := range abiSpec { + for n, f := range s.Functions { + newSpec.Functions[n] = f + } + + // Different Abis can have the Event name, but with a different signature + // Loop over the signatures, as these are less likely to have collisions + for _, e := range s.EventsByID { + newSpec.EventsByName[e.Name] = e + newSpec.EventsByID[e.EventID] = e + } + } + + return &newSpec +} + +func (spec *Spec) GetEventAbi(id EventID, addresses crypto.Address) (*EventSpec, error) { + eventSpec, ok := spec.EventsByID[id] + if !ok { + return nil, fmt.Errorf("could not find ABI for event with ID %v", id) + } + return &eventSpec, nil +} + +// Pack ABI encodes a function call. The fname specifies which function should called, if +// it doesn't exist exist the fallback function will be called. If fname is the empty +// string, the constructor is called. The arguments must be specified in args. The count +// must match the function being called. +// Returns the ABI encoded function call, whether the function is constant according +// to the ABI (which means it does not modified contract state) +func (spec *Spec) Pack(fname string, args ...interface{}) ([]byte, *FunctionSpec, error) { + var funcSpec FunctionSpec + var argSpec []Argument + if fname != "" { + if _, ok := spec.Functions[fname]; ok { + funcSpec = spec.Functions[fname] + } else { + return nil, nil, fmt.Errorf("Unknown function %s", fname) + } + } else { + if spec.Constructor.Inputs != nil { + funcSpec = spec.Constructor + } else { + return nil, nil, fmt.Errorf("Contract does not have a constructor") + } + } + + argSpec = funcSpec.Inputs + + packed := make([]byte, 0) + + if fname != "" { + packed = funcSpec.FunctionID[:] + } + + packedArgs, err := Pack(argSpec, args...) + if err != nil { + return nil, nil, err + } + + return append(packed, packedArgs...), &funcSpec, nil +} + +// Unpack decodes the return values from a function call +func (spec *Spec) Unpack(data []byte, fname string, args ...interface{}) error { + var funcSpec FunctionSpec + var argSpec []Argument + if fname != "" { + if _, ok := spec.Functions[fname]; ok { + funcSpec = spec.Functions[fname] + } else { + funcSpec = spec.Fallback + } + } else { + funcSpec = spec.Constructor + } + + argSpec = funcSpec.Outputs + + if argSpec == nil { + return fmt.Errorf("Unknown function %s", fname) + } + + return unpack(argSpec, data, func(i int) interface{} { + return args[i] + }) +} + +func (spec *Spec) UnpackWithID(data []byte, args ...interface{}) error { + var argSpec []Argument + + var id FunctionID + copy(id[:], data) + for _, fspec := range spec.Functions { + if id == fspec.FunctionID { + argSpec = fspec.Outputs + } + } + + if argSpec == nil { + return fmt.Errorf("Unknown function %x", id) + } + + return unpack(argSpec, data[4:], func(i int) interface{} { + return args[i] + }) +} + +func readArgSpec(argsJ []argumentJSON) ([]Argument, error) { + args := make([]Argument, len(argsJ)) + var err error + + for i, a := range argsJ { + args[i].Name = a.Name + args[i].Indexed = a.Indexed + + baseType := a.Type + isArray := regexp.MustCompile(`(.*)\[([0-9]+)\]`) + m := isArray.FindStringSubmatch(a.Type) + if m != nil { + args[i].IsArray = true + args[i].ArrayLength, err = strconv.ParseUint(m[2], 10, 32) + if err != nil { + return nil, err + } + baseType = m[1] + } else if strings.HasSuffix(a.Type, "[]") { + args[i].IsArray = true + baseType = strings.TrimSuffix(a.Type, "[]") + } + + isM := regexp.MustCompile("(bytes|uint|int)([0-9]+)") + m = isM.FindStringSubmatch(baseType) + if m != nil { + M, err := strconv.ParseUint(m[2], 10, 32) + if err != nil { + return nil, err + } + switch m[1] { + case "bytes": + if M < 1 || M > 32 { + return nil, fmt.Errorf("bytes%d is not valid type", M) + } + args[i].EVM = EVMBytes{M} + case "uint": + if M < 8 || M > 256 || (M%8) != 0 { + return nil, fmt.Errorf("uint%d is not valid type", M) + } + args[i].EVM = EVMUint{M} + case "int": + if M < 8 || M > 256 || (M%8) != 0 { + return nil, fmt.Errorf("uint%d is not valid type", M) + } + args[i].EVM = EVMInt{M} + } + continue + } + + isMxN := regexp.MustCompile("(fixed|ufixed)([0-9]+)x([0-9]+)") + m = isMxN.FindStringSubmatch(baseType) + if m != nil { + M, err := strconv.ParseUint(m[2], 10, 32) + if err != nil { + return nil, err + } + N, err := strconv.ParseUint(m[3], 10, 32) + if err != nil { + return nil, err + } + if M < 8 || M > 256 || (M%8) != 0 { + return nil, fmt.Errorf("%s is not valid type", baseType) + } + if N == 0 || N > 80 { + return nil, fmt.Errorf("%s is not valid type", baseType) + } + if m[1] == "fixed" { + args[i].EVM = EVMFixed{N: N, M: M, signed: true} + } else if m[1] == "ufixed" { + args[i].EVM = EVMFixed{N: N, M: M, signed: false} + } else { + panic(m[1]) + } + continue + } + switch baseType { + case "uint": + args[i].EVM = EVMUint{M: 256} + case "int": + args[i].EVM = EVMInt{M: 256} + case "address": + args[i].EVM = EVMAddress{} + case "bool": + args[i].EVM = EVMBool{} + case "fixed": + args[i].EVM = EVMFixed{M: 128, N: 8, signed: true} + case "ufixed": + args[i].EVM = EVMFixed{M: 128, N: 8, signed: false} + case "bytes": + args[i].EVM = EVMBytes{M: 0} + case "string": + args[i].EVM = EVMString{} + default: + // Assume it is a type of Contract + args[i].EVM = EVMAddress{} + } + } + + return args, nil +} diff --git a/execution/evm/snative.go b/execution/evm/snative.go index 94b283adf..04bd9c8d6 100644 --- a/execution/evm/snative.go +++ b/execution/evm/snative.go @@ -242,7 +242,7 @@ func (contract *SNativeContractDescription) Dispatch(st Interface, caller crypto } nativeArgs := reflect.New(function.Arguments).Interface() - err = abi.UnpackIntoStruct(function.Abi.Inputs, remainingArgs, nativeArgs) + err = abi.Unpack(function.Abi.Inputs, remainingArgs, nativeArgs) if err != nil { return nil, err } @@ -256,7 +256,7 @@ func (contract *SNativeContractDescription) Dispatch(st Interface, caller crypto return nil, fmt.Errorf("state error in %v: %v", function, err) } - return abi.PackIntoStruct(function.Abi.Outputs, nativeRets) + return abi.Pack(function.Abi.Outputs, nativeRets) } // We define the address of an SNative contact as the last 20 bytes of the sha3 diff --git a/execution/evm/snative_test.go b/execution/evm/snative_test.go index a42ffa231..afcb317f0 100644 --- a/execution/evm/snative_test.go +++ b/execution/evm/snative_test.go @@ -98,7 +98,7 @@ func TestSNativeContractDescription_Dispatch(t *testing.T) { require.NoError(t, cache.Error()) retValue, err := contract.Dispatch(cache, caller.Address, bc.MustSplice(funcID[:], grantee.Address.Word256(), permFlagToWord256(permission.CreateAccount)), &gas, logger) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, retValue, LeftPadBytes([]byte{1}, 32)) } From 69ecf4d4e3bee0ee650263b99b1f0685ca31d538 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Mon, 12 Aug 2019 15:40:27 +0100 Subject: [PATCH 67/70] Pointlessly mess with protobuf Signed-off-by: Silas Davis --- acm/acm.pb.go | 69 ++++--- acm/balance/balance.pb.go | 15 +- acm/validator/validator.pb.go | 13 +- bcm/bcm.pb.go | 65 +++---- consensus/tendermint/tendermint.pb.go | 13 +- crypto/crypto.pb.go | 21 +- dump/dump.pb.go | 71 ++++--- encoding/encoding.pb.go | 15 +- execution/errors/errors.pb.go | 15 +- execution/exec/exec.pb.go | 263 +++++++++++++------------- execution/names/names.pb.go | 21 +- genesis/spec/spec.pb.go | 33 ++-- keys/keys.pb.go | 53 +++++- permission/permission.pb.go | 27 ++- rpc/rpc.pb.go | 13 +- rpc/rpcdump/rpcdump.pb.go | 23 ++- rpc/rpcevents/rpcevents.pb.go | 55 +++--- rpc/rpcquery/rpcquery.pb.go | 59 +++++- rpc/rpctransact/rpctransact.pb.go | 90 ++++++--- storage/storage.pb.go | 15 +- txs/payload/payload.pb.go | 189 +++++++++--------- txs/txs.pb.go | 51 +++-- 22 files changed, 617 insertions(+), 572 deletions(-) diff --git a/acm/acm.pb.go b/acm/acm.pb.go index 5084917ee..152ecb592 100644 --- a/acm/acm.pb.go +++ b/acm/acm.pb.go @@ -5,6 +5,10 @@ package acm import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -12,8 +16,6 @@ import ( crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" permission "github.com/hyperledger/burrow/permission" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -225,17 +227,17 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintAcm(dAtA, i, uint64(m.Address.Size())) - n1, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Address.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 dAtA[i] = 0x12 i++ i = encodeVarintAcm(dAtA, i, uint64(m.PublicKey.Size())) - n2, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.PublicKey.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 if m.Sequence != 0 { @@ -251,33 +253,33 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintAcm(dAtA, i, uint64(m.EVMCode.Size())) - n3, err := m.EVMCode.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.EVMCode.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 dAtA[i] = 0x32 i++ i = encodeVarintAcm(dAtA, i, uint64(m.Permissions.Size())) - n4, err := m.Permissions.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Permissions.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 dAtA[i] = 0x3a i++ i = encodeVarintAcm(dAtA, i, uint64(m.WASMCode.Size())) - n5, err := m.WASMCode.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.WASMCode.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 dAtA[i] = 0x42 i++ i = encodeVarintAcm(dAtA, i, uint64(m.CodeHash.Size())) - n6, err := m.CodeHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.CodeHash.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 if len(m.ContractMeta) > 0 { @@ -296,9 +298,9 @@ func (m *Account) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x52 i++ i = encodeVarintAcm(dAtA, i, uint64(m.Forebear.Size())) - n7, err := m.Forebear.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.Forebear.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -326,17 +328,17 @@ func (m *ContractMeta) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintAcm(dAtA, i, uint64(m.CodeHash.Size())) - n8, err := m.CodeHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.CodeHash.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 dAtA[i] = 0x12 i++ i = encodeVarintAcm(dAtA, i, uint64(m.MetadataHash.Size())) - n9, err := m.MetadataHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.MetadataHash.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 if len(m.Metadata) > 0 { @@ -421,14 +423,7 @@ func (m *ContractMeta) Size() (n int) { } func sovAcm(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozAcm(x uint64) (n int) { return sovAcm(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/acm/balance/balance.pb.go b/acm/balance/balance.pb.go index 30fe3253a..163d4dfa2 100644 --- a/acm/balance/balance.pb.go +++ b/acm/balance/balance.pb.go @@ -5,11 +5,13 @@ package balance import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -159,14 +161,7 @@ func (m *Balance) Size() (n int) { } func sovBalance(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozBalance(x uint64) (n int) { return sovBalance(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/acm/validator/validator.pb.go b/acm/validator/validator.pb.go index 4ccac95a6..2caeb7c8a 100644 --- a/acm/validator/validator.pb.go +++ b/acm/validator/validator.pb.go @@ -5,12 +5,14 @@ package validator import ( fmt "fmt" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -123,14 +125,7 @@ func (m *Validator) Size() (n int) { } func sovValidator(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozValidator(x uint64) (n int) { return sovValidator(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/bcm/bcm.pb.go b/bcm/bcm.pb.go index 9964a9c63..4afb005b5 100644 --- a/bcm/bcm.pb.go +++ b/bcm/bcm.pb.go @@ -5,6 +5,11 @@ package bcm import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + time "time" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -12,9 +17,6 @@ import ( _ "github.com/golang/protobuf/ptypes/duration" _ "github.com/golang/protobuf/ptypes/timestamp" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" - io "io" - math "math" - time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -227,41 +229,41 @@ func (m *SyncInfo) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintBcm(dAtA, i, uint64(m.LatestBlockHash.Size())) - n1, err := m.LatestBlockHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.LatestBlockHash.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 dAtA[i] = 0x1a i++ i = encodeVarintBcm(dAtA, i, uint64(m.LatestAppHash.Size())) - n2, err := m.LatestAppHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.LatestAppHash.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 dAtA[i] = 0x22 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LatestBlockTime))) - n3, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockTime, dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockTime, dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 dAtA[i] = 0x2a i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LatestBlockSeenTime))) - n4, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockSeenTime, dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LatestBlockSeenTime, dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 dAtA[i] = 0x32 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdDuration(m.LatestBlockDuration))) - n5, err := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.LatestBlockDuration, dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.LatestBlockDuration, dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.XXX_unrecognized != nil { @@ -288,17 +290,17 @@ func (m *PersistedState) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintBcm(dAtA, i, uint64(m.AppHashAfterLastBlock.Size())) - n6, err := m.AppHashAfterLastBlock.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.AppHashAfterLastBlock.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 dAtA[i] = 0x12 i++ i = encodeVarintBcm(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.LastBlockTime))) - n7, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastBlockTime, dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastBlockTime, dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 if m.LastBlockHeight != 0 { @@ -309,9 +311,9 @@ func (m *PersistedState) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintBcm(dAtA, i, uint64(m.GenesisHash.Size())) - n8, err := m.GenesisHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.GenesisHash.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 if m.XXX_unrecognized != nil { @@ -376,14 +378,7 @@ func (m *PersistedState) Size() (n int) { } func sovBcm(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozBcm(x uint64) (n int) { return sovBcm(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/consensus/tendermint/tendermint.pb.go b/consensus/tendermint/tendermint.pb.go index e69ccf726..7031992bb 100644 --- a/consensus/tendermint/tendermint.pb.go +++ b/consensus/tendermint/tendermint.pb.go @@ -5,12 +5,14 @@ package tendermint import ( fmt "fmt" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -182,14 +184,7 @@ func (m *NodeInfo) Size() (n int) { } func sovTendermint(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozTendermint(x uint64) (n int) { return sovTendermint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/crypto/crypto.pb.go b/crypto/crypto.pb.go index aa7490042..161cb490d 100644 --- a/crypto/crypto.pb.go +++ b/crypto/crypto.pb.go @@ -5,12 +5,14 @@ package crypto import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -224,9 +226,9 @@ func (m *PublicKey) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintCrypto(dAtA, i, uint64(m.PublicKey.Size())) - n1, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.PublicKey.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if m.XXX_unrecognized != nil { @@ -374,14 +376,7 @@ func (m *Signature) Size() (n int) { } func sovCrypto(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozCrypto(x uint64) (n int) { return sovCrypto(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/dump/dump.pb.go b/dump/dump.pb.go index ad3cfb437..ef4d88c28 100644 --- a/dump/dump.pb.go +++ b/dump/dump.pb.go @@ -5,6 +5,11 @@ package dump import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + time "time" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -15,9 +20,6 @@ import ( github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" exec "github.com/hyperledger/burrow/execution/exec" names "github.com/hyperledger/burrow/execution/names" - io "io" - math "math" - time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -342,17 +344,17 @@ func (m *Storage) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintDump(dAtA, i, uint64(m.Key.Size())) - n1, err := m.Key.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Key.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(m.Value.Size())) - n2, err := m.Value.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Value.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 if m.XXX_unrecognized != nil { @@ -379,9 +381,9 @@ func (m *AccountStorage) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintDump(dAtA, i, uint64(m.Address.Size())) - n3, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Address.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 if len(m.Storage) > 0 { @@ -426,18 +428,18 @@ func (m *EVMEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.Time))) - n4, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 if m.Event != nil { dAtA[i] = 0x1a i++ i = encodeVarintDump(dAtA, i, uint64(m.Event.Size())) - n5, err := m.Event.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.Event.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 } @@ -476,9 +478,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintDump(dAtA, i, uint64(m.Account.Size())) - n6, err := m.Account.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.Account.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 } @@ -486,9 +488,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintDump(dAtA, i, uint64(m.AccountStorage.Size())) - n7, err := m.AccountStorage.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.AccountStorage.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -496,9 +498,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintDump(dAtA, i, uint64(m.EVMEvent.Size())) - n8, err := m.EVMEvent.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.EVMEvent.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 } @@ -506,9 +508,9 @@ func (m *Dump) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintDump(dAtA, i, uint64(m.Name.Size())) - n9, err := m.Name.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.Name.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 } @@ -620,14 +622,7 @@ func (m *Dump) Size() (n int) { } func sovDump(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozDump(x uint64) (n int) { return sovDump(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/encoding/encoding.pb.go b/encoding/encoding.pb.go index 19b1f18ab..11209f200 100644 --- a/encoding/encoding.pb.go +++ b/encoding/encoding.pb.go @@ -5,11 +5,13 @@ package encoding import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -161,14 +163,7 @@ func (m *TestMessage) Size() (n int) { } func sovEncoding(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozEncoding(x uint64) (n int) { return sovEncoding(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/errors/errors.pb.go b/execution/errors/errors.pb.go index 5eafb4e87..f88cc6abe 100644 --- a/execution/errors/errors.pb.go +++ b/execution/errors/errors.pb.go @@ -5,11 +5,13 @@ package errors import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -163,14 +165,7 @@ func (m *Exception) Size() (n int) { } func sovErrors(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozErrors(x uint64) (n int) { return sovErrors(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/exec/exec.pb.go b/execution/exec/exec.pb.go index c0e6e4d92..30c9a7708 100644 --- a/execution/exec/exec.pb.go +++ b/execution/exec/exec.pb.go @@ -5,6 +5,11 @@ package exec import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + time "time" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -20,9 +25,6 @@ import ( txs "github.com/hyperledger/burrow/txs" github_com_hyperledger_burrow_txs_payload "github.com/hyperledger/burrow/txs/payload" types "github.com/tendermint/tendermint/abci/types" - io "io" - math "math" - time "time" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1464,9 +1466,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.BeginBlock.Size())) - n1, err := m.BeginBlock.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.BeginBlock.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -1474,9 +1476,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.BeginTx.Size())) - n2, err := m.BeginTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.BeginTx.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -1484,9 +1486,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Envelope.Size())) - n3, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Envelope.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -1494,9 +1496,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Event.Size())) - n4, err := m.Event.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Event.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -1504,9 +1506,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.EndTx.Size())) - n5, err := m.EndTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.EndTx.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 } @@ -1514,9 +1516,9 @@ func (m *StreamEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.EndBlock.Size())) - n6, err := m.EndBlock.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.EndBlock.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 } @@ -1550,9 +1552,9 @@ func (m *BeginBlock) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n7, err := m.Header.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.Header.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -1607,9 +1609,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHeader.Size())) - n8, err := m.TxHeader.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.TxHeader.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 } @@ -1617,9 +1619,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Result.Size())) - n9, err := m.Result.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.Result.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 } @@ -1627,9 +1629,9 @@ func (m *BeginTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n10, err := m.Exception.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n10, err10 := m.Exception.MarshalTo(dAtA[i:]) + if err10 != nil { + return 0, err10 } i += n10 } @@ -1657,9 +1659,9 @@ func (m *EndTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n11, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n11, err11 := m.TxHash.MarshalTo(dAtA[i:]) + if err11 != nil { + return 0, err11 } i += n11 if m.XXX_unrecognized != nil { @@ -1691,9 +1693,9 @@ func (m *TxHeader) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n12, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n12, err12 := m.TxHash.MarshalTo(dAtA[i:]) + if err12 != nil { + return 0, err12 } i += n12 if m.Height != 0 { @@ -1710,9 +1712,9 @@ func (m *TxHeader) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.Origin.Size())) - n13, err := m.Origin.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n13, err13 := m.Origin.MarshalTo(dAtA[i:]) + if err13 != nil { + return 0, err13 } i += n13 } @@ -1746,9 +1748,9 @@ func (m *BlockExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n14, err := m.Header.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n14, err14 := m.Header.MarshalTo(dAtA[i:]) + if err14 != nil { + return 0, err14 } i += n14 } @@ -1820,9 +1822,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHeader.Size())) - n15, err := m.TxHeader.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n15, err15 := m.TxHeader.MarshalTo(dAtA[i:]) + if err15 != nil { + return 0, err15 } i += n15 } @@ -1830,9 +1832,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.Envelope.Size())) - n16, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n16, err16 := m.Envelope.MarshalTo(dAtA[i:]) + if err16 != nil { + return 0, err16 } i += n16 } @@ -1852,9 +1854,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x42 i++ i = encodeVarintExec(dAtA, i, uint64(m.Result.Size())) - n17, err := m.Result.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n17, err17 := m.Result.MarshalTo(dAtA[i:]) + if err17 != nil { + return 0, err17 } i += n17 } @@ -1862,9 +1864,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x4a i++ i = encodeVarintExec(dAtA, i, uint64(m.Receipt.Size())) - n18, err := m.Receipt.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n18, err18 := m.Receipt.MarshalTo(dAtA[i:]) + if err18 != nil { + return 0, err18 } i += n18 } @@ -1872,9 +1874,9 @@ func (m *TxExecution) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x52 i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n19, err := m.Exception.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n19, err19 := m.Exception.MarshalTo(dAtA[i:]) + if err19 != nil { + return 0, err19 } i += n19 } @@ -1930,9 +1932,9 @@ func (m *Origin) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdTime(m.Time))) - n20, err := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) - if err != nil { - return 0, err + n20, err20 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Time, dAtA[i:]) + if err20 != nil { + return 0, err20 } i += n20 if m.XXX_unrecognized != nil { @@ -1964,9 +1966,9 @@ func (m *Header) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.TxHash.Size())) - n21, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n21, err21 := m.TxHash.MarshalTo(dAtA[i:]) + if err21 != nil { + return 0, err21 } i += n21 if m.EventType != 0 { @@ -1994,9 +1996,9 @@ func (m *Header) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintExec(dAtA, i, uint64(m.Exception.Size())) - n22, err := m.Exception.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n22, err22 := m.Exception.MarshalTo(dAtA[i:]) + if err22 != nil { + return 0, err22 } i += n22 } @@ -2025,9 +2027,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Header.Size())) - n23, err := m.Header.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n23, err23 := m.Header.MarshalTo(dAtA[i:]) + if err23 != nil { + return 0, err23 } i += n23 } @@ -2035,9 +2037,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Input.Size())) - n24, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n24, err24 := m.Input.MarshalTo(dAtA[i:]) + if err24 != nil { + return 0, err24 } i += n24 } @@ -2045,9 +2047,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Output.Size())) - n25, err := m.Output.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n25, err25 := m.Output.MarshalTo(dAtA[i:]) + if err25 != nil { + return 0, err25 } i += n25 } @@ -2055,9 +2057,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Call.Size())) - n26, err := m.Call.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n26, err26 := m.Call.MarshalTo(dAtA[i:]) + if err26 != nil { + return 0, err26 } i += n26 } @@ -2065,9 +2067,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintExec(dAtA, i, uint64(m.Log.Size())) - n27, err := m.Log.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n27, err27 := m.Log.MarshalTo(dAtA[i:]) + if err27 != nil { + return 0, err27 } i += n27 } @@ -2075,9 +2077,9 @@ func (m *Event) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintExec(dAtA, i, uint64(m.GovernAccount.Size())) - n28, err := m.GovernAccount.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n28, err28 := m.GovernAccount.MarshalTo(dAtA[i:]) + if err28 != nil { + return 0, err28 } i += n28 } @@ -2117,9 +2119,9 @@ func (m *Result) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.NameEntry.Size())) - n29, err := m.NameEntry.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n29, err29 := m.NameEntry.MarshalTo(dAtA[i:]) + if err29 != nil { + return 0, err29 } i += n29 } @@ -2127,9 +2129,9 @@ func (m *Result) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.PermArgs.Size())) - n30, err := m.PermArgs.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n30, err30 := m.PermArgs.MarshalTo(dAtA[i:]) + if err30 != nil { + return 0, err30 } i += n30 } @@ -2157,17 +2159,17 @@ func (m *LogEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n31, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n31, err31 := m.Address.MarshalTo(dAtA[i:]) + if err31 != nil { + return 0, err31 } i += n31 dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Data.Size())) - n32, err := m.Data.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n32, err32 := m.Data.MarshalTo(dAtA[i:]) + if err32 != nil { + return 0, err32 } i += n32 if len(m.Topics) > 0 { @@ -2207,18 +2209,18 @@ func (m *CallEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.CallData.Size())) - n33, err := m.CallData.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n33, err33 := m.CallData.MarshalTo(dAtA[i:]) + if err33 != nil { + return 0, err33 } i += n33 } dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Origin.Size())) - n34, err := m.Origin.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n34, err34 := m.Origin.MarshalTo(dAtA[i:]) + if err34 != nil { + return 0, err34 } i += n34 if m.StackDepth != 0 { @@ -2229,9 +2231,9 @@ func (m *CallEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintExec(dAtA, i, uint64(m.Return.Size())) - n35, err := m.Return.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n35, err35 := m.Return.MarshalTo(dAtA[i:]) + if err35 != nil { + return 0, err35 } i += n35 if m.CallType != 0 { @@ -2264,9 +2266,9 @@ func (m *GovernAccountEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.AccountUpdate.Size())) - n36, err := m.AccountUpdate.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n36, err36 := m.AccountUpdate.MarshalTo(dAtA[i:]) + if err36 != nil { + return 0, err36 } i += n36 } @@ -2294,9 +2296,9 @@ func (m *InputEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n37, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n37, err37 := m.Address.MarshalTo(dAtA[i:]) + if err37 != nil { + return 0, err37 } i += n37 if m.XXX_unrecognized != nil { @@ -2323,9 +2325,9 @@ func (m *OutputEvent) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Address.Size())) - n38, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n38, err38 := m.Address.MarshalTo(dAtA[i:]) + if err38 != nil { + return 0, err38 } i += n38 if m.XXX_unrecognized != nil { @@ -2352,25 +2354,25 @@ func (m *CallData) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintExec(dAtA, i, uint64(m.Caller.Size())) - n39, err := m.Caller.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n39, err39 := m.Caller.MarshalTo(dAtA[i:]) + if err39 != nil { + return 0, err39 } i += n39 dAtA[i] = 0x12 i++ i = encodeVarintExec(dAtA, i, uint64(m.Callee.Size())) - n40, err := m.Callee.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n40, err40 := m.Callee.MarshalTo(dAtA[i:]) + if err40 != nil { + return 0, err40 } i += n40 dAtA[i] = 0x1a i++ i = encodeVarintExec(dAtA, i, uint64(m.Data.Size())) - n41, err := m.Data.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n41, err41 := m.Data.MarshalTo(dAtA[i:]) + if err41 != nil { + return 0, err41 } i += n41 if m.Value != 0 { @@ -2876,14 +2878,7 @@ func (m *CallData) Size() (n int) { } func sovExec(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozExec(x uint64) (n int) { return sovExec(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/execution/names/names.pb.go b/execution/names/names.pb.go index 32b5f872e..5a34f0ce9 100644 --- a/execution/names/names.pb.go +++ b/execution/names/names.pb.go @@ -5,12 +5,14 @@ package names import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -145,9 +147,9 @@ func (m *Entry) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintNames(dAtA, i, uint64(m.Owner.Size())) - n1, err := m.Owner.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Owner.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if len(m.Data) > 0 { @@ -202,14 +204,7 @@ func (m *Entry) Size() (n int) { } func sovNames(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozNames(x uint64) (n int) { return sovNames(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/genesis/spec/spec.pb.go b/genesis/spec/spec.pb.go index 313653bb6..886e49e0f 100644 --- a/genesis/spec/spec.pb.go +++ b/genesis/spec/spec.pb.go @@ -5,6 +5,10 @@ package spec import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -12,8 +16,6 @@ import ( balance "github.com/hyperledger/burrow/acm/balance" crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -169,9 +171,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintSpec(dAtA, i, uint64(m.Address.Size())) - n1, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Address.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -179,9 +181,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintSpec(dAtA, i, uint64(m.PublicKey.Size())) - n2, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.PublicKey.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -231,9 +233,9 @@ func (m *TemplateAccount) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintSpec(dAtA, i, uint64(m.Code.Size())) - n3, err := m.Code.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Code.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -299,14 +301,7 @@ func (m *TemplateAccount) Size() (n int) { } func sovSpec(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozSpec(x uint64) (n int) { return sovSpec(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/keys/keys.pb.go b/keys/keys.pb.go index 3bc1c9df1..7458fbb32 100644 --- a/keys/keys.pb.go +++ b/keys/keys.pb.go @@ -6,12 +6,16 @@ package keys import ( context "context" fmt "fmt" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" crypto "github.com/hyperledger/burrow/crypto" grpc "google.golang.org/grpc" - math "math" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1342,6 +1346,44 @@ type KeysServer interface { AddName(context.Context, *AddNameRequest) (*AddNameResponse, error) } +// UnimplementedKeysServer can be embedded to have forward compatible implementations. +type UnimplementedKeysServer struct { +} + +func (*UnimplementedKeysServer) GenerateKey(ctx context.Context, req *GenRequest) (*GenResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GenerateKey not implemented") +} +func (*UnimplementedKeysServer) PublicKey(ctx context.Context, req *PubRequest) (*PubResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PublicKey not implemented") +} +func (*UnimplementedKeysServer) Sign(ctx context.Context, req *SignRequest) (*SignResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Sign not implemented") +} +func (*UnimplementedKeysServer) Verify(ctx context.Context, req *VerifyRequest) (*VerifyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Verify not implemented") +} +func (*UnimplementedKeysServer) Import(ctx context.Context, req *ImportRequest) (*ImportResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Import not implemented") +} +func (*UnimplementedKeysServer) ImportJSON(ctx context.Context, req *ImportJSONRequest) (*ImportResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ImportJSON not implemented") +} +func (*UnimplementedKeysServer) Export(ctx context.Context, req *ExportRequest) (*ExportResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Export not implemented") +} +func (*UnimplementedKeysServer) Hash(ctx context.Context, req *HashRequest) (*HashResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Hash not implemented") +} +func (*UnimplementedKeysServer) RemoveName(ctx context.Context, req *RemoveNameRequest) (*RemoveNameResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RemoveName not implemented") +} +func (*UnimplementedKeysServer) List(ctx context.Context, req *ListRequest) (*ListResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method List not implemented") +} +func (*UnimplementedKeysServer) AddName(ctx context.Context, req *AddNameRequest) (*AddNameResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddName not implemented") +} + func RegisterKeysServer(s *grpc.Server, srv KeysServer) { s.RegisterService(&_Keys_serviceDesc, srv) } @@ -2026,14 +2068,7 @@ func (m *AddNameRequest) Size() (n int) { } func sovKeys(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozKeys(x uint64) (n int) { return sovKeys(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/permission/permission.pb.go b/permission/permission.pb.go index bb07ce156..41abb94c9 100644 --- a/permission/permission.pb.go +++ b/permission/permission.pb.go @@ -5,12 +5,14 @@ package permission import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -271,9 +273,9 @@ func (m *AccountPermissions) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPermission(dAtA, i, uint64(m.Base.Size())) - n1, err := m.Base.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Base.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if len(m.Roles) > 0 { @@ -346,9 +348,9 @@ func (m *PermArgs) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPermission(dAtA, i, uint64(m.Target.Size())) - n2, err := m.Target.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Target.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -444,14 +446,7 @@ func (m *PermArgs) Size() (n int) { } func sovPermission(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozPermission(x uint64) (n int) { return sovPermission(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpc.pb.go b/rpc/rpc.pb.go index f8a6ab389..4e387f401 100644 --- a/rpc/rpc.pb.go +++ b/rpc/rpc.pb.go @@ -5,6 +5,9 @@ package rpc import ( fmt "fmt" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -12,7 +15,6 @@ import ( bcm "github.com/hyperledger/burrow/bcm" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" tendermint "github.com/hyperledger/burrow/consensus/tendermint" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -195,14 +197,7 @@ func (m *ResultStatus) Size() (n int) { } func sovRpc(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpc(x uint64) (n int) { return sovRpc(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcdump/rpcdump.pb.go b/rpc/rpcdump/rpcdump.pb.go index 11f635b75..34bb4554b 100644 --- a/rpc/rpcdump/rpcdump.pb.go +++ b/rpc/rpcdump/rpcdump.pb.go @@ -6,12 +6,16 @@ package rpcdump import ( context "context" fmt "fmt" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" dump "github.com/hyperledger/burrow/dump" grpc "google.golang.org/grpc" - math "math" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -152,6 +156,14 @@ type DumpServer interface { GetDump(*GetDumpParam, Dump_GetDumpServer) error } +// UnimplementedDumpServer can be embedded to have forward compatible implementations. +type UnimplementedDumpServer struct { +} + +func (*UnimplementedDumpServer) GetDump(req *GetDumpParam, srv Dump_GetDumpServer) error { + return status.Errorf(codes.Unimplemented, "method GetDump not implemented") +} + func RegisterDumpServer(s *grpc.Server, srv DumpServer) { s.RegisterService(&_Dump_serviceDesc, srv) } @@ -207,14 +219,7 @@ func (m *GetDumpParam) Size() (n int) { } func sovRpcdump(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpcdump(x uint64) (n int) { return sovRpcdump(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcevents/rpcevents.pb.go b/rpc/rpcevents/rpcevents.pb.go index db293aaea..ba120cae2 100644 --- a/rpc/rpcevents/rpcevents.pb.go +++ b/rpc/rpcevents/rpcevents.pb.go @@ -6,14 +6,18 @@ package rpcevents import ( context "context" fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" github_com_hyperledger_burrow_binary "github.com/hyperledger/burrow/binary" exec "github.com/hyperledger/burrow/execution/exec" grpc "google.golang.org/grpc" - io "io" - math "math" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -762,6 +766,20 @@ type ExecutionEventsServer interface { Events(*BlocksRequest, ExecutionEvents_EventsServer) error } +// UnimplementedExecutionEventsServer can be embedded to have forward compatible implementations. +type UnimplementedExecutionEventsServer struct { +} + +func (*UnimplementedExecutionEventsServer) Stream(req *BlocksRequest, srv ExecutionEvents_StreamServer) error { + return status.Errorf(codes.Unimplemented, "method Stream not implemented") +} +func (*UnimplementedExecutionEventsServer) Tx(ctx context.Context, req *TxRequest) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method Tx not implemented") +} +func (*UnimplementedExecutionEventsServer) Events(req *BlocksRequest, srv ExecutionEvents_EventsServer) error { + return status.Errorf(codes.Unimplemented, "method Events not implemented") +} + func RegisterExecutionEventsServer(s *grpc.Server, srv ExecutionEventsServer) { s.RegisterService(&_ExecutionEvents_serviceDesc, srv) } @@ -904,9 +922,9 @@ func (m *TxRequest) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.TxHash.Size())) - n1, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.TxHash.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if m.Wait { @@ -944,9 +962,9 @@ func (m *BlocksRequest) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.BlockRange.Size())) - n2, err := m.BlockRange.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.BlockRange.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -1125,9 +1143,9 @@ func (m *BlockRange) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.Start.Size())) - n3, err := m.Start.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Start.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -1135,9 +1153,9 @@ func (m *BlockRange) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintRpcevents(dAtA, i, uint64(m.End.Size())) - n4, err := m.End.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.End.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -1314,14 +1332,7 @@ func (m *BlockRange) Size() (n int) { } func sovRpcevents(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpcevents(x uint64) (n int) { return sovRpcevents(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpcquery/rpcquery.pb.go b/rpc/rpcquery/rpcquery.pb.go index ee44c1ed5..a2111212d 100644 --- a/rpc/rpcquery/rpcquery.pb.go +++ b/rpc/rpcquery/rpcquery.pb.go @@ -6,6 +6,9 @@ package rpcquery import ( context "context" fmt "fmt" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -18,7 +21,8 @@ import ( payload "github.com/hyperledger/burrow/txs/payload" types "github.com/tendermint/tendermint/abci/types" grpc "google.golang.org/grpc" - math "math" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1167,6 +1171,50 @@ type QueryServer interface { GetBlockHeader(context.Context, *GetBlockParam) (*types.Header, error) } +// UnimplementedQueryServer can be embedded to have forward compatible implementations. +type UnimplementedQueryServer struct { +} + +func (*UnimplementedQueryServer) Status(ctx context.Context, req *StatusParam) (*rpc.ResultStatus, error) { + return nil, status.Errorf(codes.Unimplemented, "method Status not implemented") +} +func (*UnimplementedQueryServer) GetAccount(ctx context.Context, req *GetAccountParam) (*acm.Account, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetAccount not implemented") +} +func (*UnimplementedQueryServer) GetMetadata(ctx context.Context, req *GetMetadataParam) (*MetadataResult, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetMetadata not implemented") +} +func (*UnimplementedQueryServer) GetStorage(ctx context.Context, req *GetStorageParam) (*StorageValue, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStorage not implemented") +} +func (*UnimplementedQueryServer) ListAccounts(req *ListAccountsParam, srv Query_ListAccountsServer) error { + return status.Errorf(codes.Unimplemented, "method ListAccounts not implemented") +} +func (*UnimplementedQueryServer) GetName(ctx context.Context, req *GetNameParam) (*names.Entry, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetName not implemented") +} +func (*UnimplementedQueryServer) ListNames(req *ListNamesParam, srv Query_ListNamesServer) error { + return status.Errorf(codes.Unimplemented, "method ListNames not implemented") +} +func (*UnimplementedQueryServer) GetValidatorSet(ctx context.Context, req *GetValidatorSetParam) (*ValidatorSet, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetValidatorSet not implemented") +} +func (*UnimplementedQueryServer) GetValidatorSetHistory(ctx context.Context, req *GetValidatorSetHistoryParam) (*ValidatorSetHistory, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetValidatorSetHistory not implemented") +} +func (*UnimplementedQueryServer) GetProposal(ctx context.Context, req *GetProposalParam) (*payload.Ballot, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetProposal not implemented") +} +func (*UnimplementedQueryServer) ListProposals(req *ListProposalsParam, srv Query_ListProposalsServer) error { + return status.Errorf(codes.Unimplemented, "method ListProposals not implemented") +} +func (*UnimplementedQueryServer) GetStats(ctx context.Context, req *GetStatsParam) (*Stats, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStats not implemented") +} +func (*UnimplementedQueryServer) GetBlockHeader(ctx context.Context, req *GetBlockParam) (*types.Header, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetBlockHeader not implemented") +} + func RegisterQueryServer(s *grpc.Server, srv QueryServer) { s.RegisterService(&_Query_serviceDesc, srv) } @@ -1790,14 +1838,7 @@ func (m *GetBlockParam) Size() (n int) { } func sovRpcquery(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpcquery(x uint64) (n int) { return sovRpcquery(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/rpc/rpctransact/rpctransact.pb.go b/rpc/rpctransact/rpctransact.pb.go index 915e0c193..a11f42d3a 100644 --- a/rpc/rpctransact/rpctransact.pb.go +++ b/rpc/rpctransact/rpctransact.pb.go @@ -6,6 +6,11 @@ package rpctransact import ( context "context" fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + time "time" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" @@ -17,9 +22,8 @@ import ( txs "github.com/hyperledger/burrow/txs" payload "github.com/hyperledger/burrow/txs/payload" grpc "google.golang.org/grpc" - io "io" - math "math" - time "time" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. @@ -440,6 +444,47 @@ type TransactServer interface { NameTxAsync(context.Context, *payload.NameTx) (*txs.Receipt, error) } +// UnimplementedTransactServer can be embedded to have forward compatible implementations. +type UnimplementedTransactServer struct { +} + +func (*UnimplementedTransactServer) BroadcastTxSync(ctx context.Context, req *TxEnvelopeParam) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method BroadcastTxSync not implemented") +} +func (*UnimplementedTransactServer) BroadcastTxAsync(ctx context.Context, req *TxEnvelopeParam) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method BroadcastTxAsync not implemented") +} +func (*UnimplementedTransactServer) SignTx(ctx context.Context, req *TxEnvelopeParam) (*TxEnvelope, error) { + return nil, status.Errorf(codes.Unimplemented, "method SignTx not implemented") +} +func (*UnimplementedTransactServer) FormulateTx(ctx context.Context, req *payload.Any) (*TxEnvelope, error) { + return nil, status.Errorf(codes.Unimplemented, "method FormulateTx not implemented") +} +func (*UnimplementedTransactServer) CallTxSync(ctx context.Context, req *payload.CallTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallTxSync not implemented") +} +func (*UnimplementedTransactServer) CallTxAsync(ctx context.Context, req *payload.CallTx) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallTxAsync not implemented") +} +func (*UnimplementedTransactServer) CallTxSim(ctx context.Context, req *payload.CallTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallTxSim not implemented") +} +func (*UnimplementedTransactServer) CallCodeSim(ctx context.Context, req *CallCodeParam) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method CallCodeSim not implemented") +} +func (*UnimplementedTransactServer) SendTxSync(ctx context.Context, req *payload.SendTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method SendTxSync not implemented") +} +func (*UnimplementedTransactServer) SendTxAsync(ctx context.Context, req *payload.SendTx) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method SendTxAsync not implemented") +} +func (*UnimplementedTransactServer) NameTxSync(ctx context.Context, req *payload.NameTx) (*exec.TxExecution, error) { + return nil, status.Errorf(codes.Unimplemented, "method NameTxSync not implemented") +} +func (*UnimplementedTransactServer) NameTxAsync(ctx context.Context, req *payload.NameTx) (*txs.Receipt, error) { + return nil, status.Errorf(codes.Unimplemented, "method NameTxAsync not implemented") +} + func RegisterTransactServer(s *grpc.Server, srv TransactServer) { s.RegisterService(&_Transact_serviceDesc, srv) } @@ -735,9 +780,9 @@ func (m *CallCodeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.FromAddress.Size())) - n1, err := m.FromAddress.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.FromAddress.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 if len(m.Code) > 0 { @@ -777,9 +822,9 @@ func (m *TxEnvelope) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Envelope.Size())) - n2, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Envelope.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -808,9 +853,9 @@ func (m *TxEnvelopeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Envelope.Size())) - n3, err := m.Envelope.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.Envelope.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -818,18 +863,18 @@ func (m *TxEnvelopeParam) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintRpctransact(dAtA, i, uint64(m.Payload.Size())) - n4, err := m.Payload.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Payload.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } dAtA[i] = 0x1a i++ i = encodeVarintRpctransact(dAtA, i, uint64(github_com_gogo_protobuf_types.SizeOfStdDuration(m.Timeout))) - n5, err := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.XXX_unrecognized != nil { @@ -908,14 +953,7 @@ func (m *TxEnvelopeParam) Size() (n int) { } func sovRpctransact(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozRpctransact(x uint64) (n int) { return sovRpctransact(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/storage/storage.pb.go b/storage/storage.pb.go index dc3eb476b..b29a673a8 100644 --- a/storage/storage.pb.go +++ b/storage/storage.pb.go @@ -5,11 +5,13 @@ package storage import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -164,14 +166,7 @@ func (m *CommitID) Size() (n int) { } func sovStorage(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozStorage(x uint64) (n int) { return sovStorage(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/txs/payload/payload.pb.go b/txs/payload/payload.pb.go index 0e2c9706a..1d84f41a1 100644 --- a/txs/payload/payload.pb.go +++ b/txs/payload/payload.pb.go @@ -5,6 +5,10 @@ package payload import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -12,8 +16,6 @@ import ( github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" spec "github.com/hyperledger/burrow/genesis/spec" permission "github.com/hyperledger/burrow/permission" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -1121,9 +1123,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.CallTx.Size())) - n1, err := m.CallTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.CallTx.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -1131,9 +1133,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.SendTx.Size())) - n2, err := m.SendTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.SendTx.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -1141,9 +1143,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.NameTx.Size())) - n3, err := m.NameTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.NameTx.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -1151,9 +1153,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermsTx.Size())) - n4, err := m.PermsTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.PermsTx.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -1161,9 +1163,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintPayload(dAtA, i, uint64(m.GovTx.Size())) - n5, err := m.GovTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.GovTx.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 } @@ -1171,9 +1173,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x32 i++ i = encodeVarintPayload(dAtA, i, uint64(m.BondTx.Size())) - n6, err := m.BondTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.BondTx.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 } @@ -1181,9 +1183,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x3a i++ i = encodeVarintPayload(dAtA, i, uint64(m.UnbondTx.Size())) - n7, err := m.UnbondTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n7, err7 := m.UnbondTx.MarshalTo(dAtA[i:]) + if err7 != nil { + return 0, err7 } i += n7 } @@ -1191,9 +1193,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x42 i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n8, err := m.BatchTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n8, err8 := m.BatchTx.MarshalTo(dAtA[i:]) + if err8 != nil { + return 0, err8 } i += n8 } @@ -1201,9 +1203,9 @@ func (m *Any) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x4a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalTx.Size())) - n9, err := m.ProposalTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n9, err9 := m.ProposalTx.MarshalTo(dAtA[i:]) + if err9 != nil { + return 0, err9 } i += n9 } @@ -1231,9 +1233,9 @@ func (m *TxInput) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n10, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n10, err10 := m.Address.MarshalTo(dAtA[i:]) + if err10 != nil { + return 0, err10 } i += n10 if m.Amount != 0 { @@ -1270,9 +1272,9 @@ func (m *TxOutput) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n11, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n11, err11 := m.Address.MarshalTo(dAtA[i:]) + if err11 != nil { + return 0, err11 } i += n11 if m.Amount != 0 { @@ -1305,9 +1307,9 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n12, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n12, err12 := m.Input.MarshalTo(dAtA[i:]) + if err12 != nil { + return 0, err12 } i += n12 } @@ -1315,9 +1317,9 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n13, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n13, err13 := m.Address.MarshalTo(dAtA[i:]) + if err13 != nil { + return 0, err13 } i += n13 } @@ -1334,17 +1336,17 @@ func (m *CallTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x2a i++ i = encodeVarintPayload(dAtA, i, uint64(m.Data.Size())) - n14, err := m.Data.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n14, err14 := m.Data.MarshalTo(dAtA[i:]) + if err14 != nil { + return 0, err14 } i += n14 dAtA[i] = 0x32 i++ i = encodeVarintPayload(dAtA, i, uint64(m.WASM.Size())) - n15, err := m.WASM.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n15, err15 := m.WASM.MarshalTo(dAtA[i:]) + if err15 != nil { + return 0, err15 } i += n15 if len(m.ContractMeta) > 0 { @@ -1383,9 +1385,9 @@ func (m *ContractMeta) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.CodeHash.Size())) - n16, err := m.CodeHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n16, err16 := m.CodeHash.MarshalTo(dAtA[i:]) + if err16 != nil { + return 0, err16 } i += n16 if len(m.Meta) > 0 { @@ -1464,18 +1466,18 @@ func (m *PermsTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n17, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n17, err17 := m.Input.MarshalTo(dAtA[i:]) + if err17 != nil { + return 0, err17 } i += n17 } dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.PermArgs.Size())) - n18, err := m.PermArgs.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n18, err18 := m.PermArgs.MarshalTo(dAtA[i:]) + if err18 != nil { + return 0, err18 } i += n18 if m.XXX_unrecognized != nil { @@ -1503,9 +1505,9 @@ func (m *NameTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n19, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n19, err19 := m.Input.MarshalTo(dAtA[i:]) + if err19 != nil { + return 0, err19 } i += n19 } @@ -1551,9 +1553,9 @@ func (m *BondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n20, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n20, err20 := m.Input.MarshalTo(dAtA[i:]) + if err20 != nil { + return 0, err20 } i += n20 } @@ -1582,9 +1584,9 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n21, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n21, err21 := m.Input.MarshalTo(dAtA[i:]) + if err21 != nil { + return 0, err21 } i += n21 } @@ -1592,9 +1594,9 @@ func (m *UnbondTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Output.Size())) - n22, err := m.Output.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n22, err22 := m.Output.MarshalTo(dAtA[i:]) + if err22 != nil { + return 0, err22 } i += n22 } @@ -1668,9 +1670,9 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Input.Size())) - n23, err := m.Input.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n23, err23 := m.Input.MarshalTo(dAtA[i:]) + if err23 != nil { + return 0, err23 } i += n23 } @@ -1683,9 +1685,9 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.ProposalHash.Size())) - n24, err := m.ProposalHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n24, err24 := m.ProposalHash.MarshalTo(dAtA[i:]) + if err24 != nil { + return 0, err24 } i += n24 } @@ -1693,9 +1695,9 @@ func (m *ProposalTx) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n25, err := m.Proposal.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n25, err25 := m.Proposal.MarshalTo(dAtA[i:]) + if err25 != nil { + return 0, err25 } i += n25 } @@ -1768,9 +1770,9 @@ func (m *Vote) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Address.Size())) - n26, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n26, err26 := m.Address.MarshalTo(dAtA[i:]) + if err26 != nil { + return 0, err26 } i += n26 if m.VotingWeight != 0 { @@ -1815,9 +1817,9 @@ func (m *Proposal) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x1a i++ i = encodeVarintPayload(dAtA, i, uint64(m.BatchTx.Size())) - n27, err := m.BatchTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n27, err27 := m.BatchTx.MarshalTo(dAtA[i:]) + if err27 != nil { + return 0, err27 } i += n27 } @@ -1846,9 +1848,9 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintPayload(dAtA, i, uint64(m.Proposal.Size())) - n28, err := m.Proposal.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n28, err28 := m.Proposal.MarshalTo(dAtA[i:]) + if err28 != nil { + return 0, err28 } i += n28 } @@ -1856,9 +1858,9 @@ func (m *Ballot) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintPayload(dAtA, i, uint64(m.FinalizingTx.Size())) - n29, err := m.FinalizingTx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n29, err29 := m.FinalizingTx.MarshalTo(dAtA[i:]) + if err29 != nil { + return 0, err29 } i += n29 } @@ -2284,14 +2286,7 @@ func (m *Ballot) Size() (n int) { } func sovPayload(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozPayload(x uint64) (n int) { return sovPayload(uint64((x << 1) ^ uint64((int64(x) >> 63)))) diff --git a/txs/txs.pb.go b/txs/txs.pb.go index e95880013..bebf3056a 100644 --- a/txs/txs.pb.go +++ b/txs/txs.pb.go @@ -5,6 +5,10 @@ package txs import ( fmt "fmt" + io "io" + math "math" + math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" golang_proto "github.com/golang/protobuf/proto" @@ -12,8 +16,6 @@ import ( crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_crypto "github.com/hyperledger/burrow/crypto" github_com_hyperledger_burrow_txs_payload "github.com/hyperledger/burrow/txs/payload" - io "io" - math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -281,9 +283,9 @@ func (m *Envelope) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.Tx.Size())) - n1, err := m.Tx.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n1, err1 := m.Tx.MarshalTo(dAtA[i:]) + if err1 != nil { + return 0, err1 } i += n1 } @@ -312,9 +314,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0xa i++ i = encodeVarintTxs(dAtA, i, uint64(m.Address.Size())) - n2, err := m.Address.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n2, err2 := m.Address.MarshalTo(dAtA[i:]) + if err2 != nil { + return 0, err2 } i += n2 } @@ -322,9 +324,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.PublicKey.Size())) - n3, err := m.PublicKey.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n3, err3 := m.PublicKey.MarshalTo(dAtA[i:]) + if err3 != nil { + return 0, err3 } i += n3 } @@ -332,9 +334,9 @@ func (m *Signatory) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintTxs(dAtA, i, uint64(m.Signature.Size())) - n4, err := m.Signature.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n4, err4 := m.Signature.MarshalTo(dAtA[i:]) + if err4 != nil { + return 0, err4 } i += n4 } @@ -367,9 +369,9 @@ func (m *Receipt) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x12 i++ i = encodeVarintTxs(dAtA, i, uint64(m.TxHash.Size())) - n5, err := m.TxHash.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n5, err5 := m.TxHash.MarshalTo(dAtA[i:]) + if err5 != nil { + return 0, err5 } i += n5 if m.CreatesContract { @@ -385,9 +387,9 @@ func (m *Receipt) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x22 i++ i = encodeVarintTxs(dAtA, i, uint64(m.ContractAddress.Size())) - n6, err := m.ContractAddress.MarshalTo(dAtA[i:]) - if err != nil { - return 0, err + n6, err6 := m.ContractAddress.MarshalTo(dAtA[i:]) + if err6 != nil { + return 0, err6 } i += n6 if m.XXX_unrecognized != nil { @@ -474,14 +476,7 @@ func (m *Receipt) Size() (n int) { } func sovTxs(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + return (math_bits.Len64(x|1) + 6) / 7 } func sozTxs(x uint64) (n int) { return sovTxs(uint64((x << 1) ^ uint64((int64(x) >> 63)))) From dddbead4ed036fd87555f6baa679958c314b9d85 Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Tue, 13 Aug 2019 14:06:42 +0100 Subject: [PATCH 68/70] Improve vents handling of filters - The Vent log now supports unbounded length filter column for long filters - Changed interface for tags to simplify away ReflectTagged - Optimised short-circuit path for tag filters (less reflection for early results) - Added errors to TxStack and BlockAccumulator where filters may give invalid event stream WRT to tx formation - Supported nested tag filters separated by '.' - e.g. 'Input.Address' - Filter non-Log events from vent early - Pull EventSpec and expose its fields to the query so now you can query event name with `EventName = 'SolidityName'` since we have the ABIs. Other fields in EventSpec are also exposed with the 'Event' prefix - Add unit test for BlockConsumer function Signed-off-by: Silas Davis --- acm/account.go | 22 +- acm/account_test.go | 10 +- cmd/burrow/commands/vent.go | 2 +- event/query/expression.go | 2 +- event/query/query.go | 10 +- event/query/query_test.go | 2 + event/query/reflect_tagged.go | 140 ----- event/query/reflect_tagged_test.go | 110 ---- event/query/tags.go | 133 ++--- event/query/tags_test.go | 121 +++++ execution/evm/abi/event_spec.go | 10 +- execution/evm/abi/event_spec_test.go | 23 + execution/evm/abi/packing.go | 2 +- execution/evm/abi/spec.go | 4 +- execution/exec/block_execution.go | 69 +-- execution/exec/block_execution_test.go | 16 + execution/exec/event.go | 54 +- execution/exec/event_test.go | 26 +- execution/exec/log_event.go | 11 +- execution/exec/stream_event.go | 101 +++- execution/exec/stream_event_test.go | 9 +- execution/exec/tx_execution.go | 42 +- execution/execution.go | 4 +- execution/names/names.go | 13 +- execution/state/events.go | 13 +- execution/transactor_test.go | 2 +- forensics/revert_test.go | 5 +- go.mod | 1 + .../rpcevents/execution_events_server_test.go | 21 +- integration/rpctransact/call_test.go | 6 +- rpc/rpcevents/execution_events_server.go | 10 +- rpc/rpcquery/query_server.go | 4 +- txs/envelope.go | 14 +- txs/tx.go | 9 +- txs/tx_test.go | 31 +- vent/service/{abis.go => abi_provider.go} | 20 +- vent/service/block_consumer.go | 138 +++++ vent/service/block_consumer_test.go | 94 ++++ vent/service/consumer.go | 181 ++----- ...r_test.go => consumer_integration_test.go} | 0 vent/service/rowbuilder.go | 17 +- vent/sqldb/sqldb.go | 366 ++++++++++++- vent/sqldb/sqldb_integration_test.go | 335 ++++++++++++ vent/sqldb/sqldb_test.go | 334 +----------- vent/sqldb/system_tables.go | 134 +++++ vent/sqldb/utils.go | 510 ------------------ vent/sqldb/utils_test.go | 17 - vent/sqlsol/generate_test.go | 12 +- vent/sqlsol/projection.go | 40 +- vent/sqlsol/projection_test.go | 40 +- vent/sqlsol/spec_loader.go | 8 +- vent/test/sqlsol_view.json | 2 +- vent/types/event_class.go | 8 +- vent/types/event_class_test.go | 2 +- vent/types/sql_table.go | 3 +- 55 files changed, 1682 insertions(+), 1631 deletions(-) delete mode 100644 event/query/reflect_tagged.go delete mode 100644 event/query/reflect_tagged_test.go create mode 100644 event/query/tags_test.go create mode 100644 execution/evm/abi/event_spec_test.go rename vent/service/{abis.go => abi_provider.go} (65%) create mode 100644 vent/service/block_consumer.go create mode 100644 vent/service/block_consumer_test.go rename vent/service/{consumer_test.go => consumer_integration_test.go} (100%) create mode 100644 vent/sqldb/sqldb_integration_test.go create mode 100644 vent/sqldb/system_tables.go delete mode 100644 vent/sqldb/utils.go delete mode 100644 vent/sqldb/utils_test.go diff --git a/acm/account.go b/acm/account.go index f63e4e645..3e0c281bf 100644 --- a/acm/account.go +++ b/acm/account.go @@ -17,6 +17,7 @@ package acm import ( "bytes" "fmt" + "reflect" "github.com/gogo/protobuf/proto" "github.com/hyperledger/burrow/binary" @@ -112,18 +113,13 @@ func (acc Account) String() string { acc.Address, acc.Sequence, acc.PublicKey, acc.Balance, len(acc.EVMCode), acc.Permissions) } -func (acc *Account) Tagged() query.Tagged { - return &TaggedAccount{ - Account: acc, - Tagged: query.MergeTags(query.MustReflectTags(acc, "Address", "Balance", "Sequence", "EVMCode"), - query.TagMap{ - "Permissions": acc.Permissions.Base.ResultantPerms(), - "Roles": acc.Permissions.Roles, - }), +func (acc *Account) Get(key string) (interface{}, bool) { + switch key { + case "Permissions": + return acc.Permissions.Base.ResultantPerms(), true + case "Roles": + return acc.Permissions.Roles, true + default: + return query.GetReflect(reflect.ValueOf(acc), key) } } - -type TaggedAccount struct { - *Account - query.Tagged -} diff --git a/acm/account_test.go b/acm/account_test.go index 639eb3551..accc7367b 100644 --- a/acm/account_test.go +++ b/acm/account_test.go @@ -76,15 +76,13 @@ func TestAccountTags(t *testing.T) { Permissions: perms, EVMCode: solidity.Bytecode_StrangeLoop, } - tagged := acc.Tagged() - assert.Equal(t, []string{"Address", "Balance", "Sequence", "EVMCode", "Permissions", "Roles"}, tagged.Keys()) - flag, _ := tagged.Get("Permissions") + flag, _ := acc.Get("Permissions") permString := permission.String(flag.(permission.PermFlag)) assert.Equal(t, "send | call | createContract | createAccount | bond | name | proposal | input | batch | hasBase | hasRole", permString) - roles, _ := tagged.Get("Roles") + roles, _ := acc.Get("Roles") assert.Equal(t, []string{"frogs", "dogs"}, roles) - tagged.Get("EVMCode") + acc.Get("EVMCode") qry, err := query.New("EVMCode CONTAINS '0116002556001600360006101000A815'") require.NoError(t, err) - assert.True(t, qry.Matches(tagged)) + assert.True(t, qry.Matches(acc)) } diff --git a/cmd/burrow/commands/vent.go b/cmd/burrow/commands/vent.go index 2df40e90b..9e1ca819b 100644 --- a/cmd/burrow/commands/vent.go +++ b/cmd/burrow/commands/vent.go @@ -126,7 +126,7 @@ func Vent(output Output) func(cmd *cli.Cmd) { cmd.Command("schema", "Print JSONSchema for spec file format to validate table specs", func(cmd *cli.Cmd) { cmd.Action = func() { - output.Printf(source.JSONString(types.EventSpecSchema())) + output.Printf(source.JSONString(types.ProjectionSpecSchema())) } }) diff --git a/event/query/expression.go b/event/query/expression.go index 445b652f7..d19222261 100644 --- a/event/query/expression.go +++ b/event/query/expression.go @@ -82,7 +82,7 @@ type Expression struct { // This is our 'bytecode' code []*instruction errors errors.MultipleErrors - explainer func(fmt string, args ...interface{}) + explainer func(format string, args ...interface{}) } // Evaluate expects an Execute() to have filled the code of the Expression so it can be run in the little stack machine diff --git a/event/query/query.go b/event/query/query.go index b245a40ee..997eded3f 100644 --- a/event/query/query.go +++ b/event/query/query.go @@ -10,7 +10,6 @@ package query import ( "fmt" - "strings" ) type Query interface { @@ -35,8 +34,7 @@ type MatchError struct { } func (m *MatchError) Error() string { - keys := strings.Join(m.Tagged.Keys(), ", ") - return fmt.Sprintf("matching error occurred with tagged with keys [%s]: %v", keys, m.Cause) + return fmt.Sprintf("matching error occurred with tagged: %v", m.Cause) } // Condition represents a single condition within a query and consists of tag @@ -53,6 +51,9 @@ func New(s string) (*PegQuery, error) { p := &QueryParser{ Buffer: s, } + //p.Expression.explainer = func(format string, args ...interface{}) { + // fmt.Printf(format, args...) + //} err := p.Init() if err != nil { return nil, err @@ -89,9 +90,6 @@ func (q *PegQuery) Query() (Query, error) { // For example, query "name=John" matches tags = {"name": "John"}. More // examples could be found in parser_test.go and query_test.go. func (q *PegQuery) Matches(tags Tagged) bool { - if tags.Len() == 0 { - return false - } match, err := q.parser.Evaluate(tags.Get) if err != nil { q.error = &MatchError{Cause: err, Tagged: tags} diff --git a/event/query/query_test.go b/event/query/query_test.go index 0347a461c..b91344e52 100644 --- a/event/query/query_test.go +++ b/event/query/query_test.go @@ -23,6 +23,8 @@ func TestMatches(t *testing.T) { err bool matches bool }{ + {"Height CONTAINS '2'", map[string]interface{}{"Height": uint64(12)}, false, true}, + {"Height CONTAINS '2'", map[string]interface{}{"Height": uint64(11)}, false, false}, {"foo > 10", map[string]interface{}{"foo": 11}, false, true}, {"foo >= 10", map[string]interface{}{"foo": uint64(11)}, false, true}, {"foo >= 10", map[string]interface{}{"foo": uint32(11)}, false, true}, diff --git a/event/query/reflect_tagged.go b/event/query/reflect_tagged.go deleted file mode 100644 index 834bf0f1b..000000000 --- a/event/query/reflect_tagged.go +++ /dev/null @@ -1,140 +0,0 @@ -package query - -import ( - "fmt" - "reflect" - "strings" - "sync" -) - -type ReflectTagged struct { - rv reflect.Value - keys []string - ks map[string]struct{} -} - -var _ Tagged = &ReflectTagged{} - -func MustReflectTags(value interface{}, fieldNames ...string) *ReflectTagged { - rt, err := ReflectTags(value, fieldNames...) - if err != nil { - panic(err) - } - return rt -} - -// ReflectTags provides a query.Tagged on a structs exported fields using query.StringFromValue to derive the string -// values associated with each field. If passed explicit field names will only only provide those fields as tags, -// otherwise all exported fields are provided. -func ReflectTags(value interface{}, fieldNames ...string) (*ReflectTagged, error) { - rv := reflect.ValueOf(value) - if rv.IsNil() { - return &ReflectTagged{}, nil - } - if rv.Kind() != reflect.Ptr { - return nil, fmt.Errorf("ReflectStructTags needs a pointer to a struct but %v is not a pointer", - rv.Interface()) - } - if rv.Elem().Kind() != reflect.Struct { - return nil, fmt.Errorf("ReflectStructTags needs a pointer to a struct but %v does not point to a struct", - rv.Interface()) - } - ty := rv.Elem().Type() - // Try our global cache on types - if rt, ok := cache.get(ty, fieldNames); ok { - rt.rv = rv - return rt, nil - } - - numField := ty.NumField() - if len(fieldNames) > 0 { - if len(fieldNames) > numField { - return nil, fmt.Errorf("ReflectTags asked to tag %v fields but %v only has %v fields", - len(fieldNames), rv.Interface(), numField) - } - numField = len(fieldNames) - } - rt := &ReflectTagged{ - rv: rv, - ks: make(map[string]struct{}, numField), - keys: make([]string, 0, numField), - } - if len(fieldNames) > 0 { - for _, fieldName := range fieldNames { - field, ok := ty.FieldByName(fieldName) - if !ok { - return nil, fmt.Errorf("ReflectTags asked to tag field named %s by no such field on %v", - fieldName, rv.Interface()) - } - ok = rt.registerField(field) - if !ok { - return nil, fmt.Errorf("field %s of %v is not exported so cannot act as tag", fieldName, - rv.Interface()) - } - } - } else { - for i := 0; i < numField; i++ { - rt.registerField(ty.Field(i)) - } - } - // Cache the registration - cache.put(ty, rt, fieldNames) - return rt, nil -} - -func (rt *ReflectTagged) registerField(field reflect.StructField) (ok bool) { - // Empty iff struct field is exported - if field.PkgPath == "" { - rt.keys = append(rt.keys, field.Name) - rt.ks[field.Name] = struct{}{} - return true - } - return false -} - -func (rt *ReflectTagged) Keys() []string { - return rt.keys -} - -func (rt *ReflectTagged) Get(key string) (value interface{}, ok bool) { - if _, ok := rt.ks[key]; ok { - return rt.rv.Elem().FieldByName(key).Interface(), true - } - return "", false -} - -func (rt *ReflectTagged) Len() int { - return len(rt.keys) -} - -type reflectTaggedCache struct { - sync.Mutex - rts map[reflect.Type]map[string]ReflectTagged -} - -// Avoid the need to iterate over reflected type each time we need a reflect tagged -var cache = &reflectTaggedCache{ - rts: make(map[reflect.Type]map[string]ReflectTagged), -} - -func (c *reflectTaggedCache) get(ty reflect.Type, keys []string) (*ReflectTagged, bool) { - c.Lock() - defer c.Unlock() - if _, ok := c.rts[ty]; ok { - key := strings.Join(keys, ",") - if rt, ok := c.rts[ty][key]; ok { - return &rt, true - } - } - return nil, false -} - -func (c *reflectTaggedCache) put(ty reflect.Type, rt *ReflectTagged, fieldNames []string) { - c.Lock() - defer c.Unlock() - if _, ok := c.rts[ty]; !ok { - c.rts[ty] = make(map[string]ReflectTagged) - } - key := strings.Join(fieldNames, ",") - c.rts[ty][key] = *rt -} diff --git a/event/query/reflect_tagged_test.go b/event/query/reflect_tagged_test.go deleted file mode 100644 index 486788261..000000000 --- a/event/query/reflect_tagged_test.go +++ /dev/null @@ -1,110 +0,0 @@ -package query - -import ( - "testing" - - "github.com/hyperledger/burrow/binary" - "github.com/hyperledger/burrow/crypto" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -type testTaggable struct { - Foo string - Bar string - Baz binary.HexBytes - Address crypto.Address - Indices []int -} - -func TestReflectTagged_Keys(t *testing.T) { - rt, err := ReflectTags(&testTaggable{}) - require.NoError(t, err) - assert.Equal(t, []string{"Foo", "Bar", "Baz", "Address", "Indices"}, rt.Keys()) -} - -func TestReflectTagged_Get(t *testing.T) { - tt := testTaggable{ - Foo: "Thumbs", - Bar: "Numbed", - Baz: []byte{255, 255, 255}, - Address: crypto.Address{1, 2, 3}, - Indices: []int{5, 7, 9}, - } - rt, err := ReflectTags(&tt) - require.NoError(t, err) - - value, ok := rt.Get("Foo") - assert.True(t, ok) - assert.Equal(t, tt.Foo, value) - - value, ok = rt.Get("Bar") - assert.True(t, ok) - assert.Equal(t, tt.Bar, value) - - value, ok = rt.Get("Baz") - assert.True(t, ok) - assert.Equal(t, binary.HexBytes{0xFF, 0xFF, 0xFF}, value) - - value, ok = rt.Get("Indices") - assert.True(t, ok) - assert.Equal(t, []int{5, 7, 9}, value) - - value, ok = rt.Get("Address") - assert.True(t, ok) - assert.Equal(t, crypto.MustAddressFromHexString("0102030000000000000000000000000000000000"), value) - - // Make sure we see updates through pointer - tt.Foo = "Plums" - value, ok = rt.Get("Foo") - assert.True(t, ok) - assert.Equal(t, tt.Foo, value) -} - -func TestReflectTagged_Len(t *testing.T) { - rt, err := ReflectTags(&testTaggable{}) - require.NoError(t, err) - assert.Equal(t, 5, rt.Len()) -} - -func TestExplicitFields(t *testing.T) { - tt := testTaggable{ - Foo: "Thumbs", - Bar: "Numbed", - Baz: []byte{255, 255, 255}, - Address: crypto.Address{1, 2, 3}, - } - rt, err := ReflectTags(&tt, "Foo", "Address") - require.NoError(t, err) - - value, ok := rt.Get("Foo") - assert.True(t, ok) - assert.Equal(t, tt.Foo, value) - - value, ok = rt.Get("Address") - assert.True(t, ok) - assert.Equal(t, crypto.MustAddressFromHexString("0102030000000000000000000000000000000000"), value) - - _, ok = rt.Get("Bar") - assert.False(t, ok) - - _, ok = rt.Get("Barsss") - assert.False(t, ok) - - _, err = ReflectTags(&tt, "Foo", "Address", "Balloons") - require.Error(t, err) -} - -func TestReflectTagged_nil(t *testing.T) { - type testStruct struct { - Foo string - } - - var ts *testStruct - - rf, err := ReflectTags(ts) - require.NoError(t, err) - value, ok := rf.Get("Foo") - assert.False(t, ok) - assert.Equal(t, "", value) -} diff --git a/event/query/tags.go b/event/query/tags.go index 31ccbacf0..e5747074f 100644 --- a/event/query/tags.go +++ b/event/query/tags.go @@ -1,29 +1,18 @@ package query import ( - "sort" + "reflect" + "strings" ) +const defaultMaxTagDepth = 10 + type Tagged interface { - Keys() []string Get(key string) (value interface{}, ok bool) - // Len returns the number of tags. - Len() int } type TagMap map[string]interface{} -func MapFromTagged(tagged Tagged) map[string]interface{} { - tags := make(map[string]interface{}) - for _, k := range tagged.Keys() { - v, ok := tagged.Get(k) - if ok { - tags[k] = v - } - } - return tags -} - func (ts TagMap) Get(key string) (value interface{}, ok bool) { var vint interface{} vint, ok = ts[key] @@ -33,88 +22,72 @@ func (ts TagMap) Get(key string) (value interface{}, ok bool) { return vint, true } -func (ts TagMap) Len() int { - return len(ts) -} +type CombinedTags []interface{} -func (ts TagMap) Map() map[string]interface{} { - return ts +func TagsFor(vs ...interface{}) CombinedTags { + return vs } -func (ts TagMap) Keys() []string { - keys := make([]string, 0, len(ts)) - for k := range ts { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -type CombinedTags struct { - keys []string - ks map[string][]Tagged -} - -func NewCombinedTags() *CombinedTags { - return &CombinedTags{ - ks: make(map[string][]Tagged), +func (ct CombinedTags) Get(key string) (interface{}, bool) { + for _, t := range ct { + tagged, ok := t.(Tagged) + if ok { + v, ok := tagged.Get(key) + if ok { + return v, true + } + } + v, ok := GetReflect(reflect.ValueOf(t), key) + if ok { + return v, true + } } + return nil, false } -func MergeTags(tags ...Tagged) *CombinedTags { - ct := NewCombinedTags() - ct.AddTags(false, tags...) - return ct -} - -func ConcatTags(tags ...Tagged) *CombinedTags { - ct := NewCombinedTags() - ct.AddTags(true, tags...) - return ct +func GetReflect(rv reflect.Value, key string) (interface{}, bool) { + return GetReflectDepth(rv, key, defaultMaxTagDepth) } -// Adds each of tagsList to CombinedTags - choosing whether values for the same key should -// be concatenated or whether the first should value should stick -func (ct *CombinedTags) AddTags(concat bool, tagsList ...Tagged) { - for _, t := range tagsList { - for _, k := range t.Keys() { - if len(ct.ks[k]) == 0 { - ct.keys = append(ct.keys, k) - // Store reference to key-holder amongst Taggeds - ct.ks[k] = append(ct.ks[k], t) - } else if concat { - // Store additional tag reference only if concat, otherwise first key-holder wins - ct.ks[k] = append(ct.ks[k], t) - } - } - } -} +var zeroValue = reflect.Value{} -func (ct *CombinedTags) Get(key string) (interface{}, bool) { - ts := ct.ks[key] - if len(ts) == 0 { - return "", false +// Pull out values in a nested struct by following path +func GetReflectDepth(rv reflect.Value, key string, maxDepth int) (interface{}, bool) { + if maxDepth < 0 { + return nil, false } - values := make([]interface{}, 0, len(ts)) - for _, t := range ts { - value, ok := t.Get(key) - if ok { - values = append(values, value) + if rv.Kind() == reflect.Ptr { + if rv.IsNil() { + return nil, false } + rv = rv.Elem() } - if len(values) == 0 { + keys := strings.SplitN(key, ".", 2) + field := rv.FieldByName(keys[0]) + if field == zeroValue { return nil, false } - if len(values) == 1 { - return values[0], true + // If there there are unconsumed segments in the keys then descend + if len(keys) > 1 && (field.Kind() == reflect.Struct || + field.Kind() == reflect.Ptr && field.Elem().Kind() == reflect.Struct) { + return GetReflectDepth(field, keys[1], maxDepth-1) + } - return values, true + return field.Interface(), true } -func (ct *CombinedTags) Len() (length int) { - return len(ct.keys) +type taggedPrefix struct { + tagged Tagged + prefix string +} + +func TaggedPrefix(prefix string, tagged Tagged) *taggedPrefix { + return &taggedPrefix{ + prefix: prefix, + tagged: tagged, + } } -func (ct *CombinedTags) Keys() []string { - return ct.keys +func (t *taggedPrefix) Get(key string) (value interface{}, ok bool) { + return t.tagged.Get(strings.TrimPrefix(key, t.prefix)) } diff --git a/event/query/tags_test.go b/event/query/tags_test.go new file mode 100644 index 000000000..dd5c21369 --- /dev/null +++ b/event/query/tags_test.go @@ -0,0 +1,121 @@ +package query + +import ( + "reflect" + "testing" + + "github.com/hyperledger/burrow/binary" + "github.com/hyperledger/burrow/crypto" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetReflect(t *testing.T) { + type testTaggable struct { + Foo string + Bar string + Baz binary.HexBytes + Address crypto.Address + Indices []int + } + + t.Run("Basic", func(t *testing.T) { + tt := &testTaggable{ + Foo: "Thumbs", + Bar: "Numbed", + Baz: []byte{255, 255, 255}, + Address: crypto.Address{1, 2, 3}, + Indices: []int{5, 7, 9}, + } + + rv := reflect.ValueOf(tt) + value, ok := GetReflect(rv, "Foo") + assert.True(t, ok) + assert.Equal(t, tt.Foo, value) + + value, ok = GetReflect(rv, "Bar") + assert.True(t, ok) + assert.Equal(t, tt.Bar, value) + + value, ok = GetReflect(rv, "Baz") + assert.True(t, ok) + assert.Equal(t, binary.HexBytes{0xFF, 0xFF, 0xFF}, value) + + value, ok = GetReflect(rv, "Indices") + assert.True(t, ok) + assert.Equal(t, []int{5, 7, 9}, value) + + value, ok = GetReflect(rv, "Address") + assert.True(t, ok) + assert.Equal(t, crypto.MustAddressFromHexString("0102030000000000000000000000000000000000"), value) + + // Make sure we see updates through pointer + tt.Foo = "Plums" + value, ok = GetReflect(rv, "Foo") + assert.True(t, ok) + assert.Equal(t, tt.Foo, value) + }) + + type recursiveTaggable struct { + Tags1 testTaggable + Tags2 *testTaggable + Self *recursiveTaggable + } + t.Run("Recursive", func(t *testing.T) { + tt := &recursiveTaggable{ + Tags1: testTaggable{ + Foo: "Thumbs", + Bar: "Numbed", + Baz: []byte{255, 255, 255}, + Address: crypto.Address{1, 2, 3}, + Indices: []int{5, 7, 9}, + }, + Tags2: &testTaggable{ + Foo: "Thumbs2", + Bar: "Numbed2", + Baz: []byte{255, 255, 255}, + Address: crypto.Address{1, 2, 3}, + Indices: []int{4, 3, 2}, + }, + Self: &recursiveTaggable{ + Tags1: testTaggable{ + Foo: "DeepFoo", + }, + Self: &recursiveTaggable{ + Tags2: &testTaggable{ + Bar: "ReallyDeepBar", + }, + }, + }, + } + rv := reflect.ValueOf(tt) + v, ok := GetReflect(rv, "Tags1.Foo") + require.True(t, ok) + require.Equal(t, "Thumbs", v) + + // Shouldn't get this deep by default + v, ok = GetReflectDepth(rv, "Self.Tags1.Foo", 1) + require.False(t, ok) + + v, ok = GetReflectDepth(rv, "Self.Tags1.Foo", 2) + require.True(t, ok) + require.Equal(t, "DeepFoo", v) + + v, ok = GetReflectDepth(rv, "Self.Self.Tags2.Bar", 3) + require.True(t, ok) + require.Equal(t, "ReallyDeepBar", v) + }) + +} + +func TestReflectTagged_nil(t *testing.T) { + type testStruct struct { + Foo string + } + + var ts *testStruct + + value, ok := GetReflect(reflect.ValueOf(ts), "Foo") + assert.False(t, ok) + assert.Nil(t, value) +} diff --git a/execution/evm/abi/event_spec.go b/execution/evm/abi/event_spec.go index b111c0dd6..6061aa822 100644 --- a/execution/evm/abi/event_spec.go +++ b/execution/evm/abi/event_spec.go @@ -2,8 +2,10 @@ package abi import ( "encoding/json" + "reflect" "github.com/hyperledger/burrow/crypto/sha3" + "github.com/hyperledger/burrow/event/query" "github.com/tmthrgd/go-hex" ) @@ -28,12 +30,16 @@ type argumentJSON struct { const EventIDSize = 32 type EventSpec struct { - EventID EventID + ID EventID Inputs []Argument Name string Anonymous bool } +func (e *EventSpec) Get(key string) (interface{}, bool) { + return query.GetReflect(reflect.ValueOf(e), key) +} + func (e *EventSpec) UnmarshalJSON(data []byte) error { s := new(specJSON) err := json.Unmarshal(data, s) @@ -58,7 +64,7 @@ func (e *EventSpec) unmarshalSpec(s *specJSON) error { } } e.Name = s.Name - e.EventID = GetEventID(sig) + e.ID = GetEventID(sig) e.Inputs = inputs e.Anonymous = s.Anonymous return nil diff --git a/execution/evm/abi/event_spec_test.go b/execution/evm/abi/event_spec_test.go new file mode 100644 index 000000000..b19d6b431 --- /dev/null +++ b/execution/evm/abi/event_spec_test.go @@ -0,0 +1,23 @@ +package abi + +import ( + "testing" + + "github.com/hyperledger/burrow/execution/solidity" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEventSpec_Get(t *testing.T) { + spec, err := ReadSpec(solidity.Abi_EventEmitter) + require.NoError(t, err) + eventSpec := spec.EventsByName["ManyTypes2"] + + v, ok := eventSpec.Get("Name") + require.True(t, ok) + assert.Equal(t, "ManyTypes2", v) + + v, ok = eventSpec.Get("Inputs") + require.True(t, ok) + assert.Equal(t, eventSpec.Inputs, v) +} diff --git a/execution/evm/abi/packing.go b/execution/evm/abi/packing.go index 7e6a575d6..e7d04eb39 100644 --- a/execution/evm/abi/packing.go +++ b/execution/evm/abi/packing.go @@ -111,7 +111,7 @@ func argGetter(argSpec []Argument, args []interface{}, ptr bool) (func(int) inte func packTopics(eventSpec *EventSpec, getArg func(int) interface{}) ([]binary.Word256, error) { topics := make([]binary.Word256, 0, 5) if !eventSpec.Anonymous { - topics = append(topics, binary.Word256(eventSpec.EventID)) + topics = append(topics, binary.Word256(eventSpec.ID)) } for i, a := range eventSpec.Inputs { if a.Indexed { diff --git a/execution/evm/abi/spec.go b/execution/evm/abi/spec.go index ddb49a6c6..54e4ace27 100644 --- a/execution/evm/abi/spec.go +++ b/execution/evm/abi/spec.go @@ -69,7 +69,7 @@ func ReadSpec(specBytes []byte) (*Spec, error) { return nil, err } abiSpec.EventsByName[ev.Name] = ev - abiSpec.EventsByID[ev.EventID] = ev + abiSpec.EventsByID[ev.ID] = ev case "function": inputs, err := readArgSpec(s.Inputs) if err != nil { @@ -107,7 +107,7 @@ func MergeSpec(abiSpec []*Spec) *Spec { // Loop over the signatures, as these are less likely to have collisions for _, e := range s.EventsByID { newSpec.EventsByName[e.Name] = e - newSpec.EventsByID[e.EventID] = e + newSpec.EventsByID[e.ID] = e } } diff --git a/execution/exec/block_execution.go b/execution/exec/block_execution.go index 3cf328d69..ad664ba36 100644 --- a/execution/exec/block_execution.go +++ b/execution/exec/block_execution.go @@ -2,6 +2,7 @@ package exec import ( "fmt" + "reflect" "github.com/hyperledger/burrow/event" "github.com/hyperledger/burrow/event/query" @@ -48,23 +49,19 @@ func (be *BlockExecution) AppendTxs(tail ...*TxExecution) { } // Tags -type TaggedBlockExecution struct { - query.Tagged - *BlockExecution -} -func (be *BlockExecution) Tagged() *TaggedBlockExecution { - return &TaggedBlockExecution{ - Tagged: query.MergeTags( - query.TagMap{ - event.EventIDKey: EventStringBlockExecution(be.Height), - event.EventTypeKey: be.EventType(), - }, - query.MustReflectTags(be), - query.MustReflectTags(be.Header), - ), - BlockExecution: be, +func (be *BlockExecution) Get(key string) (interface{}, bool) { + switch key { + case event.EventIDKey: + return EventStringBlockExecution(be.Height), true + case event.EventTypeKey: + return be.EventType(), true + } + v, ok := query.GetReflect(reflect.ValueOf(be.Header), key) + if ok { + return v, true } + return query.GetReflect(reflect.ValueOf(be), key) } func QueryForBlockExecutionFromHeight(height uint64) *query.Builder { @@ -74,45 +71,3 @@ func QueryForBlockExecutionFromHeight(height uint64) *query.Builder { func QueryForBlockExecution() *query.Builder { return query.NewBuilder().AndEquals(event.EventTypeKey, TypeBlockExecution) } - -type TaggedBlockEvent struct { - query.Tagged - *StreamEvent -} - -func (ev *StreamEvent) EventType() EventType { - switch { - case ev.BeginBlock != nil: - return TypeBeginBlock - case ev.BeginTx != nil: - return TypeBeginTx - case ev.Envelope != nil: - return TypeEnvelope - case ev.Event != nil: - return ev.Event.EventType() - case ev.EndTx != nil: - return TypeEndTx - case ev.EndBlock != nil: - return TypeEndBlock - } - return TypeUnknown -} - -func (ev *StreamEvent) Tagged() *TaggedBlockEvent { - return &TaggedBlockEvent{ - Tagged: query.MergeTags( - query.TagMap{ - event.EventTypeKey: ev.EventType(), - }, - query.MustReflectTags(ev.BeginBlock, "Height"), - query.MustReflectTags(ev.BeginBlock.GetHeader()), - query.MustReflectTags(ev.BeginTx), - query.MustReflectTags(ev.BeginTx.GetTxHeader()), - ev.Envelope.Tagged(), - ev.Event.Tagged(), - query.MustReflectTags(ev.EndTx), - query.MustReflectTags(ev.EndBlock, "Height"), - ), - StreamEvent: ev, - } -} diff --git a/execution/exec/block_execution_test.go b/execution/exec/block_execution_test.go index d1380b963..009efa303 100644 --- a/execution/exec/block_execution_test.go +++ b/execution/exec/block_execution_test.go @@ -3,6 +3,7 @@ package exec import ( "testing" + "github.com/hyperledger/burrow/event/query" "github.com/stretchr/testify/require" "github.com/tendermint/tendermint/abci/types" ) @@ -20,3 +21,18 @@ func TestBlockExecution_Marshal(t *testing.T) { beOut := new(BlockExecution) require.NoError(t, beOut.Unmarshal(bs)) } + +func TestBlockExecution_StreamEvents(t *testing.T) { + be := &BlockExecution{ + Header: &types.Header{ + Height: 2, + AppHash: []byte{2}, + ProposerAddress: []byte{1, 2, 33}, + }, + } + + qry, err := query.NewBuilder().AndContains("Height", "2").Query() + require.NoError(t, err) + match := qry.Matches(be) + require.True(t, match) +} diff --git a/execution/exec/event.go b/execution/exec/event.go index af11615f0..ffa600fcc 100644 --- a/execution/exec/event.go +++ b/execution/exec/event.go @@ -1,15 +1,14 @@ package exec import ( - "reflect" - "fmt" + "reflect" "github.com/hyperledger/burrow/event" "github.com/hyperledger/burrow/event/query" ) -var eventMessageTag = query.TagMap{event.MessageTypeKey: reflect.TypeOf(&Event{}).String()} +var eventMessageType = reflect.TypeOf(&Event{}).String() type EventType uint32 @@ -99,34 +98,10 @@ func (ev *Event) Body() string { } // Tags -type TaggedEvent struct { - query.Tagged - *Event -} - -type TaggedEvents []*TaggedEvent - -func (ev *Event) Tagged() *TaggedEvent { - if ev == nil { - return &TaggedEvent{ - Tagged: query.TagMap{}, - } - } - return &TaggedEvent{ - Tagged: query.MergeTags( - query.MustReflectTags(ev.Header), - eventMessageTag, - query.MustReflectTags(ev.Input), - query.MustReflectTags(ev.Output), - query.MustReflectTags(ev.Call), - ev.Log, - ), - Event: ev, - } -} +type Events []*Event -func (tevs TaggedEvents) Filter(qry query.Query) TaggedEvents { - var filtered TaggedEvents +func (tevs Events) Filter(qry query.Query) Events { + var filtered Events for _, tev := range tevs { if qry.Matches(tev) { filtered = append(filtered, tev) @@ -134,3 +109,22 @@ func (tevs TaggedEvents) Filter(qry query.Query) TaggedEvents { } return filtered } + +func (ev *Event) Get(key string) (value interface{}, ok bool) { + switch key { + case event.MessageTypeKey: + return eventMessageType, true + } + if ev == nil { + return nil, false + } + v, ok := ev.Log.Get(key) + if ok { + return v, true + } + v, ok = query.GetReflect(reflect.ValueOf(ev.Header), key) + if ok { + return v, true + } + return query.GetReflect(reflect.ValueOf(ev), key) +} diff --git a/execution/exec/event_test.go b/execution/exec/event_test.go index faee34834..f57b2ed88 100644 --- a/execution/exec/event_test.go +++ b/execution/exec/event_test.go @@ -15,52 +15,49 @@ import ( func TestEventTagQueries(t *testing.T) { ev := logEvent() - tev := ev.Tagged() - qb := query.NewBuilder().AndEquals(event.EventTypeKey, TypeLog.String()) qry, err := qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) qb = qb.AndContains(event.EventIDKey, "bar") qry, err = qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) - qb = qb.AndEquals(event.TxHashKey, hex.EncodeUpperToString(tev.Header.TxHash)) + qb = qb.AndEquals(event.TxHashKey, hex.EncodeUpperToString(ev.Header.TxHash)) qry, err = qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) - qb = qb.AndGreaterThanOrEqual(event.HeightKey, tev.Header.Height) + qb = qb.AndGreaterThanOrEqual(event.HeightKey, ev.Header.Height) qry, err = qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) - qb = qb.AndStrictlyLessThan(event.IndexKey, tev.Header.Index+1) + qb = qb.AndStrictlyLessThan(event.IndexKey, ev.Header.Index+1) qry, err = qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) qb = qb.AndEquals(event.AddressKey, ev.Log.Address) qry, err = qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) qb = qb.AndEquals(LogNTextKey(0), "marmot") qry, err = qb.Query() require.NoError(t, err) - assert.True(t, qry.Matches(tev)) + assert.True(t, qry.Matches(ev)) require.NoError(t, qry.MatchError()) t.Logf("Query: %v", qry) - t.Logf("Keys: %v", tev.Keys()) } func BenchmarkMatching(b *testing.B) { @@ -75,10 +72,9 @@ func BenchmarkMatching(b *testing.B) { AndEquals(LogNTextKey(0), "marmot") qry, err := qb.Query() require.NoError(b, err) - tev := ev.Tagged() b.StartTimer() for i := 0; i < b.N; i++ { - qry.Matches(tev) + qry.Matches(ev) } } diff --git a/execution/exec/log_event.go b/execution/exec/log_event.go index 6ab61a03f..9d245963c 100644 --- a/execution/exec/log_event.go +++ b/execution/exec/log_event.go @@ -20,6 +20,7 @@ import ( . "github.com/hyperledger/burrow/binary" "github.com/hyperledger/burrow/event" + "github.com/hyperledger/burrow/execution/evm/abi" "github.com/tmthrgd/go-hex" ) @@ -77,10 +78,8 @@ func (log *LogEvent) GetTopic(i int) Word256 { return Word256{} } -func (log *LogEvent) Len() int { - return len(logTagKeys) -} - -func (log *LogEvent) Keys() []string { - return logTagKeys +func (log *LogEvent) SolidityEventID() abi.EventID { + var eventID abi.EventID + copy(eventID[:], log.Topics[0].Bytes()) + return eventID } diff --git a/execution/exec/stream_event.go b/execution/exec/stream_event.go index 14e6bb254..4db6b04b2 100644 --- a/execution/exec/stream_event.go +++ b/execution/exec/stream_event.go @@ -1,7 +1,11 @@ package exec import ( + "fmt" "io" + + "github.com/hyperledger/burrow/event" + "github.com/hyperledger/burrow/event/query" ) type EventStream interface { @@ -17,11 +21,49 @@ func (ses *StreamEvents) Recv() (*StreamEvent, error) { return ev, nil } +func (ev *StreamEvent) EventType() EventType { + switch { + case ev.BeginBlock != nil: + return TypeBeginBlock + case ev.BeginTx != nil: + return TypeBeginTx + case ev.Envelope != nil: + return TypeEnvelope + case ev.Event != nil: + return ev.Event.EventType() + case ev.EndTx != nil: + return TypeEndTx + case ev.EndBlock != nil: + return TypeEndBlock + } + return TypeUnknown +} + +func (ev *StreamEvent) Get(key string) (interface{}, bool) { + switch key { + case event.EventTypeKey: + return ev.EventType(), true + } + // Flatten this sum type + return query.TagsFor( + ev.GetBeginBlock().GetHeader(), + ev.BeginBlock, + ev.GetBeginTx().GetTxHeader(), + ev.BeginTx, + ev.Envelope, + ev.Event, + ev.EndTx, + ev.EndBlock).Get(key) +} + func ConsumeBlockExecution(stream EventStream) (block *BlockExecution, err error) { var ev *StreamEvent accum := new(BlockAccumulator) for ev, err = stream.Recv(); err == nil; ev, err = stream.Recv() { - block = accum.Consume(ev) + block, err = accum.Consume(ev) + if err != nil { + return nil, err + } if block != nil { return block, nil } @@ -37,7 +79,7 @@ type BlockAccumulator struct { // Consume will add the StreamEvent passed to the block accumulator and if the block complete is complete return the // BlockExecution, otherwise will return nil -func (ba *BlockAccumulator) Consume(ev *StreamEvent) *BlockExecution { +func (ba *BlockAccumulator) Consume(ev *StreamEvent) (*BlockExecution, error) { switch { case ev.BeginBlock != nil: ba.block = &BlockExecution{ @@ -45,59 +87,80 @@ func (ba *BlockAccumulator) Consume(ev *StreamEvent) *BlockExecution { Header: ev.BeginBlock.Header, } case ev.BeginTx != nil, ev.Envelope != nil, ev.Event != nil, ev.EndTx != nil: - txe := ba.stack.Consume(ev) + txe, err := ba.stack.Consume(ev) + if err != nil { + return nil, err + } if txe != nil { ba.block.TxExecutions = append(ba.block.TxExecutions, txe) } case ev.EndBlock != nil: - return ba.block + return ba.block, nil } - return nil + return nil, nil } // TxStack is able to consume potentially nested txs type TxStack []*TxExecution -func (stack TxStack) Peek() *TxExecution { - return stack[len(stack)-1] -} - func (stack *TxStack) Push(txe *TxExecution) { // Put this txe in the parent position *stack = append(*stack, txe) } -func (stack *TxStack) Pop() *TxExecution { +func (stack TxStack) Peek() (*TxExecution, error) { + if len(stack) < 1 { + return nil, fmt.Errorf("tried to peek from an empty TxStack - might be missing essential StreamEvents") + } + return stack[len(stack)-1], nil +} + +func (stack *TxStack) Pop() (*TxExecution, error) { s := *stack - txc := s.Peek() + txc, err := s.Peek() + if err != nil { + return nil, err + } *stack = s[:len(s)-1] - return txc + return txc, nil } // Consume will add the StreamEvent to the transaction stack and if that completes a single outermost transaction // returns the TxExecution otherwise will return nil -func (stack *TxStack) Consume(ev *StreamEvent) *TxExecution { +func (stack *TxStack) Consume(ev *StreamEvent) (*TxExecution, error) { switch { case ev.BeginTx != nil: stack.Push(initTx(ev.BeginTx)) case ev.Envelope != nil: - txe := stack.Peek() + txe, err := stack.Peek() + if err != nil { + return nil, err + } txe.Envelope = ev.Envelope txe.Receipt = txe.Envelope.Tx.GenerateReceipt() case ev.Event != nil: - txe := stack.Peek() + txe, err := stack.Peek() + if err != nil { + return nil, err + } txe.Events = append(txe.Events, ev.Event) case ev.EndTx != nil: - txe := stack.Pop() + txe, err := stack.Pop() + if err != nil { + return nil, err + } if len(*stack) == 0 { // This terminates the outermost transaction - return txe + return txe, nil } // If there is a parent tx on the stack add this tx as child - parent := stack.Peek() + parent, err := stack.Peek() + if err != nil { + return nil, err + } parent.TxExecutions = append(parent.TxExecutions, txe) } - return nil + return nil, nil } func initTx(beginTx *BeginTx) *TxExecution { diff --git a/execution/exec/stream_event_test.go b/execution/exec/stream_event_test.go index 1a935c9ac..001b56979 100644 --- a/execution/exec/stream_event_test.go +++ b/execution/exec/stream_event_test.go @@ -20,9 +20,11 @@ func TestTxExecution(t *testing.T) { stack := new(TxStack) var txeOut *TxExecution + var err error for _, ev := range txe.StreamEvents() { - txeOut = stack.Consume(ev) + txeOut, err = stack.Consume(ev) + require.NoError(t, err) if txeOut != nil { require.Equal(t, txe, txeOut) } @@ -51,9 +53,10 @@ func TestConsumeBlockExecution(t *testing.T) { stack := new(BlockAccumulator) var beOut *BlockExecution - + var err error for _, ev := range be.StreamEvents() { - beOut = stack.Consume(ev) + beOut, err = stack.Consume(ev) + require.NoError(t, err) if beOut != nil { require.Equal(t, be, beOut) } diff --git a/execution/exec/tx_execution.go b/execution/exec/tx_execution.go index 8d7ffacc3..effe87acb 100644 --- a/execution/exec/tx_execution.go +++ b/execution/exec/tx_execution.go @@ -2,6 +2,7 @@ package exec import ( "fmt" + "reflect" "strings" "github.com/hyperledger/burrow/binary" @@ -186,6 +187,10 @@ func (txe *TxExecution) CallError() *errors.CallError { } } +func (txe *TxExecution) TaggedEvents() Events { + return txe.Events +} + // Set result func (txe *TxExecution) Return(returnValue []byte, gasUsed uint64) { if txe.Result == nil { @@ -220,35 +225,14 @@ func (txe *TxExecution) Append(tail ...*Event) { } // Tags -type TaggedTxExecution struct { - query.Tagged - *TxExecution -} - -func (txe *TxExecution) Tagged() *TaggedTxExecution { - var tagged query.Tagged = query.TagMap{} - if txe != nil { - tagged = query.MergeTags( - query.TagMap{ - event.EventIDKey: EventStringTxExecution(txe.TxHash), - event.EventTypeKey: txe.EventType()}, - query.MustReflectTags(txe), - query.MustReflectTags(txe.TxHeader), - txe.Envelope.Tagged(), - ) - } - return &TaggedTxExecution{ - Tagged: tagged, - TxExecution: txe, - } -} - -func (txe *TxExecution) TaggedEvents() TaggedEvents { - tevs := make(TaggedEvents, len(txe.Events)) - for i, ev := range txe.Events { - tevs[i] = ev.Tagged() - } - return tevs +func (txe *TxExecution) Get(key string) (interface{}, bool) { + switch key { + case event.EventIDKey: + return EventStringTxExecution(txe.TxHash), true + case event.EventTypeKey: + return txe.EventType(), true + } + return query.GetReflect(reflect.ValueOf(txe), key) } func QueryForTxExecution(txHash []byte) query.Queryable { diff --git a/execution/execution.go b/execution/execution.go index 97c2909e6..755bed5a9 100644 --- a/execution/execution.go +++ b/execution/execution.go @@ -463,7 +463,7 @@ func (exe *executor) updateSequenceNumbers(txEnv *txs.Envelope) error { func (exe *executor) publishBlock(blockExecution *exec.BlockExecution) { for _, txe := range blockExecution.TxExecutions { - publishErr := exe.emitter.Publish(context.Background(), txe, txe.Tagged()) + publishErr := exe.emitter.Publish(context.Background(), txe, txe) if publishErr != nil { exe.logger.InfoMsg("Error publishing TxExecution", "height", blockExecution.Height, @@ -471,7 +471,7 @@ func (exe *executor) publishBlock(blockExecution *exec.BlockExecution) { structure.ErrorKey, publishErr) } } - publishErr := exe.emitter.Publish(context.Background(), blockExecution, blockExecution.Tagged()) + publishErr := exe.emitter.Publish(context.Background(), blockExecution, blockExecution) if publishErr != nil { exe.logger.InfoMsg("Error publishing BlockExecution", "height", blockExecution.Height, diff --git a/execution/names/names.go b/execution/names/names.go index ffef04ed2..fd04f44fd 100644 --- a/execution/names/names.go +++ b/execution/names/names.go @@ -16,6 +16,7 @@ package names import ( "fmt" + "reflect" "github.com/hyperledger/burrow/event/query" ) @@ -40,16 +41,8 @@ func (e *Entry) String() string { return fmt.Sprintf("NameEntry{%v -> %v; Expires: %v, Owner: %v}", e.Name, e.Data, e.Expires, e.Owner) } -type TaggedEntry struct { - *Entry - query.Tagged -} - -func (e *Entry) Tagged() *TaggedEntry { - return &TaggedEntry{ - Entry: e, - Tagged: query.MustReflectTags(e), - } +func (e *Entry) Get(key string) (value interface{}, ok bool) { + return query.GetReflect(reflect.ValueOf(e), key) } type Reader interface { diff --git a/execution/state/events.go b/execution/state/events.go index 265499dd7..aa422f17c 100644 --- a/execution/state/events.go +++ b/execution/state/events.go @@ -82,19 +82,23 @@ func (s *ReadState) IterateStreamEvents(startHeight, endHeight *uint64, consumer } func (s *ReadState) TxsAtHeight(height uint64) ([]*exec.TxExecution, error) { + const errHeader = "TxAtHeight():" var stack exec.TxStack var txExecutions []*exec.TxExecution err := s.IterateStreamEvents(&height, &height, func(ev *exec.StreamEvent) error { // Keep trying to consume TxExecutions at from events at this height - txe := stack.Consume(ev) + txe, err := stack.Consume(ev) + if err != nil { + return fmt.Errorf("%s %v", errHeader, err) + } if txe != nil { txExecutions = append(txExecutions, txe) } return nil }) if err != nil && err != io.EOF { - return nil, err + return nil, fmt.Errorf("%s %v", errHeader, err) } return txExecutions, nil } @@ -133,7 +137,10 @@ func (s *ReadState) TxByHash(txHash []byte) (*exec.TxExecution, error) { return nil, err } - txe := stack.Consume(ev) + txe, err := stack.Consume(ev) + if err != nil { + return nil, fmt.Errorf("%s: %v", errHeader, err) + } if txe != nil { return txe, nil } diff --git a/execution/transactor_test.go b/execution/transactor_test.go index 85e18e8b5..f796ba203 100644 --- a/execution/transactor_test.go +++ b/execution/transactor_test.go @@ -56,7 +56,7 @@ func TestTransactor_BroadcastTxSync(t *testing.T) { func(tx tmTypes.Tx, cb func(*abciTypes.Response)) error { txe := exec.NewTxExecution(txEnv) txe.Height = height - err := evc.Publish(context.Background(), txe, txe.Tagged()) + err := evc.Publish(context.Background(), txe, txe) if err != nil { return err } diff --git a/forensics/revert_test.go b/forensics/revert_test.go index c0d687d54..478be23b9 100644 --- a/forensics/revert_test.go +++ b/forensics/revert_test.go @@ -30,7 +30,10 @@ func testLoadStudio(t *testing.T, i int) { accum := new(exec.BlockAccumulator) buf := new(bytes.Buffer) err = st.IterateStreamEvents(nil, nil, func(ev *exec.StreamEvent) error { - be := accum.Consume(ev) + be, err := accum.Consume(ev) + if err != nil { + return err + } if be != nil { buf.WriteString(fmt.Sprintf("Block %d: %X\n\n", be.Height, be.Header.AppHash)) for _, txe := range be.TxExecutions { diff --git a/go.mod b/go.mod index 84f8efa0b..1c4e30d4d 100644 --- a/go.mod +++ b/go.mod @@ -37,6 +37,7 @@ require ( github.com/jmoiron/sqlx v1.2.0 github.com/kr/pretty v0.1.0 // indirect github.com/lib/pq v1.1.1 + github.com/magiconair/properties v1.8.0 github.com/mattn/go-colorable v0.1.2 // indirect github.com/mattn/go-sqlite3 v1.10.0 github.com/monax/relic v2.0.0+incompatible diff --git a/integration/rpcevents/execution_events_server_test.go b/integration/rpcevents/execution_events_server_test.go index 5bb9daea4..cb6289011 100644 --- a/integration/rpcevents/execution_events_server_test.go +++ b/integration/rpcevents/execution_events_server_test.go @@ -131,6 +131,7 @@ func TestExecutionEventsTest(t *testing.T) { assert.Contains(t, strconv.FormatUint(be.Height, 10), "2") return nil }) + require.Equal(t, io.EOF, err) require.Len(t, blocks, 2, "should record blocks 2 and 12") assert.Equal(t, uint64(2), blocks[0].Height) assert.Equal(t, uint64(12), blocks[1].Height) @@ -141,7 +142,8 @@ func TestExecutionEventsTest(t *testing.T) { t.Run("GetEventsSend", func(t *testing.T) { numSends := 1100 request := &rpcevents.BlocksRequest{BlockRange: doSends(t, numSends, tcli, kern, inputAddress0, 2004)} - responses := getEvents(t, request, ecli) + responses, err := getEvents(t, request, ecli) + require.NoError(t, err) assert.Equal(t, numSends*2, countEventsAndCheckConsecutive(t, responses), "should receive 1 input, 1 output per send") }) @@ -152,7 +154,8 @@ func TestExecutionEventsTest(t *testing.T) { BlockRange: doSends(t, numSends, tcli, kern, inputAddress1, 2004), Query: "TxHash CONTAINS 'AA'", } - responses := getEvents(t, request, ecli) + responses, err := getEvents(t, request, ecli) + require.NoError(t, err) for _, response := range responses { for _, ev := range response.Events { require.Contains(t, ev.Header.TxHash.String(), "AA") @@ -164,10 +167,11 @@ func TestExecutionEventsTest(t *testing.T) { numSends := 500 request := &rpcevents.BlocksRequest{ BlockRange: doSends(t, numSends, tcli, kern, inputAddress0, 999), - Query: query.NewBuilder().AndEquals("Address", inputAddress0.String()). + Query: query.NewBuilder().AndEquals("Input.Address", inputAddress0.String()). AndEquals(event.EventTypeKey, exec.TypeAccountInput.String()).String(), } - responses := getEvents(t, request, ecli) + responses, err := getEvents(t, request, ecli) + require.NoError(t, err) assert.Equal(t, numSends, countEventsAndCheckConsecutive(t, responses), "should receive every single input event per send") }) @@ -189,14 +193,15 @@ func TestExecutionEventsTest(t *testing.T) { Query: query.Must(query.NewBuilder().AndEquals(event.EventIDKey, exec.EventStringLogEvent(contractAddress)). AndEquals(event.TxHashKey, txe.TxHash).Query()).String(), } - evs := getEvents(t, request, ecli) + evs, err := getEvents(t, request, ecli) + require.NoError(t, err) n := countEventsAndCheckConsecutive(t, evs) assert.Equal(t, 0, n, "should not see reverted events") }) }) } -func getEvents(t *testing.T, request *rpcevents.BlocksRequest, ecli rpcevents.ExecutionEventsClient) []*rpcevents.EventsResponse { +func getEvents(t *testing.T, request *rpcevents.BlocksRequest, ecli rpcevents.ExecutionEventsClient) ([]*rpcevents.EventsResponse, error) { evs, err := ecli.Events(context.Background(), request) require.NoError(t, err) var responses []*rpcevents.EventsResponse @@ -206,11 +211,11 @@ func getEvents(t *testing.T, request *rpcevents.BlocksRequest, ecli rpcevents.Ex if err == io.EOF { break } - require.NoError(t, err) + return nil, err } responses = append(responses, resp) } - return responses + return responses, nil } func doSends(t *testing.T, numSends int, cli rpctransact.TransactClient, kern *core.Kernel, inputAddress crypto.Address, diff --git a/integration/rpctransact/call_test.go b/integration/rpctransact/call_test.go index 6f98b4d28..a040f87d9 100644 --- a/integration/rpctransact/call_test.go +++ b/integration/rpctransact/call_test.go @@ -359,7 +359,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) evAbi := spec.EventsByName["ChangeLevel"] err = abi.UnpackEvent(&evAbi, log.Topics, log.Data, &direction, &depth) require.NoError(t, err) - assert.Equal(t, evAbi.EventID.Bytes(), log.Topics[0].Bytes()) + assert.Equal(t, evAbi.ID.Bytes(), log.Topics[0].Bytes()) assert.Equal(t, int64(18), depth) assert.Equal(t, "Upsie!", direction) return @@ -381,7 +381,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) evAbi := spec.EventsByName["ManyTypes"] data := abi.GetPackingTypes(evAbi.Inputs) // Check signature - assert.Equal(t, evAbi.EventID.Bytes(), log.Topics[0].Bytes()) + assert.Equal(t, evAbi.ID.Bytes(), log.Topics[0].Bytes()) err = abi.UnpackEvent(&evAbi, log.Topics, log.Data.Bytes(), data...) require.NoError(t, err) @@ -390,7 +390,7 @@ func testCallTx(t *testing.T, kern *core.Kernel, cli rpctransact.TransactClient) expectedHash := h.Sum(nil) // "Downsie!", true, "Donaudampfschifffahrtselektrizitätenhauptbetriebswerkbauunterbeamtengesellschaft", 102, 42, 'hash') b := *data[0].(*[]byte) - assert.Equal(t, evAbi.EventID.Bytes(), log.Topics[0].Bytes()) + assert.Equal(t, evAbi.ID.Bytes(), log.Topics[0].Bytes()) assert.Equal(t, "Downsie!", string(bytes.Trim(b, "\x00"))) assert.Equal(t, true, *data[1].(*bool)) assert.Equal(t, "Donaudampfschifffahrtselektrizitätenhauptbetriebswerkbauunterbeamtengesellschaft", *data[2].(*string)) diff --git a/rpc/rpcevents/execution_events_server.go b/rpc/rpcevents/execution_events_server.go index f4b468db5..e25ce8273 100644 --- a/rpc/rpcevents/execution_events_server.go +++ b/rpc/rpcevents/execution_events_server.go @@ -73,7 +73,7 @@ func (ees *executionEventsServer) Stream(request *BlocksRequest, stream Executio return fmt.Errorf("could not parse TxExecution query: %v", err) } return ees.streamEvents(stream.Context(), request.BlockRange, func(ev *exec.StreamEvent) error { - if qry.Matches(ev.Tagged()) { + if qry.Matches(ev) { return stream.Send(ev) } return nil @@ -81,6 +81,7 @@ func (ees *executionEventsServer) Stream(request *BlocksRequest, stream Executio } func (ees *executionEventsServer) Events(request *BlocksRequest, stream ExecutionEvents_EventsServer) error { + const errHeader = "Events()" qry, err := query.NewOrEmpty(request.Query) if err != nil { return fmt.Errorf("could not parse Event query: %v", err) @@ -99,10 +100,13 @@ func (ees *executionEventsServer) Events(request *BlocksRequest, stream Executio default: // We need to consume transaction to exclude events belong to an exceptional transaction - txe := stack.Consume(sev) + txe, err := stack.Consume(sev) + if err != nil { + return fmt.Errorf("%s: %v", errHeader, err) + } if txe != nil && txe.Exception == nil { for _, ev := range txe.Events { - if qry.Matches(ev.Tagged()) { + if qry.Matches(ev) { response.Events = append(response.Events, ev) } } diff --git a/rpc/rpcquery/query_server.go b/rpc/rpcquery/query_server.go index e20274c8c..9cc4a8036 100644 --- a/rpc/rpcquery/query_server.go +++ b/rpc/rpcquery/query_server.go @@ -119,7 +119,7 @@ func (qs *queryServer) ListAccounts(param *ListAccountsParam, stream Query_ListA } var streamErr error err = qs.accounts.IterateAccounts(func(acc *acm.Account) error { - if qry.Matches(acc.Tagged()) { + if qry.Matches(acc) { return stream.Send(acc) } else { return nil @@ -148,7 +148,7 @@ func (qs *queryServer) ListNames(param *ListNamesParam, stream Query_ListNamesSe } var streamErr error err = qs.nameReg.IterateNames(func(entry *names.Entry) error { - if qry.Matches(entry.Tagged()) { + if qry.Matches(entry) { return stream.Send(entry) } else { return nil diff --git a/txs/envelope.go b/txs/envelope.go index 776f23be2..d54318461 100644 --- a/txs/envelope.go +++ b/txs/envelope.go @@ -2,6 +2,7 @@ package txs import ( "fmt" + "reflect" "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/acm/acmstate" @@ -160,10 +161,13 @@ func (txEnv *Envelope) Sign(signingAccounts ...acm.AddressableSigner) error { return nil } -func (txEnv *Envelope) Tagged() query.Tagged { - if txEnv != nil { - return query.MergeTags(query.MustReflectTags(txEnv, "Signatories"), txEnv.Tx.Tagged()) - } else { - return query.TagMap{} +func (txEnv *Envelope) Get(key string) (interface{}, bool) { + if txEnv == nil { + return nil, false } + v, ok := query.GetReflect(reflect.ValueOf(txEnv), key) + if ok { + return v, true + } + return txEnv.Tx.Get(key) } diff --git a/txs/tx.go b/txs/tx.go index 3a7c56b36..6f99e668a 100644 --- a/txs/tx.go +++ b/txs/tx.go @@ -18,6 +18,7 @@ import ( "crypto/sha256" "encoding/json" "fmt" + "reflect" "github.com/hyperledger/burrow/acm" "github.com/hyperledger/burrow/binary" @@ -179,8 +180,12 @@ func (tx *Tx) Rehash() []byte { return tx.txHash } -func (tx *Tx) Tagged() query.Tagged { - return query.MergeTags(query.MustReflectTags(tx), query.MustReflectTags(tx.Payload)) +func (tx *Tx) Get(key string) (interface{}, bool) { + v, ok := query.GetReflect(reflect.ValueOf(tx), key) + if ok { + return v, true + } + return query.GetReflect(reflect.ValueOf(tx.Payload), key) } // Generate a transaction Receipt containing the Tx hash and other information if the Tx is call. diff --git a/txs/tx_test.go b/txs/tx_test.go index 20d444a38..a2d272901 100644 --- a/txs/tx_test.go +++ b/txs/tx_test.go @@ -66,15 +66,40 @@ func TestSendTx(t *testing.T) { testTxSignVerify(t, sendTx) tx := Enclose("Foo", sendTx).Tx - value, ok := tx.Tagged().Get("Inputs") + value, ok := tx.Get("Inputs") require.True(t, ok) assert.Equal(t, sendTx.Inputs, value) - value, ok = tx.Tagged().Get("ChainID") + value, ok = tx.Get("ChainID") require.True(t, ok) assert.Equal(t, "Foo", value) } +func TestCallTx(t *testing.T) { + + toAddress := makePrivateAccount("contract1").GetAddress() + callTx := &payload.CallTx{ + Input: &payload.TxInput{ + Address: makePrivateAccount("input1").GetAddress(), + Amount: 12345, + Sequence: 67890, + }, + Address: &toAddress, + GasLimit: 111, + Fee: 222, + Data: []byte("data1"), + } + txEnv := Enclose("Chain1", callTx) + + v, ok := txEnv.Get("Input.Amount") + require.True(t, ok) + assert.Equal(t, callTx.Input.Amount, v) + + v, ok = txEnv.Get("Address") + require.True(t, ok) + assert.Equal(t, callTx.Address, v) +} + func TestCallTxSignable(t *testing.T) { toAddress := makePrivateAccount("contract1").GetAddress() callTx := &payload.CallTx{ @@ -162,7 +187,7 @@ func TestTxWrapper_MarshalJSON(t *testing.T) { testTxSignVerify(t, callTx) tx := Enclose("Foo", callTx).Tx - value, ok := tx.Tagged().Get("Input") + value, ok := tx.Get("Input") require.True(t, ok) assert.Equal(t, callTx.Input, value) } diff --git a/vent/service/abis.go b/vent/service/abi_provider.go similarity index 65% rename from vent/service/abis.go rename to vent/service/abi_provider.go index b63f77703..c339663aa 100644 --- a/vent/service/abis.go +++ b/vent/service/abi_provider.go @@ -10,15 +10,18 @@ import ( "github.com/hyperledger/burrow/rpc/rpcquery" ) +type EventSpecGetter func(abi.EventID, crypto.Address) (*abi.EventSpec, error) + // AbiProvider provides a method for loading ABIs from disk, and retrieving them from burrow on-demand type AbiProvider struct { abiSpec *abi.Spec cli rpcquery.QueryClient + logger *logging.Logger } // NewAbiProvider loads ABIs from the filesystem. A set of zero or more files or directories can be passed in the path // argument. If an event is encountered for which no ABI is known, it is retrieved from burrow -func NewAbiProvider(paths []string, cli rpcquery.QueryClient) (provider *AbiProvider, err error) { +func NewAbiProvider(paths []string, cli rpcquery.QueryClient, logger *logging.Logger) (provider *AbiProvider, err error) { abiSpec := &abi.Spec{} if len(paths) > 0 { abiSpec, err = abi.LoadPath(paths...) @@ -28,34 +31,35 @@ func NewAbiProvider(paths []string, cli rpcquery.QueryClient) (provider *AbiProv } provider = &AbiProvider{ - abiSpec, - cli, + abiSpec: abiSpec, + cli: cli, + logger: logger.WithScope("NewAbiProvider"), } return } // GetEventAbi get the ABI for a particular eventID. If it is not known, it is retrieved from the burrow node via // the address for the contract -func (p *AbiProvider) GetEventAbi(eventID abi.EventID, address crypto.Address, l *logging.Logger) (*abi.EventSpec, error) { +func (p *AbiProvider) GetEventAbi(eventID abi.EventID, address crypto.Address) (*abi.EventSpec, error) { evAbi, ok := p.abiSpec.EventsByID[eventID] if !ok { resp, err := p.cli.GetMetadata(context.Background(), &rpcquery.GetMetadataParam{Address: &address}) if err != nil { - l.InfoMsg("Error retrieving abi for event", "address", address.String(), "eventid", eventID.String(), "error", err) + p.logger.InfoMsg("Error retrieving abi for event", "address", address.String(), "eventid", eventID.String(), "error", err) return nil, err } if resp == nil || resp.Metadata == "" { - l.InfoMsg("ABI not found for contract", "address", address.String(), "eventid", eventID.String()) + p.logger.InfoMsg("ABI not found for contract", "address", address.String(), "eventid", eventID.String()) return nil, fmt.Errorf("No ABI present for contract at address %v", address) } a, err := abi.ReadSpec([]byte(resp.Metadata)) if err != nil { - l.InfoMsg("Failed to parse abi", "address", address.String(), "eventid", eventID.String(), "abi", resp.Metadata) + p.logger.InfoMsg("Failed to parse abi", "address", address.String(), "eventid", eventID.String(), "abi", resp.Metadata) return nil, err } evAbi, ok = a.EventsByID[eventID] if !ok { - l.InfoMsg("Event missing from ABI spec for contract", "address", address.String(), "eventid", eventID.String(), "abi", resp.Metadata) + p.logger.InfoMsg("Event missing from ABI spec for contract", "address", address.String(), "eventid", eventID.String(), "abi", resp.Metadata) return nil, fmt.Errorf("Event missing from ABI spec for contract") } diff --git a/vent/service/block_consumer.go b/vent/service/block_consumer.go new file mode 100644 index 000000000..d16a18e7b --- /dev/null +++ b/vent/service/block_consumer.go @@ -0,0 +1,138 @@ +package service + +import ( + "io" + "reflect" + + "github.com/hyperledger/burrow/event/query" + "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/vent/sqlsol" + "github.com/hyperledger/burrow/vent/types" + "github.com/pkg/errors" +) + +func NewBlockConsumer(projection *sqlsol.Projection, opt sqlsol.SpecOpt, getEventSpec EventSpecGetter, + eventCh chan<- types.EventData, doneCh chan struct{}, + logger *logging.Logger) func(blockExecution *exec.BlockExecution) error { + + logger = logger.WithScope("makeBlockConsumer") + + return func(blockExecution *exec.BlockExecution) error { + if finished(doneCh) { + return io.EOF + } + + // set new block number + fromBlock := blockExecution.Height + + logger.TraceMsg("Block received", + "height", blockExecution.Height, + "num_txs", len(blockExecution.TxExecutions)) + + // create a fresh new structure to store block data at this height + blockData := sqlsol.NewBlockData(fromBlock) + + if opt.Enabled(sqlsol.Block) { + blkRawData, err := buildBlkData(projection.Tables, blockExecution) + if err != nil { + return errors.Wrapf(err, "Error building block raw data") + } + // set row in structure + blockData.AddRow(tables.Block, blkRawData) + } + + // get transactions for a given block + for _, txe := range blockExecution.TxExecutions { + logger.TraceMsg("Getting transaction", "TxHash", txe.TxHash, "num_events", len(txe.Events)) + + if opt.Enabled(sqlsol.Tx) { + txRawData, err := buildTxData(txe) + if err != nil { + return errors.Wrapf(err, "Error building tx raw data") + } + // set row in structure + blockData.AddRow(tables.Tx, txRawData) + } + + // reverted transactions don't have to update event data tables + // so check that condition to filter them + if txe.Exception == nil { + txOrigin := txe.Origin + if txOrigin == nil { + // This is an original transaction from the current chain so we build its origin from context + txOrigin = &exec.Origin{ + Time: blockExecution.GetHeader().GetTime(), + ChainID: blockExecution.GetHeader().GetChainID(), + Height: txe.GetHeight(), + Index: txe.GetIndex(), + } + } + + // get events for a given transaction + for _, event := range txe.Events { + if event.Log == nil { + // Only EVM events are of interest + continue + } + + eventID := event.Log.SolidityEventID() + eventSpec, err := getEventSpec(eventID, event.Log.Address) + if err != nil { + return errors.Wrapf(err, "could not get ABI for solidity event with id %v at address %v", + eventID, event.Log.Address) + } + tagged := query.TagsFor(event, query.TaggedPrefix("Event", eventSpec)) + + // see which spec filter matches with the one in event data + for _, eventClass := range projection.Spec { + qry, err := eventClass.Query() + + if err != nil { + return errors.Wrapf(err, "Error parsing query from filter string") + } + + // there's a matching filter, add data to the rows + if qry.Matches(tagged) { + + logger.InfoMsg("Matched event", "header", event.Header, + "filter", eventClass.Filter) + + // unpack, decode & build event data + eventData, err := buildEventData(projection, eventClass, event, txOrigin, eventSpec, logger) + if err != nil { + return errors.Wrapf(err, "Error building event data") + } + + // set row in structure + blockData.AddRow(eventClass.TableName, eventData) + } + } + } + } + } + + // upsert rows in specific SQL event tables and update block number + // store block data in SQL tables (if any) + if blockData.PendingRows(fromBlock) { + // gets block data to upsert + blk := blockData.Data + + for name, rows := range blk.Tables { + logger.InfoMsg("Upserting rows in SQL table", "height", fromBlock, "table", name, "action", "UPSERT", "rows", rows) + } + + eventCh <- blk + } + return nil + } +} + +type eventSpecTagged struct { + Event abi.EventSpec +} + +func (e *eventSpecTagged) Get(key string) (value interface{}, ok bool) { + return query.GetReflect(reflect.ValueOf(e), key) +} diff --git a/vent/service/block_consumer_test.go b/vent/service/block_consumer_test.go new file mode 100644 index 000000000..5359c55fd --- /dev/null +++ b/vent/service/block_consumer_test.go @@ -0,0 +1,94 @@ +package service + +import ( + "math/big" + "testing" + "time" + + "github.com/hyperledger/burrow/crypto" + "github.com/hyperledger/burrow/execution/evm/abi" + "github.com/hyperledger/burrow/execution/exec" + "github.com/hyperledger/burrow/execution/solidity" + "github.com/hyperledger/burrow/logging" + "github.com/hyperledger/burrow/vent/sqlsol" + "github.com/hyperledger/burrow/vent/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + tmTypes "github.com/tendermint/tendermint/abci/types" +) + +func TestBlockConsumer(t *testing.T) { + doneCh := make(chan struct{}) + eventCh := make(chan types.EventData, 100) + longFilter := "(Log1Text = 'a' OR Log1Text = 'b' OR Log1Text = 'frogs') AND EventName = 'ManyTypes'" + tableName := "Events" + projection, err := sqlsol.NewProjection(types.ProjectionSpec{ + { + TableName: tableName, + Filter: longFilter, + FieldMappings: []*types.EventFieldMapping{ + { + Field: "direction", + Type: types.EventFieldTypeString, + ColumnName: "direction", + BytesToString: true, + }, + }, + }, + }) + require.NoError(t, err) + + spec, err := abi.ReadSpec(solidity.Abi_EventEmitter) + require.NoError(t, err) + + blockConsumer := NewBlockConsumer(projection, sqlsol.None, spec.GetEventAbi, eventCh, doneCh, logging.NewNoopLogger()) + + type args struct { + Direction []byte + Trueism bool + German string + NewDepth *big.Int + Bignum int8 + Hash string + } + eventSpec := spec.EventsByName["ManyTypes"] + + bignum := big.NewInt(1000) + in := args{ + Direction: make([]byte, 32), + Trueism: false, + German: "foo", + NewDepth: bignum, + Bignum: 100, + Hash: "ba", + } + direction := "frogs" + copy(in.Direction, direction) + topics, data, err := abi.PackEvent(&eventSpec, in) + require.NoError(t, err) + + txe := &exec.TxExecution{ + TxHeader: &exec.TxHeader{}, + } + err = txe.Log(&exec.LogEvent{ + Address: crypto.Address{}, + Data: data, + Topics: topics, + }) + require.NoError(t, err) + + block := &exec.BlockExecution{ + Header: &tmTypes.Header{}, + } + block.AppendTxs(txe) + err = blockConsumer(block) + require.NoError(t, err) + select { + case <-time.After(time.Second * 5): + t.Fatalf("timed out waiting for consumer to emit block event") + case ed := <-eventCh: + rows := ed.Tables[tableName] + assert.Len(t, rows, 1) + assert.Equal(t, direction, rows[0].RowData["direction"]) + } +} diff --git a/vent/service/consumer.go b/vent/service/consumer.go index 7687729a7..611ab6a1f 100644 --- a/vent/service/consumer.go +++ b/vent/service/consumer.go @@ -4,11 +4,11 @@ import ( "context" "fmt" "io" + "sync" "time" "github.com/hyperledger/burrow/rpc" - "github.com/hyperledger/burrow/execution/exec" "github.com/hyperledger/burrow/logging" "github.com/hyperledger/burrow/rpc/rpcevents" "github.com/hyperledger/burrow/rpc/rpcquery" @@ -24,12 +24,13 @@ import ( // Consumer contains basic configuration for consumer to run type Consumer struct { Config *config.VentConfig - Log *logging.Logger - Closing bool + Logger *logging.Logger DB *sqldb.SQLDB GRPCConnection *grpc.ClientConn // external events channel used for when vent is leveraged as a library EventsChannel chan types.EventData + Done chan struct{} + shutdownOnce sync.Once Status } @@ -45,9 +46,9 @@ type Status struct { func NewConsumer(cfg *config.VentConfig, log *logging.Logger, eventChannel chan types.EventData) *Consumer { return &Consumer{ Config: cfg, - Log: log, - Closing: false, + Logger: log, EventsChannel: eventChannel, + Done: make(chan struct{}), } } @@ -57,7 +58,7 @@ func NewConsumer(cfg *config.VentConfig, log *logging.Logger, eventChannel chan func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { var err error - c.Log.InfoMsg("Connecting to Burrow gRPC server") + c.Logger.InfoMsg("Connecting to Burrow gRPC server") c.GRPCConnection, err = grpc.Dial(c.Config.GRPCAddr, grpc.WithInsecure()) if err != nil { @@ -73,23 +74,23 @@ func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { return errors.Wrapf(err, "Error getting chain status") } - abiProvider, err := NewAbiProvider(c.Config.AbiFileOrDirs, rpcquery.NewQueryClient(c.GRPCConnection)) + abiProvider, err := NewAbiProvider(c.Config.AbiFileOrDirs, rpcquery.NewQueryClient(c.GRPCConnection), c.Logger) if err != nil { return errors.Wrapf(err, "Error loading ABIs") } - if len(projection.EventSpec) == 0 { - c.Log.InfoMsg("No events specifications found") + if len(projection.Spec) == 0 { + c.Logger.InfoMsg("No events specifications found") return nil } - c.Log.InfoMsg("Connecting to SQL database") + c.Logger.InfoMsg("Connecting to SQL database") connection := types.SQLConnection{ DBAdapter: c.Config.DBAdapter, DBURL: c.Config.DBURL, DBSchema: c.Config.DBSchema, - Log: c.Log, + Log: c.Logger, } c.DB, err = sqldb.NewSQLDB(connection) @@ -103,7 +104,7 @@ func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { return fmt.Errorf("could not clean tables after ChainID change: %v", err) } - c.Log.InfoMsg("Synchronizing config and database projection structures") + c.Logger.InfoMsg("Synchronizing config and database projection structures") err = c.DB.SynchronizeDB(c.Burrow.ChainID, projection.Tables) if err != nil { @@ -112,17 +113,16 @@ func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { // doneCh is used for sending a "done" signal from each goroutine to the main thread // eventCh is used for sending received events to the main thread to be stored in the db - doneCh := make(chan struct{}) errCh := make(chan error, 1) eventCh := make(chan types.EventData) go func() { defer func() { - close(doneCh) + c.Shutdown() }() - go c.announceEvery(doneCh) + go c.announceEvery(c.Done) - c.Log.InfoMsg("Getting last processed block number from SQL log table") + c.Logger.InfoMsg("Getting last processed block number from SQL log table") // NOTE [Silas]: I am preserving the comment below that dates from the early days of Vent. I have looked at the // bosmarmot git history and I cannot see why the original author thought that it was the case that there was @@ -169,16 +169,17 @@ func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { // get blocks - c.Log.TraceMsg("Waiting for blocks...") + c.Logger.TraceMsg("Waiting for blocks...") - err = rpcevents.ConsumeBlockExecutions(stream, c.makeBlockConsumer(projection, abiProvider, eventCh)) + err = rpcevents.ConsumeBlockExecutions(stream, + NewBlockConsumer(projection, c.Config.SpecOpt, abiProvider.GetEventAbi, eventCh, c.Done, c.Logger)) if err != nil { if err == io.EOF { - c.Log.InfoMsg("EOF stream received...") + c.Logger.InfoMsg("EOF stream received...") } else { - if c.Closing { - c.Log.TraceMsg("GRPC connection closed") + if finished(c.Done) { + c.Logger.TraceMsg("GRPC connection closed") } else { errCh <- errors.Wrapf(err, "Error receiving blocks") return @@ -191,136 +192,31 @@ func (c *Consumer) Run(projection *sqlsol.Projection, stream bool) error { select { // Process block events case blk := <-eventCh: + c.Status.LastProcessedHeight = blk.BlockHeight err := c.commitBlock(projection, blk) if err != nil { - c.Log.InfoMsg("error committing block", "err", err) + c.Logger.InfoMsg("error committing block", "err", err) return err } // Await completion - case <-doneCh: + case <-c.Done: select { // Select possible error case err := <-errCh: - c.Log.InfoMsg("finished with error", "err", err) + c.Logger.InfoMsg("finished with error", "err", err) return err // Or fallback to success default: - c.Log.InfoMsg("finished successfully") + c.Logger.InfoMsg("finished successfully") return nil } } } } -func (c *Consumer) makeBlockConsumer(projection *sqlsol.Projection, abiProvider *AbiProvider, - eventCh chan<- types.EventData) func(blockExecution *exec.BlockExecution) error { - - return func(blockExecution *exec.BlockExecution) error { - if c.Closing { - return io.EOF - } - - // set new block number - fromBlock := blockExecution.Height - - defer func() { - c.Status.LastProcessedHeight = fromBlock - }() - - c.Log.TraceMsg("Block received", "height", blockExecution.Height, "num_txs", len(blockExecution.TxExecutions)) - - // create a fresh new structure to store block data at this height - blockData := sqlsol.NewBlockData(fromBlock) - - if c.Config.SpecOpt&sqlsol.Block > 0 { - blkRawData, err := buildBlkData(projection.Tables, blockExecution) - if err != nil { - return errors.Wrapf(err, "Error building block raw data") - } - // set row in structure - blockData.AddRow(tables.Block, blkRawData) - } - - // get transactions for a given block - for _, txe := range blockExecution.TxExecutions { - c.Log.TraceMsg("Getting transaction", "TxHash", txe.TxHash, "num_events", len(txe.Events)) - - if c.Config.SpecOpt&sqlsol.Tx > 0 { - txRawData, err := buildTxData(txe) - if err != nil { - return errors.Wrapf(err, "Error building tx raw data") - } - // set row in structure - blockData.AddRow(tables.Tx, txRawData) - } - - // reverted transactions don't have to update event data tables - // so check that condition to filter them - if txe.Exception == nil { - - txOrigin := txe.Origin - if txOrigin == nil { - // This is an original transaction from the current chain so we build its origin from context - txOrigin = &exec.Origin{ - Time: blockExecution.GetHeader().GetTime(), - ChainID: c.Burrow.ChainID, - Height: txe.GetHeight(), - Index: txe.GetIndex(), - } - } - - // get events for a given transaction - for _, event := range txe.Events { - - taggedEvent := event.Tagged() - - // see which spec filter matches with the one in event data - for _, eventClass := range projection.EventSpec { - qry, err := eventClass.Query() - - if err != nil { - return errors.Wrapf(err, "Error parsing query from filter string") - } - - // there's a matching filter, add data to the rows - if qry.Matches(taggedEvent) { - - c.Log.InfoMsg("Matched event", "header", event.Header, - "filter", eventClass.Filter) - - // unpack, decode & build event data - eventData, err := buildEventData(projection, eventClass, event, txOrigin, abiProvider, c.Log) - if err != nil { - return errors.Wrapf(err, "Error building event data") - } - - // set row in structure - blockData.AddRow(eventClass.TableName, eventData) - } - } - } - } - } - - // upsert rows in specific SQL event tables and update block number - // store block data in SQL tables (if any) - if blockData.PendingRows(fromBlock) { - // gets block data to upsert - blk := blockData.Data - - for name, rows := range blk.Tables { - c.Log.InfoMsg("Upserting rows in SQL table", "height", fromBlock, "table", name, "action", "UPSERT", "rows", rows) - } - - eventCh <- blk - } - return nil - } -} - func (c *Consumer) commitBlock(projection *sqlsol.Projection, blockEvents types.EventData) error { // upsert rows in specific SQL event tables and update block number if err := c.DB.SetBlock(c.Burrow.ChainID, projection.Tables, blockEvents); err != nil { @@ -337,7 +233,7 @@ func (c *Consumer) commitBlock(projection *sqlsol.Projection, blockEvents types. // Health returns the health status for the consumer func (c *Consumer) Health() error { - if c.Closing { + if finished(c.Done) { return errors.New("closing service") } @@ -364,15 +260,17 @@ func (c *Consumer) Health() error { // Shutdown gracefully shuts down the events consumer func (c *Consumer) Shutdown() { - c.Log.InfoMsg("Shutting down vent consumer...") - c.Closing = true - c.GRPCConnection.Close() + c.shutdownOnce.Do(func() { + c.Logger.InfoMsg("Shutting down vent consumer...") + close(c.Done) + c.GRPCConnection.Close() + }) } func (c *Consumer) updateStatus(qcli rpcquery.QueryClient) { stat, err := qcli.Status(context.Background(), &rpcquery.StatusParam{}) if err != nil { - c.Log.InfoMsg("could not get blockchain status", "err", err) + c.Logger.InfoMsg("could not get blockchain status", "err", err) return } c.Status.Burrow = stat @@ -406,7 +304,7 @@ func (c *Consumer) announceEvery(doneCh <-chan struct{}) { select { case <-ticker.C: c.updateStatus(qcli) - c.Log.InfoMsg("Announcement", c.statusMessage()...) + c.Logger.InfoMsg("Announcement", c.statusMessage()...) case <-doneCh: ticker.Stop() return @@ -414,3 +312,12 @@ func (c *Consumer) announceEvery(doneCh <-chan struct{}) { } } } + +func finished(doneCh chan struct{}) bool { + select { + case <-doneCh: + return true + default: + return false + } +} diff --git a/vent/service/consumer_test.go b/vent/service/consumer_integration_test.go similarity index 100% rename from vent/service/consumer_test.go rename to vent/service/consumer_integration_test.go diff --git a/vent/service/rowbuilder.go b/vent/service/rowbuilder.go index 6a7774315..5e946beba 100644 --- a/vent/service/rowbuilder.go +++ b/vent/service/rowbuilder.go @@ -17,7 +17,7 @@ import ( // buildEventData builds event data from transactions func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, event *exec.Event, - txOrigin *exec.Origin, abiProvider *AbiProvider, l *logging.Logger) (types.EventDataRow, error) { + txOrigin *exec.Origin, evAbi *abi.EventSpec, logger *logging.Logger) (types.EventDataRow, error) { // a fresh new row to store column/value data row := make(map[string]interface{}) @@ -26,22 +26,13 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, eventHeader := event.GetHeader() eventLog := event.GetLog() - // Find event spec for this event - var eventID abi.EventID - copy(eventID[:], eventLog.Topics[0].Bytes()) - - evAbi, err := abiProvider.GetEventAbi(eventID, eventLog.Address, l) - if err != nil { - return types.EventDataRow{}, err - } - // decode event data using the provided abi specification decodedData, err := decodeEvent(eventHeader, eventLog, txOrigin, evAbi) if err != nil { return types.EventDataRow{}, errors.Wrapf(err, "Error decoding event (filter: %s)", eventClass.Filter) } - l.InfoMsg("Decoded event", decodedData) + logger.InfoMsg("Decoded event", decodedData) rowAction := types.ActionUpsert @@ -61,14 +52,14 @@ func buildEventData(projection *sqlsol.Projection, eventClass *types.EventClass, if err == nil { if fieldMapping.BytesToString { if bs, ok := value.(*[]byte); ok { - str := sanitiseBytesForString(*bs, l) + str := sanitiseBytesForString(*bs, logger) row[column.Name] = interface{}(str) continue } } row[column.Name] = value } else { - l.TraceMsg("could not get column", "err", err) + logger.TraceMsg("could not get column", "err", err) } } diff --git a/vent/sqldb/sqldb.go b/vent/sqldb/sqldb.go index 07bac8953..b59ca1322 100644 --- a/vent/sqldb/sqldb.go +++ b/vent/sqldb/sqldb.go @@ -2,8 +2,11 @@ package sqldb import ( "database/sql" + "encoding/json" "errors" "fmt" + "math" + "strconv" "strings" "time" @@ -13,6 +16,11 @@ import ( "github.com/jmoiron/sqlx" ) +const maxUint64 uint64 = (1 << 64) - 1 + +var tables = types.DefaultSQLTableNames +var columns = types.DefaultSQLColumnNames + // SQLDB implements the access to a sql database type SQLDB struct { DB *sqlx.DB @@ -63,7 +71,7 @@ func (db *SQLDB) Init(chainID, burrowVersion string) error { db.Log.InfoMsg("Initializing DB") // Create dictionary and log tables - sysTables := db.getSysTablesDefinition() + sysTables := db.systemTablesDefinition() // IMPORTANT: DO NOT CHANGE TABLE CREATION ORDER (1) if err := db.createTable(chainID, sysTables[db.Tables.Dictionary], true); err != nil { @@ -666,3 +674,359 @@ func (db *SQLDB) prepare(perr *error, query string) *sqlx.NamedStmt { } return stmt } + +// findTable checks if a table exists in the default schema +func (db *SQLDB) findTable(tableName string) (bool, error) { + + found := 0 + safeTable := safe(tableName) + query := db.DBAdapter.FindTableQuery() + + db.Log.InfoMsg("FIND TABLE", "query", query, "value", safeTable) + if err := db.DB.QueryRow(query, tableName).Scan(&found); err != nil { + db.Log.InfoMsg("Error finding table", "err", err) + return false, err + } + + if found == 0 { + db.Log.InfoMsg("Table not found", "value", safeTable) + return false, nil + } + + return true, nil +} + +// getTableDef returns the structure of a given SQL table +func (db *SQLDB) getTableDef(tableName string) (*types.SQLTable, error) { + table := &types.SQLTable{ + Name: safe(tableName), + } + found, err := db.findTable(table.Name) + if err != nil { + return nil, err + } + + if !found { + db.Log.InfoMsg("Error table not found", "value", table.Name) + return nil, errors.New("Error table not found " + table.Name) + } + + query := db.DBAdapter.TableDefinitionQuery() + + db.Log.InfoMsg("QUERY STRUCTURE", "query", query, "value", table.Name) + rows, err := db.DB.Query(query, safe(tableName)) + if err != nil { + db.Log.InfoMsg("Error querying table structure", "err", err) + return nil, err + } + defer rows.Close() + + var columns []*types.SQLTableColumn + + for rows.Next() { + var columnName string + var columnSQLType types.SQLColumnType + var columnIsPK int + var columnLength int + + if err = rows.Scan(&columnName, &columnSQLType, &columnLength, &columnIsPK); err != nil { + db.Log.InfoMsg("Error scanning table structure", "err", err) + return nil, err + } + + if _, err = db.DBAdapter.TypeMapping(columnSQLType); err != nil { + return nil, err + } + + columns = append(columns, &types.SQLTableColumn{ + Name: columnName, + Type: columnSQLType, + Length: columnLength, + Primary: columnIsPK == 1, + }) + } + + if err = rows.Err(); err != nil { + db.Log.InfoMsg("Error during rows iteration", "err", err) + return nil, err + } + + table.Columns = columns + return table, nil +} + +// alterTable alters the structure of a SQL table & add info to the dictionary +func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { + db.Log.InfoMsg("Altering table", "value", table.Name) + + // prepare log query + logQuery := db.DBAdapter.InsertLogQuery() + + // current table structure + safeTable := safe(table.Name) + currentTable, err := db.getTableDef(safeTable) + if err != nil { + return err + } + + sqlValues, _ := getJSON(nil) + + // for each column in the new table structure + for order, newColumn := range table.Columns { + found := false + + // check if exists in the current table structure + for _, currentColumn := range currentTable.Columns { + // if column exists + if currentColumn.Name == newColumn.Name { + found = true + break + } + } + + if !found { + safeCol := safe(newColumn.Name) + query, dictionary := db.DBAdapter.AlterColumnQuery(safeTable, safeCol, newColumn.Type, newColumn.Length, order) + + //alter column + db.Log.InfoMsg("ALTER TABLE", "query", safe(query)) + _, err = db.DB.Exec(safe(query)) + + if err != nil { + if db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeDuplicatedColumn) { + db.Log.InfoMsg("Duplicate column", "value", safeCol) + } else { + db.Log.InfoMsg("Error altering table", "err", err) + return err + } + } else { + //store dictionary + db.Log.InfoMsg("STORE DICTIONARY", "query", dictionary) + _, err = db.DB.Exec(dictionary) + if err != nil { + db.Log.InfoMsg("Error storing dictionary", "err", err) + return err + } + + // Marshal the table into a JSON string. + var jsonData []byte + jsonData, err = getJSON(newColumn) + if err != nil { + db.Log.InfoMsg("error marshaling column", "err", err, "value", fmt.Sprintf("%v", newColumn)) + return err + } + //insert log + _, err = db.DB.Exec(logQuery, chainID, table.Name, "", "", nil, nil, types.ActionAlterTable, jsonData, query, sqlValues) + if err != nil { + db.Log.InfoMsg("Error inserting log", "err", err) + return err + } + } + } + } + + // Ensure triggers are defined + err = db.createTableTriggers(table) + if err != nil { + db.Log.InfoMsg("error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) + return fmt.Errorf("could not create table notification triggers: %v", err) + } + return nil +} + +// createTable creates a new table +func (db *SQLDB) createTable(chainID string, table *types.SQLTable, isInitialise bool) error { + db.Log.InfoMsg("Creating Table", "value", table.Name) + + // prepare log query + logQuery := db.DBAdapter.InsertLogQuery() + + //get create table query + safeTable := safe(table.Name) + query, dictionary := db.DBAdapter.CreateTableQuery(safeTable, table.Columns) + if query == "" { + db.Log.InfoMsg("empty CREATE TABLE query") + return errors.New("empty CREATE TABLE query") + } + + // create table + db.Log.InfoMsg("CREATE TABLE", "query", query) + _, err := db.DB.Exec(query) + if err != nil { + return err + } + + //store dictionary + db.Log.InfoMsg("STORE DICTIONARY", "query", dictionary) + _, err = db.DB.Exec(dictionary) + if err != nil { + db.Log.InfoMsg("Error storing dictionary", "err", err) + return err + } + + err = db.createTableTriggers(table) + if err != nil { + db.Log.InfoMsg("error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) + return fmt.Errorf("could not create table notification triggers: %v", err) + } + + //insert log (if action is not database initialization) + if !isInitialise { + // Marshal the table into a JSON string. + var jsonData []byte + jsonData, err = getJSON(table) + if err != nil { + db.Log.InfoMsg("error marshaling table", "err", err, "value", fmt.Sprintf("%v", table)) + return err + } + sqlValues, _ := getJSON(nil) + + //insert log + _, err = db.DB.Exec(logQuery, chainID, table.Name, "", "", nil, nil, types.ActionCreateTable, jsonData, query, sqlValues) + if err != nil { + db.Log.InfoMsg("Error inserting log", "err", err) + return err + } + } + return nil +} + +// Creates (or updates) table notification triggers and functions +func (db *SQLDB) createTableTriggers(table *types.SQLTable) error { + // If the adapter supports notification triggers + dbNotify, ok := db.DBAdapter.(adapters.DBNotifyTriggerAdapter) + if ok { + for channel, columns := range table.NotifyChannels { + function := fmt.Sprintf("%s_%s_notify_function", table.Name, channel) + + query := dbNotify.CreateNotifyFunctionQuery(function, channel, columns...) + db.Log.InfoMsg("CREATE NOTIFICATION FUNCTION", "query", query) + _, err := db.DB.Exec(query) + if err != nil { + return fmt.Errorf("could not create notification function: %v", err) + } + + trigger := fmt.Sprintf("%s_%s_notify_trigger", table.Name, channel) + query = dbNotify.CreateTriggerQuery(trigger, table.Name, function) + db.Log.InfoMsg("CREATE NOTIFICATION TRIGGER", "query", query) + _, err = db.DB.Exec(query) + if err != nil { + return fmt.Errorf("could not create notification trigger: %v", err) + } + } + } + return nil +} + +// getSelectQuery builds a select query for a specific SQL table and a given block +func (db *SQLDB) getSelectQuery(table *types.SQLTable, height uint64) (string, error) { + + fields := "" + + for _, tableColumn := range table.Columns { + if fields != "" { + fields += ", " + } + fields += db.DBAdapter.SecureName(tableColumn.Name) + } + + if fields == "" { + return "", errors.New("error table does not contain any fields") + } + + query := db.DBAdapter.SelectRowQuery(table.Name, fields, strconv.FormatUint(height, 10)) + return query, nil +} + +// getBlockTables return all SQL tables that have been involved +// in a given batch transaction for a specific block +func (db *SQLDB) getBlockTables(chainid string, height uint64) (types.EventTables, error) { + tables := make(types.EventTables) + + query := db.DBAdapter.SelectLogQuery() + db.Log.InfoMsg("QUERY LOG", "query", query, "height", height, "chainid", chainid) + + rows, err := db.DB.Query(query, height, chainid) + if err != nil { + db.Log.InfoMsg("Error querying log", "err", err) + return tables, err + } + + defer rows.Close() + + for rows.Next() { + var eventName, tableName string + var table *types.SQLTable + + err = rows.Scan(&tableName, &eventName) + if err != nil { + db.Log.InfoMsg("Error scanning table structure", "err", err) + return tables, err + } + + err = rows.Err() + if err != nil { + db.Log.InfoMsg("Error scanning table structure", "err", err) + return tables, err + } + + table, err = db.getTableDef(tableName) + if err != nil { + return tables, err + } + + tables[tableName] = table + } + + return tables, nil +} + +// safe sanitizes a parameter +func safe(parameter string) string { + replacer := strings.NewReplacer(";", "", ",", "") + return replacer.Replace(parameter) +} + +//getJSON returns marshaled json from JSON single column +func getJSON(JSON interface{}) ([]byte, error) { + if JSON != nil { + return json.Marshal(JSON) + } + return json.Marshal("") +} + +//getJSONFromValues returns marshaled json from query values +func getJSONFromValues(values []interface{}) ([]byte, error) { + if values != nil { + return json.Marshal(values) + } + return json.Marshal("") +} + +//getValuesFromJSON returns query values from unmarshaled JSON column +func getValuesFromJSON(JSON string) ([]interface{}, error) { + pointers := make([]interface{}, 0) + bytes := []byte(JSON) + err := json.Unmarshal(bytes, &pointers) + if err != nil { + return nil, err + } + for i, ptr := range pointers { + switch v := ptr.(type) { + // Normalise integral floats + case float64: + i64 := int64(v) + if float64(i64) == v { + pointers[i] = i64 + } + } + } + return pointers, nil +} + +func digits(x uint64) int { + if x == 0 { + return 1 + } + return int(math.Log10(float64(x))) + 1 +} diff --git a/vent/sqldb/sqldb_integration_test.go b/vent/sqldb/sqldb_integration_test.go new file mode 100644 index 000000000..11bf5fa8c --- /dev/null +++ b/vent/sqldb/sqldb_integration_test.go @@ -0,0 +1,335 @@ +// +build integration + +package sqldb_test + +import ( + "database/sql" + "fmt" + "testing" + "time" + + "github.com/hyperledger/burrow/vent/config" + "github.com/hyperledger/burrow/vent/sqldb" + "github.com/hyperledger/burrow/vent/sqldb/adapters" + "github.com/hyperledger/burrow/vent/sqlsol" + "github.com/hyperledger/burrow/vent/test" + "github.com/hyperledger/burrow/vent/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testSynchronizeDB(t *testing.T, cfg *config.VentConfig) { + t.Run(fmt.Sprintf("%s: successfully creates database tables and synchronizes db", cfg.DBAdapter), + func(t *testing.T) { + goodJSON := test.GoodJSONConfFile(t) + + byteValue := []byte(goodJSON) + tableStructure, err := sqlsol.NewProjectionFromBytes(byteValue) + require.NoError(t, err) + + db, cleanUpDB := test.NewTestDB(t, cfg) + defer cleanUpDB() + + err = db.Ping() + require.NoError(t, err) + + err = db.SynchronizeDB(test.ChainID, tableStructure.Tables) + require.NoError(t, err) + }) +} + +func testCleanDB(t *testing.T, cfg *config.VentConfig) { + t.Run(fmt.Sprintf("%s: successfully creates tables, updates test.ChainID and drops all tables", cfg.DBAdapter), + func(t *testing.T) { + byteValue := []byte(test.GoodJSONConfFile(t)) + tableStructure, err := sqlsol.NewProjectionFromBytes(byteValue) + require.NoError(t, err) + + db, cleanUpDB := test.NewTestDB(t, cfg) + defer cleanUpDB() + + err = db.Ping() + require.NoError(t, err) + + err = db.SynchronizeDB(test.ChainID, tableStructure.Tables) + require.NoError(t, err) + + err = db.CleanTables(test.ChainID, test.BurrowVersion) + require.NoError(t, err) + }) +} + +func testRestore(t *testing.T, cfg *config.VentConfig) { + t.Run(fmt.Sprintf("%s: can restore from vent logging", cfg.DBAdapter), + func(t *testing.T) { + db, closeDB := test.NewTestDB(t, cfg) + defer closeDB() + + errp := db.Ping() + require.NoError(t, errp) + + // new + str, dat := getBlock() + err := db.SetBlock(test.ChainID, str, dat) + require.NoError(t, err) + + // restore to new table + prefix := "RESTORED" + err = db.RestoreDB(time.Time{}, prefix) + require.NoError(t, err) + + for table := range dat.Tables { + assertTablesEqual(t, db, table, fmt.Sprintf("%s_%s", prefix, table)) + } + + for table := range dat.Tables { + dropQuery := db.DBAdapter.DropTableQuery(table) + _, err = db.DB.Exec(dropQuery) + require.NoError(t, err) + } + + // restore in-place over original tables + err = db.RestoreDB(time.Time{}, "") + require.NoError(t, err) + + for table := range dat.Tables { + assertTablesEqual(t, db, table, fmt.Sprintf("%s_%s", prefix, table)) + } + }) +} + +func testSetBlock(t *testing.T, cfg *config.VentConfig) { + t.Run(fmt.Sprintf("%s: successfully inserts a block", cfg.DBAdapter), + func(t *testing.T) { + db, closeDB := test.NewTestDB(t, cfg) + defer closeDB() + + err := db.Ping() + require.NoError(t, err) + + // new + str, dat := getBlock() + err = db.SetBlock(test.ChainID, str, dat) + require.NoError(t, err) + + // read + _, err = db.LastBlockHeight(test.ChainID) + require.NoError(t, err) + + _, err = db.GetBlock(test.ChainID, dat.BlockHeight) + require.NoError(t, err) + + // alter + str, dat = getAlterBlock() + err = db.SetBlock(test.ChainID, str, dat) + require.NoError(t, err) + + //restore + err = db.RestoreDB(time.Time{}, "RESTORED") + require.NoError(t, err) + + }) + + t.Run(fmt.Sprintf("%s: successfully creates an empty table", cfg.DBAdapter), func(t *testing.T) { + db, closeDB := test.NewTestDB(t, cfg) + defer closeDB() + + errp := db.Ping() + require.NoError(t, errp) + + //table 1 + tables := map[string]*types.SQLTable{ + "AllDataTypesTable": { + Name: "AllDataTypesTable", + Columns: []*types.SQLTableColumn{ + {Name: "test_id", Type: types.SQLColumnTypeSerial, Primary: true}, + {Name: "col1", Type: types.SQLColumnTypeBool, Primary: false}, + {Name: "col2", Type: types.SQLColumnTypeByteA, Primary: false}, + {Name: "col3", Type: types.SQLColumnTypeInt, Primary: false}, + {Name: "col4", Type: types.SQLColumnTypeText, Primary: false}, + {Name: "col5", Type: types.SQLColumnTypeTimeStamp, Primary: false}, + {Name: "col6", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + }, + }, + } + + err := db.SynchronizeDB(test.ChainID, tables) + require.NoError(t, err) + }) +} + +func getBlock() (types.EventTables, types.EventData) { + longtext := "qwertyuiopasdfghjklzxcvbnm1234567890QWERTYUIOPASDFGHJKLZXCVBNM" + longtext = fmt.Sprintf("%s %s %s %s %s", longtext, longtext, longtext, longtext, longtext) + + //table 1 + table1 := &types.SQLTable{ + Name: "test_table1", + Columns: []*types.SQLTableColumn{ + {Name: "test_id", Type: types.SQLColumnTypeInt, Primary: true}, + {Name: "col1", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + {Name: "col2", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + {Name: "col4", Type: types.SQLColumnTypeText, Primary: false}, + {Name: "colV", Type: types.SQLColumnTypeVarchar, Length: 400, Primary: false}, + {Name: "colT", Type: types.SQLColumnTypeText, Length: 0, Primary: false}, + }, + } + + //table 2 + table2 := &types.SQLTable{ + Name: "test_table2", + Columns: []*types.SQLTableColumn{ + {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: true}, + {Name: "sid_id", Type: types.SQLColumnTypeInt, Primary: true}, + {Name: "field_1", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + {Name: "field_2", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + }, + } + + //table 3 + table3 := &types.SQLTable{ + Name: "test_table3", + Columns: []*types.SQLTableColumn{ + {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: true}, + {Name: "val", Type: types.SQLColumnTypeInt, Primary: false}, + }, + } + + //table 4 + table4 := &types.SQLTable{ + Name: "test_table4", + Columns: []*types.SQLTableColumn{ + {Name: "index", Type: types.SQLColumnTypeInt, Primary: true}, + {Name: "time", Type: types.SQLColumnTypeTimeStamp, Primary: false}, + {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, + }, + } + + str := make(types.EventTables) + str["1"] = table1 + str["2"] = table2 + str["3"] = table3 + str["4"] = table4 + + //---------------------------------------data------------------------------------- + var dat types.EventData + dat.BlockHeight = 2134234 + dat.Tables = make(map[string]types.EventDataTable) + + var rows1 []types.EventDataRow + rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "1", "col1": "text11", "col2": "text12", "_height": dat.BlockHeight, "col4": "14", "colV": longtext, "colT": longtext}}) + rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "2", "col1": "text21", "col2": "text22", "_height": dat.BlockHeight, "col4": "24", "colV": longtext, "colT": longtext}}) + rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "3", "col1": "text31", "col2": "text32", "_height": dat.BlockHeight, "col4": "34", "colV": longtext, "colT": longtext}}) + rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "4", "col1": "text41", "col3": "text43", "_height": dat.BlockHeight, "colV": longtext, "colT": longtext}}) + rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "1", "col1": "upd", "col2": "upd", "_height": dat.BlockHeight, "col4": "upd", "colV": longtext, "colT": longtext}}) + dat.Tables["test_table1"] = rows1 + + var rows2 []types.EventDataRow + rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "1", "field_1": "A", "field_2": "B"}}) + rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "2", "field_1": "C", "field_2": ""}}) + rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "3", "field_1": "D", "field_2": "E"}}) + rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "1", "field_1": "F"}}) + rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "1", "field_2": "U"}}) + dat.Tables["test_table2"] = rows2 + + var rows3 []types.EventDataRow + rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": "0123456789ABCDEF1", "val": "1"}}) + rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": "0123456789ABCDEF2", "val": "2"}}) + rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": "0123456789ABCDEFX", "val": "-1"}}) + rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight}}) + dat.Tables["test_table3"] = rows3 + + var rows4 []types.EventDataRow + rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-01 15:04:05", "index": "1"}}) + rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-02 15:04:05", "index": "2"}}) + rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-03 15:04:05", "index": "3"}}) + rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-03 15:04:05", "index": "4"}}) + rows4 = append(rows4, types.EventDataRow{Action: types.ActionDelete, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-03 15:04:05", "index": "3"}}) + dat.Tables["test_table4"] = rows4 + + return str, dat +} + +func getAlterBlock() (types.EventTables, types.EventData) { + //table 3 + table3 := &types.SQLTable{ + Name: "test_table3", + Columns: []*types.SQLTableColumn{ + {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: true}, + {Name: "val", Type: types.SQLColumnTypeInt, Primary: false}, + {Name: "val_alter", Type: types.SQLColumnTypeInt, Primary: false}, + }, + } + + str := make(types.EventTables) + str["3"] = table3 + + //---------------------------------------data------------------------------------- + var dat types.EventData + dat.BlockHeight = 23423423 + dat.Tables = make(map[string]types.EventDataTable) + + var rows5 []types.EventDataRow + rows5 = append(rows5, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "val": "1", "val_alter": "1"}}) + dat.Tables["test_table3"] = rows5 + + return str, dat +} + +func assertTablesEqual(t *testing.T, db *sqldb.SQLDB, table1, table2 string) { + cols1, rows1 := selectAll(t, db, table1) + cols2, rows2 := selectAll(t, db, table2) + + assert.Equal(t, cols1, cols2, "columns should be equal") + for i, r1 := range rows1 { + r2 := rows2[i] + assert.Equal(t, r1, r2, "each row should be equal") + } +} + +func selectAll(t *testing.T, db *sqldb.SQLDB, tablename string) (columns []string, rows []map[string]interface{}) { + // language=SQL + selectQuery := adapters.Cleanf("SELECT * FROM %s", db.DBAdapter.SchemaName(tablename)) + sqlRows, err := db.DB.Query(selectQuery) + require.NoError(t, err) + defer sqlRows.Close() + + cols, err := sqlRows.Columns() + require.NoError(t, err) + for sqlRows.Next() { + row := rowMap(t, cols, sqlRows) + rows = append(rows, row) + } + return cols, rows +} + +func rowMap(t *testing.T, cols []string, rows *sql.Rows) map[string]interface{} { + vals := scanValues(len(cols)) + err := rows.Scan(vals...) + require.NoError(t, err) + mp := make(map[string]interface{}, len(cols)) + + for i, v := range vals { + iface := v.(*interface{}) + if iface != nil { + // truly go at its most beautiful + switch iv := (*iface).(type) { + case []byte: + str := string(iv) + mp[cols[i]] = str + default: + mp[cols[i]] = iv + } + } + } + return mp +} + +func scanValues(n int) []interface{} { + vals := make([]interface{}, n) + for i := 0; i < n; i++ { + vals[i] = new(interface{}) + } + return vals +} diff --git a/vent/sqldb/sqldb_test.go b/vent/sqldb/sqldb_test.go index 11bf5fa8c..c2f95ee4f 100644 --- a/vent/sqldb/sqldb_test.go +++ b/vent/sqldb/sqldb_test.go @@ -1,335 +1,17 @@ -// +build integration - -package sqldb_test +package sqldb import ( - "database/sql" "fmt" "testing" - "time" - "github.com/hyperledger/burrow/vent/config" - "github.com/hyperledger/burrow/vent/sqldb" - "github.com/hyperledger/burrow/vent/sqldb/adapters" - "github.com/hyperledger/burrow/vent/sqlsol" - "github.com/hyperledger/burrow/vent/test" - "github.com/hyperledger/burrow/vent/types" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) -func testSynchronizeDB(t *testing.T, cfg *config.VentConfig) { - t.Run(fmt.Sprintf("%s: successfully creates database tables and synchronizes db", cfg.DBAdapter), - func(t *testing.T) { - goodJSON := test.GoodJSONConfFile(t) - - byteValue := []byte(goodJSON) - tableStructure, err := sqlsol.NewProjectionFromBytes(byteValue) - require.NoError(t, err) - - db, cleanUpDB := test.NewTestDB(t, cfg) - defer cleanUpDB() - - err = db.Ping() - require.NoError(t, err) - - err = db.SynchronizeDB(test.ChainID, tableStructure.Tables) - require.NoError(t, err) - }) -} - -func testCleanDB(t *testing.T, cfg *config.VentConfig) { - t.Run(fmt.Sprintf("%s: successfully creates tables, updates test.ChainID and drops all tables", cfg.DBAdapter), - func(t *testing.T) { - byteValue := []byte(test.GoodJSONConfFile(t)) - tableStructure, err := sqlsol.NewProjectionFromBytes(byteValue) - require.NoError(t, err) - - db, cleanUpDB := test.NewTestDB(t, cfg) - defer cleanUpDB() - - err = db.Ping() - require.NoError(t, err) - - err = db.SynchronizeDB(test.ChainID, tableStructure.Tables) - require.NoError(t, err) - - err = db.CleanTables(test.ChainID, test.BurrowVersion) - require.NoError(t, err) - }) -} - -func testRestore(t *testing.T, cfg *config.VentConfig) { - t.Run(fmt.Sprintf("%s: can restore from vent logging", cfg.DBAdapter), - func(t *testing.T) { - db, closeDB := test.NewTestDB(t, cfg) - defer closeDB() - - errp := db.Ping() - require.NoError(t, errp) - - // new - str, dat := getBlock() - err := db.SetBlock(test.ChainID, str, dat) - require.NoError(t, err) - - // restore to new table - prefix := "RESTORED" - err = db.RestoreDB(time.Time{}, prefix) - require.NoError(t, err) - - for table := range dat.Tables { - assertTablesEqual(t, db, table, fmt.Sprintf("%s_%s", prefix, table)) - } - - for table := range dat.Tables { - dropQuery := db.DBAdapter.DropTableQuery(table) - _, err = db.DB.Exec(dropQuery) - require.NoError(t, err) - } - - // restore in-place over original tables - err = db.RestoreDB(time.Time{}, "") - require.NoError(t, err) - - for table := range dat.Tables { - assertTablesEqual(t, db, table, fmt.Sprintf("%s_%s", prefix, table)) - } - }) -} - -func testSetBlock(t *testing.T, cfg *config.VentConfig) { - t.Run(fmt.Sprintf("%s: successfully inserts a block", cfg.DBAdapter), - func(t *testing.T) { - db, closeDB := test.NewTestDB(t, cfg) - defer closeDB() - - err := db.Ping() - require.NoError(t, err) - - // new - str, dat := getBlock() - err = db.SetBlock(test.ChainID, str, dat) - require.NoError(t, err) - - // read - _, err = db.LastBlockHeight(test.ChainID) - require.NoError(t, err) - - _, err = db.GetBlock(test.ChainID, dat.BlockHeight) - require.NoError(t, err) - - // alter - str, dat = getAlterBlock() - err = db.SetBlock(test.ChainID, str, dat) - require.NoError(t, err) - - //restore - err = db.RestoreDB(time.Time{}, "RESTORED") - require.NoError(t, err) - - }) - - t.Run(fmt.Sprintf("%s: successfully creates an empty table", cfg.DBAdapter), func(t *testing.T) { - db, closeDB := test.NewTestDB(t, cfg) - defer closeDB() - - errp := db.Ping() - require.NoError(t, errp) - - //table 1 - tables := map[string]*types.SQLTable{ - "AllDataTypesTable": { - Name: "AllDataTypesTable", - Columns: []*types.SQLTableColumn{ - {Name: "test_id", Type: types.SQLColumnTypeSerial, Primary: true}, - {Name: "col1", Type: types.SQLColumnTypeBool, Primary: false}, - {Name: "col2", Type: types.SQLColumnTypeByteA, Primary: false}, - {Name: "col3", Type: types.SQLColumnTypeInt, Primary: false}, - {Name: "col4", Type: types.SQLColumnTypeText, Primary: false}, - {Name: "col5", Type: types.SQLColumnTypeTimeStamp, Primary: false}, - {Name: "col6", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - }, - }, - } - - err := db.SynchronizeDB(test.ChainID, tables) - require.NoError(t, err) - }) -} - -func getBlock() (types.EventTables, types.EventData) { - longtext := "qwertyuiopasdfghjklzxcvbnm1234567890QWERTYUIOPASDFGHJKLZXCVBNM" - longtext = fmt.Sprintf("%s %s %s %s %s", longtext, longtext, longtext, longtext, longtext) - - //table 1 - table1 := &types.SQLTable{ - Name: "test_table1", - Columns: []*types.SQLTableColumn{ - {Name: "test_id", Type: types.SQLColumnTypeInt, Primary: true}, - {Name: "col1", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - {Name: "col2", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - {Name: "col4", Type: types.SQLColumnTypeText, Primary: false}, - {Name: "colV", Type: types.SQLColumnTypeVarchar, Length: 400, Primary: false}, - {Name: "colT", Type: types.SQLColumnTypeText, Length: 0, Primary: false}, - }, - } - - //table 2 - table2 := &types.SQLTable{ - Name: "test_table2", - Columns: []*types.SQLTableColumn{ - {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: true}, - {Name: "sid_id", Type: types.SQLColumnTypeInt, Primary: true}, - {Name: "field_1", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - {Name: "field_2", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - }, - } - - //table 3 - table3 := &types.SQLTable{ - Name: "test_table3", - Columns: []*types.SQLTableColumn{ - {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: true}, - {Name: "val", Type: types.SQLColumnTypeInt, Primary: false}, - }, - } - - //table 4 - table4 := &types.SQLTable{ - Name: "test_table4", - Columns: []*types.SQLTableColumn{ - {Name: "index", Type: types.SQLColumnTypeInt, Primary: true}, - {Name: "time", Type: types.SQLColumnTypeTimeStamp, Primary: false}, - {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: false}, - }, - } - - str := make(types.EventTables) - str["1"] = table1 - str["2"] = table2 - str["3"] = table3 - str["4"] = table4 - - //---------------------------------------data------------------------------------- - var dat types.EventData - dat.BlockHeight = 2134234 - dat.Tables = make(map[string]types.EventDataTable) - - var rows1 []types.EventDataRow - rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "1", "col1": "text11", "col2": "text12", "_height": dat.BlockHeight, "col4": "14", "colV": longtext, "colT": longtext}}) - rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "2", "col1": "text21", "col2": "text22", "_height": dat.BlockHeight, "col4": "24", "colV": longtext, "colT": longtext}}) - rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "3", "col1": "text31", "col2": "text32", "_height": dat.BlockHeight, "col4": "34", "colV": longtext, "colT": longtext}}) - rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "4", "col1": "text41", "col3": "text43", "_height": dat.BlockHeight, "colV": longtext, "colT": longtext}}) - rows1 = append(rows1, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"test_id": "1", "col1": "upd", "col2": "upd", "_height": dat.BlockHeight, "col4": "upd", "colV": longtext, "colT": longtext}}) - dat.Tables["test_table1"] = rows1 - - var rows2 []types.EventDataRow - rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "1", "field_1": "A", "field_2": "B"}}) - rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "2", "field_1": "C", "field_2": ""}}) - rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "3", "field_1": "D", "field_2": "E"}}) - rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "1", "field_1": "F"}}) - rows2 = append(rows2, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "sid_id": "1", "field_2": "U"}}) - dat.Tables["test_table2"] = rows2 - - var rows3 []types.EventDataRow - rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": "0123456789ABCDEF1", "val": "1"}}) - rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": "0123456789ABCDEF2", "val": "2"}}) - rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": "0123456789ABCDEFX", "val": "-1"}}) - rows3 = append(rows3, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight}}) - dat.Tables["test_table3"] = rows3 - - var rows4 []types.EventDataRow - rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-01 15:04:05", "index": "1"}}) - rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-02 15:04:05", "index": "2"}}) - rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-03 15:04:05", "index": "3"}}) - rows4 = append(rows4, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-03 15:04:05", "index": "4"}}) - rows4 = append(rows4, types.EventDataRow{Action: types.ActionDelete, RowData: map[string]interface{}{"_height": dat.BlockHeight, "time": "2006-01-03 15:04:05", "index": "3"}}) - dat.Tables["test_table4"] = rows4 - - return str, dat -} - -func getAlterBlock() (types.EventTables, types.EventData) { - //table 3 - table3 := &types.SQLTable{ - Name: "test_table3", - Columns: []*types.SQLTableColumn{ - {Name: "_height", Type: types.SQLColumnTypeVarchar, Length: 100, Primary: true}, - {Name: "val", Type: types.SQLColumnTypeInt, Primary: false}, - {Name: "val_alter", Type: types.SQLColumnTypeInt, Primary: false}, - }, - } - - str := make(types.EventTables) - str["3"] = table3 - - //---------------------------------------data------------------------------------- - var dat types.EventData - dat.BlockHeight = 23423423 - dat.Tables = make(map[string]types.EventDataTable) - - var rows5 []types.EventDataRow - rows5 = append(rows5, types.EventDataRow{Action: types.ActionUpsert, RowData: map[string]interface{}{"_height": dat.BlockHeight, "val": "1", "val_alter": "1"}}) - dat.Tables["test_table3"] = rows5 - - return str, dat -} - -func assertTablesEqual(t *testing.T, db *sqldb.SQLDB, table1, table2 string) { - cols1, rows1 := selectAll(t, db, table1) - cols2, rows2 := selectAll(t, db, table2) - - assert.Equal(t, cols1, cols2, "columns should be equal") - for i, r1 := range rows1 { - r2 := rows2[i] - assert.Equal(t, r1, r2, "each row should be equal") - } -} - -func selectAll(t *testing.T, db *sqldb.SQLDB, tablename string) (columns []string, rows []map[string]interface{}) { - // language=SQL - selectQuery := adapters.Cleanf("SELECT * FROM %s", db.DBAdapter.SchemaName(tablename)) - sqlRows, err := db.DB.Query(selectQuery) - require.NoError(t, err) - defer sqlRows.Close() - - cols, err := sqlRows.Columns() - require.NoError(t, err) - for sqlRows.Next() { - row := rowMap(t, cols, sqlRows) - rows = append(rows, row) - } - return cols, rows -} - -func rowMap(t *testing.T, cols []string, rows *sql.Rows) map[string]interface{} { - vals := scanValues(len(cols)) - err := rows.Scan(vals...) - require.NoError(t, err) - mp := make(map[string]interface{}, len(cols)) - - for i, v := range vals { - iface := v.(*interface{}) - if iface != nil { - // truly go at its most beautiful - switch iv := (*iface).(type) { - case []byte: - str := string(iv) - mp[cols[i]] = str - default: - mp[cols[i]] = iv - } - } - } - return mp -} - -func scanValues(n int) []interface{} { - vals := make([]interface{}, n) - for i := 0; i < n; i++ { - vals[i] = new(interface{}) - } - return vals +func TestDigits(t *testing.T) { + s := fmt.Sprintf("%v", maxUint64) + assert.Len(t, s, digits(maxUint64)) + assert.Equal(t, 1, digits(1)) + assert.Equal(t, 1, digits(1)) + assert.Equal(t, 1, digits(2)) + assert.Equal(t, 2, digits(10)) } diff --git a/vent/sqldb/system_tables.go b/vent/sqldb/system_tables.go new file mode 100644 index 000000000..295d056b6 --- /dev/null +++ b/vent/sqldb/system_tables.go @@ -0,0 +1,134 @@ +package sqldb + +import ( + "github.com/hyperledger/burrow/txs" + "github.com/hyperledger/burrow/vent/types" +) + +// getSysTablesDefinition returns log, chain info & dictionary structures +func (db *SQLDB) systemTablesDefinition() types.EventTables { + return types.EventTables{ + tables.Log: { + Name: tables.Log, + Columns: []*types.SQLTableColumn{ + { + Name: columns.Id, + Type: types.SQLColumnTypeSerial, + Primary: true, + }, + { + Name: columns.ChainID, + Type: types.SQLColumnTypeVarchar, + Length: 100, + }, + { + Name: columns.TimeStamp, + Type: types.SQLColumnTypeTimeStamp, + }, + { + Name: columns.TableName, + Type: types.SQLColumnTypeVarchar, + Length: 100, + }, + { + Name: columns.EventName, + Type: types.SQLColumnTypeVarchar, + Length: 100, + }, + { + Name: columns.EventFilter, + Type: types.SQLColumnTypeText, + }, + // We use varchar for height - there is no uint64 type though numeric could have been used. We obtain the + // maximum height by maxing over the serial ID type + { + Name: columns.Height, + Type: types.SQLColumnTypeVarchar, + Length: 100, + }, + { + Name: columns.TxHash, + Type: types.SQLColumnTypeVarchar, + Length: txs.HashLengthHex, + }, + { + Name: columns.Action, + Type: types.SQLColumnTypeVarchar, + Length: 20, + }, + { + Name: columns.DataRow, + Type: types.SQLColumnTypeJSON, + }, + { + Name: columns.SqlStmt, + Type: types.SQLColumnTypeText, + }, + { + Name: columns.SqlValues, + Type: types.SQLColumnTypeText, + }, + }, + NotifyChannels: map[string][]string{types.BlockHeightLabel: {columns.Height}}, + }, + tables.Dictionary: { + Name: tables.Dictionary, + Columns: []*types.SQLTableColumn{ + { + Name: columns.TableName, + Type: types.SQLColumnTypeVarchar, + Length: 100, + Primary: true, + }, + { + Name: columns.ColumnName, + Type: types.SQLColumnTypeVarchar, + Length: 100, + Primary: true, + }, + { + Name: columns.ColumnType, + Type: types.SQLColumnTypeInt, + Length: 0, + }, + { + Name: columns.ColumnLength, + Type: types.SQLColumnTypeInt, + Length: 0, + }, + { + Name: columns.PrimaryKey, + Type: types.SQLColumnTypeInt, + Length: 0, + }, + { + Name: columns.ColumnOrder, + Type: types.SQLColumnTypeInt, + Length: 0, + }, + }, + }, + tables.ChainInfo: { + Name: tables.ChainInfo, + Columns: []*types.SQLTableColumn{ + { + Name: columns.ChainID, + Type: types.SQLColumnTypeVarchar, + Length: 100, + Primary: true, + }, + { + Name: columns.BurrowVersion, + Type: types.SQLColumnTypeVarchar, + Length: 100, + }, + { + Name: columns.Height, + Type: types.SQLColumnTypeNumeric, + // Maps to numeric(20, 0) - a 20 digit integer value + Length: digits(maxUint64), + }, + }, + }, + } +} diff --git a/vent/sqldb/utils.go b/vent/sqldb/utils.go deleted file mode 100644 index b24eefb9f..000000000 --- a/vent/sqldb/utils.go +++ /dev/null @@ -1,510 +0,0 @@ -package sqldb - -import ( - "errors" - "fmt" - "math" - "strconv" - "strings" - - "github.com/hyperledger/burrow/vent/sqldb/adapters" - - "github.com/hyperledger/burrow/txs" - - "encoding/json" - - "github.com/hyperledger/burrow/vent/types" -) - -const maxUint64 uint64 = (1 << 64) - 1 - -var tables = types.DefaultSQLTableNames -var columns = types.DefaultSQLColumnNames - -// findTable checks if a table exists in the default schema -func (db *SQLDB) findTable(tableName string) (bool, error) { - - found := 0 - safeTable := safe(tableName) - query := db.DBAdapter.FindTableQuery() - - db.Log.InfoMsg("FIND TABLE", "query", query, "value", safeTable) - if err := db.DB.QueryRow(query, tableName).Scan(&found); err != nil { - db.Log.InfoMsg("Error finding table", "err", err) - return false, err - } - - if found == 0 { - db.Log.InfoMsg("Table not found", "value", safeTable) - return false, nil - } - - return true, nil -} - -// getSysTablesDefinition returns log, chain info & dictionary structures -func (db *SQLDB) getSysTablesDefinition() types.EventTables { - return types.EventTables{ - tables.Log: { - Name: tables.Log, - Columns: []*types.SQLTableColumn{ - { - Name: columns.Id, - Type: types.SQLColumnTypeSerial, - Primary: true, - }, - { - Name: columns.ChainID, - Type: types.SQLColumnTypeVarchar, - Length: 100, - }, - { - Name: columns.TimeStamp, - Type: types.SQLColumnTypeTimeStamp, - }, - { - Name: columns.TableName, - Type: types.SQLColumnTypeVarchar, - Length: 100, - }, - { - Name: columns.EventName, - Type: types.SQLColumnTypeVarchar, - Length: 100, - }, - { - Name: columns.EventFilter, - Type: types.SQLColumnTypeVarchar, - Length: 100, - }, - // We use varchar for height - there is no uint64 type though numeric could have been used. We obtain the - // maximum height by maxing over the serial ID type - { - Name: columns.Height, - Type: types.SQLColumnTypeVarchar, - Length: 100, - }, - { - Name: columns.TxHash, - Type: types.SQLColumnTypeVarchar, - Length: txs.HashLengthHex, - }, - { - Name: columns.Action, - Type: types.SQLColumnTypeVarchar, - Length: 20, - }, - { - Name: columns.DataRow, - Type: types.SQLColumnTypeJSON, - Length: 0, - }, - { - Name: columns.SqlStmt, - Type: types.SQLColumnTypeText, - Length: 0, - }, - { - Name: columns.SqlValues, - Type: types.SQLColumnTypeText, - Length: 0, - }, - }, - NotifyChannels: map[string][]string{types.BlockHeightLabel: {columns.Height}}, - }, - tables.Dictionary: { - Name: tables.Dictionary, - Columns: []*types.SQLTableColumn{ - { - Name: columns.TableName, - Type: types.SQLColumnTypeVarchar, - Length: 100, - Primary: true, - }, - { - Name: columns.ColumnName, - Type: types.SQLColumnTypeVarchar, - Length: 100, - Primary: true, - }, - { - Name: columns.ColumnType, - Type: types.SQLColumnTypeInt, - Length: 0, - }, - { - Name: columns.ColumnLength, - Type: types.SQLColumnTypeInt, - Length: 0, - }, - { - Name: columns.PrimaryKey, - Type: types.SQLColumnTypeInt, - Length: 0, - }, - { - Name: columns.ColumnOrder, - Type: types.SQLColumnTypeInt, - Length: 0, - }, - }, - }, - tables.ChainInfo: { - Name: tables.ChainInfo, - Columns: []*types.SQLTableColumn{ - { - Name: columns.ChainID, - Type: types.SQLColumnTypeVarchar, - Length: 100, - Primary: true, - }, - { - Name: columns.BurrowVersion, - Type: types.SQLColumnTypeVarchar, - Length: 100, - }, - { - Name: columns.Height, - Type: types.SQLColumnTypeNumeric, - // Maps to numeric(20, 0) - a 20 digit integer value - Length: digits(maxUint64), - }, - }, - }, - } -} - -// getTableDef returns the structure of a given SQL table -func (db *SQLDB) getTableDef(tableName string) (*types.SQLTable, error) { - table := &types.SQLTable{ - Name: safe(tableName), - } - found, err := db.findTable(table.Name) - if err != nil { - return nil, err - } - - if !found { - db.Log.InfoMsg("Error table not found", "value", table.Name) - return nil, errors.New("Error table not found " + table.Name) - } - - query := db.DBAdapter.TableDefinitionQuery() - - db.Log.InfoMsg("QUERY STRUCTURE", "query", query, "value", table.Name) - rows, err := db.DB.Query(query, safe(tableName)) - if err != nil { - db.Log.InfoMsg("Error querying table structure", "err", err) - return nil, err - } - defer rows.Close() - - var columns []*types.SQLTableColumn - - for rows.Next() { - var columnName string - var columnSQLType types.SQLColumnType - var columnIsPK int - var columnLength int - - if err = rows.Scan(&columnName, &columnSQLType, &columnLength, &columnIsPK); err != nil { - db.Log.InfoMsg("Error scanning table structure", "err", err) - return nil, err - } - - if _, err = db.DBAdapter.TypeMapping(columnSQLType); err != nil { - return nil, err - } - - columns = append(columns, &types.SQLTableColumn{ - Name: columnName, - Type: columnSQLType, - Length: columnLength, - Primary: columnIsPK == 1, - }) - } - - if err = rows.Err(); err != nil { - db.Log.InfoMsg("Error during rows iteration", "err", err) - return nil, err - } - - table.Columns = columns - return table, nil -} - -// alterTable alters the structure of a SQL table & add info to the dictionary -func (db *SQLDB) alterTable(chainID string, table *types.SQLTable) error { - db.Log.InfoMsg("Altering table", "value", table.Name) - - // prepare log query - logQuery := db.DBAdapter.InsertLogQuery() - - // current table structure - safeTable := safe(table.Name) - currentTable, err := db.getTableDef(safeTable) - if err != nil { - return err - } - - sqlValues, _ := getJSON(nil) - - // for each column in the new table structure - for order, newColumn := range table.Columns { - found := false - - // check if exists in the current table structure - for _, currentColumn := range currentTable.Columns { - // if column exists - if currentColumn.Name == newColumn.Name { - found = true - break - } - } - - if !found { - safeCol := safe(newColumn.Name) - query, dictionary := db.DBAdapter.AlterColumnQuery(safeTable, safeCol, newColumn.Type, newColumn.Length, order) - - //alter column - db.Log.InfoMsg("ALTER TABLE", "query", safe(query)) - _, err = db.DB.Exec(safe(query)) - - if err != nil { - if db.DBAdapter.ErrorEquals(err, types.SQLErrorTypeDuplicatedColumn) { - db.Log.InfoMsg("Duplicate column", "value", safeCol) - } else { - db.Log.InfoMsg("Error altering table", "err", err) - return err - } - } else { - //store dictionary - db.Log.InfoMsg("STORE DICTIONARY", "query", dictionary) - _, err = db.DB.Exec(dictionary) - if err != nil { - db.Log.InfoMsg("Error storing dictionary", "err", err) - return err - } - - // Marshal the table into a JSON string. - var jsonData []byte - jsonData, err = getJSON(newColumn) - if err != nil { - db.Log.InfoMsg("error marshaling column", "err", err, "value", fmt.Sprintf("%v", newColumn)) - return err - } - //insert log - _, err = db.DB.Exec(logQuery, chainID, table.Name, "", "", nil, nil, types.ActionAlterTable, jsonData, query, sqlValues) - if err != nil { - db.Log.InfoMsg("Error inserting log", "err", err) - return err - } - } - } - } - - // Ensure triggers are defined - err = db.createTableTriggers(table) - if err != nil { - db.Log.InfoMsg("error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) - return fmt.Errorf("could not create table notification triggers: %v", err) - } - return nil -} - -// createTable creates a new table -func (db *SQLDB) createTable(chainID string, table *types.SQLTable, isInitialise bool) error { - db.Log.InfoMsg("Creating Table", "value", table.Name) - - // prepare log query - logQuery := db.DBAdapter.InsertLogQuery() - - //get create table query - safeTable := safe(table.Name) - query, dictionary := db.DBAdapter.CreateTableQuery(safeTable, table.Columns) - if query == "" { - db.Log.InfoMsg("empty CREATE TABLE query") - return errors.New("empty CREATE TABLE query") - } - - // create table - db.Log.InfoMsg("CREATE TABLE", "query", query) - _, err := db.DB.Exec(query) - if err != nil { - return err - } - - //store dictionary - db.Log.InfoMsg("STORE DICTIONARY", "query", dictionary) - _, err = db.DB.Exec(dictionary) - if err != nil { - db.Log.InfoMsg("Error storing dictionary", "err", err) - return err - } - - err = db.createTableTriggers(table) - if err != nil { - db.Log.InfoMsg("error creating notification triggers", "err", err, "value", fmt.Sprintf("%v", table)) - return fmt.Errorf("could not create table notification triggers: %v", err) - } - - //insert log (if action is not database initialization) - if !isInitialise { - // Marshal the table into a JSON string. - var jsonData []byte - jsonData, err = getJSON(table) - if err != nil { - db.Log.InfoMsg("error marshaling table", "err", err, "value", fmt.Sprintf("%v", table)) - return err - } - sqlValues, _ := getJSON(nil) - - //insert log - _, err = db.DB.Exec(logQuery, chainID, table.Name, "", "", nil, nil, types.ActionCreateTable, jsonData, query, sqlValues) - if err != nil { - db.Log.InfoMsg("Error inserting log", "err", err) - return err - } - } - return nil -} - -// Creates (or updates) table notification triggers and functions -func (db *SQLDB) createTableTriggers(table *types.SQLTable) error { - // If the adapter supports notification triggers - dbNotify, ok := db.DBAdapter.(adapters.DBNotifyTriggerAdapter) - if ok { - for channel, columns := range table.NotifyChannels { - function := fmt.Sprintf("%s_%s_notify_function", table.Name, channel) - - query := dbNotify.CreateNotifyFunctionQuery(function, channel, columns...) - db.Log.InfoMsg("CREATE NOTIFICATION FUNCTION", "query", query) - _, err := db.DB.Exec(query) - if err != nil { - return fmt.Errorf("could not create notification function: %v", err) - } - - trigger := fmt.Sprintf("%s_%s_notify_trigger", table.Name, channel) - query = dbNotify.CreateTriggerQuery(trigger, table.Name, function) - db.Log.InfoMsg("CREATE NOTIFICATION TRIGGER", "query", query) - _, err = db.DB.Exec(query) - if err != nil { - return fmt.Errorf("could not create notification trigger: %v", err) - } - } - } - return nil -} - -// getSelectQuery builds a select query for a specific SQL table and a given block -func (db *SQLDB) getSelectQuery(table *types.SQLTable, height uint64) (string, error) { - - fields := "" - - for _, tableColumn := range table.Columns { - if fields != "" { - fields += ", " - } - fields += db.DBAdapter.SecureName(tableColumn.Name) - } - - if fields == "" { - return "", errors.New("error table does not contain any fields") - } - - query := db.DBAdapter.SelectRowQuery(table.Name, fields, strconv.FormatUint(height, 10)) - return query, nil -} - -// getBlockTables return all SQL tables that have been involved -// in a given batch transaction for a specific block -func (db *SQLDB) getBlockTables(chainid string, height uint64) (types.EventTables, error) { - tables := make(types.EventTables) - - query := db.DBAdapter.SelectLogQuery() - db.Log.InfoMsg("QUERY LOG", "query", query, "height", height, "chainid", chainid) - - rows, err := db.DB.Query(query, height, chainid) - if err != nil { - db.Log.InfoMsg("Error querying log", "err", err) - return tables, err - } - - defer rows.Close() - - for rows.Next() { - var eventName, tableName string - var table *types.SQLTable - - err = rows.Scan(&tableName, &eventName) - if err != nil { - db.Log.InfoMsg("Error scanning table structure", "err", err) - return tables, err - } - - err = rows.Err() - if err != nil { - db.Log.InfoMsg("Error scanning table structure", "err", err) - return tables, err - } - - table, err = db.getTableDef(tableName) - if err != nil { - return tables, err - } - - tables[tableName] = table - } - - return tables, nil -} - -// safe sanitizes a parameter -func safe(parameter string) string { - replacer := strings.NewReplacer(";", "", ",", "") - return replacer.Replace(parameter) -} - -//getJSON returns marshaled json from JSON single column -func getJSON(JSON interface{}) ([]byte, error) { - if JSON != nil { - return json.Marshal(JSON) - } - return json.Marshal("") -} - -//getJSONFromValues returns marshaled json from query values -func getJSONFromValues(values []interface{}) ([]byte, error) { - if values != nil { - return json.Marshal(values) - } - return json.Marshal("") -} - -//getValuesFromJSON returns query values from unmarshaled JSON column -func getValuesFromJSON(JSON string) ([]interface{}, error) { - pointers := make([]interface{}, 0) - bytes := []byte(JSON) - err := json.Unmarshal(bytes, &pointers) - if err != nil { - return nil, err - } - for i, ptr := range pointers { - switch v := ptr.(type) { - // Normalise integral floats - case float64: - i64 := int64(v) - if float64(i64) == v { - pointers[i] = i64 - } - } - } - return pointers, nil -} - -func digits(x uint64) int { - if x == 0 { - return 1 - } - return int(math.Log10(float64(x))) + 1 -} diff --git a/vent/sqldb/utils_test.go b/vent/sqldb/utils_test.go deleted file mode 100644 index c2f95ee4f..000000000 --- a/vent/sqldb/utils_test.go +++ /dev/null @@ -1,17 +0,0 @@ -package sqldb - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDigits(t *testing.T) { - s := fmt.Sprintf("%v", maxUint64) - assert.Len(t, s, digits(maxUint64)) - assert.Equal(t, 1, digits(1)) - assert.Equal(t, 1, digits(1)) - assert.Equal(t, 1, digits(2)) - assert.Equal(t, 2, digits(10)) -} diff --git a/vent/sqlsol/generate_test.go b/vent/sqlsol/generate_test.go index 7a87561ef..2f1d10c29 100644 --- a/vent/sqlsol/generate_test.go +++ b/vent/sqlsol/generate_test.go @@ -19,32 +19,32 @@ func TestGenerateSpecFromAbis(t *testing.T) { require.ElementsMatch(t, project[0].FieldMappings, []*types.EventFieldMapping{ - &types.EventFieldMapping{ + { Field: "trueism", ColumnName: "trueism", Type: "bool", }, - &types.EventFieldMapping{ + { Field: "german", ColumnName: "german", Type: "string", }, - &types.EventFieldMapping{ + { Field: "newDepth", ColumnName: "newDepth", Type: "int128", }, - &types.EventFieldMapping{ + { Field: "bignum", ColumnName: "bignum", Type: "int256", }, - &types.EventFieldMapping{ + { Field: "hash", ColumnName: "hash", Type: "bytes32", }, - &types.EventFieldMapping{ + { Field: "direction", ColumnName: "direction", Type: "bytes32", diff --git a/vent/sqlsol/projection.go b/vent/sqlsol/projection.go index 2e11b4f65..807eaf0d3 100644 --- a/vent/sqlsol/projection.go +++ b/vent/sqlsol/projection.go @@ -18,30 +18,30 @@ import ( // Projection contains EventTable, Event & Abi specifications type Projection struct { - Tables types.EventTables - EventSpec types.EventSpec + Tables types.EventTables + Spec types.ProjectionSpec } // NewProjectionFromBytes creates a Projection from a stream of bytes func NewProjectionFromBytes(bs []byte) (*Projection, error) { - eventSpec := types.EventSpec{} + spec := types.ProjectionSpec{} - err := ValidateJSONEventSpec(bs) + err := ValidateJSONSpec(bs) if err != nil { return nil, err } - err = json.Unmarshal(bs, &eventSpec) + err = json.Unmarshal(bs, &spec) if err != nil { - return nil, errors.Wrap(err, "Error unmarshalling eventSpec") + return nil, errors.Wrap(err, "Error unmarshalling spec") } - return NewProjectionFromEventSpec(eventSpec) + return NewProjection(spec) } // NewProjectionFromFolder creates a Projection from a folder containing spec files func NewProjectionFromFolder(specFileOrDirs ...string) (*Projection, error) { - eventSpec := types.EventSpec{} + spec := types.ProjectionSpec{} const errHeader = "NewProjectionFromFolder():" @@ -56,18 +56,18 @@ func NewProjectionFromFolder(specFileOrDirs ...string) (*Projection, error) { return fmt.Errorf("error reading spec file '%s': %v", path, err) } - err = ValidateJSONEventSpec(bs) + err = ValidateJSONSpec(bs) if err != nil { return fmt.Errorf("could not validate spec file '%s': %v", path, err) } - fileEventSpec := types.EventSpec{} + fileEventSpec := types.ProjectionSpec{} err = json.Unmarshal(bs, &fileEventSpec) if err != nil { return fmt.Errorf("error reading spec file '%s': %v", path, err) } - eventSpec = append(eventSpec, fileEventSpec...) + spec = append(spec, fileEventSpec...) } return nil @@ -77,18 +77,18 @@ func NewProjectionFromFolder(specFileOrDirs ...string) (*Projection, error) { } } - return NewProjectionFromEventSpec(eventSpec) + return NewProjection(spec) } -// NewProjectionFromEventSpec receives a sqlsol event specification +// Takes a sqlsol event specification // and returns a pointer to a filled projection structure // that contains event types mapped to SQL column types // and Event tables structures with table and columns info -func NewProjectionFromEventSpec(eventSpec types.EventSpec) (*Projection, error) { +func NewProjection(spec types.ProjectionSpec) (*Projection, error) { // builds abi information from specification tables := make(types.EventTables) - for _, eventClass := range eventSpec { + for _, eventClass := range spec { // validate json structure if err := eventClass.Validate(); err != nil { return nil, fmt.Errorf("validation error on %v: %v", eventClass, err) @@ -167,8 +167,8 @@ func NewProjectionFromEventSpec(eventSpec types.EventSpec) (*Projection, error) } return &Projection{ - Tables: tables, - EventSpec: eventSpec, + Tables: tables, + Spec: spec, }, nil } @@ -186,8 +186,8 @@ func (p *Projection) GetColumn(tableName, columnName string) (*types.SQLTableCol return nil, fmt.Errorf("GetColumn: table does not exist projection: %s ", tableName) } -func ValidateJSONEventSpec(bs []byte) error { - schemaLoader := gojsonschema.NewGoLoader(types.EventSpecSchema()) +func ValidateJSONSpec(bs []byte) error { + schemaLoader := gojsonschema.NewGoLoader(types.ProjectionSpecSchema()) specLoader := gojsonschema.NewBytesLoader(bs) result, err := gojsonschema.Validate(schemaLoader, specLoader) if err != nil { @@ -199,7 +199,7 @@ func ValidateJSONEventSpec(bs []byte) error { for i, err := range result.Errors() { errs[i] = err.String() } - return fmt.Errorf("EventSpec failed JSONSchema validation:\n%s", strings.Join(errs, "\n")) + return fmt.Errorf("ProjectionSpec failed JSONSchema validation:\n%s", strings.Join(errs, "\n")) } return nil } diff --git a/vent/sqlsol/projection_test.go b/vent/sqlsol/projection_test.go index c740ae9d3..ed9e7a439 100644 --- a/vent/sqlsol/projection_test.go +++ b/vent/sqlsol/projection_test.go @@ -119,26 +119,26 @@ func TestGetTables(t *testing.T) { }) } -func TestGetEventSpec(t *testing.T) { +func TestNewProjectionFromBytes(t *testing.T) { goodJSON := test.GoodJSONConfFile(t) byteValue := []byte(goodJSON) tableStruct, _ := sqlsol.NewProjectionFromBytes(byteValue) t.Run("successfully returns event specification structures", func(t *testing.T) { - eventSpec := tableStruct.EventSpec - require.Equal(t, 2, len(eventSpec)) - require.Equal(t, "LOG0 = 'UserAccounts'", eventSpec[0].Filter) - require.Equal(t, "UserAccounts", eventSpec[0].TableName) + spec := tableStruct.Spec + require.Equal(t, 2, len(spec)) + require.Equal(t, "LOG0 = 'UserAccounts'", spec[0].Filter) + require.Equal(t, "UserAccounts", spec[0].TableName) - require.Equal(t, "Log1Text = 'EVENT_TEST'", eventSpec[1].Filter) - require.Equal(t, "TEST_TABLE", eventSpec[1].TableName) + require.Equal(t, "Log1Text = 'EVENT_TEST'", spec[1].Filter) + require.Equal(t, "TEST_TABLE", spec[1].TableName) }) } -func TestNewProjectionFromEventSpec(t *testing.T) { +func TestProjectionSpec(t *testing.T) { tableName := "BurnNotices" - eventSpec := types.EventSpec{ + spec := types.ProjectionSpec{ { TableName: tableName, Filter: "LOG1Text = 'CIA/burn'", @@ -208,29 +208,29 @@ func TestNewProjectionFromEventSpec(t *testing.T) { }, }, } - projection, err := sqlsol.NewProjectionFromEventSpec(eventSpec) + projection, err := sqlsol.NewProjection(spec) require.NoError(t, err, "burn and unreliable field mappings should unify to single column") require.Equal(t, []string{"burnt", "name"}, projection.Tables[tableName].NotifyChannels["burn"]) // Notify sugars on the burn channel - field := eventSpec[1].GetFieldMapping("sugars") + field := spec[1].GetFieldMapping("sugars") field.Notify = append(field.Notify, "burn") - projection, err = sqlsol.NewProjectionFromEventSpec(eventSpec) + projection, err = sqlsol.NewProjection(spec) require.NoError(t, err) require.Equal(t, []string{"burnt", "name", "tea_sugars"}, projection.Tables[tableName].NotifyChannels["burn"]) // Create a column conflict between burn and unreliable fields (both map to burnt so the SQL column def must be identical) - field = eventSpec[1].GetFieldMapping("unreliable") + field = spec[1].GetFieldMapping("unreliable") field.Primary = !field.Primary - _, err = sqlsol.NewProjectionFromEventSpec(eventSpec) + _, err = sqlsol.NewProjection(spec) require.Error(t, err) } func TestWithNoPrimaryKey(t *testing.T) { tableName := "BurnNotices" - eventSpec := types.EventSpec{ + spec := types.ProjectionSpec{ { TableName: tableName, Filter: "LOG1Text = 'CIA/burn'", @@ -264,13 +264,13 @@ func TestWithNoPrimaryKey(t *testing.T) { }, } - _, err := sqlsol.NewProjectionFromEventSpec(eventSpec) + _, err := sqlsol.NewProjection(spec) require.Error(t, err, "no DeleteMarkerField allowed if no primary key on") // Try again and now check that the right fields are primary - eventSpec[0].DeleteMarkerField = "" + spec[0].DeleteMarkerField = "" - projection, err := sqlsol.NewProjectionFromEventSpec(eventSpec) + projection, err := sqlsol.NewProjection(spec) require.NoError(t, err, "projection with no primary key should be allowed") for _, c := range projection.Tables[tableName].Columns { @@ -288,7 +288,7 @@ func TestWithNoPrimaryKey(t *testing.T) { } } - eventSpec = types.EventSpec{ + spec = types.ProjectionSpec{ { TableName: tableName, Filter: "LOG1Text = 'CIA/burn'", @@ -322,7 +322,7 @@ func TestWithNoPrimaryKey(t *testing.T) { }, } - projection, err = sqlsol.NewProjectionFromEventSpec(eventSpec) + projection, err = sqlsol.NewProjection(spec) require.NoError(t, err, "projection with primary key should be allowed") for _, c := range projection.Tables[tableName].Columns { diff --git a/vent/sqlsol/spec_loader.go b/vent/sqlsol/spec_loader.go index f3ea0261e..0fd1319ab 100644 --- a/vent/sqlsol/spec_loader.go +++ b/vent/sqlsol/spec_loader.go @@ -19,6 +19,10 @@ const ( BlockTx = Block | Tx ) +func (so SpecOpt) Enabled(opt SpecOpt) bool { + return so&opt > 0 +} + // SpecLoader loads spec files and parses them func SpecLoader(specFileOrDirs []string, opts SpecOpt) (*Projection, error) { var projection *Projection @@ -34,12 +38,12 @@ func SpecLoader(specFileOrDirs []string, opts SpecOpt) (*Projection, error) { } // add block & tx to tables definition - if Block&opts > 0 { + if opts.Enabled(Block) { for k, v := range blockTables() { projection.Tables[k] = v } } - if Tx&opts > 0 { + if opts.Enabled(Tx) { for k, v := range txTables() { projection.Tables[k] = v } diff --git a/vent/test/sqlsol_view.json b/vent/test/sqlsol_view.json index 1c116410a..4f7c94621 100644 --- a/vent/test/sqlsol_view.json +++ b/vent/test/sqlsol_view.json @@ -1,7 +1,7 @@ [ { "TableName": "EventTest", - "Filter": "EventType = 'LogEvent'", + "Filter": "EventType = 'LogEvent' OR EventType = 'MultiSpoon' OR EventType = 'Splodge-Gniver' OR EventType = 'Splodge-Gniver' OR EventType = 'COLY-BLOPS'", "DeleteMarkerField": "__DELETE__", "FieldMappings": [ { diff --git a/vent/types/event_class.go b/vent/types/event_class.go index 2164c5472..ae28c4b9e 100644 --- a/vent/types/event_class.go +++ b/vent/types/event_class.go @@ -6,11 +6,11 @@ import ( "github.com/hyperledger/burrow/event/query" ) -// EventSpec contains all event class specifications -type EventSpec []*EventClass +// ProjectionSpec contains all event class specifications +type ProjectionSpec []*EventClass -func EventSpecSchema() *jsonschema.Schema { - return jsonschema.Reflect(EventSpec{}) +func ProjectionSpecSchema() *jsonschema.Schema { + return jsonschema.Reflect(ProjectionSpec{}) } // EventClass struct (table name where to persist filtered events and it structure) diff --git a/vent/types/event_class_test.go b/vent/types/event_class_test.go index 6bc78cf06..229dc3b30 100644 --- a/vent/types/event_class_test.go +++ b/vent/types/event_class_test.go @@ -8,6 +8,6 @@ import ( ) func TestEventTablesSchema(t *testing.T) { - schema := EventSpecSchema() + schema := ProjectionSpecSchema() fmt.Println(source.JSONString(schema)) } diff --git a/vent/types/sql_table.go b/vent/types/sql_table.go index 4c3670740..d5f2642b0 100644 --- a/vent/types/sql_table.go +++ b/vent/types/sql_table.go @@ -29,7 +29,8 @@ type SQLTableColumn struct { Name string Type SQLColumnType Primary bool - Length int + // Length of variable column type where applicable 0 indicates variable/unbounded length + Length int } func (col *SQLTableColumn) String() string { From 9f99015e55ed64016a7b18eb3819aa3b97503d9a Mon Sep 17 00:00:00 2001 From: Gregory Hill Date: Wed, 14 Aug 2019 15:10:41 +0100 Subject: [PATCH 69/70] render markdown docs via docsify Signed-off-by: Gregory Hill --- README.md | 30 +-------- docs/.nojekyll | 0 docs/INSTALL.md | 6 ++ docs/README.md | 37 +++++------ docs/_sidebar.md | 17 +++++ docs/index.html | 23 +++++++ docs/quickstart/add-validators.md | 75 ----------------------- docs/quickstart/multiple-validators.md | 6 +- docs/quickstart/single-full-node.md | 6 +- docs/{ => reference}/genesis.md | 0 docs/{LOGGING.md => reference/logging.md} | 0 docs/{ => reference}/permissions.md | 0 docs/{architecture => reference}/state.md | 0 13 files changed, 65 insertions(+), 135 deletions(-) create mode 100644 docs/.nojekyll create mode 100644 docs/_sidebar.md create mode 100644 docs/index.html delete mode 100644 docs/quickstart/add-validators.md rename docs/{ => reference}/genesis.md (100%) rename docs/{LOGGING.md => reference/logging.md} (100%) rename docs/{ => reference}/permissions.md (100%) rename docs/{architecture => reference}/state.md (100%) diff --git a/README.md b/README.md index fc5ba7ac5..97a4d3660 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,6 @@ [![LoC](https://tokei.rs/b1/github/hyperledger/burrow?category=lines)](https://github.com/hyperledger/burrow) [![codecov](https://codecov.io/gh/hyperledger/burrow/branch/develop/graph/badge.svg)](https://codecov.io/gh/hyperledger/burrow) - Branch | Linux ----------|------ | Master | [![Circle CI](https://circleci.com/gh/hyperledger/burrow/tree/master.svg?style=svg)](https://circleci.com/gh/hyperledger/burrow/tree/master) | @@ -31,32 +30,9 @@ Hyperledger Burrow is a permissioned blockchain node that executes smart contrac Project information generally updated on a quarterly basis can be found on the [Hyperledger Burrow Wiki](https://wiki.hyperledger.org/display/burrow). -## Minimum requirements - -Requirement|Notes ----|--- -Go version | Go1.11 or higher - -## Installation - -See the [install instructions](docs/INSTALL.md). - -## Quick Start -1. [Single full node](docs/quickstart/single-full-node.md) - start your first chain -1. [Send transactions](docs/quickstart/send-transactions.md) - how to communicate with your Burrow chain -1. [Deploy contracts](docs/quickstart/deploy-contracts.md) - interact with the Ethereum Virtual Machine -1. [Multiple validators](docs/quickstart/multiple-validators.md) - advanced consensus setup -1. [Add validators](docs/quickstart/add-validators.md) - bonding a new party -1. [Seed nodes](docs/quickstart/seed-nodes.md) - add new node dynamically -1. [Kubernetes](https://github.com/helm/charts/tree/master/stable/burrow) - bootstraps a burrow network on a Kubernetes cluster - -## Project documentation +## Documentation Burrow getting started documentation is available in the [docs](docs/README.md) directory and in [GoDocs](https://godoc.org/github.com/hyperledger/burrow). -### Reference - -- [Genesis](docs/genesis.md) - the 'chain making' or genesis process -- [Permissions](docs/permissions.md) - understand the permissions model at the core of Burrow ## Contribute @@ -67,10 +43,6 @@ You can find us on: - [Hyperledger Mailing List](https://lists.hyperledger.org/mailman/listinfo) - [here on Github](https://github.com/hyperledger/burrow/issues) -## Future work - -For some (slightly outdated) ideas on future work, see the [proposals document](docs/PROPOSALS.md). - ## License [Apache 2.0](LICENSE.md) diff --git a/docs/.nojekyll b/docs/.nojekyll new file mode 100644 index 000000000..e69de29bb diff --git a/docs/INSTALL.md b/docs/INSTALL.md index 1a40d03ee..281627497 100644 --- a/docs/INSTALL.md +++ b/docs/INSTALL.md @@ -1,3 +1,9 @@ +## Minimum requirements + +Requirement|Notes +---|--- +Go version | Go1.11 or higher + ## Installation - [Install go](https://golang.org/doc/install) version 1.11 or above and have `$GOPATH` set diff --git a/docs/README.md b/docs/README.md index 432507d6b..15d0abbbd 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,21 +1,16 @@ -# Hyperledger Burrow Documentation - -Hyperledger Burrow is a permissioned Ethereum smart-contract blockchain node. It executes Ethereum EVM smart contract code (usually written in [Solidity](https://solidity.readthedocs.io)) on a permissioned virtual machine. Burrow provides transaction finality and high transaction throughput on a proof-of-stake [Tendermint](https://tendermint.com) consensus engine. - -![burrow logo](assets/images/burrow.png) - -1. [Installation](INSTALL.md) -2. [Logging](LOGGING.md) -3. [Quickstart](quickstart) - * [Single full node](quickstart/single-full-node.md) - start your first chain - * [Send transactions](quickstart/send-transactions.md) - how to communicate with your Burrow chain - * [Deploy contracts](quickstart/deploy-contracts.md) - interact with the Ethereum Virtual Machine - * [Multiple validators](quickstart/multiple-validators.md) - advanced consensus setup - * [Bonding validators](quickstart/bonding-validators.md) - bonding yourself on - * [Seed nodes](quickstart/seed-nodes.md) - add new nodes dynamically - * [Dump / restore](design/dump-restore.md) - create a new chain with previous state -4. [Genesis](design/genesis.md) -5. [Permissions](design/permissions.md) -6. [Architecture](architecture) - * [State](arch/state.md) -7. [Kubernetes](https://github.com/helm/charts/tree/master/stable/burrow) - bootstraps a burrow network on a Kubernetes cluster +# Introduction + +Hyperledger Burrow is a permissioned Ethereum smart-contract blockchain node. It executes Ethereum EVM smart contract code (usually written in [Solidity](https://solidity.readthedocs.io)) on a permissioned virtual machine. Burrow provides transaction finality and high transaction throughput on a proof-of-stake [Tendermint](https://tendermint.com) consensus engine. + +![burrow logo](assets/images/burrow.png) + +## What is Burrow + +Hyperledger Burrow is a permissioned blockchain node that executes smart contract code following the Ethereum specification. Burrow is built for a multi-chain universe with application specific optimization in mind. Burrow as a node is constructed out of three main components: the consensus engine, the permissioned Ethereum virtual machine and the rpc gateway. More specifically Burrow consists of the following: + +- **Consensus Engine:** Transactions are ordered and finalised with the Byzantine fault-tolerant Tendermint protocol. The Tendermint protocol provides high transaction throughput over a set of known validators and prevents the blockchain from forking. +- **Application Blockchain Interface (ABCI):** The smart contract application interfaces with the consensus engine over the [ABCI](https://github.com/tendermint/tendermint/abci). The ABCI allows for the consensus engine to remain agnostic from the smart contract application. +- **Smart Contract Application:** Transactions are validated and applied to the application state in the order that the consensus engine has finalised them. The application state consists of all accounts, the validator set and the name registry. Accounts in Burrow have permissions and either contain smart contract code or correspond to a public-private key pair. A transaction that calls on the smart contract code in a given account will activate the execution of that account’s code in a permissioned virtual machine. +- **Permissioned Ethereum Virtual Machine:** This virtual machine is built to observe the Ethereum operation code specification and additionally asserts the correct permissions have been granted. Permissioning is enforced through secure native functions and underlies all smart contract code. An arbitrary but finite amount of gas is handed out for every execution to ensure a finite execution duration - “You don’t need money to play, when you have permission to play”. +- **Application Binary Interface (ABI):** Transactions need to be formulated in a binary format that can be processed by the blockchain node. Current tooling provides functionality to compile, deploy and link solidity smart contracts and formulate transactions to call smart contracts on the chain. +- **API Gateway:** Burrow exposes REST and JSON-RPC endpoints to interact with the blockchain network and the application state through broadcasting transactions, or querying the current state of the application. Websockets allow subscribing to events, which is particularly valuable as the consensus engine and smart contract application can give unambiguously finalised results to transactions within one blocktime of about one second. \ No newline at end of file diff --git a/docs/_sidebar.md b/docs/_sidebar.md new file mode 100644 index 000000000..2598a6a08 --- /dev/null +++ b/docs/_sidebar.md @@ -0,0 +1,17 @@ +- Burrow + - [Introduction](README.md) + - [Installation](INSTALL.md) +- Tutorials + - [Single Node](quickstart/single-full-node.md) + - [Send Transactions](quickstart/send-transactions.md) + - [Deploy Contracts](quickstart/deploy-contracts.md) + - [Multiple Validators](quickstart/multiple-validators.md) + - [Add Validators](quickstart/bonding-validators.md) + - [Seed Nodes](quickstart/seed-nodes.md) + - [Dump-Restore](quickstart/dump-restore.md) + - [Kubernetes](https://github.com/helm/charts/tree/master/stable/burrow) +- Reference + - [Genesis](reference/genesis.md) + - [Logging](reference/logging.md) + - [Permissions](reference/permissions.md) + - [State](reference/state.md) \ No newline at end of file diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 000000000..ae986d73c --- /dev/null +++ b/docs/index.html @@ -0,0 +1,23 @@ + + + + + Document + + + + + + +
+ + + + diff --git a/docs/quickstart/add-validators.md b/docs/quickstart/add-validators.md deleted file mode 100644 index a1b66fa3b..000000000 --- a/docs/quickstart/add-validators.md +++ /dev/null @@ -1,75 +0,0 @@ -# Add Validator - -For this example, make sure you have the latest `burrow` binary and JSON parsing tool `jq` installed. - -First, let's start a network with two running validators: - -```bash -burrow spec -f2 | burrow configure -s- --pool --separate-genesis-doc=genesis.json -burrow start --config=burrow000.toml & -burrow start --config=burrow001.toml & -``` - -Next, fetch a persistent peer and the validator address of the first node: - -```bash -PERSISTENT_PEER=$(cat burrow000.toml | grep PersistentPeers | cut -d \" -f2) -OLD_VALIDATOR=$(cat burrow000.toml | grep ValidatorAddress | cut -d \" -f2) -``` - -Let's generate the config and keys for a new validator account. As this node will be joining an existing network we won't need the GenesisDoc, but we will need to give it the persistent peer address we obtained above. Unless you want to do these steps manually, please use the following commands: - -```bash -burrow spec -v1 | burrow configure -s- --json > burrow-new.json -NEW_VALIDATOR=$(jq -r '.GenesisDoc.Accounts[0].PublicKey.PublicKey' burrow-new.json) -jq 'del(.GenesisDoc)' burrow-new.json | jq ".Tendermint.PersistentPeers=\"$PERSISTENT_PEER\"" | jq '.RPC.Info.Enabled=false' | jq '.RPC.GRPC.Enabled=false' | jq '.Tendermint.ListenPort="25565"' > burrow003.json -``` - -Copy the following script into `deploy.yaml`: - -```yaml -jobs: -- name: InitialTotalPower - query-vals: - field: "Set.TotalPower" - -- name: AddValidator - update-account: - target: NEW_VALIDATOR - power: 232322 - -- name: CheckAdded - query-vals: - field: "Set.${AddValidator.address}.Power" - -- name: AssertPowerNonZero - assert: - key: $CheckAdded - relation: gt - val: 0 - -- name: AssertPowerEqual - assert: - key: $CheckAdded - relation: eq - val: $AddValidator.power -``` - -If you haven't already, swap in the public key for your new validator and run the deployment to give the account stake: - -```bash -sed -i "s/NEW_VALIDATOR/$NEW_VALIDATOR/" deploy.yaml -burrow deploy -c 127.0.0.1:10997 --mempool-signing=true --address=$OLD_VALIDATOR deploy.yaml -``` - -If this returns successfully, you'll be able to see that the new validator is now in the running set: - -```bash -curl -s 127.0.0.1:26758/consensus -``` - -Let's start the new validator and watch it catch up: - -```bash -burrow start --config=burrow003.json --genesis=genesis.json -``` \ No newline at end of file diff --git a/docs/quickstart/multiple-validators.md b/docs/quickstart/multiple-validators.md index 644c92b04..313ee441b 100644 --- a/docs/quickstart/multiple-validators.md +++ b/docs/quickstart/multiple-validators.md @@ -24,8 +24,4 @@ burrow start --config=burrow001.toml If the connection successed, you will see empty blocks automatically created `Sending vote message` and `Finalizing commit of block with 0 txs`, you can see consensus state: ```bash curl -s 127.0.0.1:26758/consensus -``` - -## Send transactions to the blockchain - -You can start to [send transactions](send-transactions.md). +``` \ No newline at end of file diff --git a/docs/quickstart/single-full-node.md b/docs/quickstart/single-full-node.md index 131cb17e4..dedbbb5b3 100644 --- a/docs/quickstart/single-full-node.md +++ b/docs/quickstart/single-full-node.md @@ -36,8 +36,4 @@ burrow start --address=BE584820DC904A55449D7EB0C97607B40224B96E and the logs will start streaming through. If you would like to reset your node, you can just delete its working directory with `rm -rf .burrow`. In the context of a -multi-node chain it will resync with peers, otherwise it will restart from height 0. - -## Send transactions to the blockchain - -You can start to [send transactions](send-transactions.md). \ No newline at end of file +multi-node chain it will resync with peers, otherwise it will restart from height 0. \ No newline at end of file diff --git a/docs/genesis.md b/docs/reference/genesis.md similarity index 100% rename from docs/genesis.md rename to docs/reference/genesis.md diff --git a/docs/LOGGING.md b/docs/reference/logging.md similarity index 100% rename from docs/LOGGING.md rename to docs/reference/logging.md diff --git a/docs/permissions.md b/docs/reference/permissions.md similarity index 100% rename from docs/permissions.md rename to docs/reference/permissions.md diff --git a/docs/architecture/state.md b/docs/reference/state.md similarity index 100% rename from docs/architecture/state.md rename to docs/reference/state.md From 7074a8a8f03b1ccc0de502f4e97447d1c57185cf Mon Sep 17 00:00:00 2001 From: Silas Davis Date: Wed, 14 Aug 2019 17:05:06 +0100 Subject: [PATCH 70/70] Changelog for 0.28.0 Signed-off-by: Silas Davis --- CHANGELOG.md | 28 ++++++++++++++++++++++++++++ NOTES.md | 22 +++++++++++++++++----- project/history.go | 21 +++++++++++++++------ 3 files changed, 60 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 078ae7298..6e0a3ed00 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,30 @@ # [Hyperledger Burrow](https://github.com/hyperledger/burrow) Changelog +## [Unreleased] + + +## [0.28.0] - 2019-08-14 +### Changed +- [State] IterateStreamEvents now takes inclusive start and end points (end used to be exclusive) avoid bug-prone conversion +- [Dump] Improved structure and API +- [Dump] Default to JSON output and use protobuf for binary output + +### Fixed +- [Dump] Fix dump missing events emitted at end height provided +- [Dump] EVM events were not dumped if no height was provided to burrow dump remote commandline +- [RPC/Info] Fix panic in /names and implement properly - now accepts a 'regex' parameter which is a regular expression to match names. Empty for all names. +- [Configure] burrow configure flags --separate-genesis-doc and --pool now work together + +### Added +- [State] Burrow now remembers contact ABIs (which describe how to pack bits when calling contracts) - burrow deploy and vent will both use chain-hosted ABI if they are available +- [State] Bond and unbond transactions are now implement to allow validators to transfer native token into validator power. +- [Dump] Better tests, mock, and benchmarks - suitable for profiling IAVL +- [Events] Filters now support OR connective +- [Vent] Projection filters can now have filters longer than 100 characters. +- [Vent] Falls back to local ABI +- [CLI/RPC] Contracts now hold metadata, including contract name, source file, and function names + + + ## [0.27.0] - 2019-06-23 ### Added - [WASM] Support for WASM contracts written in Solidity compiled using solang @@ -515,6 +541,8 @@ This release marks the start of Eris-DB as the full permissioned blockchain node - [Blockchain] Fix getBlocks to respect block height cap. +[Unreleased]: https://github.com/hyperledger/burrow/compare/v0.28.0...HEAD +[0.28.0]: https://github.com/hyperledger/burrow/compare/v0.27.0...v0.28.0 [0.27.0]: https://github.com/hyperledger/burrow/compare/v0.26.2...v0.27.0 [0.26.2]: https://github.com/hyperledger/burrow/compare/v0.26.1...v0.26.2 [0.26.1]: https://github.com/hyperledger/burrow/compare/v0.26.0...v0.26.1 diff --git a/NOTES.md b/NOTES.md index 30d091dc2..fc4b0a62a 100644 --- a/NOTES.md +++ b/NOTES.md @@ -1,9 +1,21 @@ -### Added -- [WASM] Support for WASM contracts written in Solidity compiled using solang +### Changed +- [State] IterateStreamEvents now takes inclusive start and end points (end used to be exclusive) avoid bug-prone conversion +- [Dump] Improved structure and API +- [Dump] Default to JSON output and use protobuf for binary output ### Fixed --[RPC/Transact] CallCodeSim and CallTxSim were run against uncommitted checker state rather than committed state were all other reads are routed. They were also passed through Transactor for no particularly good reason. This changes them to run against committed DB state and removes the code path through Transactor. +- [Dump] Fix dump missing events emitted at end height provided +- [Dump] EVM events were not dumped if no height was provided to burrow dump remote commandline +- [RPC/Info] Fix panic in /names and implement properly - now accepts a 'regex' parameter which is a regular expression to match names. Empty for all names. +- [Configure] burrow configure flags --separate-genesis-doc and --pool now work together + +### Added +- [State] Burrow now remembers contact ABIs (which describe how to pack bits when calling contracts) - burrow deploy and vent will both use chain-hosted ABI if they are available +- [State] Bond and unbond transactions are now implement to allow validators to transfer native token into validator power. +- [Dump] Better tests, mock, and benchmarks - suitable for profiling IAVL +- [Events] Filters now support OR connective +- [Vent] Projection filters can now have filters longer than 100 characters. +- [Vent] Falls back to local ABI +- [CLI/RPC] Contracts now hold metadata, including contract name, source file, and function names -### Changed -- [State] TxExecution's Envelope now stored in state so will be reproduced in Vent Tx tables and over RPC, and so matches TxExecutions served from *Sync rpctransact methods diff --git a/project/history.go b/project/history.go index 2e491a4f6..88df02b2f 100644 --- a/project/history.go +++ b/project/history.go @@ -49,18 +49,27 @@ func FullVersion() string { var History relic.ImmutableHistory = relic.NewHistory("Hyperledger Burrow", "https://github.com/hyperledger/burrow"). MustDeclareReleases( "", - `### Fixed -- [Dump] Fix dump missing events emitted at end height provided -- [Dump] EVM events were not dumped if no height was provided to burrow dump remote commandline -- [RPC/Info] Fix panic in /names and implement properly - now accepts a 'regex' parameter which is a regular expression to match names. Empty for all names. - -### Changed + ``, + "0.28.0 - 2019-08-14", + `### Changed - [State] IterateStreamEvents now takes inclusive start and end points (end used to be exclusive) avoid bug-prone conversion - [Dump] Improved structure and API - [Dump] Default to JSON output and use protobuf for binary output +### Fixed +- [Dump] Fix dump missing events emitted at end height provided +- [Dump] EVM events were not dumped if no height was provided to burrow dump remote commandline +- [RPC/Info] Fix panic in /names and implement properly - now accepts a 'regex' parameter which is a regular expression to match names. Empty for all names. +- [Configure] burrow configure flags --separate-genesis-doc and --pool now work together + ### Added +- [State] Burrow now remembers contact ABIs (which describe how to pack bits when calling contracts) - burrow deploy and vent will both use chain-hosted ABI if they are available +- [State] Bond and unbond transactions are now implement to allow validators to transfer native token into validator power. - [Dump] Better tests, mock, and benchmarks - suitable for profiling IAVL +- [Events] Filters now support OR connective +- [Vent] Projection filters can now have filters longer than 100 characters. +- [Vent] Falls back to local ABI +- [CLI/RPC] Contracts now hold metadata, including contract name, source file, and function names `, "0.27.0 - 2019-06-23",