Skip to content
This repository has been archived by the owner on Aug 9, 2018. It is now read-only.

Commit

Permalink
Fix encoding tests and implement NodeWriter for memory.Node
Browse files Browse the repository at this point in the history
  • Loading branch information
mildred committed Feb 7, 2016
1 parent 2932e85 commit e3254b3
Show file tree
Hide file tree
Showing 5 changed files with 155 additions and 130 deletions.
51 changes: 21 additions & 30 deletions coding/coding.go
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
package coding

import (
"bytes"
"fmt"
"io"

cbor "github.com/ipfs/go-ipld/coding/cbor"
json "github.com/ipfs/go-ipld/coding/json"
mc "github.com/jbenet/go-multicodec"
mcmux "github.com/jbenet/go-multicodec/mux"
mcproto "github.com/jbenet/go-multicodec/protobuf"

ipld "github.com/ipfs/go-ipld"
Expand All @@ -33,25 +33,8 @@ const (

var StreamCodecs map[string]func(io.Reader) (ipld.NodeReader, error)

// defaultCodec is the default applied if user does not specify a codec.
// Most new objects will never specify a codec. We track the codecs with
// the object so that multiple people using the same object will continue
// to marshal using the same codec. the only reason this is important is
// that the hashes must be the same.
var defaultCodec string

var muxCodec *mcmux.Multicodec

func init() {
Header = mc.Header([]byte(HeaderPath))
// by default, always encode things as cbor
defaultCodec = string(mc.HeaderPath(cbor.Header))

muxCodec = mcmux.MuxMulticodec([]mc.Multicodec{
CborMulticodec(),
JsonMulticodec(),
pb.Multicodec(),
}, selectCodec)

StreamCodecs = map[string]func(io.Reader) (ipld.NodeReader, error){
json.HeaderPath: func(r io.Reader) (ipld.NodeReader, error) {
Expand All @@ -64,17 +47,6 @@ func init() {
}
}

// Multicodec returns a muxing codec that marshals to
// whatever codec makes sense depending on what information
// the IPLD object itself carries
func Multicodec() mc.Multicodec {
return muxCodec
}

func selectCodec(v interface{}, codecs []mc.Multicodec) mc.Multicodec {
return nil // no codec
}

func Decode(r io.Reader) (ipld.NodeReader, error) {
// get multicodec first header, should be mcmux.Header
err := mc.ConsumeHeader(r, Header)
Expand All @@ -97,6 +69,10 @@ func Decode(r io.Reader) (ipld.NodeReader, error) {
return fun(r)
}

func DecodeBytes(data []byte) (ipld.NodeReader, error) {
return Decode(bytes.NewReader(data))
}

func DecodeLegacyProtobuf(r io.Reader) (ipld.NodeReader, error) {
var node memory.Node = memory.Node{}
r = mc.WrapHeaderReader(mcproto.HeaderMsgio, r)
Expand All @@ -105,7 +81,11 @@ func DecodeLegacyProtobuf(r io.Reader) (ipld.NodeReader, error) {
return node, err
}

func Encode(codec Codec, w io.Writer, node memory.Node) error {
func DecodeLegacyProtobufBytes(data []byte) (ipld.NodeReader, error) {
return DecodeLegacyProtobuf(bytes.NewReader(data))
}

func EncodeRaw(codec Codec, w io.Writer, node memory.Node) error {
switch codec {
case CodecCBOR:
return cbor.Encode(w, node)
Expand All @@ -117,3 +97,14 @@ func Encode(codec Codec, w io.Writer, node memory.Node) error {
return fmt.Errorf("Unknown codec %v", codec)
}
}

func Encode(codec Codec, w io.Writer, node memory.Node) error {
w.Write(Header)
return EncodeRaw(codec, w, node)
}

func EncodeBytes(codec Codec, node memory.Node) ([]byte, error) {
var buf bytes.Buffer
err := Encode(codec, &buf, node)
return buf.Bytes(), err
}
121 changes: 23 additions & 98 deletions coding/coding_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,11 @@ package coding
import (
"bytes"
"io/ioutil"
"reflect"
"testing"

reader "github.com/ipfs/go-ipld"
memory "github.com/ipfs/go-ipld/memory"
readertest "github.com/ipfs/go-ipld/test"

mc "github.com/jbenet/go-multicodec"
mctest "github.com/jbenet/go-multicodec/test"
assrt "github.com/mildred/assrt"
)

Expand Down Expand Up @@ -39,117 +35,46 @@ type TC struct {
ctx interface{}
}

var testCases []TC

func init() {
testCases = append(testCases, TC{
[]byte{},
memory.Node{
"foo": "bar",
"bar": []int{1, 2, 3},
"baz": memory.Node{
"@type": "mlink",
"hash": "QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo",
},
},
map[string]memory.Link{
"baz": {"@type": "mlink", "hash": ("QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo")},
},
"",
nil,
})

testCases = append(testCases, TC{
[]byte{},
memory.Node{
"foo": "bar",
"@type": "commit",
"@context": "/ipfs/QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo/mdag",
"baz": memory.Node{
"@type": "mlink",
"hash": "QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo",
},
"bazz": memory.Node{
"@type": "mlink",
"hash": "QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo",
},
"bar": memory.Node{
"@type": "mlinkoo",
"hash": "QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo",
},
"bar2": memory.Node{
"foo": memory.Node{
"@type": "mlink",
"hash": "QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo",
},
},
},
map[string]memory.Link{
"baz": {"@type": "mlink", "hash": ("QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo")},
"bazz": {"@type": "mlink", "hash": ("QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo")},
"bar2/foo": {"@type": "mlink", "hash": ("QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo")},
},
"",
"/ipfs/QmZku7P7KeeHAnwMr6c4HveYfMzmtVinNXzibkiNbfDbPo/mdag",
})

}

func TestHeaderMC(t *testing.T) {
codec := Multicodec()
for _, tc := range testCases {
mctest.HeaderTest(t, codec, &tc.src)
}
}

func TestRoundtripBasicMC(t *testing.T) {
codec := Multicodec()
for _, tca := range testCases {
var tcb memory.Node
mctest.RoundTripTest(t, codec, &(tca.src), &tcb)
}
}

// Test decoding and encoding a json and cbor file
func TestCodecsDecodeEncode(t *testing.T) {
func TestCodecsEncodeDecode(t *testing.T) {
for _, fname := range []string{"json.testfile", "cbor.testfile"} {
testfile := codedFiles[fname]
var n memory.Node
codec := Multicodec()

if err := mc.Unmarshal(codec, testfile, &n); err != nil {
t.Log(fname)
t.Log(testfile)
r, err := DecodeBytes(testfile)
if err != nil {
t.Error(err)
continue
}

linksExpected := map[string]memory.Link{
"abc": memory.Link{
"mlink": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V",
},
var codec Codec
switch fname {
case "json.testfile":
codec = CodecJSON
case "cbor.testfile":
codec = CodecCBOR
default:
panic("should not arrive here")
}
linksActual := memory.Links(n)
if !reflect.DeepEqual(linksExpected, linksActual) {
t.Logf("Expected: %#v", linksExpected)
t.Logf("Actual: %#v", linksActual)
t.Logf("node: %#v\n", n)
t.Error("Links are not expected in " + fname)

n, err := memory.NewNodeFrom(r)
if err != nil {
t.Error(err)
continue
}

encoded, err := mc.Marshal(codec, &n)
outData, err := EncodeBytes(codec, n)
if err != nil {
t.Error(err)
return
continue
}

if !bytes.Equal(testfile, encoded) {
t.Error("marshalled values not equal in " + fname)
t.Log(string(testfile))
t.Log(string(encoded))
if !bytes.Equal(outData, testfile) {
t.Errorf("%s: encoded is not the same as original", fname)
t.Log(n)
t.Log(testfile)
t.Log(encoded)
t.Log(string(testfile))
t.Log(outData)
t.Log(string(outData))
}
}
}
Expand Down
9 changes: 8 additions & 1 deletion copy.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,14 @@ func Copy(r NodeReader, w NodeWriter) error {
case TokenIndex:
return nil
case TokenValuePart:
return w.WriteValuePart(value)
switch value.(type) {
case string:
return w.WriteValuePart([]byte(value.(string)))
case []byte:
return w.WriteValuePart(value.([]byte))
default:
return fmt.Errorf("Cannot write value part %#v (wrong type)", value)
}
case TokenValue:
return w.WriteValue(value)
case TokenEndNode:
Expand Down
100 changes: 100 additions & 0 deletions memory/writer.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
package memory

import (
"fmt"

ipld "github.com/ipfs/go-ipld"
)

type Writer struct {
Node Node
stack []interface{}
curKey string
valPart []byte
}

func NewWriter() *Writer {
return &Writer{Node{}, nil, "", nil}
}

func NewNodeFrom(r ipld.NodeReader) (Node, error) {
w := NewWriter()
err := ipld.Copy(r, w)
return w.Node, err
}

func (w *Writer) WriteValue(val interface{}) error {
if len(w.stack) == 0 {
return fmt.Errorf("Cannot write value")
}

switch val.(type) {
case string:
val = string(append(w.valPart, []byte(val.(string))...))
case []byte:
val = append(w.valPart, val.([]byte)...)
}

last := len(w.stack) - 1
switch w.stack[last].(type) {
case *Node:
(*w.stack[last].(*Node))[w.curKey] = val
case []interface{}:
w.stack[last] = append(w.stack[last].([]interface{}), val)
default:
panic("Currupted stack")
}

w.curKey = ""
w.valPart = nil
return nil
}

func (w *Writer) WriteValuePart(val []byte) error {
w.valPart = append(w.valPart, val...)
return nil
}

func (w *Writer) WriteBeginNode(n_elems int) error {
if len(w.stack) == 0 {
w.stack = append(w.stack, &w.Node)
return nil
} else {
n := &Node{}
err := w.WriteValue(n)
w.stack = append(w.stack, n)
return err
}
}

func (w *Writer) WriteNodeKey(key string) error {
w.curKey = key
return nil
}

func (w *Writer) WriteEndNode() error {
if len(w.stack) == 0 {
return fmt.Errorf("Cannot end node")
}

w.stack = w.stack[:len(w.stack)-1]
return nil
}

func (w *Writer) WriteBeginArray(n_elems int) error {
if len(w.stack) == 0 {
return fmt.Errorf("Cannot start array")
}

w.stack = append(w.stack, []interface{}{})
return nil
}

func (w *Writer) WriteEndArray() error {
if len(w.stack) <= 1 {
return fmt.Errorf("Cannot end array")
}

w.stack = w.stack[:len(w.stack)-1]
return nil
}
4 changes: 3 additions & 1 deletion writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@ type NodeWriter interface {
// - float32
// - float64
// - *"math/big".Int
// - string
// - []byte
WriteValue(val interface{}) error

// Write a value part. When writing values in multiple chunks, one must call
// WriteValuePart any number of times and end with WriteValue
WriteValuePart(val interface{}) error
WriteValuePart(val []byte) error

// Write the prolog for a node / associative array. n_elems is the number of
// elements in the node, -1 if unknown.
Expand Down

0 comments on commit e3254b3

Please sign in to comment.