1
0
mirror of https://github.com/ipfs/kubo.git synced 2025-08-24 10:32:01 +08:00

unixfs: fix dagTruncate to preserve node type

Extract the original `FSNode` passed inside the `ipld.Node` argument and modify
its `Blocksizes` (removing all of them and re-adding the ones that were not
truncated). In contrast, the replaced code was creating a new `FSNode` that was
not preserving some of the features of the original one.

Change `TRUNC_HASH` values in `sharness` that were created with the bug to the
correct values.

License: MIT
Signed-off-by: Lucas Molas <schomatis@gmail.com>
This commit is contained in:
Lucas Molas
2018-07-11 12:24:37 -03:00
parent 454a170032
commit 65a18ed669
3 changed files with 19 additions and 7 deletions

@ -613,7 +613,7 @@ tests_for_files_api() {
ROOT_HASH=QmcwKfTMCT7AaeiD92hWjnZn9b6eh9NxnhfSzN5x2vnDpt ROOT_HASH=QmcwKfTMCT7AaeiD92hWjnZn9b6eh9NxnhfSzN5x2vnDpt
CATS_HASH=Qma88m8ErTGkZHbBWGqy1C7VmEmX8wwNDWNpGyCaNmEgwC CATS_HASH=Qma88m8ErTGkZHbBWGqy1C7VmEmX8wwNDWNpGyCaNmEgwC
FILE_HASH=QmQdQt9qooenjeaNhiKHF3hBvmNteB4MQBtgu3jxgf9c7i FILE_HASH=QmQdQt9qooenjeaNhiKHF3hBvmNteB4MQBtgu3jxgf9c7i
TRUNC_HASH=QmdaQZbLwK5ykweGdCVovNnvBom7QhikovDUVqTPHQG4L8 TRUNC_HASH=QmPVnT9gocPbqzN4G6SMp8vAPyzcjDbUJrNdKgzQquuDg4
test_files_api "($EXTRA)" test_files_api "($EXTRA)"
test_expect_success "can create some files for testing with raw-leaves ($EXTRA)" ' test_expect_success "can create some files for testing with raw-leaves ($EXTRA)" '
@ -629,13 +629,13 @@ tests_for_files_api() {
ROOT_HASH=QmW3dMSU6VNd1mEdpk9S3ZYRuR1YwwoXjGaZhkyK6ru9YU ROOT_HASH=QmW3dMSU6VNd1mEdpk9S3ZYRuR1YwwoXjGaZhkyK6ru9YU
CATS_HASH=QmPqWDEg7NoWRX8Y4vvYjZtmdg5umbfsTQ9zwNr12JoLmt CATS_HASH=QmPqWDEg7NoWRX8Y4vvYjZtmdg5umbfsTQ9zwNr12JoLmt
FILE_HASH=QmRCgHeoKxCqK2Es6M6nPUDVWz19yNQPnsXGsXeuTkSKpN FILE_HASH=QmRCgHeoKxCqK2Es6M6nPUDVWz19yNQPnsXGsXeuTkSKpN
TRUNC_HASH=QmRFJEKWF5A5FyFYZgNhusLw2UziW9zBKYr4huyHjzcB6o TRUNC_HASH=QmckstrVxJuecVD1FHUiURJiU9aPURZWJieeBVHJPACj8L
test_files_api "($EXTRA, raw-leaves)" '' --raw-leaves test_files_api "($EXTRA, raw-leaves)" '' --raw-leaves
ROOT_HASH=QmageRWxC7wWjPv5p36NeAgBAiFdBHaNfxAehBSwzNech2 ROOT_HASH=QmageRWxC7wWjPv5p36NeAgBAiFdBHaNfxAehBSwzNech2
CATS_HASH=zdj7WkEzPLNAr5TYJSQC8CFcBjLvWFfGdx6kaBrJXnBguwWeX CATS_HASH=zdj7WkEzPLNAr5TYJSQC8CFcBjLvWFfGdx6kaBrJXnBguwWeX
FILE_HASH=zdj7WYHvf5sBRgSBjYnq64QFr449CCbgupXfBvoYL3aHC1DzJ FILE_HASH=zdj7WYHvf5sBRgSBjYnq64QFr449CCbgupXfBvoYL3aHC1DzJ
TRUNC_HASH=zdj7WYLYbka6Ydg8gZUJRLKnFBVehCADhQKBsFbNiMxZSB5Gj TRUNC_HASH=zdj7Wjr8GHZonPFVCWvz2SLLo9H6MmqBxyeB34ArHfyCbmdJG
if [ "$EXTRA" = "offline" ]; then if [ "$EXTRA" = "offline" ]; then
test_files_api "($EXTRA, cidv1)" --cid-version=1 test_files_api "($EXTRA, cidv1)" --cid-version=1
fi fi
@ -660,7 +660,7 @@ tests_for_files_api() {
ROOT_HASH=zDMZof1kxEsAwSgCZsGQRVcHCMtHLjkUQoiZUbZ87erpPQJGUeW8 ROOT_HASH=zDMZof1kxEsAwSgCZsGQRVcHCMtHLjkUQoiZUbZ87erpPQJGUeW8
CATS_HASH=zDMZof1kuAhr3zBkxq48V7o9HJZCTVyu1Wd9wnZtVcPJLW8xnGft CATS_HASH=zDMZof1kuAhr3zBkxq48V7o9HJZCTVyu1Wd9wnZtVcPJLW8xnGft
FILE_HASH=zDMZof1kxbB9CvxgRioBzESbGnZUxtSCsZ18H1EUkxDdWt1DYEkK FILE_HASH=zDMZof1kxbB9CvxgRioBzESbGnZUxtSCsZ18H1EUkxDdWt1DYEkK
TRUNC_HASH=zDMZof1kxXqKdVsVo231qVdN3hCTF5a34UuQZpzmm5K7CbRJ4u2S TRUNC_HASH=zDMZof1kpH1vxK3k2TeYc8w59atCbzMzrhZonsztMWSptVro2zQa
test_files_api "($EXTRA, blake2b-256 root)" test_files_api "($EXTRA, blake2b-256 root)"
fi fi

@ -529,7 +529,13 @@ func dagTruncate(ctx context.Context, n ipld.Node, size uint64, ds ipld.DAGServi
var cur uint64 var cur uint64
end := 0 end := 0
var modified ipld.Node var modified ipld.Node
ndata := ft.NewFSNode(ft.TRaw) ndata, err := ft.FSNodeFromBytes(nd.Data())
if err != nil {
return nil, err
}
// Reset the block sizes of the node to adjust them
// with the new values of the truncated children.
ndata.RemoveAllBlockSizes()
for i, lnk := range nd.Links() { for i, lnk := range nd.Links() {
child, err := lnk.GetNode(ctx, ds) child, err := lnk.GetNode(ctx, ds)
if err != nil { if err != nil {
@ -558,7 +564,7 @@ func dagTruncate(ctx context.Context, n ipld.Node, size uint64, ds ipld.DAGServi
ndata.AddBlockSize(childsize) ndata.AddBlockSize(childsize)
} }
err := ds.Add(ctx, modified) err = ds.Add(ctx, modified)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -573,7 +579,7 @@ func dagTruncate(ctx context.Context, n ipld.Node, size uint64, ds ipld.DAGServi
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Save the new block sizes to the original node.
nd.SetData(d) nd.SetData(d)
// invalidate cache and recompute serialized data // invalidate cache and recompute serialized data

@ -201,6 +201,12 @@ func (n *FSNode) BlockSize(i int) uint64 {
return n.format.Blocksizes[i] return n.format.Blocksizes[i]
} }
// RemoveAllBlockSizes removes all the child block sizes of this node.
func (n *FSNode) RemoveAllBlockSizes() {
n.format.Blocksizes = []uint64{}
n.format.Filesize = proto.Uint64(uint64(len(n.Data())))
}
// GetBytes marshals this node as a protobuf message. // GetBytes marshals this node as a protobuf message.
func (n *FSNode) GetBytes() ([]byte, error) { func (n *FSNode) GetBytes() ([]byte, error) {
return proto.Marshal(&n.format) return proto.Marshal(&n.format)