diff --git a/test/sharness/t0250-files-api.sh b/test/sharness/t0250-files-api.sh index be993b9ee..70e3bf71f 100755 --- a/test/sharness/t0250-files-api.sh +++ b/test/sharness/t0250-files-api.sh @@ -46,11 +46,15 @@ verify_dir_contents() { } test_sharding() { - test_expect_success "make a directory" ' - ipfs files mkdir /foo + local EXTRA ARGS + EXTRA=$1 + ARGS=$2 # only applied to the initial directory + + test_expect_success "make a directory $EXTRA" ' + ipfs files $ARGS mkdir /foo ' - test_expect_success "can make 100 files in a directory" ' + test_expect_success "can make 100 files in a directory $EXTRA" ' printf "" > list_exp_raw for i in `seq 100` do @@ -59,33 +63,37 @@ test_sharding() { done ' - test_expect_success "listing works" ' + test_expect_success "listing works $EXTRA" ' ipfs files ls /foo |sort > list_out && sort list_exp_raw > list_exp && test_cmp list_exp list_out ' - test_expect_success "can read a file from sharded directory" ' + test_expect_success "can read a file from sharded directory $EXTRA" ' ipfs files read /foo/file65 > file_out && echo "65" > file_exp && test_cmp file_out file_exp ' - test_expect_success "can pin a file from sharded directory" ' + test_expect_success "can pin a file from sharded directory $EXTRA" ' ipfs files stat --hash /foo/file42 > pin_file_hash && ipfs pin add < pin_file_hash > pin_hash ' - test_expect_success "can unpin a file from sharded directory" ' + test_expect_success "can unpin a file from sharded directory $EXTRA" ' read -r _ HASH _ < pin_hash && ipfs pin rm $HASH ' - test_expect_success "output object was really sharded" ' + test_expect_success "output object was really sharded and has correct hash $EXTRA" ' ipfs files stat --hash /foo > expected_foo_hash && - echo QmPkwLJTYZRGPJ8Lazr9qPdrLmswPtUjaDbEpmR9jEh1se > actual_foo_hash && + echo $SHARD_HASH > actual_foo_hash && test_cmp expected_foo_hash actual_foo_hash ' + + test_expect_success "clean up $EXTRA" ' + ipfs files rm -r /foo + ' } test_files_api() { @@ -429,6 +437,13 @@ test_files_api() { test_cmp root_hash_exp root_hash ' + test_expect_success "/cats hash looks good $EXTRA" ' + export EXP_CATS_HASH="$CATS_HASH" && + echo $EXP_CATS_HASH > cats_hash_exp && + ipfs files stat --hash /cats > cats_hash + test_cmp cats_hash_exp cats_hash + ' + test_expect_success "flush root succeeds $EXTRA" ' ipfs files $ARGS flush / ' @@ -466,6 +481,12 @@ test_files_api() { test_cmp file_out file_exp ' + test_expect_success "file hash correct $EXTRA" ' + echo $TRUNC_HASH > filehash_expected && + ipfs files $ARGS stat --hash /cats > filehash && + test_cmp filehash_expected filehash + ' + test_expect_success "cleanup $EXTRA" ' ipfs files $ARGS rm /cats ' @@ -555,21 +576,65 @@ tests_for_files_api() { create_files ' ROOT_HASH=QmcwKfTMCT7AaeiD92hWjnZn9b6eh9NxnhfSzN5x2vnDpt + CATS_HASH=Qma88m8ErTGkZHbBWGqy1C7VmEmX8wwNDWNpGyCaNmEgwC FILE_HASH=QmQdQt9qooenjeaNhiKHF3hBvmNteB4MQBtgu3jxgf9c7i + TRUNC_HASH=QmdaQZbLwK5ykweGdCVovNnvBom7QhikovDUVqTPHQG4L8 test_files_api "($EXTRA)" test_expect_success "can create some files for testing with raw-leaves ($extra)" ' create_files --raw-leaves ' - ROOT_HASH=QmTpKiKcAj4sbeesN6vrs5w3QeVmd4QmGpxRL81hHut4dZ - test_files_api "($EXTRA, partial raw-leaves)" - test_expect_success "can create some files for testing with raw-leaves ($extra)" ' - create_files --raw-leaves - ' + if [ "$EXTRA" = "offline" ]; then + ROOT_HASH=QmTpKiKcAj4sbeesN6vrs5w3QeVmd4QmGpxRL81hHut4dZ + CATS_HASH=QmPhPkmtUGGi8ySPHoPu1qbfryLJKKq1GYxpgLyyCruvGe + test_files_api "($EXTRA, partial raw-leaves)" + fi + ROOT_HASH=QmW3dMSU6VNd1mEdpk9S3ZYRuR1YwwoXjGaZhkyK6ru9YU + CATS_HASH=QmPqWDEg7NoWRX8Y4vvYjZtmdg5umbfsTQ9zwNr12JoLmt FILE_HASH=QmRCgHeoKxCqK2Es6M6nPUDVWz19yNQPnsXGsXeuTkSKpN + TRUNC_HASH=QmRFJEKWF5A5FyFYZgNhusLw2UziW9zBKYr4huyHjzcB6o test_files_api "($EXTRA, raw-leaves)" --raw-leaves + + ROOT_HASH=QmageRWxC7wWjPv5p36NeAgBAiFdBHaNfxAehBSwzNech2 + CATS_HASH=zdj7WkEzPLNAr5TYJSQC8CFcBjLvWFfGdx6kaBrJXnBguwWeX + FILE_HASH=zdj7WYHvf5sBRgSBjYnq64QFr449CCbgupXfBvoYL3aHC1DzJ + TRUNC_HASH=zdj7WYLYbka6Ydg8gZUJRLKnFBVehCADhQKBsFbNiMxZSB5Gj + if [ "$EXTRA" = "offline" ]; then + test_files_api "($EXTRA, cidv1)" --cid-version=1 + fi + + test_expect_success "can update root hash to cidv1" ' + ipfs files --cid-version=1 update / && + echo zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi > hash_expect && + ipfs files stat --hash / > hash_actual && + test_cmp hash_expect hash_actual + ' + + ROOT_HASH=zdj7Whmtnx23bR7c7E1Yn3zWYWjnvT4tpzWYGaBMyqcopDWrx + test_files_api "($EXTRA, cidv1 root)" + + if [ "$EXTRA" = "offline" ]; then + test_expect_success "can update root hash to blake2b-256" ' + ipfs files --hash-fun=blake2b-256 update / && + echo zDMZof1kvswQMT8txrmnb3JGBuna6qXCTry6hSifrkZEd6VmHbBm > hash_expect && + ipfs files stat --hash / > hash_actual && + test_cmp hash_expect hash_actual + ' + ROOT_HASH=zDMZof1kxEsAwSgCZsGQRVcHCMtHLjkUQoiZUbZ87erpPQJGUeW8 + CATS_HASH=zDMZof1kuAhr3zBkxq48V7o9HJZCTVyu1Wd9wnZtVcPJLW8xnGft + FILE_HASH=zDMZof1kxbB9CvxgRioBzESbGnZUxtSCsZ18H1EUkxDdWt1DYEkK + TRUNC_HASH=zDMZof1kxXqKdVsVo231qVdN3hCTF5a34UuQZpzmm5K7CbRJ4u2S + test_files_api "($EXTRA, blake2b-256 root)" + fi + + test_expect_success "can update root hash back to cidv0" ' + ipfs files --cid-version=0 update / && + echo QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn > hash_expect && + ipfs files stat --hash / > hash_actual && + test_cmp hash_expect hash_actual + ' } tests_for_files_api "online" @@ -587,7 +652,13 @@ test_expect_success "enable sharding in config" ' ' test_launch_ipfs_daemon --offline -test_sharding + +SHARD_HASH=QmPkwLJTYZRGPJ8Lazr9qPdrLmswPtUjaDbEpmR9jEh1se +test_sharding "(cidv0)" + +SHARD_HASH=zdj7WZXr6vG2Ne7ZLHGEKrGyF3pHBfAViEnmH9CoyvjrFQM8E +test_sharding "(cidv1 root)" "--cid-version=1" + test_kill_ipfs_daemon test_done