mirror of
https://github.com/ipfs/kubo.git
synced 2025-06-28 17:03:58 +08:00
@ -15,8 +15,9 @@ type ErrorType uint
|
||||
|
||||
// ErrorTypes convey what category of error ocurred
|
||||
const (
|
||||
ErrNormal ErrorType = iota // general errors
|
||||
ErrClient // error was caused by the client, (e.g. invalid CLI usage)
|
||||
ErrNormal ErrorType = iota // general errors
|
||||
ErrClient // error was caused by the client, (e.g. invalid CLI usage)
|
||||
ErrImplementation // programmer error in the server
|
||||
// TODO: add more types of errors for better error-specific handling
|
||||
)
|
||||
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"strings"
|
||||
|
||||
cmds "github.com/ipfs/go-ipfs/commands"
|
||||
unixfs "github.com/ipfs/go-ipfs/core/commands/unixfs"
|
||||
evlog "github.com/ipfs/go-ipfs/thirdparty/eventlog"
|
||||
)
|
||||
|
||||
@ -35,6 +36,7 @@ DATA STRUCTURE COMMANDS
|
||||
|
||||
block Interact with raw blocks in the datastore
|
||||
object Interact with raw dag nodes
|
||||
file Interact with Unix filesystem objects
|
||||
|
||||
ADVANCED COMMANDS
|
||||
|
||||
@ -102,6 +104,7 @@ var rootSubcommands = map[string]*cmds.Command{
|
||||
"stats": StatsCmd,
|
||||
"swarm": SwarmCmd,
|
||||
"tour": tourCmd,
|
||||
"file": unixfs.UnixFSCmd,
|
||||
"update": UpdateCmd,
|
||||
"version": VersionCmd,
|
||||
"bitswap": BitswapCmd,
|
||||
|
202
core/commands/unixfs/ls.go
Normal file
202
core/commands/unixfs/ls.go
Normal file
@ -0,0 +1,202 @@
|
||||
package unixfs
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"text/tabwriter"
|
||||
"time"
|
||||
|
||||
context "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"
|
||||
|
||||
cmds "github.com/ipfs/go-ipfs/commands"
|
||||
core "github.com/ipfs/go-ipfs/core"
|
||||
path "github.com/ipfs/go-ipfs/path"
|
||||
unixfs "github.com/ipfs/go-ipfs/unixfs"
|
||||
unixfspb "github.com/ipfs/go-ipfs/unixfs/pb"
|
||||
)
|
||||
|
||||
type LsLink struct {
|
||||
Name, Hash string
|
||||
Size uint64
|
||||
Type string
|
||||
}
|
||||
|
||||
type LsObject struct {
|
||||
Hash string
|
||||
Size uint64
|
||||
Type string
|
||||
Links []LsLink
|
||||
}
|
||||
|
||||
type LsOutput struct {
|
||||
Arguments map[string]string
|
||||
Objects map[string]*LsObject
|
||||
}
|
||||
|
||||
var LsCmd = &cmds.Command{
|
||||
Helptext: cmds.HelpText{
|
||||
Tagline: "List directory contents for Unix-filesystem objects",
|
||||
ShortDescription: `
|
||||
Retrieves the object named by <ipfs-or-ipns-path> and displays the
|
||||
contents with the following format:
|
||||
|
||||
<hash> <type> <size> <name>
|
||||
|
||||
For files, the child size is the total size of the file contents. For
|
||||
directories, the child size is the IPFS link size.
|
||||
`,
|
||||
},
|
||||
|
||||
Arguments: []cmds.Argument{
|
||||
cmds.StringArg("ipfs-path", true, true, "The path to the IPFS object(s) to list links from").EnableStdin(),
|
||||
},
|
||||
Run: func(req cmds.Request, res cmds.Response) {
|
||||
node, err := req.Context().GetNode()
|
||||
if err != nil {
|
||||
res.SetError(err, cmds.ErrNormal)
|
||||
return
|
||||
}
|
||||
|
||||
paths := req.Arguments()
|
||||
|
||||
output := LsOutput{
|
||||
Arguments: map[string]string{},
|
||||
Objects: map[string]*LsObject{},
|
||||
}
|
||||
|
||||
for _, fpath := range paths {
|
||||
ctx := req.Context().Context
|
||||
merkleNode, err := core.Resolve(ctx, node, path.Path(fpath))
|
||||
if err != nil {
|
||||
res.SetError(err, cmds.ErrNormal)
|
||||
return
|
||||
}
|
||||
|
||||
key, err := merkleNode.Key()
|
||||
if err != nil {
|
||||
res.SetError(err, cmds.ErrNormal)
|
||||
return
|
||||
}
|
||||
|
||||
hash := key.B58String()
|
||||
output.Arguments[fpath] = hash
|
||||
|
||||
if _, ok := output.Objects[hash]; ok {
|
||||
// duplicate argument for an already-listed node
|
||||
continue
|
||||
}
|
||||
|
||||
unixFSNode, err := unixfs.FromBytes(merkleNode.Data)
|
||||
if err != nil {
|
||||
res.SetError(err, cmds.ErrNormal)
|
||||
return
|
||||
}
|
||||
|
||||
t := unixFSNode.GetType()
|
||||
|
||||
output.Objects[hash] = &LsObject{
|
||||
Hash: key.String(),
|
||||
Type: t.String(),
|
||||
Size: unixFSNode.GetFilesize(),
|
||||
}
|
||||
|
||||
switch t {
|
||||
default:
|
||||
res.SetError(fmt.Errorf("unrecognized type: %s", t), cmds.ErrImplementation)
|
||||
return
|
||||
case unixfspb.Data_File:
|
||||
break
|
||||
case unixfspb.Data_Directory:
|
||||
links := make([]LsLink, len(merkleNode.Links))
|
||||
output.Objects[hash].Links = links
|
||||
for i, link := range merkleNode.Links {
|
||||
getCtx, cancel := context.WithTimeout(ctx, time.Minute)
|
||||
defer cancel()
|
||||
link.Node, err = link.GetNode(getCtx, node.DAG)
|
||||
if err != nil {
|
||||
res.SetError(err, cmds.ErrNormal)
|
||||
return
|
||||
}
|
||||
d, err := unixfs.FromBytes(link.Node.Data)
|
||||
if err != nil {
|
||||
res.SetError(err, cmds.ErrNormal)
|
||||
return
|
||||
}
|
||||
t := d.GetType()
|
||||
lsLink := LsLink{
|
||||
Name: link.Name,
|
||||
Hash: link.Hash.B58String(),
|
||||
Type: t.String(),
|
||||
}
|
||||
if t == unixfspb.Data_File {
|
||||
lsLink.Size = d.GetFilesize()
|
||||
} else {
|
||||
lsLink.Size = link.Size
|
||||
}
|
||||
links[i] = lsLink
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.SetOutput(&output)
|
||||
},
|
||||
Marshalers: cmds.MarshalerMap{
|
||||
cmds.Text: func(res cmds.Response) (io.Reader, error) {
|
||||
|
||||
output := res.Output().(*LsOutput)
|
||||
buf := new(bytes.Buffer)
|
||||
w := tabwriter.NewWriter(buf, 1, 2, 1, ' ', 0)
|
||||
|
||||
nonDirectories := []string{}
|
||||
directories := []string{}
|
||||
for argument, hash := range output.Arguments {
|
||||
object, ok := output.Objects[hash]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unresolved hash: %s", hash)
|
||||
}
|
||||
|
||||
if object.Type == "Directory" {
|
||||
directories = append(directories, argument)
|
||||
} else {
|
||||
nonDirectories = append(nonDirectories, argument)
|
||||
}
|
||||
}
|
||||
sort.Strings(nonDirectories)
|
||||
sort.Strings(directories)
|
||||
|
||||
for _, argument := range nonDirectories {
|
||||
fmt.Fprintf(w, "%s\n", argument)
|
||||
}
|
||||
|
||||
seen := map[string]bool{}
|
||||
for i, argument := range directories {
|
||||
hash := output.Arguments[argument]
|
||||
if _, ok := seen[hash]; ok {
|
||||
continue
|
||||
}
|
||||
seen[hash] = true
|
||||
|
||||
object := output.Objects[hash]
|
||||
if i > 0 || len(nonDirectories) > 0 {
|
||||
fmt.Fprintln(w)
|
||||
}
|
||||
if len(output.Arguments) > 1 {
|
||||
for _, arg := range directories[i:] {
|
||||
if output.Arguments[arg] == hash {
|
||||
fmt.Fprintf(w, "%s:\n", arg)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, link := range object.Links {
|
||||
fmt.Fprintf(w, "%s\n", link.Name)
|
||||
}
|
||||
}
|
||||
w.Flush()
|
||||
|
||||
return buf, nil
|
||||
},
|
||||
},
|
||||
Type: LsOutput{},
|
||||
}
|
21
core/commands/unixfs/unixfs.go
Normal file
21
core/commands/unixfs/unixfs.go
Normal file
@ -0,0 +1,21 @@
|
||||
package unixfs
|
||||
|
||||
import cmds "github.com/ipfs/go-ipfs/commands"
|
||||
|
||||
var UnixFSCmd = &cmds.Command{
|
||||
Helptext: cmds.HelpText{
|
||||
Tagline: "Interact with ipfs objects representing Unix filesystems",
|
||||
ShortDescription: `
|
||||
'ipfs file' provides a familar interface to filesystems represtented
|
||||
by IPFS objects that hides IPFS-implementation details like layout
|
||||
objects (e.g. fanout and chunking).
|
||||
`,
|
||||
Synopsis: `
|
||||
ipfs file ls <path>... - List directory contents for <path>...
|
||||
`,
|
||||
},
|
||||
|
||||
Subcommands: map[string]*cmds.Command{
|
||||
"ls": LsCmd,
|
||||
},
|
||||
}
|
185
test/sharness/t0200-unixfs-ls.sh
Executable file
185
test/sharness/t0200-unixfs-ls.sh
Executable file
@ -0,0 +1,185 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2014 Christian Couder
|
||||
# MIT Licensed; see the LICENSE file in this repository.
|
||||
#
|
||||
|
||||
test_description="Test file ls command"
|
||||
|
||||
. lib/test-lib.sh
|
||||
|
||||
test_init_ipfs
|
||||
|
||||
test_ls_cmd() {
|
||||
|
||||
test_expect_success "'ipfs add -r testData' succeeds" '
|
||||
mkdir -p testData testData/d1 testData/d2 &&
|
||||
echo "test" >testData/f1 &&
|
||||
echo "data" >testData/f2 &&
|
||||
echo "hello" >testData/d1/a &&
|
||||
random 128 42 >testData/d1/128 &&
|
||||
echo "world" >testData/d2/a &&
|
||||
random 1024 42 >testData/d2/1024 &&
|
||||
ipfs add -r testData >actual_add
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs add' output looks good" '
|
||||
cat <<-\EOF >expected_add &&
|
||||
added QmQNd6ubRXaNG6Prov8o6vk3bn6eWsj9FxLGrAVDUAGkGe testData/d1/128
|
||||
added QmZULkCELmmk5XNfCgTnCyFgAVxBRBXyDHGGMVoLFLiXEN testData/d1/a
|
||||
added QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss testData/d1
|
||||
added QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd testData/d2/1024
|
||||
added QmaRGe7bVmVaLmxbrMiVNXqW4pRNNp3xq7hFtyRKA3mtJL testData/d2/a
|
||||
added QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy testData/d2
|
||||
added QmeomffUNfmQy76CQGy9NdmqEnnHU9soCexBnGU3ezPHVH testData/f1
|
||||
added QmNtocSs7MoDkJMc1RkyisCSKvLadujPsfJfSdJ3e1eA1M testData/f2
|
||||
added QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj testData
|
||||
EOF
|
||||
test_cmp expected_add actual_add
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <dir>' succeeds" '
|
||||
ipfs file ls QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy >actual_ls_one_directory
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <dir>' output looks good" '
|
||||
cat <<-\EOF >expected_ls_one_directory &&
|
||||
1024
|
||||
a
|
||||
EOF
|
||||
test_cmp expected_ls_one_directory actual_ls_one_directory
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <three dir hashes>' succeeds" '
|
||||
ipfs file ls QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss >actual_ls_three_directories
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <three dir hashes>' output looks good" '
|
||||
cat <<-\EOF >expected_ls_three_directories &&
|
||||
QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy:
|
||||
1024
|
||||
a
|
||||
|
||||
QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss:
|
||||
128
|
||||
a
|
||||
|
||||
QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj:
|
||||
d1
|
||||
d2
|
||||
f1
|
||||
f2
|
||||
EOF
|
||||
test_cmp expected_ls_three_directories actual_ls_three_directories
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <file hashes>' succeeds" '
|
||||
ipfs file ls /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 QmQNd6ubRXaNG6Prov8o6vk3bn6eWsj9FxLGrAVDUAGkGe >actual_ls_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <file hashes>' output looks good" '
|
||||
cat <<-\EOF >expected_ls_file &&
|
||||
/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024
|
||||
QmQNd6ubRXaNG6Prov8o6vk3bn6eWsj9FxLGrAVDUAGkGe
|
||||
EOF
|
||||
test_cmp expected_ls_file actual_ls_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <duplicates>' succeeds" '
|
||||
ipfs file ls /ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1 /ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 /ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd >actual_ls_duplicates_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs file ls <duplicates>' output looks good" '
|
||||
cat <<-\EOF >expected_ls_duplicates_file &&
|
||||
/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024
|
||||
/ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd
|
||||
|
||||
/ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss:
|
||||
/ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1:
|
||||
128
|
||||
a
|
||||
EOF
|
||||
test_cmp expected_ls_duplicates_file actual_ls_duplicates_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs --encoding=json file ls <file hashes>' succeeds" '
|
||||
ipfs --encoding=json file ls /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 >actual_json_ls_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs --encoding=json file ls <file hashes>' output looks good" '
|
||||
cat <<-\EOF >expected_json_ls_file_trailing_newline &&
|
||||
{
|
||||
"Arguments": {
|
||||
"/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd"
|
||||
},
|
||||
"Objects": {
|
||||
"QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd": {
|
||||
"Hash": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
|
||||
"Size": 1024,
|
||||
"Type": "File",
|
||||
"Links": null
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
printf %s "$(cat expected_json_ls_file_trailing_newline)" >expected_json_ls_file &&
|
||||
test_cmp expected_json_ls_file actual_json_ls_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs --encoding=json file ls <duplicates>' succeeds" '
|
||||
ipfs --encoding=json file ls /ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1 /ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 /ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd >actual_json_ls_duplicates_file
|
||||
'
|
||||
|
||||
test_expect_success "'ipfs --encoding=json file ls <duplicates>' output looks good" '
|
||||
cat <<-\EOF >expected_json_ls_duplicates_file_trailing_newline &&
|
||||
{
|
||||
"Arguments": {
|
||||
"/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
|
||||
"/ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss": "QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss",
|
||||
"/ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
|
||||
"/ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1": "QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss"
|
||||
},
|
||||
"Objects": {
|
||||
"QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss": {
|
||||
"Hash": "QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss",
|
||||
"Size": 0,
|
||||
"Type": "Directory",
|
||||
"Links": [
|
||||
{
|
||||
"Name": "128",
|
||||
"Hash": "QmQNd6ubRXaNG6Prov8o6vk3bn6eWsj9FxLGrAVDUAGkGe",
|
||||
"Size": 128,
|
||||
"Type": "File"
|
||||
},
|
||||
{
|
||||
"Name": "a",
|
||||
"Hash": "QmZULkCELmmk5XNfCgTnCyFgAVxBRBXyDHGGMVoLFLiXEN",
|
||||
"Size": 6,
|
||||
"Type": "File"
|
||||
}
|
||||
]
|
||||
},
|
||||
"QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd": {
|
||||
"Hash": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
|
||||
"Size": 1024,
|
||||
"Type": "File",
|
||||
"Links": null
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
printf %s "$(cat expected_json_ls_duplicates_file_trailing_newline)" >expected_json_ls_duplicates_file &&
|
||||
test_cmp expected_json_ls_duplicates_file actual_json_ls_duplicates_file
|
||||
'
|
||||
}
|
||||
|
||||
|
||||
# should work offline
|
||||
test_ls_cmd
|
||||
|
||||
# should work online
|
||||
test_launch_ipfs_daemon
|
||||
test_ls_cmd
|
||||
test_kill_ipfs_daemon
|
||||
|
||||
test_done
|
Reference in New Issue
Block a user