mirror of
https://github.com/ipfs/kubo.git
synced 2025-05-17 23:16:11 +08:00
Fix Typos
This commit is contained in:
@ -402,7 +402,7 @@ func daemonFunc(req *cmds.Request, re cmds.ResponseEmitter, env cmds.Environment
|
||||
return err
|
||||
}
|
||||
|
||||
// Add ipfs version info to prometheous metrics
|
||||
// Add ipfs version info to prometheus metrics
|
||||
var ipfsInfoMetric = promauto.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "ipfs_info",
|
||||
Help: "IPFS version information.",
|
||||
|
@ -35,7 +35,7 @@ You may have to create /ipfs and /ipns before using 'ipfs mount':
|
||||
`,
|
||||
LongDescription: `
|
||||
Mount IPFS at a read-only mountpoint on the OS. The default, /ipfs and /ipns,
|
||||
are set in the configuration file, but can be overriden by the options.
|
||||
are set in the configuration file, but can be overridden by the options.
|
||||
All IPFS objects will be accessible under this directory. Note that the
|
||||
root will not be listable, as it is virtual. Access known paths directly.
|
||||
|
||||
|
@ -34,7 +34,7 @@ type Object struct {
|
||||
Links []Link `json:"Links,omitempty"`
|
||||
}
|
||||
|
||||
var ErrDataEncoding = errors.New("unkown data field encoding")
|
||||
var ErrDataEncoding = errors.New("unknown data field encoding")
|
||||
|
||||
const (
|
||||
headersOptionName = "headers"
|
||||
@ -197,7 +197,7 @@ This command outputs data in the following encodings:
|
||||
* "xml"
|
||||
(Specified by the "--encoding" or "--enc" flag)
|
||||
|
||||
The encoding of the object's data field can be specifed by using the
|
||||
The encoding of the object's data field can be specified by using the
|
||||
--data-encoding flag
|
||||
|
||||
Supported values are:
|
||||
|
@ -523,7 +523,7 @@ Efficiently pins a new object based on differences from an existing one and,
|
||||
by default, removes the old pin.
|
||||
|
||||
This commands is useful when the new pin contains many similarities or is a
|
||||
derivate of an existing one, particuarly for large objects. This allows a more
|
||||
derivative of an existing one, particuarly for large objects. This allows a more
|
||||
efficient DAG-traversal which fully skips already-pinned branches from the old
|
||||
object. As a requirement, the old object needs to be an existing recursive
|
||||
pin.
|
||||
|
@ -338,7 +338,7 @@ func deserializeNode(nd *Node, dataFieldEncoding string) (*dag.ProtoNode, error)
|
||||
}
|
||||
dagnode.SetData(data)
|
||||
default:
|
||||
return nil, fmt.Errorf("unkown data field encoding")
|
||||
return nil, fmt.Errorf("unknown data field encoding")
|
||||
}
|
||||
|
||||
links := make([]*ipld.Link, len(nd.Links))
|
||||
|
@ -34,10 +34,10 @@ space+=
|
||||
comma:=,
|
||||
join-with=$(subst $(space),$1,$(strip $2))
|
||||
|
||||
# debug target, prints varaible. Example: `make print-GOFLAGS`
|
||||
# debug target, prints variable. Example: `make print-GOFLAGS`
|
||||
print-%:
|
||||
@echo $*=$($*)
|
||||
|
||||
# phony target that will mean that recipe is always exectued
|
||||
# phony target that will mean that recipe is always executed
|
||||
ALWAYS:
|
||||
.PHONY: ALWAYS
|
||||
|
@ -52,7 +52,7 @@ directory.
|
||||
Please do not change anything in the "lib/sharness" directory.
|
||||
|
||||
If you really need some changes in sharness, please fork it from
|
||||
[its cannonical repo](https://github.com/mlafeldt/sharness/) and
|
||||
[its canonical repo](https://github.com/mlafeldt/sharness/) and
|
||||
send pull requests there.
|
||||
|
||||
## Writing Tests
|
||||
|
@ -193,7 +193,7 @@ test_add_cat_file() {
|
||||
test_expect_code 1 ipfs add -Q --chunker rabin-12-512-1024 mountdir/hello.txt
|
||||
'
|
||||
|
||||
test_expect_success "ipfs add --chunker buzhash suceeds" '
|
||||
test_expect_success "ipfs add --chunker buzhash succeeds" '
|
||||
ipfs add --chunker buzhash mountdir/hello.txt >actual
|
||||
'
|
||||
|
||||
@ -787,11 +787,11 @@ test_add_cat_5MB '--cid-version=1 --raw-leaves=false' "bafybeieyifrgpjn3yengthr7
|
||||
|
||||
# note: --hash=blake2b-256 implies --cid-version=1 which implies --raw-leaves=true
|
||||
# the specified hash represents the leaf nodes stored as raw leaves and
|
||||
# encoded with the blake2b-256 hash funtion
|
||||
# encoded with the blake2b-256 hash function
|
||||
test_add_cat_5MB '--hash=blake2b-256' "bafykbzacebnmjcl4sn37b3ehtibvf263oun2w6idghenrvlpehq5w5jqyvhjo"
|
||||
|
||||
# the specified hash represents the leaf nodes stored as protoful nodes and
|
||||
# encoded with the blake2b-256 hash funtion
|
||||
# encoded with the blake2b-256 hash function
|
||||
test_add_cat_5MB '--hash=blake2b-256 --raw-leaves=false' "bafykbzaceaxiiykzgpbhnzlecffqm3zbuvhujyvxe5scltksyafagkyw4rjn2"
|
||||
|
||||
test_add_cat_expensive "" "QmU9SWAPPmNEKZB8umYMmjYvN7VyHqABNvdA6GUi4MMEz3"
|
||||
@ -802,7 +802,7 @@ test_add_cat_expensive "--cid-version=1" "bafybeidkj5ecbhrqmzrcee2rw7qwsx24z3364
|
||||
|
||||
# note: --hash=blake2b-256 implies --cid-version=1 which implies --raw-leaves=true
|
||||
# the specified hash represents the leaf nodes stored as raw leaves and
|
||||
# encoded with the blake2b-256 hash funtion
|
||||
# encoded with the blake2b-256 hash function
|
||||
test_add_cat_expensive '--hash=blake2b-256' "bafykbzaceb26fnq5hz5iopzamcb4yqykya5x6a4nvzdmcyuu4rj2akzs3z7r6"
|
||||
|
||||
test_add_named_pipe
|
||||
|
@ -16,7 +16,7 @@ init_strategy() {
|
||||
PEERID_1=$(iptb attr get 1 id)
|
||||
'
|
||||
|
||||
test_expect_success 'use pinning startegy for reprovider' '
|
||||
test_expect_success 'use pinning strategy for reprovider' '
|
||||
ipfsi 0 config Reprovider.Strategy '$1'
|
||||
'
|
||||
|
||||
|
@ -169,7 +169,7 @@ test_expect_success 'start http server' '
|
||||
start_http_server
|
||||
'
|
||||
|
||||
test_expect_success 'handle proxy http request propogates error response from remote' '
|
||||
test_expect_success 'handle proxy http request propagates error response from remote' '
|
||||
serve_content "SORRY GUYS, I LOST IT" "404 Not Found" &&
|
||||
curl_send_proxy_request_and_check_response 404 "SORRY GUYS, I LOST IT"
|
||||
'
|
||||
|
@ -28,7 +28,7 @@ test_urlstore() {
|
||||
|
||||
test_launch_ipfs_daemon --offline
|
||||
|
||||
test_expect_success "make sure files can be retrived via the gateway" '
|
||||
test_expect_success "make sure files can be retrieved via the gateway" '
|
||||
curl http://127.0.0.1:$GWAY_PORT/ipfs/$HASH1a -o file1.actual &&
|
||||
test_cmp file1 file1.actual &&
|
||||
curl http://127.0.0.1:$GWAY_PORT/ipfs/$HASH2a -o file2.actual &&
|
||||
|
Reference in New Issue
Block a user