From 853b3f1e8786ac68470ad5d1cd40644145198a60 Mon Sep 17 00:00:00 2001 From: Russell Centanni Date: Wed, 16 Aug 2023 18:00:47 -0400 Subject: [PATCH] fix: remove the use of the --short flag in kubectl version Fixes ENG-1890 Signed-off-by: Russell Centanni Co-authored-by: JoshInLondon Co-authored-by: Martin Chatterton Signed-off-by: Russell Centanni --- .github/workflows/e2e-tests.yaml | 11 +- go.mod | 18 + go.sum | 188 + .../deploy/deployer/kubectl/builder.go | 12 +- vendor/github.com/MakeNowJust/heredoc/LICENSE | 21 + .../github.com/MakeNowJust/heredoc/README.md | 52 + .../github.com/MakeNowJust/heredoc/heredoc.go | 105 + .../chai2010/gettext-go/.travis.yml | 5 + vendor/github.com/chai2010/gettext-go/LICENSE | 27 + .../github.com/chai2010/gettext-go/README.md | 191 + vendor/github.com/chai2010/gettext-go/doc.go | 67 + vendor/github.com/chai2010/gettext-go/fs.go | 84 + .../github.com/chai2010/gettext-go/fs_json.go | 66 + .../github.com/chai2010/gettext-go/fs_os.go | 91 + .../github.com/chai2010/gettext-go/fs_zip.go | 142 + .../github.com/chai2010/gettext-go/gettext.go | 219 + .../github.com/chai2010/gettext-go/locale.go | 205 + .../github.com/chai2010/gettext-go/mo/doc.go | 74 + .../chai2010/gettext-go/mo/encoder.go | 105 + .../github.com/chai2010/gettext-go/mo/file.go | 197 + .../chai2010/gettext-go/mo/header.go | 109 + .../chai2010/gettext-go/mo/message.go | 52 + .../github.com/chai2010/gettext-go/mo/util.go | 110 + .../chai2010/gettext-go/plural/doc.go | 36 + .../chai2010/gettext-go/plural/formula.go | 181 + .../chai2010/gettext-go/plural/table.go | 55 + .../chai2010/gettext-go/po/comment.go | 270 + .../github.com/chai2010/gettext-go/po/doc.go | 24 + .../github.com/chai2010/gettext-go/po/file.go | 81 + .../chai2010/gettext-go/po/header.go | 106 + .../chai2010/gettext-go/po/line_reader.go | 62 + .../chai2010/gettext-go/po/message.go | 193 + .../github.com/chai2010/gettext-go/po/re.go | 58 + .../github.com/chai2010/gettext-go/po/util.go | 114 + vendor/github.com/chai2010/gettext-go/tr.go | 175 + vendor/github.com/chai2010/gettext-go/util.go | 34 + .../exponent-io/jsonpath/.gitignore | 24 + .../exponent-io/jsonpath/.travis.yml | 5 + .../github.com/exponent-io/jsonpath/LICENSE | 21 + .../github.com/exponent-io/jsonpath/README.md | 66 + .../exponent-io/jsonpath/decoder.go | 210 + .../github.com/exponent-io/jsonpath/path.go | 67 + .../exponent-io/jsonpath/pathaction.go | 61 + .../github.com/go-errors/errors/.travis.yml | 5 + .../github.com/go-errors/errors/LICENSE.MIT | 7 + vendor/github.com/go-errors/errors/README.md | 66 + vendor/github.com/go-errors/errors/cover.out | 89 + vendor/github.com/go-errors/errors/error.go | 217 + .../go-errors/errors/parse_panic.go | 127 + .../github.com/go-errors/errors/stackframe.go | 102 + vendor/github.com/google/btree/.travis.yml | 1 + vendor/github.com/google/btree/LICENSE | 202 + vendor/github.com/google/btree/README.md | 12 + vendor/github.com/google/btree/btree.go | 890 + .../gregjones/httpcache/.travis.yml | 19 + .../gregjones/httpcache/LICENSE.txt | 7 + .../github.com/gregjones/httpcache/README.md | 25 + .../httpcache/diskcache/diskcache.go | 61 + .../gregjones/httpcache/httpcache.go | 551 + .../github.com/liggitt/tabwriter/.travis.yml | 11 + vendor/github.com/liggitt/tabwriter/LICENSE | 27 + vendor/github.com/liggitt/tabwriter/README.md | 7 + .../github.com/liggitt/tabwriter/tabwriter.go | 637 + .../monochromegane/go-gitignore/.travis.yml | 6 + .../monochromegane/go-gitignore/LICENSE | 21 + .../monochromegane/go-gitignore/README.md | 95 + .../go-gitignore/depth_holder.go | 79 + .../go-gitignore/full_scan_patterns.go | 31 + .../monochromegane/go-gitignore/gitignore.go | 80 + .../go-gitignore/index_scan_patterns.go | 35 + .../go-gitignore/initial_holder.go | 62 + .../monochromegane/go-gitignore/match.go | 24 + .../monochromegane/go-gitignore/pattern.go | 69 + .../monochromegane/go-gitignore/patterns.go | 22 + .../monochromegane/go-gitignore/util.go | 45 + vendor/github.com/peterbourgon/diskv/LICENSE | 19 + .../github.com/peterbourgon/diskv/README.md | 141 + .../peterbourgon/diskv/compression.go | 64 + vendor/github.com/peterbourgon/diskv/diskv.go | 624 + vendor/github.com/peterbourgon/diskv/index.go | 115 + vendor/github.com/pmezard/go-difflib/LICENSE | 27 + .../pmezard/go-difflib/difflib/difflib.go | 772 + .../russross/blackfriday/.gitignore | 8 + .../russross/blackfriday/.travis.yml | 18 + .../russross/blackfriday/LICENSE.txt | 28 + .../github.com/russross/blackfriday/README.md | 364 + .../github.com/russross/blackfriday/block.go | 1480 + vendor/github.com/russross/blackfriday/doc.go | 32 + .../github.com/russross/blackfriday/html.go | 945 + .../github.com/russross/blackfriday/inline.go | 1154 + .../github.com/russross/blackfriday/latex.go | 334 + .../russross/blackfriday/markdown.go | 943 + .../russross/blackfriday/smartypants.go | 430 + vendor/github.com/stretchr/testify/LICENSE | 21 + .../testify/assert/assertion_compare.go | 458 + .../assert/assertion_compare_can_convert.go | 16 + .../assert/assertion_compare_legacy.go | 16 + .../testify/assert/assertion_format.go | 763 + .../testify/assert/assertion_format.go.tmpl | 5 + .../testify/assert/assertion_forward.go | 1514 + .../testify/assert/assertion_forward.go.tmpl | 5 + .../testify/assert/assertion_order.go | 81 + .../stretchr/testify/assert/assertions.go | 1856 + .../github.com/stretchr/testify/assert/doc.go | 45 + .../stretchr/testify/assert/errors.go | 10 + .../testify/assert/forward_assertions.go | 16 + .../testify/assert/http_assertions.go | 162 + .../stretchr/testify/require/doc.go | 28 + .../testify/require/forward_requirements.go | 16 + .../stretchr/testify/require/require.go | 1935 + .../stretchr/testify/require/require.go.tmpl | 6 + .../testify/require/require_forward.go | 1515 + .../testify/require/require_forward.go.tmpl | 5 + .../stretchr/testify/require/requirements.go | 29 + vendor/github.com/xlab/treeprint/LICENSE | 20 + vendor/github.com/xlab/treeprint/README.md | 128 + vendor/github.com/xlab/treeprint/helpers.go | 47 + vendor/github.com/xlab/treeprint/struct.go | 322 + vendor/github.com/xlab/treeprint/treeprint.go | 215 + vendor/go.starlark.net/LICENSE | 29 + .../internal/compile/compile.go | 1903 + .../internal/compile/serial.go | 389 + .../go.starlark.net/internal/spell/spell.go | 115 + vendor/go.starlark.net/resolve/binding.go | 74 + vendor/go.starlark.net/resolve/resolve.go | 978 + vendor/go.starlark.net/starlark/debug.go | 42 + vendor/go.starlark.net/starlark/empty.s | 3 + vendor/go.starlark.net/starlark/eval.go | 1497 + vendor/go.starlark.net/starlark/hashtable.go | 373 + vendor/go.starlark.net/starlark/int.go | 350 + vendor/go.starlark.net/starlark/interp.go | 637 + vendor/go.starlark.net/starlark/library.go | 2104 + vendor/go.starlark.net/starlark/profile.go | 449 + vendor/go.starlark.net/starlark/unpack.go | 258 + vendor/go.starlark.net/starlark/value.go | 1293 + .../go.starlark.net/starlarkstruct/module.go | 43 + .../go.starlark.net/starlarkstruct/struct.go | 281 + vendor/go.starlark.net/syntax/grammar.txt | 129 + vendor/go.starlark.net/syntax/parse.go | 1029 + vendor/go.starlark.net/syntax/quote.go | 269 + vendor/go.starlark.net/syntax/scan.go | 1089 + vendor/go.starlark.net/syntax/syntax.go | 529 + vendor/go.starlark.net/syntax/walk.go | 163 + .../unstructured/unstructuredscheme/scheme.go | 129 + .../pkg/util/duration/duration.go | 93 + .../apimachinery/pkg/util/version/doc.go | 18 + .../apimachinery/pkg/util/version/version.go | 325 + vendor/k8s.io/cli-runtime/LICENSE | 202 + .../pkg/genericclioptions/builder_flags.go | 231 + .../genericclioptions/builder_flags_fake.go | 54 + .../pkg/genericclioptions/client_config.go | 72 + .../pkg/genericclioptions/command_headers.go | 91 + .../pkg/genericclioptions/config_flags.go | 456 + .../genericclioptions/config_flags_fake.go | 127 + .../cli-runtime/pkg/genericclioptions/doc.go | 19 + .../pkg/genericclioptions/filename_flags.go | 82 + .../pkg/genericclioptions/io_options.go | 57 + .../pkg/genericclioptions/json_yaml_flags.go | 79 + .../pkg/genericclioptions/jsonpath_flags.go | 137 + .../genericclioptions/kube_template_flags.go | 94 + .../pkg/genericclioptions/name_flags.go | 83 + .../pkg/genericclioptions/print_flags.go | 171 + .../pkg/genericclioptions/record_flags.go | 201 + .../pkg/genericclioptions/template_flags.go | 136 + .../cli-runtime/pkg/printers/discard.go | 30 + vendor/k8s.io/cli-runtime/pkg/printers/doc.go | 19 + .../cli-runtime/pkg/printers/interface.go | 54 + .../k8s.io/cli-runtime/pkg/printers/json.go | 79 + .../cli-runtime/pkg/printers/jsonpath.go | 147 + .../cli-runtime/pkg/printers/managedfields.go | 59 + .../k8s.io/cli-runtime/pkg/printers/name.go | 130 + .../cli-runtime/pkg/printers/sourcechecker.go | 60 + .../cli-runtime/pkg/printers/tableprinter.go | 588 + .../cli-runtime/pkg/printers/tabwriter.go | 36 + .../cli-runtime/pkg/printers/template.go | 118 + .../cli-runtime/pkg/printers/typesetter.go | 95 + .../pkg/printers/warningprinter.go | 55 + .../k8s.io/cli-runtime/pkg/printers/yaml.go | 85 + .../cli-runtime/pkg/resource/builder.go | 1247 + .../k8s.io/cli-runtime/pkg/resource/client.go | 69 + .../cli-runtime/pkg/resource/crd_finder.go | 110 + vendor/k8s.io/cli-runtime/pkg/resource/doc.go | 24 + .../k8s.io/cli-runtime/pkg/resource/fake.go | 40 + .../k8s.io/cli-runtime/pkg/resource/helper.go | 321 + .../cli-runtime/pkg/resource/interfaces.go | 103 + .../pkg/resource/kustomizevisitor.go | 54 + .../k8s.io/cli-runtime/pkg/resource/mapper.go | 166 + .../pkg/resource/metadata_decoder.go | 56 + .../pkg/resource/query_param_verifier.go | 166 + .../k8s.io/cli-runtime/pkg/resource/result.go | 242 + .../k8s.io/cli-runtime/pkg/resource/scheme.go | 82 + .../cli-runtime/pkg/resource/selector.go | 92 + .../cli-runtime/pkg/resource/visitor.go | 742 + .../discovery/cached/disk/cached_discovery.go | 312 + .../discovery/cached/disk/round_tripper.go | 65 + vendor/k8s.io/client-go/dynamic/interface.go | 63 + vendor/k8s.io/client-go/dynamic/scheme.go | 108 + vendor/k8s.io/client-go/dynamic/simple.go | 388 + .../k8s.io/client-go/openapi/cached/client.go | 54 + .../client-go/openapi/cached/groupversion.go | 45 + .../restmapper/category_expansion.go | 119 + .../k8s.io/client-go/restmapper/discovery.go | 338 + .../k8s.io/client-go/restmapper/shortcut.go | 187 + vendor/k8s.io/client-go/scale/client.go | 238 + vendor/k8s.io/client-go/scale/doc.go | 21 + vendor/k8s.io/client-go/scale/interfaces.go | 47 + .../client-go/scale/scheme/appsint/doc.go | 22 + .../scale/scheme/appsint/register.go | 55 + .../scale/scheme/appsv1beta1/conversion.go | 73 + .../client-go/scale/scheme/appsv1beta1/doc.go | 20 + .../scale/scheme/appsv1beta1/register.go | 45 + .../appsv1beta1/zz_generated.conversion.go | 134 + .../scale/scheme/appsv1beta2/conversion.go | 73 + .../client-go/scale/scheme/appsv1beta2/doc.go | 20 + .../scale/scheme/appsv1beta2/register.go | 45 + .../appsv1beta2/zz_generated.conversion.go | 134 + .../scale/scheme/autoscalingv1/conversion.go | 54 + .../scale/scheme/autoscalingv1/doc.go | 20 + .../scale/scheme/autoscalingv1/register.go | 45 + .../autoscalingv1/zz_generated.conversion.go | 133 + vendor/k8s.io/client-go/scale/scheme/doc.go | 22 + .../scale/scheme/extensionsint/doc.go | 22 + .../scale/scheme/extensionsint/register.go | 55 + .../scheme/extensionsv1beta1/conversion.go | 73 + .../scale/scheme/extensionsv1beta1/doc.go | 20 + .../scheme/extensionsv1beta1/register.go | 45 + .../zz_generated.conversion.go | 134 + .../k8s.io/client-go/scale/scheme/register.go | 54 + vendor/k8s.io/client-go/scale/scheme/types.go | 60 + .../scale/scheme/zz_generated.deepcopy.go | 92 + vendor/k8s.io/client-go/scale/util.go | 197 + .../third_party/forked/golang/LICENSE | 27 + .../third_party/forked/golang/PATENTS | 22 + .../forked/golang/template/exec.go | 52 + .../forked/golang/template/funcs.go | 177 + vendor/k8s.io/client-go/util/jsonpath/doc.go | 20 + .../client-go/util/jsonpath/jsonpath.go | 579 + vendor/k8s.io/client-go/util/jsonpath/node.go | 256 + .../k8s.io/client-go/util/jsonpath/parser.go | 527 + vendor/k8s.io/component-base/LICENSE | 202 + vendor/k8s.io/component-base/version/OWNERS | 16 + vendor/k8s.io/component-base/version/base.go | 63 + .../k8s.io/component-base/version/version.go | 42 + .../pkg/util/proto/validation/errors.go | 79 + .../pkg/util/proto/validation/types.go | 299 + .../pkg/util/proto/validation/validation.go | 30 + .../k8s.io/kubectl/pkg/cmd/util/env_file.go | 103 + vendor/k8s.io/kubectl/pkg/cmd/util/factory.go | 69 + .../pkg/cmd/util/factory_client_access.go | 196 + vendor/k8s.io/kubectl/pkg/cmd/util/helpers.go | 846 + .../pkg/cmd/util/kubectl_match_version.go | 129 + .../kubectl/pkg/cmd/util/override_options.go | 90 + .../k8s.io/kubectl/pkg/cmd/util/printing.go | 29 + .../kubectl/pkg/cmd/version/skew_warning.go | 54 + .../k8s.io/kubectl/pkg/cmd/version/version.go | 203 + vendor/k8s.io/kubectl/pkg/util/i18n/i18n.go | 150 + .../kubectl/pkg/util/i18n/translations/OWNERS | 7 + .../pkg/util/i18n/translations/README.md | 82 + .../pkg/util/i18n/translations/extract.py | 105 + .../pkg/util/i18n/translations/kubectl/OWNERS | 6 + .../kubectl/de_DE/LC_MESSAGES/k8s.mo | Bin 0 -> 17420 bytes .../kubectl/de_DE/LC_MESSAGES/k8s.po | 2920 + .../kubectl/default/LC_MESSAGES/k8s.mo | Bin 0 -> 153024 bytes .../kubectl/default/LC_MESSAGES/k8s.po | 5085 + .../kubectl/en_US/LC_MESSAGES/k8s.mo | Bin 0 -> 153024 bytes .../kubectl/en_US/LC_MESSAGES/k8s.po | 5085 + .../kubectl/fr_FR/LC_MESSAGES/k8s.mo | Bin 0 -> 1233 bytes .../kubectl/fr_FR/LC_MESSAGES/k8s.po | 103 + .../kubectl/it_IT/LC_MESSAGES/k8s.mo | Bin 0 -> 20017 bytes .../kubectl/it_IT/LC_MESSAGES/k8s.po | 3249 + .../kubectl/ja_JP/LC_MESSAGES/k8s.mo | Bin 0 -> 19210 bytes .../kubectl/ja_JP/LC_MESSAGES/k8s.po | 3365 + .../kubectl/ko_KR/LC_MESSAGES/k8s.mo | Bin 0 -> 1274 bytes .../kubectl/ko_KR/LC_MESSAGES/k8s.po | 96 + .../kubectl/pt_BR/LC_MESSAGES/k8s.mo | Bin 0 -> 19980 bytes .../kubectl/pt_BR/LC_MESSAGES/k8s.po | 3250 + .../i18n/translations/kubectl/template.pot | 3183 + .../kubectl/zh_CN/LC_MESSAGES/k8s.mo | Bin 0 -> 18814 bytes .../kubectl/zh_CN/LC_MESSAGES/k8s.po | 3287 + .../kubectl/zh_TW/LC_MESSAGES/k8s.mo | Bin 0 -> 1187 bytes .../kubectl/zh_TW/LC_MESSAGES/k8s.po | 81 + .../test/default/LC_MESSAGES/k8s.mo | Bin 0 -> 563 bytes .../test/default/LC_MESSAGES/k8s.po | 28 + .../test/en_US/LC_MESSAGES/k8s.mo | Bin 0 -> 563 bytes .../test/en_US/LC_MESSAGES/k8s.po | 28 + vendor/k8s.io/kubectl/pkg/util/openapi/OWNERS | 6 + vendor/k8s.io/kubectl/pkg/util/openapi/doc.go | 21 + .../kubectl/pkg/util/openapi/extensions.go | 27 + .../kubectl/pkg/util/openapi/openapi.go | 128 + .../pkg/util/openapi/openapi_getter.go | 82 + .../pkg/util/openapi/validation/validation.go | 140 + .../pkg/util/templates/command_groups.go | 59 + .../pkg/util/templates/help_flags_printer.go | 76 + .../kubectl/pkg/util/templates/markdown.go | 150 + .../kubectl/pkg/util/templates/normalizers.go | 97 + .../kubectl/pkg/util/templates/templater.go | 309 + .../kubectl/pkg/util/templates/templates.go | 103 + .../k8s.io/kubectl/pkg/validation/schema.go | 154 + vendor/k8s.io/utils/exec/README.md | 5 + vendor/k8s.io/utils/exec/doc.go | 18 + vendor/k8s.io/utils/exec/exec.go | 256 + vendor/k8s.io/utils/exec/fixup_go118.go | 32 + vendor/k8s.io/utils/exec/fixup_go119.go | 40 + vendor/k8s.io/utils/pointer/OWNERS | 10 + vendor/k8s.io/utils/pointer/README.md | 3 + vendor/k8s.io/utils/pointer/pointer.go | 300 + vendor/modules.txt | 169 + vendor/sigs.k8s.io/kustomize/api/LICENSE | 201 + .../api/filters/annotations/annotations.go | 52 + .../kustomize/api/filters/annotations/doc.go | 6 + .../kustomize/api/filters/fieldspec/doc.go | 6 + .../api/filters/fieldspec/fieldspec.go | 182 + .../api/filters/filtersutil/setters.go | 66 + .../kustomize/api/filters/fsslice/doc.go | 6 + .../kustomize/api/filters/fsslice/fsslice.go | 47 + .../api/filters/iampolicygenerator/doc.go | 3 + .../iampolicygenerator/iampolicygenerator.go | 55 + .../kustomize/api/filters/imagetag/doc.go | 12 + .../api/filters/imagetag/imagetag.go | 72 + .../kustomize/api/filters/imagetag/legacy.go | 105 + .../kustomize/api/filters/imagetag/updater.go | 66 + .../kustomize/api/filters/labels/doc.go | 6 + .../kustomize/api/filters/labels/labels.go | 53 + .../kustomize/api/filters/nameref/doc.go | 3 + .../kustomize/api/filters/nameref/nameref.go | 396 + .../api/filters/nameref/seqfilter.go | 57 + .../kustomize/api/filters/namespace/doc.go | 9 + .../api/filters/namespace/namespace.go | 174 + .../api/filters/patchjson6902/doc.go | 6 + .../filters/patchjson6902/patchjson6902.go | 65 + .../api/filters/patchstrategicmerge/doc.go | 6 + .../patchstrategicmerge.go | 36 + .../kustomize/api/filters/prefix/doc.go | 6 + .../kustomize/api/filters/prefix/prefix.go | 50 + .../kustomize/api/filters/refvar/doc.go | 3 + .../kustomize/api/filters/refvar/expand.go | 147 + .../kustomize/api/filters/refvar/refvar.go | 110 + .../kustomize/api/filters/replacement/doc.go | 4 + .../api/filters/replacement/replacement.go | 249 + .../kustomize/api/filters/replicacount/doc.go | 6 + .../api/filters/replicacount/replicacount.go | 45 + .../kustomize/api/filters/suffix/doc.go | 6 + .../kustomize/api/filters/suffix/suffix.go | 50 + .../api/filters/valueadd/valueadd.go | 134 + .../kustomize/api/hasher/hasher.go | 155 + vendor/sigs.k8s.io/kustomize/api/ifc/ifc.go | 48 + .../sigs.k8s.io/kustomize/api/image/image.go | 66 + .../accumulator/loadconfigfromcrds.go | 198 + .../accumulator/namereferencetransformer.go | 165 + .../internal/accumulator/refvartransformer.go | 57 + .../internal/accumulator/resaccumulator.go | 190 + .../builtins/AnnotationsTransformer.go | 38 + .../internal/builtins/ConfigMapGenerator.go | 39 + .../api/internal/builtins/HashTransformer.go | 40 + .../builtins/HelmChartInflationGenerator.go | 339 + .../internal/builtins/IAMPolicyGenerator.go | 33 + .../internal/builtins/ImageTagTransformer.go | 41 + .../api/internal/builtins/LabelTransformer.go | 38 + .../builtins/LegacyOrderTransformer.go | 46 + .../internal/builtins/NamespaceTransformer.go | 55 + .../builtins/PatchJson6902Transformer.go | 105 + .../PatchStrategicMergeTransformer.go | 89 + .../api/internal/builtins/PatchTransformer.go | 153 + .../internal/builtins/PrefixTransformer.go | 96 + .../builtins/ReplacementTransformer.go | 78 + .../builtins/ReplicaCountTransformer.go | 73 + .../api/internal/builtins/SecretGenerator.go | 39 + .../internal/builtins/SuffixTransformer.go | 96 + .../internal/builtins/ValueAddTransformer.go | 141 + .../kustomize/api/internal/builtins/doc.go | 8 + .../api/internal/generators/configmap.go | 52 + .../api/internal/generators/secret.go | 59 + .../api/internal/generators/utils.go | 97 + .../kustomize/api/internal/git/cloner.go | 54 + .../kustomize/api/internal/git/gitrunner.go | 55 + .../kustomize/api/internal/git/repospec.go | 268 + .../api/internal/kusterr/yamlformaterror.go | 55 + .../api/internal/plugins/builtinconfig/doc.go | 10 + .../builtinconfig/loaddefaultconfig.go | 42 + .../builtinconfig/namebackreferences.go | 99 + .../builtinconfig/transformerconfig.go | 148 + .../builtinplugintype_string.go | 42 + .../plugins/builtinhelpers/builtins.go | 114 + .../internal/plugins/execplugin/execplugin.go | 192 + .../api/internal/plugins/fnplugin/fnplugin.go | 199 + .../api/internal/plugins/loader/loader.go | 306 + .../api/internal/plugins/utils/utils.go | 240 + .../target/errmissingkustomization.go | 48 + .../api/internal/target/kusttarget.go | 559 + .../target/kusttarget_configplugin.go | 438 + .../api/internal/target/multitransformer.go | 41 + .../api/internal/utils/annotations.go | 26 + .../api/internal/utils/errtimeout.go | 36 + .../api/internal/utils/makeResIds.go | 64 + .../api/internal/utils/pathsplitter.go | 64 + .../api/internal/utils/stringslice.go | 44 + .../kustomize/api/internal/utils/timedcall.go | 23 + .../api/internal/validate/fieldvalidator.go | 68 + .../builtinpluginconsts/commonannotations.go | 47 + .../builtinpluginconsts/commonlabels.go | 159 + .../builtinpluginconsts/defaultconfig.go | 40 + .../api/konfig/builtinpluginconsts/doc.go | 8 + .../api/konfig/builtinpluginconsts/images.go | 18 + .../konfig/builtinpluginconsts/nameprefix.go | 11 + .../builtinpluginconsts/namereference.go | 427 + .../konfig/builtinpluginconsts/namespace.go | 26 + .../konfig/builtinpluginconsts/namesuffix.go | 11 + .../konfig/builtinpluginconsts/replicas.go | 23 + .../builtinpluginconsts/varreference.go | 223 + .../sigs.k8s.io/kustomize/api/konfig/doc.go | 7 + .../kustomize/api/konfig/general.go | 49 + .../kustomize/api/konfig/plugins.go | 138 + .../sigs.k8s.io/kustomize/api/krusty/doc.go | 11 + .../kustomize/api/krusty/kustomizer.go | 122 + .../kustomize/api/krusty/options.go | 58 + vendor/sigs.k8s.io/kustomize/api/kv/kv.go | 225 + .../kustomize/api/loader/errors.go | 5 + .../kustomize/api/loader/fileloader.go | 343 + .../kustomize/api/loader/loader.go | 34 + .../kustomize/api/loader/loadrestrictions.go | 35 + .../kustomize/api/provenance/provenance.go | 68 + .../kustomize/api/provider/depprovider.go | 42 + .../kustomize/api/resmap/factory.go | 145 + .../kustomize/api/resmap/idslice.go | 37 + .../kustomize/api/resmap/resmap.go | 333 + .../kustomize/api/resmap/reswrangler.go | 765 + .../sigs.k8s.io/kustomize/api/resource/doc.go | 5 + .../kustomize/api/resource/factory.go | 293 + .../kustomize/api/resource/idset.go | 30 + .../kustomize/api/resource/origin.go | 106 + .../kustomize/api/resource/resource.go | 532 + .../builtinpluginloadingoptions_string.go | 25 + .../kustomize/api/types/configmapargs.go | 10 + vendor/sigs.k8s.io/kustomize/api/types/doc.go | 9 + .../api/types/erronlybuiltinpluginsallowed.go | 33 + .../kustomize/api/types/errunabletofind.go | 40 + .../kustomize/api/types/fieldspec.go | 91 + vendor/sigs.k8s.io/kustomize/api/types/fix.go | 54 + .../kustomize/api/types/generationbehavior.go | 46 + .../kustomize/api/types/generatorargs.go | 27 + .../kustomize/api/types/generatoroptions.go | 76 + .../kustomize/api/types/helmchartargs.go | 122 + .../kustomize/api/types/iampolicygenerator.go | 36 + .../sigs.k8s.io/kustomize/api/types/image.go | 21 + .../kustomize/api/types/inventory.go | 16 + .../kustomize/api/types/kustomization.go | 274 + .../kustomize/api/types/kvpairsources.go | 36 + .../sigs.k8s.io/kustomize/api/types/labels.go | 25 + .../kustomize/api/types/loadrestrictions.go | 24 + .../api/types/loadrestrictions_string.go | 25 + .../kustomize/api/types/objectmeta.go | 13 + .../sigs.k8s.io/kustomize/api/types/pair.go | 10 + .../sigs.k8s.io/kustomize/api/types/patch.go | 34 + .../api/types/patchstrategicmerge.go | 9 + .../kustomize/api/types/pluginconfig.go | 47 + .../kustomize/api/types/pluginrestrictions.go | 62 + .../api/types/pluginrestrictions_string.go | 25 + .../kustomize/api/types/replacement.go | 87 + .../kustomize/api/types/replacementfield.go | 6 + .../kustomize/api/types/replica.go | 16 + .../kustomize/api/types/secretargs.go | 19 + .../kustomize/api/types/selector.go | 124 + .../kustomize/api/types/typemeta.go | 11 + vendor/sigs.k8s.io/kustomize/api/types/var.go | 211 + vendor/sigs.k8s.io/kustomize/kyaml/LICENSE | 201 + .../kustomize/kyaml/LICENSE_TEMPLATE | 2 + .../kustomize/kyaml/comments/comments.go | 83 + .../kustomize/kyaml/errors/errors.go | 40 + vendor/sigs.k8s.io/kustomize/kyaml/ext/ext.go | 10 + .../kustomize/kyaml/fieldmeta/fieldmeta.go | 275 + .../kustomize/kyaml/filesys/confirmeddir.go | 79 + .../kustomize/kyaml/filesys/doc.go | 7 + .../kustomize/kyaml/filesys/file.go | 15 + .../kustomize/kyaml/filesys/fileinfo.go | 34 + .../kustomize/kyaml/filesys/fileondisk.go | 27 + .../kustomize/kyaml/filesys/filesystem.go | 120 + .../kustomize/kyaml/filesys/fsnode.go | 648 + .../kustomize/kyaml/filesys/fsondisk.go | 137 + .../kustomize/kyaml/filesys/util.go | 143 + .../kyaml/fn/runtime/container/container.go | 201 + .../kustomize/kyaml/fn/runtime/exec/doc.go | 5 + .../kustomize/kyaml/fn/runtime/exec/exec.go | 54 + .../kyaml/fn/runtime/runtimeutil/doc.go | 5 + .../fn/runtime/runtimeutil/functiontypes.go | 305 + .../fn/runtime/runtimeutil/runtimeutil.go | 281 + .../kyaml/fn/runtime/runtimeutil/types.go | 8 + .../kyaml/fn/runtime/starlark/context.go | 79 + .../kyaml/fn/runtime/starlark/doc.go | 36 + .../kyaml/fn/runtime/starlark/starlark.go | 181 + .../forked/github.com/go-yaml/yaml/LICENSE | 50 + .../forked/github.com/go-yaml/yaml/NOTICE | 13 + .../forked/github.com/go-yaml/yaml/README.md | 150 + .../forked/github.com/go-yaml/yaml/apic.go | 747 + .../forked/github.com/go-yaml/yaml/decode.go | 950 + .../github.com/go-yaml/yaml/emitterc.go | 2028 + .../forked/github.com/go-yaml/yaml/encode.go | 577 + .../forked/github.com/go-yaml/yaml/parserc.go | 1249 + .../forked/github.com/go-yaml/yaml/readerc.go | 434 + .../forked/github.com/go-yaml/yaml/resolve.go | 326 + .../github.com/go-yaml/yaml/scannerc.go | 3038 + .../forked/github.com/go-yaml/yaml/sorter.go | 134 + .../forked/github.com/go-yaml/yaml/writerc.go | 48 + .../forked/github.com/go-yaml/yaml/yaml.go | 708 + .../forked/github.com/go-yaml/yaml/yamlh.go | 809 + .../github.com/go-yaml/yaml/yamlprivateh.go | 198 + .../github.com/qri-io/starlib/util/LICENSE | 21 + .../github.com/qri-io/starlib/util/doc.go | 25 + .../github.com/qri-io/starlib/util/util.go | 273 + .../kustomize/kyaml/kio/byteio_reader.go | 349 + .../kustomize/kyaml/kio/byteio_writer.go | 198 + vendor/sigs.k8s.io/kustomize/kyaml/kio/doc.go | 35 + .../kustomize/kyaml/kio/filters/filters.go | 210 + .../kustomize/kyaml/kio/filters/fmtr.go | 314 + .../kustomize/kyaml/kio/filters/grep.go | 117 + .../kustomize/kyaml/kio/filters/local.go | 38 + .../kustomize/kyaml/kio/filters/merge.go | 86 + .../kustomize/kyaml/kio/filters/merge3.go | 317 + .../kustomize/kyaml/kio/filters/modify.go | 4 + .../kyaml/kio/filters/stripcomments.go | 32 + .../kustomize/kyaml/kio/ignorefilesmatcher.go | 105 + vendor/sigs.k8s.io/kustomize/kyaml/kio/kio.go | 442 + .../kustomize/kyaml/kio/kioutil/kioutil.go | 420 + .../kustomize/kyaml/kio/pkgio_reader.go | 360 + .../kustomize/kyaml/kio/pkgio_writer.go | 150 + .../kustomize/kyaml/kio/testing.go | 45 + .../sigs.k8s.io/kustomize/kyaml/kio/tree.go | 519 + .../kustomize/kyaml/openapi/Makefile | 62 + .../kustomize/kyaml/openapi/README.md | 94 + .../openapi/kubernetesapi/openapiinfo.go | 18 + .../openapi/kubernetesapi/v1212/swagger.go | 249 + .../openapi/kubernetesapi/v1212/swagger.json | 101036 +++++++++++++++ .../kyaml/openapi/kustomizationapi/swagger.go | 249 + .../openapi/kustomizationapi/swagger.json | 130 + .../kustomize/kyaml/openapi/openapi.go | 726 + .../kustomize/kyaml/order/syncorder.go | 124 + .../sigs.k8s.io/kustomize/kyaml/resid/gvk.go | 258 + .../kustomize/kyaml/resid/resid.go | 164 + .../kustomize/kyaml/runfn/runfn.go | 537 + .../kustomize/kyaml/sets/string.go | 64 + .../kustomize/kyaml/sets/stringlist.go | 44 + .../kustomize/kyaml/sliceutil/slice.go | 25 + .../sigs.k8s.io/kustomize/kyaml/yaml/alias.go | 99 + .../kustomize/kyaml/yaml/compatibility.go | 100 + .../sigs.k8s.io/kustomize/kyaml/yaml/const.go | 30 + .../kustomize/kyaml/yaml/datamap.go | 121 + .../sigs.k8s.io/kustomize/kyaml/yaml/doc.go | 49 + .../kustomize/kyaml/yaml/filters.go | 146 + .../sigs.k8s.io/kustomize/kyaml/yaml/fns.go | 866 + .../k8sgen/pkg/labels/copied.deepcopy.go | 44 + .../yaml/internal/k8sgen/pkg/labels/labels.go | 192 + .../internal/k8sgen/pkg/labels/selector.go | 926 + .../internal/k8sgen/pkg/selection/operator.go | 36 + .../internal/k8sgen/pkg/util/errors/errors.go | 252 + .../internal/k8sgen/pkg/util/sets/empty.go | 24 + .../internal/k8sgen/pkg/util/sets/string.go | 206 + .../pkg/util/validation/field/errors.go | 275 + .../k8sgen/pkg/util/validation/field/path.go | 94 + .../k8sgen/pkg/util/validation/validation.go | 506 + .../sigs.k8s.io/kustomize/kyaml/yaml/kfns.go | 137 + .../kustomize/kyaml/yaml/mapnode.go | 40 + .../sigs.k8s.io/kustomize/kyaml/yaml/match.go | 273 + .../kustomize/kyaml/yaml/merge2/merge2.go | 182 + .../kyaml/yaml/merge2/smpdirective.go | 101 + .../kyaml/yaml/merge2/smpdirective_string.go | 26 + .../kustomize/kyaml/yaml/merge3/merge3.go | 45 + .../kustomize/kyaml/yaml/merge3/visitor.go | 172 + .../sigs.k8s.io/kustomize/kyaml/yaml/order.go | 107 + .../sigs.k8s.io/kustomize/kyaml/yaml/rnode.go | 1293 + .../kustomize/kyaml/yaml/schema/schema.go | 44 + .../sigs.k8s.io/kustomize/kyaml/yaml/types.go | 240 + .../sigs.k8s.io/kustomize/kyaml/yaml/util.go | 71 + .../kyaml/yaml/walk/associative_sequence.go | 385 + .../kustomize/kyaml/yaml/walk/map.go | 173 + .../yaml/walk/nonassociative_sequence.go | 13 + .../kustomize/kyaml/yaml/walk/scalar.go | 11 + .../kustomize/kyaml/yaml/walk/visitor.go | 28 + .../kustomize/kyaml/yaml/walk/walk.go | 186 + 577 files changed, 230521 insertions(+), 3 deletions(-) create mode 100644 vendor/github.com/MakeNowJust/heredoc/LICENSE create mode 100644 vendor/github.com/MakeNowJust/heredoc/README.md create mode 100644 vendor/github.com/MakeNowJust/heredoc/heredoc.go create mode 100644 vendor/github.com/chai2010/gettext-go/.travis.yml create mode 100644 vendor/github.com/chai2010/gettext-go/LICENSE create mode 100644 vendor/github.com/chai2010/gettext-go/README.md create mode 100644 vendor/github.com/chai2010/gettext-go/doc.go create mode 100644 vendor/github.com/chai2010/gettext-go/fs.go create mode 100644 vendor/github.com/chai2010/gettext-go/fs_json.go create mode 100644 vendor/github.com/chai2010/gettext-go/fs_os.go create mode 100644 vendor/github.com/chai2010/gettext-go/fs_zip.go create mode 100644 vendor/github.com/chai2010/gettext-go/gettext.go create mode 100644 vendor/github.com/chai2010/gettext-go/locale.go create mode 100644 vendor/github.com/chai2010/gettext-go/mo/doc.go create mode 100644 vendor/github.com/chai2010/gettext-go/mo/encoder.go create mode 100644 vendor/github.com/chai2010/gettext-go/mo/file.go create mode 100644 vendor/github.com/chai2010/gettext-go/mo/header.go create mode 100644 vendor/github.com/chai2010/gettext-go/mo/message.go create mode 100644 vendor/github.com/chai2010/gettext-go/mo/util.go create mode 100644 vendor/github.com/chai2010/gettext-go/plural/doc.go create mode 100644 vendor/github.com/chai2010/gettext-go/plural/formula.go create mode 100644 vendor/github.com/chai2010/gettext-go/plural/table.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/comment.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/doc.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/file.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/header.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/line_reader.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/message.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/re.go create mode 100644 vendor/github.com/chai2010/gettext-go/po/util.go create mode 100644 vendor/github.com/chai2010/gettext-go/tr.go create mode 100644 vendor/github.com/chai2010/gettext-go/util.go create mode 100644 vendor/github.com/exponent-io/jsonpath/.gitignore create mode 100644 vendor/github.com/exponent-io/jsonpath/.travis.yml create mode 100644 vendor/github.com/exponent-io/jsonpath/LICENSE create mode 100644 vendor/github.com/exponent-io/jsonpath/README.md create mode 100644 vendor/github.com/exponent-io/jsonpath/decoder.go create mode 100644 vendor/github.com/exponent-io/jsonpath/path.go create mode 100644 vendor/github.com/exponent-io/jsonpath/pathaction.go create mode 100644 vendor/github.com/go-errors/errors/.travis.yml create mode 100644 vendor/github.com/go-errors/errors/LICENSE.MIT create mode 100644 vendor/github.com/go-errors/errors/README.md create mode 100644 vendor/github.com/go-errors/errors/cover.out create mode 100644 vendor/github.com/go-errors/errors/error.go create mode 100644 vendor/github.com/go-errors/errors/parse_panic.go create mode 100644 vendor/github.com/go-errors/errors/stackframe.go create mode 100644 vendor/github.com/google/btree/.travis.yml create mode 100644 vendor/github.com/google/btree/LICENSE create mode 100644 vendor/github.com/google/btree/README.md create mode 100644 vendor/github.com/google/btree/btree.go create mode 100644 vendor/github.com/gregjones/httpcache/.travis.yml create mode 100644 vendor/github.com/gregjones/httpcache/LICENSE.txt create mode 100644 vendor/github.com/gregjones/httpcache/README.md create mode 100644 vendor/github.com/gregjones/httpcache/diskcache/diskcache.go create mode 100644 vendor/github.com/gregjones/httpcache/httpcache.go create mode 100644 vendor/github.com/liggitt/tabwriter/.travis.yml create mode 100644 vendor/github.com/liggitt/tabwriter/LICENSE create mode 100644 vendor/github.com/liggitt/tabwriter/README.md create mode 100644 vendor/github.com/liggitt/tabwriter/tabwriter.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/.travis.yml create mode 100644 vendor/github.com/monochromegane/go-gitignore/LICENSE create mode 100644 vendor/github.com/monochromegane/go-gitignore/README.md create mode 100644 vendor/github.com/monochromegane/go-gitignore/depth_holder.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/full_scan_patterns.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/gitignore.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/index_scan_patterns.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/initial_holder.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/match.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/pattern.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/patterns.go create mode 100644 vendor/github.com/monochromegane/go-gitignore/util.go create mode 100644 vendor/github.com/peterbourgon/diskv/LICENSE create mode 100644 vendor/github.com/peterbourgon/diskv/README.md create mode 100644 vendor/github.com/peterbourgon/diskv/compression.go create mode 100644 vendor/github.com/peterbourgon/diskv/diskv.go create mode 100644 vendor/github.com/peterbourgon/diskv/index.go create mode 100644 vendor/github.com/pmezard/go-difflib/LICENSE create mode 100644 vendor/github.com/pmezard/go-difflib/difflib/difflib.go create mode 100644 vendor/github.com/russross/blackfriday/.gitignore create mode 100644 vendor/github.com/russross/blackfriday/.travis.yml create mode 100644 vendor/github.com/russross/blackfriday/LICENSE.txt create mode 100644 vendor/github.com/russross/blackfriday/README.md create mode 100644 vendor/github.com/russross/blackfriday/block.go create mode 100644 vendor/github.com/russross/blackfriday/doc.go create mode 100644 vendor/github.com/russross/blackfriday/html.go create mode 100644 vendor/github.com/russross/blackfriday/inline.go create mode 100644 vendor/github.com/russross/blackfriday/latex.go create mode 100644 vendor/github.com/russross/blackfriday/markdown.go create mode 100644 vendor/github.com/russross/blackfriday/smartypants.go create mode 100644 vendor/github.com/stretchr/testify/LICENSE create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_compare.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_compare_can_convert.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_compare_legacy.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_format.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_forward.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_order.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertions.go create mode 100644 vendor/github.com/stretchr/testify/assert/doc.go create mode 100644 vendor/github.com/stretchr/testify/assert/errors.go create mode 100644 vendor/github.com/stretchr/testify/assert/forward_assertions.go create mode 100644 vendor/github.com/stretchr/testify/assert/http_assertions.go create mode 100644 vendor/github.com/stretchr/testify/require/doc.go create mode 100644 vendor/github.com/stretchr/testify/require/forward_requirements.go create mode 100644 vendor/github.com/stretchr/testify/require/require.go create mode 100644 vendor/github.com/stretchr/testify/require/require.go.tmpl create mode 100644 vendor/github.com/stretchr/testify/require/require_forward.go create mode 100644 vendor/github.com/stretchr/testify/require/require_forward.go.tmpl create mode 100644 vendor/github.com/stretchr/testify/require/requirements.go create mode 100644 vendor/github.com/xlab/treeprint/LICENSE create mode 100644 vendor/github.com/xlab/treeprint/README.md create mode 100644 vendor/github.com/xlab/treeprint/helpers.go create mode 100644 vendor/github.com/xlab/treeprint/struct.go create mode 100644 vendor/github.com/xlab/treeprint/treeprint.go create mode 100644 vendor/go.starlark.net/LICENSE create mode 100644 vendor/go.starlark.net/internal/compile/compile.go create mode 100644 vendor/go.starlark.net/internal/compile/serial.go create mode 100644 vendor/go.starlark.net/internal/spell/spell.go create mode 100644 vendor/go.starlark.net/resolve/binding.go create mode 100644 vendor/go.starlark.net/resolve/resolve.go create mode 100644 vendor/go.starlark.net/starlark/debug.go create mode 100644 vendor/go.starlark.net/starlark/empty.s create mode 100644 vendor/go.starlark.net/starlark/eval.go create mode 100644 vendor/go.starlark.net/starlark/hashtable.go create mode 100644 vendor/go.starlark.net/starlark/int.go create mode 100644 vendor/go.starlark.net/starlark/interp.go create mode 100644 vendor/go.starlark.net/starlark/library.go create mode 100644 vendor/go.starlark.net/starlark/profile.go create mode 100644 vendor/go.starlark.net/starlark/unpack.go create mode 100644 vendor/go.starlark.net/starlark/value.go create mode 100644 vendor/go.starlark.net/starlarkstruct/module.go create mode 100644 vendor/go.starlark.net/starlarkstruct/struct.go create mode 100644 vendor/go.starlark.net/syntax/grammar.txt create mode 100644 vendor/go.starlark.net/syntax/parse.go create mode 100644 vendor/go.starlark.net/syntax/quote.go create mode 100644 vendor/go.starlark.net/syntax/scan.go create mode 100644 vendor/go.starlark.net/syntax/syntax.go create mode 100644 vendor/go.starlark.net/syntax/walk.go create mode 100644 vendor/k8s.io/apimachinery/pkg/apis/meta/v1/unstructured/unstructuredscheme/scheme.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/duration/duration.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/version/doc.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/version/version.go create mode 100644 vendor/k8s.io/cli-runtime/LICENSE create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/builder_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/builder_flags_fake.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/client_config.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/command_headers.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/config_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/config_flags_fake.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/doc.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/filename_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/io_options.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/json_yaml_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/jsonpath_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/kube_template_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/name_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/print_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/record_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/genericclioptions/template_flags.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/discard.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/doc.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/interface.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/json.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/jsonpath.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/managedfields.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/name.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/sourcechecker.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/tableprinter.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/tabwriter.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/template.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/typesetter.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/warningprinter.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/printers/yaml.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/builder.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/client.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/crd_finder.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/doc.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/fake.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/helper.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/interfaces.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/kustomizevisitor.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/mapper.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/metadata_decoder.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/query_param_verifier.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/result.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/scheme.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/selector.go create mode 100644 vendor/k8s.io/cli-runtime/pkg/resource/visitor.go create mode 100644 vendor/k8s.io/client-go/discovery/cached/disk/cached_discovery.go create mode 100644 vendor/k8s.io/client-go/discovery/cached/disk/round_tripper.go create mode 100644 vendor/k8s.io/client-go/dynamic/interface.go create mode 100644 vendor/k8s.io/client-go/dynamic/scheme.go create mode 100644 vendor/k8s.io/client-go/dynamic/simple.go create mode 100644 vendor/k8s.io/client-go/openapi/cached/client.go create mode 100644 vendor/k8s.io/client-go/openapi/cached/groupversion.go create mode 100644 vendor/k8s.io/client-go/restmapper/category_expansion.go create mode 100644 vendor/k8s.io/client-go/restmapper/discovery.go create mode 100644 vendor/k8s.io/client-go/restmapper/shortcut.go create mode 100644 vendor/k8s.io/client-go/scale/client.go create mode 100644 vendor/k8s.io/client-go/scale/doc.go create mode 100644 vendor/k8s.io/client-go/scale/interfaces.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsint/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsint/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta1/conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta1/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta1/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta1/zz_generated.conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta2/conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta2/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta2/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/appsv1beta2/zz_generated.conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/autoscalingv1/conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/autoscalingv1/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/autoscalingv1/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/autoscalingv1/zz_generated.conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/extensionsint/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/extensionsint/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/extensionsv1beta1/conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/extensionsv1beta1/doc.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/extensionsv1beta1/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/extensionsv1beta1/zz_generated.conversion.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/register.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/types.go create mode 100644 vendor/k8s.io/client-go/scale/scheme/zz_generated.deepcopy.go create mode 100644 vendor/k8s.io/client-go/scale/util.go create mode 100644 vendor/k8s.io/client-go/third_party/forked/golang/LICENSE create mode 100644 vendor/k8s.io/client-go/third_party/forked/golang/PATENTS create mode 100644 vendor/k8s.io/client-go/third_party/forked/golang/template/exec.go create mode 100644 vendor/k8s.io/client-go/third_party/forked/golang/template/funcs.go create mode 100644 vendor/k8s.io/client-go/util/jsonpath/doc.go create mode 100644 vendor/k8s.io/client-go/util/jsonpath/jsonpath.go create mode 100644 vendor/k8s.io/client-go/util/jsonpath/node.go create mode 100644 vendor/k8s.io/client-go/util/jsonpath/parser.go create mode 100644 vendor/k8s.io/component-base/LICENSE create mode 100644 vendor/k8s.io/component-base/version/OWNERS create mode 100644 vendor/k8s.io/component-base/version/base.go create mode 100644 vendor/k8s.io/component-base/version/version.go create mode 100644 vendor/k8s.io/kube-openapi/pkg/util/proto/validation/errors.go create mode 100644 vendor/k8s.io/kube-openapi/pkg/util/proto/validation/types.go create mode 100644 vendor/k8s.io/kube-openapi/pkg/util/proto/validation/validation.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/env_file.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/factory.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/factory_client_access.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/helpers.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/kubectl_match_version.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/override_options.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/util/printing.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/version/skew_warning.go create mode 100644 vendor/k8s.io/kubectl/pkg/cmd/version/version.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/i18n.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/OWNERS create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/README.md create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/extract.py create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/OWNERS create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/de_DE/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/de_DE/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/default/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/default/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/en_US/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/en_US/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/fr_FR/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/fr_FR/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/it_IT/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/it_IT/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/ja_JP/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/ja_JP/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/ko_KR/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/ko_KR/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/pt_BR/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/pt_BR/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/template.pot create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/zh_CN/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/zh_CN/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/zh_TW/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/kubectl/zh_TW/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/test/default/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/test/default/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/test/en_US/LC_MESSAGES/k8s.mo create mode 100644 vendor/k8s.io/kubectl/pkg/util/i18n/translations/test/en_US/LC_MESSAGES/k8s.po create mode 100644 vendor/k8s.io/kubectl/pkg/util/openapi/OWNERS create mode 100644 vendor/k8s.io/kubectl/pkg/util/openapi/doc.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/openapi/extensions.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/openapi/openapi.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/openapi/openapi_getter.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/openapi/validation/validation.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/templates/command_groups.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/templates/help_flags_printer.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/templates/markdown.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/templates/normalizers.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/templates/templater.go create mode 100644 vendor/k8s.io/kubectl/pkg/util/templates/templates.go create mode 100644 vendor/k8s.io/kubectl/pkg/validation/schema.go create mode 100644 vendor/k8s.io/utils/exec/README.md create mode 100644 vendor/k8s.io/utils/exec/doc.go create mode 100644 vendor/k8s.io/utils/exec/exec.go create mode 100644 vendor/k8s.io/utils/exec/fixup_go118.go create mode 100644 vendor/k8s.io/utils/exec/fixup_go119.go create mode 100644 vendor/k8s.io/utils/pointer/OWNERS create mode 100644 vendor/k8s.io/utils/pointer/README.md create mode 100644 vendor/k8s.io/utils/pointer/pointer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/LICENSE create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/annotations/annotations.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/annotations/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/fieldspec/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/fieldspec/fieldspec.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/filtersutil/setters.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/fsslice/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/fsslice/fsslice.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/iampolicygenerator/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/iampolicygenerator/iampolicygenerator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/imagetag/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/imagetag/imagetag.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/imagetag/legacy.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/imagetag/updater.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/labels/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/labels/labels.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/nameref/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/nameref/nameref.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/nameref/seqfilter.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/namespace/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/namespace/namespace.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/patchjson6902/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/patchjson6902/patchjson6902.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/patchstrategicmerge/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/patchstrategicmerge/patchstrategicmerge.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/prefix/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/prefix/prefix.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/refvar/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/refvar/expand.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/refvar/refvar.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/replacement/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/replacement/replacement.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/replicacount/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/replicacount/replicacount.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/suffix/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/suffix/suffix.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/filters/valueadd/valueadd.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/hasher/hasher.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/ifc/ifc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/image/image.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/accumulator/loadconfigfromcrds.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/accumulator/namereferencetransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/accumulator/refvartransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/accumulator/resaccumulator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/AnnotationsTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/ConfigMapGenerator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/HashTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/HelmChartInflationGenerator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/IAMPolicyGenerator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/ImageTagTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/LabelTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/LegacyOrderTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/NamespaceTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/PatchJson6902Transformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/PatchStrategicMergeTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/PatchTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/PrefixTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/ReplacementTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/ReplicaCountTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/SecretGenerator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/SuffixTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/ValueAddTransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/builtins/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/generators/configmap.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/generators/secret.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/generators/utils.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/git/cloner.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/git/gitrunner.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/git/repospec.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/kusterr/yamlformaterror.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/builtinconfig/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/builtinconfig/loaddefaultconfig.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/builtinconfig/namebackreferences.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/builtinconfig/transformerconfig.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/builtinhelpers/builtinplugintype_string.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/builtinhelpers/builtins.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/execplugin/execplugin.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/fnplugin/fnplugin.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/loader/loader.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/plugins/utils/utils.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/target/errmissingkustomization.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/target/kusttarget.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/target/kusttarget_configplugin.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/target/multitransformer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/utils/annotations.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/utils/errtimeout.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/utils/makeResIds.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/utils/pathsplitter.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/utils/stringslice.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/utils/timedcall.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/internal/validate/fieldvalidator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/commonannotations.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/commonlabels.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/defaultconfig.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/images.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/nameprefix.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/namereference.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/namespace.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/namesuffix.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/replicas.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/builtinpluginconsts/varreference.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/general.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/konfig/plugins.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/krusty/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/krusty/kustomizer.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/krusty/options.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/kv/kv.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/loader/errors.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/loader/fileloader.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/loader/loader.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/loader/loadrestrictions.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/provenance/provenance.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/provider/depprovider.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resmap/factory.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resmap/idslice.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resmap/resmap.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resmap/reswrangler.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resource/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resource/factory.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resource/idset.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resource/origin.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/resource/resource.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/builtinpluginloadingoptions_string.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/configmapargs.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/erronlybuiltinpluginsallowed.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/errunabletofind.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/fieldspec.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/fix.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/generationbehavior.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/generatorargs.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/generatoroptions.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/helmchartargs.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/iampolicygenerator.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/image.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/inventory.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/kustomization.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/kvpairsources.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/labels.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/loadrestrictions.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/loadrestrictions_string.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/objectmeta.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/pair.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/patch.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/patchstrategicmerge.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/pluginconfig.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/pluginrestrictions.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/pluginrestrictions_string.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/replacement.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/replacementfield.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/replica.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/secretargs.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/selector.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/typemeta.go create mode 100644 vendor/sigs.k8s.io/kustomize/api/types/var.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/LICENSE create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/LICENSE_TEMPLATE create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/comments/comments.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/errors/errors.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/ext/ext.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fieldmeta/fieldmeta.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/confirmeddir.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/file.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/fileinfo.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/fileondisk.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/filesystem.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/fsnode.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/fsondisk.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/filesys/util.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/container/container.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/exec/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/exec/exec.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/runtimeutil/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/runtimeutil/functiontypes.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/runtimeutil/runtimeutil.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/runtimeutil/types.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/starlark/context.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/starlark/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/fn/runtime/starlark/starlark.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/LICENSE create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/NOTICE create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/README.md create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/apic.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/decode.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/emitterc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/encode.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/parserc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/readerc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/resolve.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/scannerc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/sorter.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/writerc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/yaml.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/yamlh.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/go-yaml/yaml/yamlprivateh.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/qri-io/starlib/util/LICENSE create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/qri-io/starlib/util/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/internal/forked/github.com/qri-io/starlib/util/util.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/byteio_reader.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/byteio_writer.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/filters.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/fmtr.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/grep.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/local.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/merge.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/merge3.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/modify.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/filters/stripcomments.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/ignorefilesmatcher.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/kio.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/kioutil/kioutil.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/pkgio_reader.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/pkgio_writer.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/testing.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/kio/tree.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/Makefile create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/README.md create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/kubernetesapi/openapiinfo.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/kubernetesapi/v1212/swagger.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/kubernetesapi/v1212/swagger.json create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/kustomizationapi/swagger.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/kustomizationapi/swagger.json create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/openapi/openapi.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/order/syncorder.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/resid/gvk.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/resid/resid.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/runfn/runfn.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/sets/string.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/sets/stringlist.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/sliceutil/slice.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/alias.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/compatibility.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/const.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/datamap.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/doc.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/filters.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/fns.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/labels/copied.deepcopy.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/labels/labels.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/labels/selector.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/selection/operator.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/util/errors/errors.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/util/sets/empty.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/util/sets/string.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/util/validation/field/errors.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/util/validation/field/path.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/internal/k8sgen/pkg/util/validation/validation.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/kfns.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/mapnode.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/match.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/merge2/merge2.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/merge2/smpdirective.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/merge2/smpdirective_string.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/merge3/merge3.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/merge3/visitor.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/order.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/rnode.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/schema/schema.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/types.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/util.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/walk/associative_sequence.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/walk/map.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/walk/nonassociative_sequence.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/walk/scalar.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/walk/visitor.go create mode 100644 vendor/sigs.k8s.io/kustomize/kyaml/yaml/walk/walk.go diff --git a/.github/workflows/e2e-tests.yaml b/.github/workflows/e2e-tests.yaml index 77587ce01c..7e5bcd99a2 100644 --- a/.github/workflows/e2e-tests.yaml +++ b/.github/workflows/e2e-tests.yaml @@ -21,18 +21,27 @@ env: jobs: test-e2e: runs-on: ubuntu-latest - + matrix: + kubectl_version: + - v1.27.4 + - v1.28.0 steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - name: Checkout repo uses: actions/checkout@v2 + - name: kubectl install + uses: azure/setup-kubectl@v3 + with: + version: ${{ matrix.kubectl_version }} + # Creates KinD with using k8s versions from the matrix above - name: Set up kind with K8s version v1.22.4 uses: engineerd/setup-kind@v0.5.0 with: version: "v0.11.1" image: kindest/node:v1.22.4 + - name: Testing kind cluster set-up run: | kubectl cluster-info diff --git a/go.mod b/go.mod index 985c5e23a1..b33de22d99 100644 --- a/go.mod +++ b/go.mod @@ -71,12 +71,14 @@ require ( require ( github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/MakeNowJust/heredoc v1.0.0 // indirect github.com/Microsoft/go-winio v0.6.0 // indirect github.com/PuerkitoBio/purell v1.1.1 // indirect github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chai2010/gettext-go v1.0.2 // indirect github.com/containerd/console v1.0.3 // indirect github.com/containerd/containerd v1.6.18 // indirect github.com/containerd/continuity v0.3.0 // indirect @@ -91,6 +93,8 @@ require ( github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960 // indirect github.com/emicklei/go-restful/v3 v3.10.1 // indirect github.com/emirpasic/gods v1.12.0 // indirect + github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect + github.com/go-errors/errors v1.0.1 // indirect github.com/go-logr/logr v1.2.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.19.5 // indirect @@ -100,6 +104,7 @@ require ( github.com/gogo/googleapis v1.4.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/protobuf v1.5.2 // indirect + github.com/google/btree v1.0.1 // indirect github.com/google/gnostic v0.5.7-v3refs // indirect github.com/google/go-cmp v0.5.9 // indirect github.com/google/go-github/v30 v30.1.0 // indirect @@ -107,6 +112,7 @@ require ( github.com/google/gofuzz v1.2.0 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/gorilla/mux v1.8.0 // indirect + github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 // indirect @@ -119,6 +125,7 @@ require ( github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd // indirect github.com/klauspost/compress v1.15.12 // indirect github.com/kr/fs v0.1.0 // indirect + github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/mailru/easyjson v0.7.6 // indirect github.com/mattn/go-colorable v0.1.11 // indirect github.com/mattn/go-isatty v0.0.14 // indirect @@ -133,18 +140,23 @@ require ( github.com/moby/sys/symlink v0.2.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect github.com/morikuni/aec v1.0.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc2 // indirect github.com/opencontainers/runc v1.1.5 // indirect + github.com/peterbourgon/diskv v2.0.1+incompatible // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/prometheus/client_golang v1.14.0 // indirect github.com/prometheus/client_model v0.3.0 // indirect github.com/prometheus/common v0.37.0 // indirect github.com/prometheus/procfs v0.8.0 // indirect + github.com/russross/blackfriday v1.6.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.1.0 // indirect github.com/src-d/gcfg v1.4.0 // indirect + github.com/stretchr/testify v1.8.2 // indirect github.com/syncthing/notify v0.0.0-20210616190510-c6b7342338d2 // indirect github.com/tcnksm/go-gitconfig v0.1.2 // indirect github.com/tonistiigi/fsutil v0.0.0-20230105215944-fb433841cbfa // indirect @@ -156,6 +168,7 @@ require ( github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.29.0 // indirect go.opentelemetry.io/otel v1.4.1 // indirect @@ -163,6 +176,7 @@ require ( go.opentelemetry.io/otel/sdk v1.4.1 // indirect go.opentelemetry.io/otel/trace v1.4.1 // indirect go.opentelemetry.io/proto/otlp v0.12.0 // indirect + go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect golang.org/x/mod v0.8.0 // indirect golang.org/x/oauth2 v0.4.0 // indirect golang.org/x/sync v0.1.0 // indirect @@ -176,8 +190,12 @@ require ( gopkg.in/src-d/go-billy.v4 v4.3.2 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect + k8s.io/cli-runtime v0.25.0-alpha.2 // indirect + k8s.io/component-base v0.25.0-alpha.2 // indirect k8s.io/kube-openapi v0.0.0-20220603121420-31174f50af60 // indirect k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed // indirect sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect + sigs.k8s.io/kustomize/api v0.11.4 // indirect + sigs.k8s.io/kustomize/kyaml v0.13.6 // indirect sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect ) diff --git a/go.sum b/go.sum index 9038698b8d..e21ebe54b0 100644 --- a/go.sum +++ b/go.sum @@ -13,6 +13,11 @@ cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKV cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= cloud.google.com/go v0.105.0 h1:DNtEKRBAAzeS4KyIory52wWHuClNaXJ5x1F7xa4q+5Y= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= @@ -24,6 +29,7 @@ cloud.google.com/go/compute v1.15.1 h1:7UGq3QknM33pw5xATlpzeoomNxsacIVvTqTTvbfaj cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -43,6 +49,8 @@ github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSW github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ= +github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= github.com/Microsoft/hcsshim v0.9.6 h1:VwnDOgLeoi2du6dAznfmspNqTiwczvjv4K7NxuY9jsY= @@ -67,6 +75,9 @@ github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYU github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= @@ -75,7 +86,9 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/bmatcuk/doublestar v1.1.1 h1:YroD6BJCZBYx06yYFEWvUuKVWQn3vLLQAVmDmvTSaiQ= @@ -91,6 +104,8 @@ github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chai2010/gettext-go v1.0.2 h1:1Lwwip6Q2QGsAdl/ZKPCwTe9fe0CjlUbqj5bFNSjIRk= +github.com/chai2010/gettext-go v1.0.2/go.mod h1:y+wnP2cHYaVj19NZhYKAwEMH2CI1gNHeQQ+5AjwawxA= github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= @@ -98,6 +113,7 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn github.com/cilium/ebpf v0.7.0/go.mod h1:/oI2+1shJiTGAMgl6/RgJr36Eo1jzrRcAWbcXO2usCA= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= @@ -119,8 +135,10 @@ github.com/containerd/stargz-snapshotter/estargz v0.13.0/go.mod h1:m+9VaGJGlhCnr github.com/containerd/ttrpc v1.1.0 h1:GbtyLRxb0gOLR0TYQWt3O6B0NvT8tMdorEHqIQo/lWI= github.com/containerd/typeurl v1.0.2 h1:Chlt8zIieDbzQFzXzAeBEF92KhExuE4p9p92/QmY7aY= github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= @@ -162,6 +180,7 @@ github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/ github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc= +github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful/v3 v3.7.5-0.20220308211933-7c971ca4d0fd/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= @@ -170,19 +189,26 @@ github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3 github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/evanphx/json-patch v4.11.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.12.0+incompatible h1:4onqiflcdA9EOZ4RxV643DvftH5pOlLGNtQ5lPWQu84= github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch/v5 v5.1.0 h1:B0aXl1o/1cP8NbviYiBMkcHBtUjIJ1/Ccg6b+SwCLQg= github.com/evanphx/json-patch/v5 v5.1.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM= +github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fujiwara/shapeio v1.0.0 h1:xG5D9oNqCSUUbryZ/jQV3cqe1v2suEjwPIcEg1gKM8M= github.com/fujiwara/shapeio v1.0.0/go.mod h1:LmEmu6L/8jetyj1oewewFb7bZCNRwE7wLCUNzDLaLVA= github.com/gertd/go-pluralize v0.2.0 h1:VzWNnxkUo3wkW2Nmp+3ieHSTQQ0LBHeSVxlKsQPQ+UY= @@ -193,6 +219,8 @@ github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeME github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= github.com/gliderlabs/ssh v0.3.5/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4= +github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= +github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -246,6 +274,7 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -261,11 +290,14 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/gomodule/redigo v1.8.2/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= +github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54= github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -275,6 +307,7 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= @@ -293,6 +326,7 @@ github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -300,31 +334,60 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ= github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174/go.mod h1:DqJ97dSdRW1W22yXSB90986pcOyQ7r45iio1KN2ez1A= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 h1:i462o439ZjprVSFSZLZxcsoAe592sZB1rci2Z8j4wdk= github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY= @@ -353,6 +416,7 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 h1:qGQQKEcAR99REcMpsXCp3lJ03zYT1PkRd3kQGPn9GVg= @@ -381,6 +445,8 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0= +github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= github.com/loft-sh/go-github-selfupdate v1.0.0 h1:YS8iSsIWXw3BygBdPK2xDO4K84XYu2YuYgVS7eQNtik= github.com/loft-sh/go-github-selfupdate v1.0.0/go.mod h1:LDkR6J2QpqQLIMcYvNaSinVwvjPAkg8278oZBPGnrb8= github.com/loft-sh/loft-util v0.0.9-alpha h1:kGcyTQWxWHWy7bbjhS8Hsq/JRdlSztAU++anV6P+sqk= @@ -391,14 +457,18 @@ github.com/loft-sh/programming-language-detection v0.0.5 h1:XiWlxtrf4t6Z7SQiob0J github.com/loft-sh/programming-language-detection v0.0.5/go.mod h1:QGPQGKr9q1+rQS4OyisS5CPGY1a76SdNaZuk9oy+2cE= github.com/loft-sh/utils v0.0.16 h1:XnD6Sb6gRWIHgM34U94dHcQ5MtxN5kAGZQ5eddAxC+c= github.com/loft-sh/utils v0.0.16/go.mod h1:n2L3X4i7d8kb2NF+q5duKa41N+N6fBde6XY2AolgSBI= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= @@ -411,11 +481,19 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zk github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= @@ -443,6 +521,8 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0= +github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= @@ -487,16 +567,22 @@ github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT9 github.com/otiai10/mint v1.3.3 h1:7JgpsBaN0uMkyju4tbYHu0mnM55hNKVYLsXmwr15NQI= github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/package-url/packageurl-go v0.1.1-0.20220428063043-89078438f170 h1:DiLBVp4DAcZlBVBEtJpNWZpZVq0AEeCY7Hqk8URVs4o= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= +github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pkg/sftp v1.13.1 h1:I2qBYMChEhIjOgazfJmV3/mZM256btk6wkCDRmW7JYs= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= @@ -529,12 +615,16 @@ github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0ua github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg= +github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= +github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sabhiram/go-gitignore v0.0.0-20180611051255-d3107576ba94 h1:G04eS0JkAIVZfaJLjla9dNxkJCPiKIGZlw9AfOhzOD0= github.com/sabhiram/go-gitignore v0.0.0-20180611051255-d3107576ba94/go.mod h1:b18R55ulyQ/h3RaWyloPyER7fWQVZvimKKhnI5OfrJQ= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -551,14 +641,22 @@ github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0 github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/skratchdot/open-golang v0.0.0-20160302144031-75fb7ed4208c h1:fyKiXKO1/I/B6Y2U8T7WdQGWzwehOuGIrljPtt7YTTI= github.com/skratchdot/open-golang v0.0.0-20160302144031-75fb7ed4208c/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spdx/tools-golang v0.3.1-0.20230104082527-d6f58551be3f h1:9B623Cfs+mclYK6dsae7gLSwuIBHvlgmEup87qpqsAQ= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= github.com/spf13/cobra v1.6.0 h1:42a0n6jwCot1pUmomAp4T7DeMD+20LFv4Q54pxLf2LI= github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4= github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= @@ -566,6 +664,7 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= @@ -579,6 +678,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/syncthing/notify v0.0.0-20210616190510-c6b7342338d2 h1:F4snRP//nIuTTW9LYEzVH4HVwDG9T3M4t8y/2nqMbiY= github.com/syncthing/notify v0.0.0-20210616190510-c6b7342338d2/go.mod h1:J0q59IWjLtpRIJulohwqEZvjzwOfTEPp8SVhDJl+y0Y= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= @@ -608,6 +709,8 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca h1:1CFlNzQhALwjS9mBAUkycX616GzgsuYUOCHA5+HSlXI= +github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -616,11 +719,16 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.29.0 h1:n9b7AAdbQtQ0k9dm0Dm2/KUcUqtG8i2O15KzNaDze8c= @@ -639,16 +747,23 @@ go.opentelemetry.io/otel/trace v1.4.1/go.mod h1:iYEVbroFCNut9QkwEczV9vMRPHNKSSwY go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.12.0 h1:CMJ/3Wp7iOWES+CYLfnBv+DVmPbB+kmy9PJ92XvlR6c= go.opentelemetry.io/proto/otlp v0.12.0/go.mod h1:TsIjwGWIx5VFYv9KGVlOpxoBl5Dy+63SUguV7GGvlSQ= +go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 h1:+FNtrFTmVw0YZGpBGX56XDee331t6JAXeK2bcyhLOOc= +go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5/go.mod h1:nmDLcffg48OtT/PSW0Hg7FvpRQsQh5OSqIylirxKC7o= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200128174031-69ecbb4d6d5d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -679,6 +794,8 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -687,13 +804,17 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -721,9 +842,13 @@ golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= @@ -739,6 +864,13 @@ golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4Iltr golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.4.0 h1:NF0gk8LVPg1Ml7SSbGyySuoxdsXitj7TvgvuRxIMc/M= @@ -756,10 +888,12 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -775,6 +909,8 @@ golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191002063906-3421d5a6bb1c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -795,10 +931,19 @@ golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -829,6 +974,7 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -846,6 +992,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -857,6 +1004,7 @@ golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDq golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -884,7 +1032,14 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= @@ -909,6 +1064,12 @@ google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0M google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -950,7 +1111,18 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f h1:BWUVssLB0HVOSY78gIdvk1dTVYtT1y8SBWtPYuTJ/6w= google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= @@ -966,9 +1138,14 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc= @@ -999,6 +1176,7 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg= @@ -1041,8 +1219,13 @@ k8s.io/api v0.25.0-alpha.2 h1:azwXduCht76Ecuv80QzZkCDzcFcLotKPXiE9/+jx5Qk= k8s.io/api v0.25.0-alpha.2/go.mod h1:wOntqHYj8WveLW2sh6q4tkE2vMZTtxe0MrFyVwO8JCM= k8s.io/apimachinery v0.25.0-alpha.2 h1:y6uTWaiqsPTPRewnXJ15IFyGmBo2qPt6enm4zszG8Z0= k8s.io/apimachinery v0.25.0-alpha.2/go.mod h1:h34FtK3eCxige6ZIACdBSYExtDaKAUxoc7hVe2LOxzw= +k8s.io/cli-runtime v0.25.0-alpha.2 h1:mjM3hHapzpWWRL1pUoWAs5+4DtLcqXRgHfo7cbQ8GqE= +k8s.io/cli-runtime v0.25.0-alpha.2/go.mod h1:iZrrniFcz+PC90O7/wpA7uGRp5xgUxnf9UqHvCHlni4= k8s.io/client-go v0.25.0-alpha.2 h1:kXlDl2L/CmdubzbRTPOCXj9JDPv9U0MuEjRXSCltQ00= k8s.io/client-go v0.25.0-alpha.2/go.mod h1:AN5W2BkXTu2lNm2BANn5lC6VnGlv6AM5HNPQLsriBOA= +k8s.io/component-base v0.25.0-alpha.2 h1:/u3UH2T5e8pac0FiCmsyKNh39rzdT2DxAOXww6e9b7M= +k8s.io/component-base v0.25.0-alpha.2/go.mod h1:6wq0/nWr0pBhuDnWjT2MhpcTRdnLBxucqwTShWfjVKM= +k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20210813121822-485abfe95c7c/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= @@ -1050,6 +1233,7 @@ k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/kube-openapi v0.0.0-20210421082810-95288971da7e/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= k8s.io/kube-openapi v0.0.0-20220603121420-31174f50af60 h1:cE/M8rmDQgibspuSm+X1iW16ByTImtEaapgaHoVSLX4= k8s.io/kube-openapi v0.0.0-20220603121420-31174f50af60/go.mod h1:ouUzE1U2mEv//HRoBwYLFE5pdqjIebvtX361vtEIlBI= k8s.io/kubectl v0.25.0-alpha.2 h1:4PNH8YkSRkRq9cmkua0Oi4/6Qvvf6CXflXdLgKDpD8Q= @@ -1064,6 +1248,10 @@ rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/kustomize/api v0.11.4 h1:/0Mr3kfBBNcNPOW5Qwk/3eb8zkswCwnqQxxKtmrTkRo= +sigs.k8s.io/kustomize/api v0.11.4/go.mod h1:k+8RsqYbgpkIrJ4p9jcdPqe8DprLxFUUO0yNOq8C+xI= +sigs.k8s.io/kustomize/kyaml v0.13.6 h1:eF+wsn4J7GOAXlvajv6OknSunxpcOBQQqsnPxObtkGs= +sigs.k8s.io/kustomize/kyaml v0.13.6/go.mod h1:yHP031rn1QX1lr/Xd934Ri/xdVNG8BE2ECa78Ht/kEg= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= diff --git a/pkg/devspace/deploy/deployer/kubectl/builder.go b/pkg/devspace/deploy/deployer/kubectl/builder.go index e715ddaae9..e7922f709d 100644 --- a/pkg/devspace/deploy/deployer/kubectl/builder.go +++ b/pkg/devspace/deploy/deployer/kubectl/builder.go @@ -2,6 +2,7 @@ package kubectl import ( "context" + "encoding/json" "fmt" "os" "os/exec" @@ -18,6 +19,7 @@ import ( "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/client-go/tools/clientcmd" clientcmdapi "k8s.io/client-go/tools/clientcmd/api" + "k8s.io/kubectl/pkg/cmd/version" "mvdan.cc/sh/v3/expand" ) @@ -78,12 +80,18 @@ func NewKubectlBuilder(path string, config *latest.DeploymentConfig, kubeConfig // to decide the --dry-run value var useOldDryRun = func(ctx context.Context, environ expand.Environ, dir, path string) (bool, error) { // compare kubectl version for --dry-run flag value - out, err := command.Output(ctx, dir, environ, path, "version", "--client", "--short") + out, err := command.Output(ctx, dir, environ, path, "version", "--client", "--output=json") if err != nil { return false, err } - v1, err := constraint.NewVersion(strings.TrimPrefix(strings.TrimSpace(string(out)), "Client Version: v")) + kubectlVersion := &version.Version{} + err = json.Unmarshal(out, kubectlVersion) + if err != nil { + return false, err + } + + v1, err := constraint.NewVersion(strings.TrimPrefix(kubectlVersion.ClientVersion.GitVersion, "v")) if err != nil { return false, nil } diff --git a/vendor/github.com/MakeNowJust/heredoc/LICENSE b/vendor/github.com/MakeNowJust/heredoc/LICENSE new file mode 100644 index 0000000000..6d0eb9d5d6 --- /dev/null +++ b/vendor/github.com/MakeNowJust/heredoc/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2019 TSUYUSATO Kitsune + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/MakeNowJust/heredoc/README.md b/vendor/github.com/MakeNowJust/heredoc/README.md new file mode 100644 index 0000000000..289ba31d6a --- /dev/null +++ b/vendor/github.com/MakeNowJust/heredoc/README.md @@ -0,0 +1,52 @@ +# heredoc + +[![Build Status](https://circleci.com/gh/MakeNowJust/heredoc.svg?style=svg)](https://circleci.com/gh/MakeNowJust/heredoc) [![GoDoc](https://godoc.org/github.com/MakeNowJusti/heredoc?status.svg)](https://godoc.org/github.com/MakeNowJust/heredoc) + +## About + +Package heredoc provides the here-document with keeping indent. + +## Install + +```console +$ go get github.com/MakeNowJust/heredoc +``` + +## Import + +```go +// usual +import "github.com/MakeNowJust/heredoc" +``` + +## Example + +```go +package main + +import ( + "fmt" + "github.com/MakeNowJust/heredoc" +) + +func main() { + fmt.Println(heredoc.Doc(` + Lorem ipsum dolor sit amet, consectetur adipisicing elit, + sed do eiusmod tempor incididunt ut labore et dolore magna + aliqua. Ut enim ad minim veniam, ... + `)) + // Output: + // Lorem ipsum dolor sit amet, consectetur adipisicing elit, + // sed do eiusmod tempor incididunt ut labore et dolore magna + // aliqua. Ut enim ad minim veniam, ... + // +} +``` + +## API Document + + - [heredoc - GoDoc](https://godoc.org/github.com/MakeNowJust/heredoc) + +## License + +This software is released under the MIT License, see LICENSE. diff --git a/vendor/github.com/MakeNowJust/heredoc/heredoc.go b/vendor/github.com/MakeNowJust/heredoc/heredoc.go new file mode 100644 index 0000000000..1fc0469555 --- /dev/null +++ b/vendor/github.com/MakeNowJust/heredoc/heredoc.go @@ -0,0 +1,105 @@ +// Copyright (c) 2014-2019 TSUYUSATO Kitsune +// This software is released under the MIT License. +// http://opensource.org/licenses/mit-license.php + +// Package heredoc provides creation of here-documents from raw strings. +// +// Golang supports raw-string syntax. +// +// doc := ` +// Foo +// Bar +// ` +// +// But raw-string cannot recognize indentation. Thus such content is an indented string, equivalent to +// +// "\n\tFoo\n\tBar\n" +// +// I dont't want this! +// +// However this problem is solved by package heredoc. +// +// doc := heredoc.Doc(` +// Foo +// Bar +// `) +// +// Is equivalent to +// +// "Foo\nBar\n" +package heredoc + +import ( + "fmt" + "strings" + "unicode" +) + +const maxInt = int(^uint(0) >> 1) + +// Doc returns un-indented string as here-document. +func Doc(raw string) string { + skipFirstLine := false + if len(raw) > 0 && raw[0] == '\n' { + raw = raw[1:] + } else { + skipFirstLine = true + } + + lines := strings.Split(raw, "\n") + + minIndentSize := getMinIndent(lines, skipFirstLine) + lines = removeIndentation(lines, minIndentSize, skipFirstLine) + + return strings.Join(lines, "\n") +} + +// getMinIndent calculates the minimum indentation in lines, excluding empty lines. +func getMinIndent(lines []string, skipFirstLine bool) int { + minIndentSize := maxInt + + for i, line := range lines { + if i == 0 && skipFirstLine { + continue + } + + indentSize := 0 + for _, r := range []rune(line) { + if unicode.IsSpace(r) { + indentSize += 1 + } else { + break + } + } + + if len(line) == indentSize { + if i == len(lines)-1 && indentSize < minIndentSize { + lines[i] = "" + } + } else if indentSize < minIndentSize { + minIndentSize = indentSize + } + } + return minIndentSize +} + +// removeIndentation removes n characters from the front of each line in lines. +// Skips first line if skipFirstLine is true, skips empty lines. +func removeIndentation(lines []string, n int, skipFirstLine bool) []string { + for i, line := range lines { + if i == 0 && skipFirstLine { + continue + } + + if len(lines[i]) >= n { + lines[i] = line[n:] + } + } + return lines +} + +// Docf returns unindented and formatted string as here-document. +// Formatting is done as for fmt.Printf(). +func Docf(raw string, args ...interface{}) string { + return fmt.Sprintf(Doc(raw), args...) +} diff --git a/vendor/github.com/chai2010/gettext-go/.travis.yml b/vendor/github.com/chai2010/gettext-go/.travis.yml new file mode 100644 index 0000000000..4eac3982bc --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/.travis.yml @@ -0,0 +1,5 @@ +language: go + +go: + - "1.14" + - tip diff --git a/vendor/github.com/chai2010/gettext-go/LICENSE b/vendor/github.com/chai2010/gettext-go/LICENSE new file mode 100644 index 0000000000..8f39408250 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/LICENSE @@ -0,0 +1,27 @@ +Copyright 2013 ChaiShushan . All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/chai2010/gettext-go/README.md b/vendor/github.com/chai2010/gettext-go/README.md new file mode 100644 index 0000000000..9381bd1522 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/README.md @@ -0,0 +1,191 @@ +- *赞助 BTC: 1Cbd6oGAUUyBi7X7MaR4np4nTmQZXVgkCW* +- *赞助 ETH: 0x623A3C3a72186A6336C79b18Ac1eD36e1c71A8a6* +- *Go语言付费QQ群: 1055927514* + +---- + +# gettext-go: GNU gettext for Go ([Imported By Kubernetes](https://pkg.go.dev/github.com/chai2010/gettext-go@v0.1.0/gettext?tab=importedby)) + +- PkgDoc: [http://godoc.org/github.com/chai2010/gettext-go](http://godoc.org/github.com/chai2010/gettext-go) +- PkgDoc: [http://pkg.go.dev/github.com/chai2010/gettext-go](http://pkg.go.dev/github.com/chai2010/gettext-go) + +## Install + +1. `go get github.com/chai2010/gettext-go` +2. `go run hello.go` + +The godoc.org or go.dev has more information. + +## Examples + +```Go +package main + +import ( + "fmt" + + "github.com/chai2010/gettext-go" +) + +func main() { + gettext := gettext.New("hello", "./examples/locale").SetLanguage("zh_CN") + fmt.Println(gettext.Gettext("Hello, world!")) + + // Output: 你好, 世界! +} +``` + +```Go +package main + +import ( + "fmt" + + "github.com/chai2010/gettext-go" +) + +func main() { + gettext.SetLanguage("zh_CN") + gettext.BindLocale(gettext.New("hello", "locale")) + + // gettext.BindLocale("hello", "locale") // from locale dir + // gettext.BindLocale("hello", "locale.zip") // from locale zip file + // gettext.BindLocale("hello", "locale.zip", zipData) // from embedded zip data + + // translate source text + fmt.Println(gettext.Gettext("Hello, world!")) + // Output: 你好, 世界! + + // if no msgctxt in PO file (only msgid and msgstr), + // specify context as "" by + fmt.Println(gettext.PGettext("", "Hello, world!")) + // Output: 你好, 世界! + + // translate resource + fmt.Println(string(gettext.Getdata("poems.txt")))) + // Output: ... +} +``` + +Go file: [hello.go](https://github.com/chai2010/gettext-go/blob/master/examples/hello.go); PO file: [hello.po](https://github.com/chai2010/gettext-go/blob/master/examples/locale/default/LC_MESSAGES/hello.po); + +---- + +## API Changes (v0.1.0 vs v1.0.0) + +### Renamed package path + +| v0.1.0 (old) | v1.0.0 (new) | +| ----------------------------------------------- | --------------------------------------- | +| `github.com/chai2010/gettext-go/gettext` | `github.com/chai2010/gettext-go` | +| `github.com/chai2010/gettext-go/gettext/po` | `github.com/chai2010/gettext-go/po` | +| `github.com/chai2010/gettext-go/gettext/mo` | `github.com/chai2010/gettext-go/mo` | +| `github.com/chai2010/gettext-go/gettext/plural` | `github.com/chai2010/gettext-go/plural` | + +### Renamed functions + +| v0.1.0 (old) | v1.0.0 (new) | +| ---------------------------------- | --------------------------- | +| `gettext-go/gettext.*` | `gettext-go.*` | +| `gettext-go/gettext.DefaultLocal` | `gettext-go.DefaultLanguage`| +| `gettext-go/gettext.BindTextdomain`| `gettext-go.BindLocale` | +| `gettext-go/gettext.Textdomain` | `gettext-go.SetDomain` | +| `gettext-go/gettext.SetLocale` | `gettext-go.SetLanguage` | +| `gettext-go/gettext/po.Load` | `gettext-go/po.LoadFile` | +| `gettext-go/gettext/po.LoadData` | `gettext-go/po.Load` | +| `gettext-go/gettext/mo.Load` | `gettext-go/mo.LoadFile` | +| `gettext-go/gettext/mo.LoadData` | `gettext-go/mo.Load` | + +### Use empty string as the default context for `gettext.Gettext` + +```go +package main + +// v0.1.0 +// if the **context** missing, use `callerName(2)` as the context: + +// v1.0.0 +// if the **context** missing, use empty string as the context: + +func main() { + gettext.Gettext("hello") + // v0.1.0 => gettext.PGettext("main.main", "hello") + // v1.0.0 => gettext.PGettext("", "hello") + + gettext.DGettext("domain", "hello") + // v0.1.0 => gettext.DPGettext("domain", "main.main", "hello") + // v1.0.0 => gettext.DPGettext("domain", "", "hello") + + gettext.NGettext("domain", "hello", "hello2", n) + // v0.1.0 => gettext.PNGettext("domain", "main.main", "hello", "hello2", n) + // v1.0.0 => gettext.PNGettext("domain", "", "hello", "hello2", n) + + gettext.DNGettext("domain", "hello", "hello2", n) + // v0.1.0 => gettext.DPNGettext("domain", "main.main", "hello", "hello2", n) + // v1.0.0 => gettext.DPNGettext("domain", "", "hello", "hello2", n) +} +``` + +### `BindLocale` support `FileSystem` interface + +```go +// Use FileSystem: +// BindLocale(New("poedit", "name", OS("path/to/dir"))) // bind "poedit" domain +// BindLocale(New("poedit", "name", OS("path/to.zip"))) // bind "poedit" domain +``` + +## New API in v1.0.0 + +`Gettexter` interface: + +```go +type Gettexter interface { + FileSystem() FileSystem + + GetDomain() string + SetDomain(domain string) Gettexter + + GetLanguage() string + SetLanguage(lang string) Gettexter + + Gettext(msgid string) string + PGettext(msgctxt, msgid string) string + + NGettext(msgid, msgidPlural string, n int) string + PNGettext(msgctxt, msgid, msgidPlural string, n int) string + + DGettext(domain, msgid string) string + DPGettext(domain, msgctxt, msgid string) string + DNGettext(domain, msgid, msgidPlural string, n int) string + DPNGettext(domain, msgctxt, msgid, msgidPlural string, n int) string + + Getdata(name string) []byte + DGetdata(domain, name string) []byte +} + +func New(domain, path string, data ...interface{}) Gettexter +``` + +`FileSystem` interface: + +```go +type FileSystem interface { + LocaleList() []string + LoadMessagesFile(domain, lang, ext string) ([]byte, error) + LoadResourceFile(domain, lang, name string) ([]byte, error) + String() string +} + +func NewFS(name string, x interface{}) FileSystem +func OS(root string) FileSystem +func ZipFS(r *zip.Reader, name string) FileSystem +func NilFS(name string) FileSystem +``` + +---- + +## BUGS + +Please report bugs to . + +Thanks! diff --git a/vendor/github.com/chai2010/gettext-go/doc.go b/vendor/github.com/chai2010/gettext-go/doc.go new file mode 100644 index 0000000000..50dfea3305 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/doc.go @@ -0,0 +1,67 @@ +// Copyright 2013 . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gettext implements a basic GNU's gettext library. + +Example: + import ( + "github.com/chai2010/gettext-go" + ) + + func main() { + gettext.SetLanguage("zh_CN") + + // gettext.BindLocale(gettext.New("hello", "locale")) // from locale dir + // gettext.BindLocale(gettext.New("hello", "locale.zip")) // from locale zip file + // gettext.BindLocale(gettext.New("hello", "locale.zip", zipData)) // from embedded zip data + + gettext.BindLocale(gettext.New("hello", "locale")) + + // translate source text + fmt.Println(gettext.Gettext("Hello, world!")) + // Output: 你好, 世界! + + // translate resource + fmt.Println(string(gettext.Getdata("poems.txt"))) + // Output: ... + } + +Translate directory struct("./examples/locale.zip"): + + Root: "path" or "file.zip/zipBaseName" + +-default # locale: $(LC_MESSAGES) or $(LANG) or "default" + | +-LC_MESSAGES # just for `gettext.Gettext` + | | +-hello.mo # $(Root)/$(lang)/LC_MESSAGES/$(domain).mo + | | +-hello.po # $(Root)/$(lang)/LC_MESSAGES/$(domain).po + | | \-hello.json # $(Root)/$(lang)/LC_MESSAGES/$(domain).json + | | + | \-LC_RESOURCE # just for `gettext.Getdata` + | +-hello # domain map a dir in resource translate + | +-favicon.ico # $(Root)/$(lang)/LC_RESOURCE/$(domain)/$(filename) + | \-poems.txt + | + \-zh_CN # simple chinese translate + +-LC_MESSAGES + | +-hello.po # try "$(domain).po" first + | +-hello.mo # try "$(domain).mo" second + | \-hello.json # try "$(domain).json" third + | + \-LC_RESOURCE + +-hello + +-favicon.ico # $(lang)/$(domain)/favicon.ico + \-poems.txt # $(lang)/$(domain)/poems.txt + +See: + http://en.wikipedia.org/wiki/Gettext + http://www.gnu.org/software/gettext/manual/html_node + http://www.gnu.org/software/gettext/manual/html_node/Header-Entry.html + http://www.gnu.org/software/gettext/manual/html_node/PO-Files.html + http://www.gnu.org/software/gettext/manual/html_node/MO-Files.html + http://www.poedit.net/ + +Please report bugs to . +Thanks! +*/ +package gettext diff --git a/vendor/github.com/chai2010/gettext-go/fs.go b/vendor/github.com/chai2010/gettext-go/fs.go new file mode 100644 index 0000000000..4e66fae7c6 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/fs.go @@ -0,0 +1,84 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "archive/zip" + "bytes" + "fmt" +) + +type FileSystem interface { + LocaleList() []string + LoadMessagesFile(domain, lang, ext string) ([]byte, error) + LoadResourceFile(domain, lang, name string) ([]byte, error) + String() string +} + +func NewFS(name string, x interface{}) FileSystem { + if x == nil { + if name != "" { + return OS(name) + } + return NilFS(name) + } + + switch x := x.(type) { + case []byte: + if len(x) == 0 { + return OS(name) + } + if r, err := zip.NewReader(bytes.NewReader(x), int64(len(x))); err == nil { + return ZipFS(r, name) + } + if fs, err := newJson(x, name); err == nil { + return fs + } + case string: + if len(x) == 0 { + return OS(name) + } + if r, err := zip.NewReader(bytes.NewReader([]byte(x)), int64(len(x))); err == nil { + return ZipFS(r, name) + } + if fs, err := newJson([]byte(x), name); err == nil { + return fs + } + case FileSystem: + return x + } + + return NilFS(name) +} + +func OS(root string) FileSystem { + return newOsFS(root) +} + +func ZipFS(r *zip.Reader, name string) FileSystem { + return newZipFS(r, name) +} + +func NilFS(name string) FileSystem { + return &nilFS{name} +} + +type nilFS struct { + name string +} + +func (p *nilFS) LocaleList() []string { + return nil +} + +func (p *nilFS) LoadMessagesFile(domain, lang, ext string) ([]byte, error) { + return nil, fmt.Errorf("not found") +} +func (p *nilFS) LoadResourceFile(domain, lang, name string) ([]byte, error) { + return nil, fmt.Errorf("not found") +} +func (p *nilFS) String() string { + return "gettext.nilfs(" + p.name + ")" +} diff --git a/vendor/github.com/chai2010/gettext-go/fs_json.go b/vendor/github.com/chai2010/gettext-go/fs_json.go new file mode 100644 index 0000000000..c7138c9954 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/fs_json.go @@ -0,0 +1,66 @@ +// Copyright 2020 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "encoding/json" + "fmt" + "sort" +) + +type jsonFS struct { + name string + x map[string]struct { + LC_MESSAGES map[string][]struct { + MsgContext string `json:"msgctxt"` // msgctxt context + MsgId string `json:"msgid"` // msgid untranslated-string + MsgIdPlural string `json:"msgid_plural"` // msgid_plural untranslated-string-plural + MsgStr []string `json:"msgstr"` // msgstr translated-string + } + LC_RESOURCE map[string]map[string]string + } +} + +func isJsonData() bool { + return false +} + +func newJson(jsonData []byte, name string) (*jsonFS, error) { + p := &jsonFS{name: name} + if err := json.Unmarshal(jsonData, &p.x); err != nil { + return nil, err + } + + return p, nil +} + +func (p *jsonFS) LocaleList() []string { + var ss []string + for lang := range p.x { + ss = append(ss, lang) + } + sort.Strings(ss) + return ss +} + +func (p *jsonFS) LoadMessagesFile(domain, lang, ext string) ([]byte, error) { + if v, ok := p.x[lang]; ok { + if v, ok := v.LC_MESSAGES[domain+ext]; ok { + return json.Marshal(v) + } + } + return nil, fmt.Errorf("not found") +} +func (p *jsonFS) LoadResourceFile(domain, lang, name string) ([]byte, error) { + if v, ok := p.x[lang]; ok { + if v, ok := v.LC_RESOURCE[domain]; ok { + return []byte(v[name]), nil + } + } + return nil, fmt.Errorf("not found") +} +func (p *jsonFS) String() string { + return "gettext.nilfs(" + p.name + ")" +} diff --git a/vendor/github.com/chai2010/gettext-go/fs_os.go b/vendor/github.com/chai2010/gettext-go/fs_os.go new file mode 100644 index 0000000000..80d4f51bac --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/fs_os.go @@ -0,0 +1,91 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "archive/zip" + "bytes" + "fmt" + "io/ioutil" + "os" + "sort" + "strings" +) + +type osFS struct { + root string +} + +func newOsFS(root string) FileSystem { + // locale zip file + if fi, err := os.Stat(root); err == nil && !fi.IsDir() { + if strings.HasSuffix(strings.ToLower(root), ".zip") { + if x, err := ioutil.ReadFile(root); err == nil { + if r, err := zip.NewReader(bytes.NewReader(x), int64(len(x))); err == nil { + return ZipFS(r, root) + } + } + } + if strings.HasSuffix(strings.ToLower(root), ".json") { + if x, err := ioutil.ReadFile(root); err == nil { + if fs, err := newJson(x, root); err == nil { + return fs + } + } + } + } + + // locale dir + return &osFS{root: root} +} + +func (p *osFS) LocaleList() []string { + list, err := ioutil.ReadDir(p.root) + if err != nil { + return nil + } + ssMap := make(map[string]bool) + for _, dir := range list { + if dir.IsDir() { + ssMap[dir.Name()] = true + } + } + var locales = make([]string, 0, len(ssMap)) + for s := range ssMap { + locales = append(locales, s) + } + sort.Strings(locales) + return locales +} + +func (p *osFS) LoadMessagesFile(domain, locale, ext string) ([]byte, error) { + trName := p.makeMessagesFileName(domain, locale, ext) + rcData, err := ioutil.ReadFile(trName) + if err != nil { + return nil, err + } + return rcData, nil +} + +func (p *osFS) LoadResourceFile(domain, locale, name string) ([]byte, error) { + rcName := p.makeResourceFileName(domain, locale, name) + rcData, err := ioutil.ReadFile(rcName) + if err != nil { + return nil, err + } + return rcData, nil +} + +func (p *osFS) String() string { + return "gettext.localfs(" + p.root + ")" +} + +func (p *osFS) makeMessagesFileName(domain, lang, ext string) string { + return fmt.Sprintf("%s/%s/LC_MESSAGES/%s%s", p.root, lang, domain, ext) +} + +func (p *osFS) makeResourceFileName(domain, lang, name string) string { + return fmt.Sprintf("%s/%s/LC_RESOURCE/%s/%s", p.root, lang, domain, name) +} diff --git a/vendor/github.com/chai2010/gettext-go/fs_zip.go b/vendor/github.com/chai2010/gettext-go/fs_zip.go new file mode 100644 index 0000000000..61eb8359da --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/fs_zip.go @@ -0,0 +1,142 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "archive/zip" + "fmt" + "io/ioutil" + "sort" + "strings" +) + +type zipFS struct { + root string + name string + r *zip.Reader +} + +func newZipFS(r *zip.Reader, name string) *zipFS { + fs := &zipFS{r: r, name: name} + fs.root = fs.zipRoot() + return fs +} + +func (p *zipFS) zipName() string { + name := p.name + if x := strings.LastIndexAny(name, `\/`); x != -1 { + name = name[x+1:] + } + name = strings.TrimSuffix(name, ".zip") + return name +} + +func (p *zipFS) zipRoot() string { + var somepath string + for _, f := range p.r.File { + if x := strings.Index(f.Name, "LC_MESSAGES"); x != -1 { + somepath = f.Name + } + if x := strings.Index(f.Name, "LC_RESOURCE"); x != -1 { + somepath = f.Name + } + } + if somepath == "" { + return p.zipName() + } + + ss := strings.Split(somepath, "/") + for i, s := range ss { + // $(root)/$(lang)/LC_MESSAGES + // $(root)/$(lang)/LC_RESOURCE + if (s == "LC_MESSAGES" || s == "LC_RESOURCE") && i >= 2 { + return strings.Join(ss[:i-1], "/") + } + } + + return p.zipName() +} + +func (p *zipFS) LocaleList() []string { + var locals []string + for s := range p.lsZip(p.r) { + locals = append(locals, s) + } + sort.Strings(locals) + return locals +} + +func (p *zipFS) LoadMessagesFile(domain, lang, ext string) ([]byte, error) { + trName := p.makeMessagesFileName(domain, lang, ext) + for _, f := range p.r.File { + if f.Name != trName { + continue + } + rc, err := f.Open() + if err != nil { + return nil, err + } + rcData, err := ioutil.ReadAll(rc) + rc.Close() + return rcData, err + } + return nil, fmt.Errorf("not found") +} + +func (p *zipFS) LoadResourceFile(domain, lang, name string) ([]byte, error) { + rcName := p.makeResourceFileName(domain, lang, name) + for _, f := range p.r.File { + if f.Name != rcName { + continue + } + rc, err := f.Open() + if err != nil { + return nil, err + } + rcData, err := ioutil.ReadAll(rc) + rc.Close() + return rcData, err + } + return nil, fmt.Errorf("not found") +} + +func (p *zipFS) String() string { + return "gettext.zipfs(" + p.name + ")" +} + +func (p *zipFS) makeMessagesFileName(domain, lang, ext string) string { + return fmt.Sprintf("%s/%s/LC_MESSAGES/%s%s", p.root, lang, domain, ext) +} + +func (p *zipFS) makeResourceFileName(domain, lang, name string) string { + return fmt.Sprintf("%s/%s/LC_RESOURCE/%s/%s", p.root, lang, domain, name) +} + +func (p *zipFS) lsZip(r *zip.Reader) map[string]bool { + ssMap := make(map[string]bool) + for _, f := range r.File { + if x := strings.Index(f.Name, "LC_MESSAGES"); x != -1 { + s := strings.TrimRight(f.Name[:x], `\/`) + if x = strings.LastIndexAny(s, `\/`); x != -1 { + s = s[x+1:] + } + if s != "" { + ssMap[s] = true + } + continue + } + if x := strings.Index(f.Name, "LC_RESOURCE"); x != -1 { + s := strings.TrimRight(f.Name[:x], `\/`) + if x = strings.LastIndexAny(s, `\/`); x != -1 { + s = s[x+1:] + } + if s != "" { + ssMap[s] = true + } + continue + } + } + return ssMap +} diff --git a/vendor/github.com/chai2010/gettext-go/gettext.go b/vendor/github.com/chai2010/gettext-go/gettext.go new file mode 100644 index 0000000000..7747188ab4 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/gettext.go @@ -0,0 +1,219 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +var ( + DefaultLanguage string = getDefaultLanguage() // use $(LC_MESSAGES) or $(LANG) or "default" +) + +type Gettexter interface { + FileSystem() FileSystem + + GetDomain() string + SetDomain(domain string) Gettexter + + GetLanguage() string + SetLanguage(lang string) Gettexter + + Gettext(msgid string) string + PGettext(msgctxt, msgid string) string + + NGettext(msgid, msgidPlural string, n int) string + PNGettext(msgctxt, msgid, msgidPlural string, n int) string + + DGettext(domain, msgid string) string + DPGettext(domain, msgctxt, msgid string) string + DNGettext(domain, msgid, msgidPlural string, n int) string + DPNGettext(domain, msgctxt, msgid, msgidPlural string, n int) string + + Getdata(name string) []byte + DGetdata(domain, name string) []byte +} + +// New create Interface use default language. +func New(domain, path string, data ...interface{}) Gettexter { + return newLocale(domain, path, data...) +} + +var defaultGettexter struct { + lang string + domain string + Gettexter +} + +func init() { + defaultGettexter.lang = getDefaultLanguage() + defaultGettexter.domain = "default" + defaultGettexter.Gettexter = newLocale("", "") +} + +// BindLocale sets and queries program's domains. +// +// Examples: +// BindLocale(New("poedit", "locale")) // bind "poedit" domain +// +// Use zip file: +// BindLocale(New("poedit", "locale.zip")) // bind "poedit" domain +// BindLocale(New("poedit", "locale.zip", zipData)) // bind "poedit" domain +// +// Use FileSystem: +// BindLocale(New("poedit", "name", OS("path/to/dir"))) // bind "poedit" domain +// BindLocale(New("poedit", "name", OS("path/to.zip"))) // bind "poedit" domain +// +func BindLocale(g Gettexter) { + if g != nil { + defaultGettexter.Gettexter = g + defaultGettexter.SetLanguage(defaultGettexter.lang) + } else { + defaultGettexter.Gettexter = newLocale("", "") + defaultGettexter.SetLanguage(defaultGettexter.lang) + } +} + +// SetLanguage sets and queries the program's current lang. +// +// If the lang is not empty string, set the new locale. +// +// If the lang is empty string, don't change anything. +// +// Returns is the current locale. +// +// Examples: +// SetLanguage("") // get locale: return DefaultLocale +// SetLanguage("zh_CN") // set locale: return zh_CN +// SetLanguage("") // get locale: return zh_CN +func SetLanguage(lang string) string { + defaultGettexter.SetLanguage(lang) + return defaultGettexter.GetLanguage() +} + +// SetDomain sets and retrieves the current message domain. +// +// If the domain is not empty string, set the new domains. +// +// If the domain is empty string, don't change anything. +// +// Returns is the all used domains. +// +// Examples: +// SetDomain("poedit") // set domain: poedit +// SetDomain("") // get domain: return poedit +func SetDomain(domain string) string { + defaultGettexter.SetDomain(domain) + return defaultGettexter.GetDomain() +} + +// Gettext attempt to translate a text string into the user's native language, +// by looking up the translation in a message catalog. +// +// It use the caller's function name as the msgctxt. +// +// Examples: +// func Foo() { +// msg := gettext.Gettext("Hello") // msgctxt is "" +// } +func Gettext(msgid string) string { + return defaultGettexter.Gettext(msgid) +} + +// Getdata attempt to translate a resource file into the user's native language, +// by looking up the translation in a message catalog. +// +// Examples: +// func Foo() { +// Textdomain("hello") +// BindLocale("hello", "locale.zip", nilOrZipData) +// poems := gettext.Getdata("poems.txt") +// } +func Getdata(name string) []byte { + return defaultGettexter.Getdata(name) +} + +// NGettext attempt to translate a text string into the user's native language, +// by looking up the appropriate plural form of the translation in a message +// catalog. +// +// It use the caller's function name as the msgctxt. +// +// Examples: +// func Foo() { +// msg := gettext.NGettext("%d people", "%d peoples", 2) +// } +func NGettext(msgid, msgidPlural string, n int) string { + return defaultGettexter.NGettext(msgid, msgidPlural, n) +} + +// PGettext attempt to translate a text string into the user's native language, +// by looking up the translation in a message catalog. +// +// Examples: +// func Foo() { +// msg := gettext.PGettext("gettext-go.example", "Hello") // msgctxt is "gettext-go.example" +// } +func PGettext(msgctxt, msgid string) string { + return defaultGettexter.PGettext(msgctxt, msgid) +} + +// PNGettext attempt to translate a text string into the user's native language, +// by looking up the appropriate plural form of the translation in a message +// catalog. +// +// Examples: +// func Foo() { +// msg := gettext.PNGettext("gettext-go.example", "%d people", "%d peoples", 2) +// } +func PNGettext(msgctxt, msgid, msgidPlural string, n int) string { + return defaultGettexter.PNGettext(msgctxt, msgid, msgidPlural, n) +} + +// DGettext like Gettext(), but looking up the message in the specified domain. +// +// Examples: +// func Foo() { +// msg := gettext.DGettext("poedit", "Hello") +// } +func DGettext(domain, msgid string) string { + return defaultGettexter.DGettext(domain, msgid) +} + +// DNGettext like NGettext(), but looking up the message in the specified domain. +// +// Examples: +// func Foo() { +// msg := gettext.PNGettext("poedit", "gettext-go.example", "%d people", "%d peoples", 2) +// } +func DNGettext(domain, msgid, msgidPlural string, n int) string { + return defaultGettexter.DNGettext(domain, msgid, msgidPlural, n) +} + +// DPGettext like PGettext(), but looking up the message in the specified domain. +// +// Examples: +// func Foo() { +// msg := gettext.DPGettext("poedit", "gettext-go.example", "Hello") +// } +func DPGettext(domain, msgctxt, msgid string) string { + return defaultGettexter.DPGettext(domain, msgctxt, msgid) +} + +// DPNGettext like PNGettext(), but looking up the message in the specified domain. +// +// Examples: +// func Foo() { +// msg := gettext.DPNGettext("poedit", "gettext-go.example", "%d people", "%d peoples", 2) +// } +func DPNGettext(domain, msgctxt, msgid, msgidPlural string, n int) string { + return defaultGettexter.DPNGettext(domain, msgctxt, msgid, msgidPlural, n) +} + +// DGetdata like Getdata(), but looking up the resource in the specified domain. +// +// Examples: +// func Foo() { +// msg := gettext.DGetdata("hello", "poems.txt") +// } +func DGetdata(domain, name string) []byte { + return defaultGettexter.DGetdata(domain, name) +} diff --git a/vendor/github.com/chai2010/gettext-go/locale.go b/vendor/github.com/chai2010/gettext-go/locale.go new file mode 100644 index 0000000000..e7a2d4b37b --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/locale.go @@ -0,0 +1,205 @@ +// Copyright 2020 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "fmt" + "sync" +) + +type _Locale struct { + mutex sync.Mutex + fs FileSystem + lang string + domain string + trMap map[string]*translator + trCurrent *translator +} + +var _ Gettexter = (*_Locale)(nil) + +func newLocale(domain, path string, data ...interface{}) *_Locale { + if domain == "" { + domain = "default" + } + p := &_Locale{ + lang: DefaultLanguage, + domain: domain, + } + if len(data) > 0 { + p.fs = NewFS(path, data[0]) + } else { + p.fs = NewFS(path, nil) + } + + p.syncTrMap() + return p +} + +func (p *_Locale) makeTrMapKey(domain, _Locale string) string { + return domain + "_$$$_" + _Locale +} + +func (p *_Locale) FileSystem() FileSystem { + return p.fs +} + +func (p *_Locale) GetLanguage() string { + p.mutex.Lock() + defer p.mutex.Unlock() + + return p.lang +} +func (p *_Locale) SetLanguage(lang string) Gettexter { + p.mutex.Lock() + defer p.mutex.Unlock() + + if lang == "" { + lang = DefaultLanguage + } + if lang == p.lang { + return p + } + + p.lang = lang + p.syncTrMap() + return p +} + +func (p *_Locale) GetDomain() string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.domain +} + +func (p *_Locale) SetDomain(domain string) Gettexter { + p.mutex.Lock() + defer p.mutex.Unlock() + + if domain == "" || domain == p.domain { + return p + } + + p.domain = domain + p.syncTrMap() + return p +} + +func (p *_Locale) syncTrMap() { + p.trMap = make(map[string]*translator) + trMapKey := p.makeTrMapKey(p.domain, p.lang) + + if tr, ok := p.trMap[trMapKey]; ok { + p.trCurrent = tr + return + } + + // try load po file + if data, err := p.fs.LoadMessagesFile(p.domain, p.lang, ".po"); err == nil { + if tr, err := newPoTranslator(fmt.Sprintf("%s_%s.po", p.domain, p.lang), data); err == nil { + p.trMap[trMapKey] = tr + p.trCurrent = tr + return + } + } + + // try load mo file + if data, err := p.fs.LoadMessagesFile(p.domain, p.lang, ".mo"); err == nil { + if tr, err := newMoTranslator(fmt.Sprintf("%s_%s.mo", p.domain, p.lang), data); err == nil { + p.trMap[trMapKey] = tr + p.trCurrent = tr + return + } + } + + // try load json file + if data, err := p.fs.LoadMessagesFile(p.domain, p.lang, ".json"); err == nil { + if tr, err := newJsonTranslator(p.lang, fmt.Sprintf("%s_%s.json", p.domain, p.lang), data); err == nil { + p.trMap[trMapKey] = tr + p.trCurrent = tr + return + } + } + + // no po/mo file + p.trMap[trMapKey] = nilTranslator + p.trCurrent = nilTranslator + return +} + +func (p *_Locale) Gettext(msgid string) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.trCurrent.PGettext("", msgid) +} + +func (p *_Locale) PGettext(msgctxt, msgid string) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.trCurrent.PGettext(msgctxt, msgid) +} + +func (p *_Locale) NGettext(msgid, msgidPlural string, n int) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.trCurrent.PNGettext("", msgid, msgidPlural, n) +} + +func (p *_Locale) PNGettext(msgctxt, msgid, msgidPlural string, n int) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.trCurrent.PNGettext(msgctxt, msgid, msgidPlural, n) +} + +func (p *_Locale) DGettext(domain, msgid string) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.gettext(domain, "", msgid, "", 0) +} + +func (p *_Locale) DNGettext(domain, msgid, msgidPlural string, n int) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.gettext(domain, "", msgid, msgidPlural, n) +} + +func (p *_Locale) DPGettext(domain, msgctxt, msgid string) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.gettext(domain, msgctxt, msgid, "", 0) +} + +func (p *_Locale) DPNGettext(domain, msgctxt, msgid, msgidPlural string, n int) string { + p.mutex.Lock() + defer p.mutex.Unlock() + return p.gettext(domain, msgctxt, msgid, msgidPlural, n) +} + +func (p *_Locale) Getdata(name string) []byte { + return p.getdata(p.domain, name) +} + +func (p *_Locale) DGetdata(domain, name string) []byte { + return p.getdata(domain, name) +} + +func (p *_Locale) gettext(domain, msgctxt, msgid, msgidPlural string, n int) string { + if f, ok := p.trMap[p.makeTrMapKey(domain, p.lang)]; ok { + return f.PNGettext(msgctxt, msgid, msgidPlural, n) + } + return msgid +} + +func (p *_Locale) getdata(domain, name string) []byte { + if data, err := p.fs.LoadResourceFile(domain, p.lang, name); err == nil { + return data + } + if p.lang != "default" { + if data, err := p.fs.LoadResourceFile(domain, "default", name); err == nil { + return data + } + } + return nil +} diff --git a/vendor/github.com/chai2010/gettext-go/mo/doc.go b/vendor/github.com/chai2010/gettext-go/mo/doc.go new file mode 100644 index 0000000000..5fefc18930 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/mo/doc.go @@ -0,0 +1,74 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package mo provides support for reading and writing GNU MO file. + +Examples: + import ( + "github.com/chai2010/gettext-go/mo" + ) + + func main() { + moFile, err := mo.LoadFile("test.mo") + if err != nil { + log.Fatal(err) + } + fmt.Printf("%v", moFile) + } + +GNU MO file struct: + + byte + +------------------------------------------+ + 0 | magic number = 0x950412de | + | | + 4 | file format revision = 0 | + | | + 8 | number of strings | == N + | | + 12 | offset of table with original strings | == O + | | + 16 | offset of table with translation strings | == T + | | + 20 | size of hashing table | == S + | | + 24 | offset of hashing table | == H + | | + . . + . (possibly more entries later) . + . . + | | + O | length & offset 0th string ----------------. + O + 8 | length & offset 1st string ------------------. + ... ... | | + O + ((N-1)*8)| length & offset (N-1)th string | | | + | | | | + T | length & offset 0th translation ---------------. + T + 8 | length & offset 1st translation -----------------. + ... ... | | | | + T + ((N-1)*8)| length & offset (N-1)th translation | | | | | + | | | | | | + H | start hash table | | | | | + ... ... | | | | + H + S * 4 | end hash table | | | | | + | | | | | | + | NUL terminated 0th string <----------------' | | | + | | | | | + | NUL terminated 1st string <------------------' | | + | | | | + ... ... | | + | | | | + | NUL terminated 0th translation <---------------' | + | | | + | NUL terminated 1st translation <-----------------' + | | + ... ... + | | + +------------------------------------------+ + +The GNU MO file specification is at +http://www.gnu.org/software/gettext/manual/html_node/MO-Files.html. +*/ +package mo diff --git a/vendor/github.com/chai2010/gettext-go/mo/encoder.go b/vendor/github.com/chai2010/gettext-go/mo/encoder.go new file mode 100644 index 0000000000..f953fd3cb8 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/mo/encoder.go @@ -0,0 +1,105 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mo + +import ( + "bytes" + "encoding/binary" + "sort" + "strings" +) + +type moHeader struct { + MagicNumber uint32 + MajorVersion uint16 + MinorVersion uint16 + MsgIdCount uint32 + MsgIdOffset uint32 + MsgStrOffset uint32 + HashSize uint32 + HashOffset uint32 +} + +type moStrPos struct { + Size uint32 // must keep fields order + Addr uint32 +} + +func encodeFile(f *File) []byte { + hdr := &moHeader{ + MagicNumber: MoMagicLittleEndian, + } + data := encodeData(hdr, f) + data = append(encodeHeader(hdr), data...) + return data +} + +// encode data and init moHeader +func encodeData(hdr *moHeader, f *File) []byte { + msgList := []Message{f.MimeHeader.toMessage()} + for _, v := range f.Messages { + if len(v.MsgId) == 0 { + continue + } + if len(v.MsgStr) == 0 && len(v.MsgStrPlural) == 0 { + continue + } + msgList = append(msgList, v) + } + sort.Slice(msgList, func(i, j int) bool { + return msgList[i].less(&msgList[j]) + }) + + var buf bytes.Buffer + var msgIdPosList = make([]moStrPos, len(msgList)) + var msgStrPosList = make([]moStrPos, len(msgList)) + for i, v := range msgList { + // write msgid + msgId := encodeMsgId(v) + msgIdPosList[i].Addr = uint32(buf.Len() + MoHeaderSize) + msgIdPosList[i].Size = uint32(len(msgId)) + buf.WriteString(msgId) + // write msgstr + msgStr := encodeMsgStr(v) + msgStrPosList[i].Addr = uint32(buf.Len() + MoHeaderSize) + msgStrPosList[i].Size = uint32(len(msgStr)) + buf.WriteString(msgStr) + } + + hdr.MsgIdOffset = uint32(buf.Len() + MoHeaderSize) + binary.Write(&buf, binary.LittleEndian, msgIdPosList) + hdr.MsgStrOffset = uint32(buf.Len() + MoHeaderSize) + binary.Write(&buf, binary.LittleEndian, msgStrPosList) + + hdr.MsgIdCount = uint32(len(msgList)) + return buf.Bytes() +} + +// must called after encodeData +func encodeHeader(hdr *moHeader) []byte { + var buf bytes.Buffer + binary.Write(&buf, binary.LittleEndian, hdr) + return buf.Bytes() +} + +func encodeMsgId(v Message) string { + if v.MsgContext != "" && v.MsgIdPlural != "" { + return v.MsgContext + EotSeparator + v.MsgId + NulSeparator + v.MsgIdPlural + } + if v.MsgContext != "" && v.MsgIdPlural == "" { + return v.MsgContext + EotSeparator + v.MsgId + } + if v.MsgContext == "" && v.MsgIdPlural != "" { + return v.MsgId + NulSeparator + v.MsgIdPlural + } + return v.MsgId +} + +func encodeMsgStr(v Message) string { + if v.MsgIdPlural != "" { + return strings.Join(v.MsgStrPlural, NulSeparator) + } + return v.MsgStr +} diff --git a/vendor/github.com/chai2010/gettext-go/mo/file.go b/vendor/github.com/chai2010/gettext-go/mo/file.go new file mode 100644 index 0000000000..6f7ed161c1 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/mo/file.go @@ -0,0 +1,197 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mo + +import ( + "bytes" + "encoding/binary" + "fmt" + "io/ioutil" + "strings" +) + +const ( + MoHeaderSize = 28 + MoMagicLittleEndian = 0x950412de + MoMagicBigEndian = 0xde120495 + + EotSeparator = "\x04" // msgctxt and msgid separator + NulSeparator = "\x00" // msgid and msgstr separator +) + +// File represents an MO File. +// +// See http://www.gnu.org/software/gettext/manual/html_node/MO-Files.html +type File struct { + MagicNumber uint32 + MajorVersion uint16 + MinorVersion uint16 + MsgIdCount uint32 + MsgIdOffset uint32 + MsgStrOffset uint32 + HashSize uint32 + HashOffset uint32 + MimeHeader Header + Messages []Message +} + +// Load loads mo file format data. +func Load(data []byte) (*File, error) { + return loadData(data) +} + +// Load loads a named mo file. +func LoadFile(path string) (*File, error) { + data, err := ioutil.ReadFile(path) + if err != nil { + return nil, err + } + return loadData(data) +} + +func loadData(data []byte) (*File, error) { + r := bytes.NewReader(data) + + var magicNumber uint32 + if err := binary.Read(r, binary.LittleEndian, &magicNumber); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + var bo binary.ByteOrder + switch magicNumber { + case MoMagicLittleEndian: + bo = binary.LittleEndian + case MoMagicBigEndian: + bo = binary.BigEndian + default: + return nil, fmt.Errorf("gettext: %v", "invalid magic number") + } + + var header struct { + MajorVersion uint16 + MinorVersion uint16 + MsgIdCount uint32 + MsgIdOffset uint32 + MsgStrOffset uint32 + HashSize uint32 + HashOffset uint32 + } + if err := binary.Read(r, bo, &header); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + if v := header.MajorVersion; v != 0 && v != 1 { + return nil, fmt.Errorf("gettext: %v", "invalid version number") + } + if v := header.MinorVersion; v != 0 && v != 1 { + return nil, fmt.Errorf("gettext: %v", "invalid version number") + } + + msgIdStart := make([]uint32, header.MsgIdCount) + msgIdLen := make([]uint32, header.MsgIdCount) + if _, err := r.Seek(int64(header.MsgIdOffset), 0); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + for i := 0; i < int(header.MsgIdCount); i++ { + if err := binary.Read(r, bo, &msgIdLen[i]); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + if err := binary.Read(r, bo, &msgIdStart[i]); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + } + + msgStrStart := make([]int32, header.MsgIdCount) + msgStrLen := make([]int32, header.MsgIdCount) + if _, err := r.Seek(int64(header.MsgStrOffset), 0); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + for i := 0; i < int(header.MsgIdCount); i++ { + if err := binary.Read(r, bo, &msgStrLen[i]); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + if err := binary.Read(r, bo, &msgStrStart[i]); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + } + + file := &File{ + MagicNumber: magicNumber, + MajorVersion: header.MajorVersion, + MinorVersion: header.MinorVersion, + MsgIdCount: header.MsgIdCount, + MsgIdOffset: header.MsgIdOffset, + MsgStrOffset: header.MsgStrOffset, + HashSize: header.HashSize, + HashOffset: header.HashOffset, + } + for i := 0; i < int(header.MsgIdCount); i++ { + if _, err := r.Seek(int64(msgIdStart[i]), 0); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + msgIdData := make([]byte, msgIdLen[i]) + if _, err := r.Read(msgIdData); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + + if _, err := r.Seek(int64(msgStrStart[i]), 0); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + msgStrData := make([]byte, msgStrLen[i]) + if _, err := r.Read(msgStrData); err != nil { + return nil, fmt.Errorf("gettext: %v", err) + } + + if len(msgIdData) == 0 { + var msg = Message{ + MsgId: string(msgIdData), + MsgStr: string(msgStrData), + } + file.MimeHeader.fromMessage(&msg) + } else { + var msg = Message{ + MsgId: string(msgIdData), + MsgStr: string(msgStrData), + } + // Is this a context message? + if idx := strings.Index(msg.MsgId, EotSeparator); idx != -1 { + msg.MsgContext, msg.MsgId = msg.MsgId[:idx], msg.MsgId[idx+1:] + } + // Is this a plural message? + if idx := strings.Index(msg.MsgId, NulSeparator); idx != -1 { + msg.MsgId, msg.MsgIdPlural = msg.MsgId[:idx], msg.MsgId[idx+1:] + msg.MsgStrPlural = strings.Split(msg.MsgStr, NulSeparator) + msg.MsgStr = "" + } + file.Messages = append(file.Messages, msg) + } + } + + return file, nil +} + +// Save saves a mo file. +func (f *File) Save(name string) error { + return ioutil.WriteFile(name, f.Data(), 0666) +} + +// Save returns a mo file format data. +func (f *File) Data() []byte { + return encodeFile(f) +} + +// String returns the po format file string. +func (f *File) String() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, "# version: %d.%d\n", f.MajorVersion, f.MinorVersion) + fmt.Fprintf(&buf, "%s\n", f.MimeHeader.String()) + fmt.Fprintf(&buf, "\n") + + for k, v := range f.Messages { + fmt.Fprintf(&buf, `msgid "%v"`+"\n", k) + fmt.Fprintf(&buf, `msgstr "%s"`+"\n", v.MsgStr) + fmt.Fprintf(&buf, "\n") + } + + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/mo/header.go b/vendor/github.com/chai2010/gettext-go/mo/header.go new file mode 100644 index 0000000000..d8c7a5e3a3 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/mo/header.go @@ -0,0 +1,109 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mo + +import ( + "bytes" + "fmt" + "strings" +) + +// Header is the initial comments "SOME DESCRIPTIVE TITLE", "YEAR" +// and "FIRST AUTHOR , YEAR" ought to be replaced by sensible information. +// +// See http://www.gnu.org/software/gettext/manual/html_node/Header-Entry.html#Header-Entry +type Header struct { + ProjectIdVersion string // Project-Id-Version: PACKAGE VERSION + ReportMsgidBugsTo string // Report-Msgid-Bugs-To: FIRST AUTHOR + POTCreationDate string // POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE + PORevisionDate string // PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE + LastTranslator string // Last-Translator: FIRST AUTHOR + LanguageTeam string // Language-Team: golang-china + Language string // Language: zh_CN + MimeVersion string // MIME-Version: 1.0 + ContentType string // Content-Type: text/plain; charset=UTF-8 + ContentTransferEncoding string // Content-Transfer-Encoding: 8bit + PluralForms string // Plural-Forms: nplurals=2; plural=n == 1 ? 0 : 1; + XGenerator string // X-Generator: Poedit 1.5.5 + UnknowFields map[string]string +} + +func (p *Header) fromMessage(msg *Message) { + if msg.MsgId != "" || msg.MsgStr == "" { + return + } + lines := strings.Split(msg.MsgStr, "\n") + for i := 0; i < len(lines); i++ { + idx := strings.Index(lines[i], ":") + if idx < 0 { + continue + } + key := strings.TrimSpace(lines[i][:idx]) + val := strings.TrimSpace(lines[i][idx+1:]) + switch strings.ToUpper(key) { + case strings.ToUpper("Project-Id-Version"): + p.ProjectIdVersion = val + case strings.ToUpper("Report-Msgid-Bugs-To"): + p.ReportMsgidBugsTo = val + case strings.ToUpper("POT-Creation-Date"): + p.POTCreationDate = val + case strings.ToUpper("PO-Revision-Date"): + p.PORevisionDate = val + case strings.ToUpper("Last-Translator"): + p.LastTranslator = val + case strings.ToUpper("Language-Team"): + p.LanguageTeam = val + case strings.ToUpper("Language"): + p.Language = val + case strings.ToUpper("MIME-Version"): + p.MimeVersion = val + case strings.ToUpper("Content-Type"): + p.ContentType = val + case strings.ToUpper("Content-Transfer-Encoding"): + p.ContentTransferEncoding = val + case strings.ToUpper("Plural-Forms"): + p.PluralForms = val + case strings.ToUpper("X-Generator"): + p.XGenerator = val + default: + if p.UnknowFields == nil { + p.UnknowFields = make(map[string]string) + } + p.UnknowFields[key] = val + } + } +} + +func (p *Header) toMessage() Message { + return Message{ + MsgStr: p.String(), + } +} + +// String returns the po format header string. +func (p Header) String() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, `msgid ""`+"\n") + fmt.Fprintf(&buf, `msgstr ""`+"\n") + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Project-Id-Version", p.ProjectIdVersion) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Report-Msgid-Bugs-To", p.ReportMsgidBugsTo) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "POT-Creation-Date", p.POTCreationDate) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "PO-Revision-Date", p.PORevisionDate) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Last-Translator", p.LastTranslator) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Language-Team", p.LanguageTeam) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Language", p.Language) + if p.MimeVersion != "" { + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "MIME-Version", p.MimeVersion) + } + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Content-Type", p.ContentType) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Content-Transfer-Encoding", p.ContentTransferEncoding) + if p.XGenerator != "" { + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "X-Generator", p.XGenerator) + } + for k, v := range p.UnknowFields { + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", k, v) + } + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/mo/message.go b/vendor/github.com/chai2010/gettext-go/mo/message.go new file mode 100644 index 0000000000..b67bde0b70 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/mo/message.go @@ -0,0 +1,52 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mo + +import ( + "bytes" + "fmt" +) + +// A MO file is made up of many entries, +// each entry holding the relation between an original untranslated string +// and its corresponding translation. +// +// See http://www.gnu.org/software/gettext/manual/html_node/MO-Files.html +type Message struct { + MsgContext string // msgctxt context + MsgId string // msgid untranslated-string + MsgIdPlural string // msgid_plural untranslated-string-plural + MsgStr string // msgstr translated-string + MsgStrPlural []string // msgstr[0] translated-string-case-0 +} + +// String returns the po format entry string. +func (p Message) String() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, "msgid %s", encodePoString(p.MsgId)) + if p.MsgIdPlural != "" { + fmt.Fprintf(&buf, "msgid_plural %s", encodePoString(p.MsgIdPlural)) + } + if p.MsgStr != "" { + fmt.Fprintf(&buf, "msgstr %s", encodePoString(p.MsgStr)) + } + for i := 0; i < len(p.MsgStrPlural); i++ { + fmt.Fprintf(&buf, "msgstr[%d] %s", i, encodePoString(p.MsgStrPlural[i])) + } + return buf.String() +} + +func (m_i *Message) less(m_j *Message) bool { + if a, b := m_i.MsgContext, m_j.MsgContext; a != b { + return a < b + } + if a, b := m_i.MsgId, m_j.MsgId; a != b { + return a < b + } + if a, b := m_i.MsgIdPlural, m_j.MsgIdPlural; a != b { + return a < b + } + return false +} diff --git a/vendor/github.com/chai2010/gettext-go/mo/util.go b/vendor/github.com/chai2010/gettext-go/mo/util.go new file mode 100644 index 0000000000..3804511053 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/mo/util.go @@ -0,0 +1,110 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mo + +import ( + "bytes" + "strings" +) + +func decodePoString(text string) string { + lines := strings.Split(text, "\n") + for i := 0; i < len(lines); i++ { + left := strings.Index(lines[i], `"`) + right := strings.LastIndex(lines[i], `"`) + if left < 0 || right < 0 || left == right { + lines[i] = "" + continue + } + line := lines[i][left+1 : right] + data := make([]byte, 0, len(line)) + for i := 0; i < len(line); i++ { + if line[i] != '\\' { + data = append(data, line[i]) + continue + } + if i+1 >= len(line) { + break + } + switch line[i+1] { + case 'n': // \\n -> \n + data = append(data, '\n') + i++ + case 't': // \\t -> \n + data = append(data, '\t') + i++ + case '\\': // \\\ -> ? + data = append(data, '\\') + i++ + } + } + lines[i] = string(data) + } + return strings.Join(lines, "") +} + +func encodePoString(text string) string { + var buf bytes.Buffer + lines := strings.Split(text, "\n") + for i := 0; i < len(lines); i++ { + if lines[i] == "" { + if i != len(lines)-1 { + buf.WriteString(`"\n"` + "\n") + } + continue + } + buf.WriteRune('"') + for _, r := range lines[i] { + switch r { + case '\\': + buf.WriteString(`\\`) + case '"': + buf.WriteString(`\"`) + case '\n': + buf.WriteString(`\n`) + case '\t': + buf.WriteString(`\t`) + default: + buf.WriteRune(r) + } + } + buf.WriteString(`\n"` + "\n") + } + return buf.String() +} + +func encodeCommentPoString(text string) string { + var buf bytes.Buffer + lines := strings.Split(text, "\n") + if len(lines) > 1 { + buf.WriteString(`""` + "\n") + } + for i := 0; i < len(lines); i++ { + if len(lines) > 0 { + buf.WriteString("#| ") + } + buf.WriteRune('"') + for _, r := range lines[i] { + switch r { + case '\\': + buf.WriteString(`\\`) + case '"': + buf.WriteString(`\"`) + case '\n': + buf.WriteString(`\n`) + case '\t': + buf.WriteString(`\t`) + default: + buf.WriteRune(r) + } + } + if i < len(lines)-1 { + buf.WriteString(`\n"` + "\n") + } else { + buf.WriteString(`"`) + } + } + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/plural/doc.go b/vendor/github.com/chai2010/gettext-go/plural/doc.go new file mode 100644 index 0000000000..31cb8fae9f --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/plural/doc.go @@ -0,0 +1,36 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package plural provides standard plural formulas. + +Examples: + import ( + "github.com/chai2010/gettext-go/plural" + ) + + func main() { + enFormula := plural.Formula("en_US") + xxFormula := plural.Formula("zh_CN") + + fmt.Printf("%s: %d\n", "en", enFormula(0)) + fmt.Printf("%s: %d\n", "en", enFormula(1)) + fmt.Printf("%s: %d\n", "en", enFormula(2)) + fmt.Printf("%s: %d\n", "??", xxFormula(0)) + fmt.Printf("%s: %d\n", "??", xxFormula(1)) + fmt.Printf("%s: %d\n", "??", xxFormula(2)) + fmt.Printf("%s: %d\n", "??", xxFormula(9)) + // Output: + // en: 0 + // en: 0 + // en: 1 + // ??: 0 + // ??: 0 + // ??: 1 + // ??: 8 + } + +See http://www.gnu.org/software/gettext/manual/html_node/Plural-forms.html +*/ +package plural diff --git a/vendor/github.com/chai2010/gettext-go/plural/formula.go b/vendor/github.com/chai2010/gettext-go/plural/formula.go new file mode 100644 index 0000000000..679a1cd50d --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/plural/formula.go @@ -0,0 +1,181 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package plural + +import ( + "strings" +) + +// Formula provides the language's standard plural formula. +func Formula(lang string) func(n int) int { + if idx := index(lang); idx != -1 { + return formulaTable[fmtForms(FormsTable[idx].Value)] + } + if idx := index("??"); idx != -1 { + return formulaTable[fmtForms(FormsTable[idx].Value)] + } + return func(n int) int { + return n + } +} + +func index(lang string) int { + for i := 0; i < len(FormsTable); i++ { + if strings.HasPrefix(lang, FormsTable[i].Lang) { + return i + } + } + return -1 +} + +func fmtForms(forms string) string { + forms = strings.TrimSpace(forms) + forms = strings.Replace(forms, " ", "", -1) + return forms +} + +var formulaTable = map[string]func(n int) int{ + fmtForms("nplurals=n; plural=n-1;"): func(n int) int { + if n > 0 { + return n - 1 + } + return 0 + }, + fmtForms("nplurals=1; plural=0;"): func(n int) int { + return 0 + }, + fmtForms("nplurals=2; plural=(n != 1);"): func(n int) int { + if n <= 1 { + return 0 + } + return 1 + }, + fmtForms("nplurals=2; plural=(n > 1);"): func(n int) int { + if n <= 1 { + return 0 + } + return 1 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n != 0 { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=n==1 ? 0 : n==2 ? 1 : 2;"): func(n int) int { + if n == 1 { + return 0 + } + if n == 2 { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2;"): func(n int) int { + if n == 1 { + return 0 + } + if n == 0 || (n%100 > 0 && n%100 < 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n%10 >= 2 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;"): func(n int) int { + if n == 1 { + return 0 + } + if n >= 2 && n <= 4 { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;"): func(n int) int { + if n == 1 { + return 0 + } + if n >= 2 && n <= 4 { + return 1 + } + return 2 + }, + fmtForms("nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"): func(n int) int { + if n == 1 { + return 0 + } + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + return 2 + }, + fmtForms("nplurals=4; plural=(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3);"): func(n int) int { + if n%100 == 1 { + return 0 + } + if n%100 == 2 { + return 1 + } + if n%100 == 3 || n%100 == 4 { + return 2 + } + return 3 + }, +} diff --git a/vendor/github.com/chai2010/gettext-go/plural/table.go b/vendor/github.com/chai2010/gettext-go/plural/table.go new file mode 100644 index 0000000000..cdc50d2110 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/plural/table.go @@ -0,0 +1,55 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package plural + +// FormsTable are standard hard-coded plural rules. +// The application developers and the translators need to understand them. +// +// See GNU's gettext library source code: gettext/gettext-tools/src/plural-table.c +var FormsTable = []struct { + Lang string + Language string + Value string +}{ + {"??", "Unknown", "nplurals=1; plural=0;"}, + {"ja", "Japanese", "nplurals=1; plural=0;"}, + {"vi", "Vietnamese", "nplurals=1; plural=0;"}, + {"ko", "Korean", "nplurals=1; plural=0;"}, + {"en", "English", "nplurals=2; plural=(n != 1);"}, + {"de", "German", "nplurals=2; plural=(n != 1);"}, + {"nl", "Dutch", "nplurals=2; plural=(n != 1);"}, + {"sv", "Swedish", "nplurals=2; plural=(n != 1);"}, + {"da", "Danish", "nplurals=2; plural=(n != 1);"}, + {"no", "Norwegian", "nplurals=2; plural=(n != 1);"}, + {"nb", "Norwegian Bokmal", "nplurals=2; plural=(n != 1);"}, + {"nn", "Norwegian Nynorsk", "nplurals=2; plural=(n != 1);"}, + {"fo", "Faroese", "nplurals=2; plural=(n != 1);"}, + {"es", "Spanish", "nplurals=2; plural=(n != 1);"}, + {"pt", "Portuguese", "nplurals=2; plural=(n != 1);"}, + {"it", "Italian", "nplurals=2; plural=(n != 1);"}, + {"bg", "Bulgarian", "nplurals=2; plural=(n != 1);"}, + {"el", "Greek", "nplurals=2; plural=(n != 1);"}, + {"fi", "Finnish", "nplurals=2; plural=(n != 1);"}, + {"et", "Estonian", "nplurals=2; plural=(n != 1);"}, + {"he", "Hebrew", "nplurals=2; plural=(n != 1);"}, + {"eo", "Esperanto", "nplurals=2; plural=(n != 1);"}, + {"hu", "Hungarian", "nplurals=2; plural=(n != 1);"}, + {"tr", "Turkish", "nplurals=2; plural=(n != 1);"}, + {"pt_BR", "Brazilian", "nplurals=2; plural=(n > 1);"}, + {"fr", "French", "nplurals=2; plural=(n > 1);"}, + {"lv", "Latvian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2);"}, + {"ga", "Irish", "nplurals=3; plural=n==1 ? 0 : n==2 ? 1 : 2;"}, + {"ro", "Romanian", "nplurals=3; plural=n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2;"}, + {"lt", "Lithuanian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"ru", "Russian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"uk", "Ukrainian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"be", "Belarusian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"sr", "Serbian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"hr", "Croatian", "nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"cs", "Czech", "nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;"}, + {"sk", "Slovak", "nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;"}, + {"pl", "Polish", "nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);"}, + {"sl", "Slovenian", "nplurals=4; plural=(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3);"}, +} diff --git a/vendor/github.com/chai2010/gettext-go/po/comment.go b/vendor/github.com/chai2010/gettext-go/po/comment.go new file mode 100644 index 0000000000..d4abe7c106 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/comment.go @@ -0,0 +1,270 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "bytes" + "fmt" + "io" + "strconv" + "strings" +) + +// Comment represents every message's comments. +type Comment struct { + StartLine int // comment start line + TranslatorComment string // # translator-comments // TrimSpace + ExtractedComment string // #. extracted-comments + ReferenceFile []string // #: src/msgcmp.c:338 src/po-lex.c:699 + ReferenceLine []int // #: src/msgcmp.c:338 src/po-lex.c:699 + Flags []string // #, fuzzy,c-format,range:0..10 + PrevMsgContext string // #| msgctxt previous-context + PrevMsgId string // #| msgid previous-untranslated-string +} + +func (p *Comment) less(q *Comment) bool { + if p.StartLine != 0 || q.StartLine != 0 { + return p.StartLine < q.StartLine + } + if a, b := len(p.ReferenceFile), len(q.ReferenceFile); a != b { + return a < b + } + for i := 0; i < len(p.ReferenceFile); i++ { + if a, b := p.ReferenceFile[i], q.ReferenceFile[i]; a != b { + return a < b + } + if a, b := p.ReferenceLine[i], q.ReferenceLine[i]; a != b { + return a < b + } + } + return false +} + +func (p *Comment) readPoComment(r *lineReader) (err error) { + *p = Comment{} + if err = r.skipBlankLine(); err != nil { + return err + } + defer func(oldPos int) { + newPos := r.currentPos() + if newPos != oldPos && err == io.EOF { + err = nil + } + }(r.currentPos()) + + p.StartLine = r.currentPos() + 1 + for { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if len(s) == 0 || s[0] != '#' { + return + } + + if err = p.readTranslatorComment(r); err != nil { + return + } + if err = p.readExtractedComment(r); err != nil { + return + } + if err = p.readReferenceComment(r); err != nil { + return + } + if err = p.readFlagsComment(r); err != nil { + return + } + if err = p.readPrevMsgContext(r); err != nil { + return + } + if err = p.readPrevMsgId(r); err != nil { + return + } + } +} + +func (p *Comment) readTranslatorComment(r *lineReader) (err error) { + const prefix = "# " // .,:| + for { + var s string + if s, _, err = r.readLine(); err != nil { + return err + } + if len(s) < 1 || s[0] != '#' { + r.unreadLine() + return nil + } + if len(s) >= 2 { + switch s[1] { + case '.', ',', ':', '|': + r.unreadLine() + return nil + } + } + if p.TranslatorComment != "" { + p.TranslatorComment += "\n" + } + p.TranslatorComment += strings.TrimSpace(s[1:]) + } +} + +func (p *Comment) readExtractedComment(r *lineReader) (err error) { + const prefix = "#." + for { + var s string + if s, _, err = r.readLine(); err != nil { + return err + } + if len(s) < len(prefix) || s[:len(prefix)] != prefix { + r.unreadLine() + return nil + } + if p.ExtractedComment != "" { + p.ExtractedComment += "\n" + } + p.ExtractedComment += strings.TrimSpace(s[len(prefix):]) + } +} + +func (p *Comment) readReferenceComment(r *lineReader) (err error) { + const prefix = "#:" + for { + var s string + if s, _, err = r.readLine(); err != nil { + return err + } + if len(s) < len(prefix) || s[:len(prefix)] != prefix { + r.unreadLine() + return nil + } + ss := strings.Split(strings.TrimSpace(s[len(prefix):]), " ") + for i := 0; i < len(ss); i++ { + idx := strings.Index(ss[i], ":") + if idx <= 0 { + continue + } + name := strings.TrimSpace(ss[i][:idx]) + line, _ := strconv.Atoi(strings.TrimSpace(ss[i][idx+1:])) + p.ReferenceFile = append(p.ReferenceFile, name) + p.ReferenceLine = append(p.ReferenceLine, line) + } + } +} + +func (p *Comment) readFlagsComment(r *lineReader) (err error) { + const prefix = "#," + for { + var s string + if s, _, err = r.readLine(); err != nil { + return err + } + if len(s) < len(prefix) || s[:len(prefix)] != prefix { + r.unreadLine() + return nil + } + ss := strings.Split(strings.TrimSpace(s[len(prefix):]), ",") + for i := 0; i < len(ss); i++ { + p.Flags = append(p.Flags, strings.TrimSpace(ss[i])) + } + } +} + +func (p *Comment) readPrevMsgContext(r *lineReader) (err error) { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if !rePrevMsgContextComments.MatchString(s) { + return + } + p.PrevMsgContext, err = p.readString(r) + return +} + +func (p *Comment) readPrevMsgId(r *lineReader) (err error) { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if !rePrevMsgIdComments.MatchString(s) { + return + } + p.PrevMsgId, err = p.readString(r) + return +} + +func (p *Comment) readString(r *lineReader) (msg string, err error) { + var s string + if s, _, err = r.readLine(); err != nil { + return + } + msg += decodePoString(s) + for { + if s, _, err = r.readLine(); err != nil { + return + } + if !reStringLineComments.MatchString(s) { + r.unreadLine() + break + } + msg += decodePoString(s) + } + return +} + +// GetFuzzy gets the fuzzy flag. +func (p *Comment) GetFuzzy() bool { + for _, s := range p.Flags { + if s == "fuzzy" { + return true + } + } + return false +} + +// SetFuzzy sets the fuzzy flag. +func (p *Comment) SetFuzzy(fuzzy bool) { + // +} + +// String returns the po format comment string. +func (p Comment) String() string { + var buf bytes.Buffer + if p.TranslatorComment != "" { + ss := strings.Split(p.TranslatorComment, "\n") + for i := 0; i < len(ss); i++ { + fmt.Fprintf(&buf, "# %s\n", ss[i]) + } + } + if p.ExtractedComment != "" { + ss := strings.Split(p.ExtractedComment, "\n") + for i := 0; i < len(ss); i++ { + fmt.Fprintf(&buf, "#. %s\n", ss[i]) + } + } + if a, b := len(p.ReferenceFile), len(p.ReferenceLine); a != 0 && a == b { + fmt.Fprintf(&buf, "#:") + for i := 0; i < len(p.ReferenceFile); i++ { + fmt.Fprintf(&buf, " %s:%d", p.ReferenceFile[i], p.ReferenceLine[i]) + } + fmt.Fprintf(&buf, "\n") + } + if len(p.Flags) != 0 { + fmt.Fprintf(&buf, "#, %s", p.Flags[0]) + for i := 1; i < len(p.Flags); i++ { + fmt.Fprintf(&buf, ", %s", p.Flags[i]) + } + fmt.Fprintf(&buf, "\n") + } + if p.PrevMsgContext != "" { + s := encodeCommentPoString(p.PrevMsgContext) + fmt.Fprintf(&buf, "#| msgctxt %s\n", s) + } + if p.PrevMsgId != "" { + s := encodeCommentPoString(p.PrevMsgId) + fmt.Fprintf(&buf, "#| msgid %s\n", s) + } + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/po/doc.go b/vendor/github.com/chai2010/gettext-go/po/doc.go new file mode 100644 index 0000000000..6cfa2a24be --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/doc.go @@ -0,0 +1,24 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package po provides support for reading and writing GNU PO file. + +Examples: + import ( + "github.com/chai2010/gettext-go/po" + ) + + func main() { + poFile, err := po.LoadFile("test.po") + if err != nil { + log.Fatal(err) + } + fmt.Printf("%v", poFile) + } + +The GNU PO file specification is at +http://www.gnu.org/software/gettext/manual/html_node/PO-Files.html. +*/ +package po diff --git a/vendor/github.com/chai2010/gettext-go/po/file.go b/vendor/github.com/chai2010/gettext-go/po/file.go new file mode 100644 index 0000000000..4a122eeb8b --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/file.go @@ -0,0 +1,81 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "sort" +) + +// File represents an PO File. +// +// See http://www.gnu.org/software/gettext/manual/html_node/PO-Files.html +type File struct { + MimeHeader Header + Messages []Message +} + +// Load loads po file format data. +func Load(data []byte) (*File, error) { + return loadData(data) +} + +// LoadFile loads a named po file. +func LoadFile(path string) (*File, error) { + data, err := ioutil.ReadFile(path) + if err != nil { + return nil, err + } + return loadData(data) +} + +func loadData(data []byte) (*File, error) { + r := newLineReader(string(data)) + var file File + for { + var msg Message + if err := msg.readPoEntry(r); err != nil { + if err == io.EOF { + return &file, nil + } + return nil, err + } + if msg.MsgId == "" { + file.MimeHeader.parseHeader(&msg) + continue + } + file.Messages = append(file.Messages, msg) + } +} + +// Save saves a po file. +func (f *File) Save(name string) error { + return ioutil.WriteFile(name, []byte(f.String()), 0666) +} + +// Save returns a po file format data. +func (f *File) Data() []byte { + // sort the massge as ReferenceFile/ReferenceLine field + var messages []Message + messages = append(messages, f.Messages...) + sort.Slice(messages, func(i, j int) bool { + return messages[i].less(&messages[j]) + }) + + var buf bytes.Buffer + fmt.Fprintf(&buf, "%s\n", f.MimeHeader.String()) + for i := 0; i < len(messages); i++ { + fmt.Fprintf(&buf, "%s\n", messages[i].String()) + } + return buf.Bytes() +} + +// String returns the po format file string. +func (f *File) String() string { + return string(f.Data()) +} diff --git a/vendor/github.com/chai2010/gettext-go/po/header.go b/vendor/github.com/chai2010/gettext-go/po/header.go new file mode 100644 index 0000000000..a9b5b6671b --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/header.go @@ -0,0 +1,106 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "bytes" + "fmt" + "strings" +) + +// Header is the initial comments "SOME DESCRIPTIVE TITLE", "YEAR" +// and "FIRST AUTHOR , YEAR" ought to be replaced by sensible information. +// +// See http://www.gnu.org/software/gettext/manual/html_node/Header-Entry.html#Header-Entry +type Header struct { + Comment // Header Comments + ProjectIdVersion string // Project-Id-Version: PACKAGE VERSION + ReportMsgidBugsTo string // Report-Msgid-Bugs-To: FIRST AUTHOR + POTCreationDate string // POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE + PORevisionDate string // PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE + LastTranslator string // Last-Translator: FIRST AUTHOR + LanguageTeam string // Language-Team: golang-china + Language string // Language: zh_CN + MimeVersion string // MIME-Version: 1.0 + ContentType string // Content-Type: text/plain; charset=UTF-8 + ContentTransferEncoding string // Content-Transfer-Encoding: 8bit + PluralForms string // Plural-Forms: nplurals=2; plural=n == 1 ? 0 : 1; + XGenerator string // X-Generator: Poedit 1.5.5 + UnknowFields map[string]string +} + +func (p *Header) parseHeader(msg *Message) { + if msg.MsgId != "" || msg.MsgStr == "" { + return + } + lines := strings.Split(msg.MsgStr, "\n") + for i := 0; i < len(lines); i++ { + idx := strings.Index(lines[i], ":") + if idx < 0 { + continue + } + key := strings.TrimSpace(lines[i][:idx]) + val := strings.TrimSpace(lines[i][idx+1:]) + switch strings.ToUpper(key) { + case strings.ToUpper("Project-Id-Version"): + p.ProjectIdVersion = val + case strings.ToUpper("Report-Msgid-Bugs-To"): + p.ReportMsgidBugsTo = val + case strings.ToUpper("POT-Creation-Date"): + p.POTCreationDate = val + case strings.ToUpper("PO-Revision-Date"): + p.PORevisionDate = val + case strings.ToUpper("Last-Translator"): + p.LastTranslator = val + case strings.ToUpper("Language-Team"): + p.LanguageTeam = val + case strings.ToUpper("Language"): + p.Language = val + case strings.ToUpper("MIME-Version"): + p.MimeVersion = val + case strings.ToUpper("Content-Type"): + p.ContentType = val + case strings.ToUpper("Content-Transfer-Encoding"): + p.ContentTransferEncoding = val + case strings.ToUpper("Plural-Forms"): + p.PluralForms = val + case strings.ToUpper("X-Generator"): + p.XGenerator = val + default: + if p.UnknowFields == nil { + p.UnknowFields = make(map[string]string) + } + p.UnknowFields[key] = val + } + } + p.Comment = msg.Comment +} + +// String returns the po format header string. +func (p Header) String() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, "%s", p.Comment.String()) + fmt.Fprintf(&buf, `msgid ""`+"\n") + fmt.Fprintf(&buf, `msgstr ""`+"\n") + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Project-Id-Version", p.ProjectIdVersion) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Report-Msgid-Bugs-To", p.ReportMsgidBugsTo) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "POT-Creation-Date", p.POTCreationDate) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "PO-Revision-Date", p.PORevisionDate) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Last-Translator", p.LastTranslator) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Language-Team", p.LanguageTeam) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Language", p.Language) + if p.MimeVersion != "" { + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "MIME-Version", p.MimeVersion) + } + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Content-Type", p.ContentType) + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "Content-Transfer-Encoding", p.ContentTransferEncoding) + if p.XGenerator != "" { + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", "X-Generator", p.XGenerator) + } + for k, v := range p.UnknowFields { + fmt.Fprintf(&buf, `"%s: %s\n"`+"\n", k, v) + } + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/po/line_reader.go b/vendor/github.com/chai2010/gettext-go/po/line_reader.go new file mode 100644 index 0000000000..8597273a2b --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/line_reader.go @@ -0,0 +1,62 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "io" + "strings" +) + +type lineReader struct { + lines []string + pos int +} + +func newLineReader(data string) *lineReader { + data = strings.Replace(data, "\r", "", -1) + lines := strings.Split(data, "\n") + return &lineReader{lines: lines} +} + +func (r *lineReader) skipBlankLine() error { + for ; r.pos < len(r.lines); r.pos++ { + if strings.TrimSpace(r.lines[r.pos]) != "" { + break + } + } + if r.pos >= len(r.lines) { + return io.EOF + } + return nil +} + +func (r *lineReader) currentPos() int { + return r.pos +} + +func (r *lineReader) currentLine() (s string, pos int, err error) { + if r.pos >= len(r.lines) { + err = io.EOF + return + } + s, pos = r.lines[r.pos], r.pos + return +} + +func (r *lineReader) readLine() (s string, pos int, err error) { + if r.pos >= len(r.lines) { + err = io.EOF + return + } + s, pos = r.lines[r.pos], r.pos + r.pos++ + return +} + +func (r *lineReader) unreadLine() { + if r.pos >= 0 { + r.pos-- + } +} diff --git a/vendor/github.com/chai2010/gettext-go/po/message.go b/vendor/github.com/chai2010/gettext-go/po/message.go new file mode 100644 index 0000000000..39936dcc7b --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/message.go @@ -0,0 +1,193 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "bytes" + "fmt" + "io" + "strconv" + "strings" +) + +// A PO file is made up of many entries, +// each entry holding the relation between an original untranslated string +// and its corresponding translation. +// +// See http://www.gnu.org/software/gettext/manual/html_node/PO-Files.html +type Message struct { + Comment // Coments + MsgContext string // msgctxt context + MsgId string // msgid untranslated-string + MsgIdPlural string // msgid_plural untranslated-string-plural + MsgStr string // msgstr translated-string + MsgStrPlural []string // msgstr[0] translated-string-case-0 +} + +func (p *Message) less(q *Message) bool { + if p.Comment.less(&q.Comment) { + return true + } + if a, b := p.MsgContext, q.MsgContext; a != b { + return a < b + } + if a, b := p.MsgId, q.MsgId; a != b { + return a < b + } + if a, b := p.MsgIdPlural, q.MsgIdPlural; a != b { + return a < b + } + return false +} + +func (p *Message) readPoEntry(r *lineReader) (err error) { + *p = Message{} + if err = r.skipBlankLine(); err != nil { + return + } + defer func(oldPos int) { + newPos := r.currentPos() + if newPos != oldPos && err == io.EOF { + err = nil + } + }(r.currentPos()) + + if err = p.Comment.readPoComment(r); err != nil { + return + } + for { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + + if p.isInvalidLine(s) { + err = fmt.Errorf("gettext: line %d, %v", r.currentPos(), "invalid line") + return + } + if reComment.MatchString(s) || reBlankLine.MatchString(s) { + return + } + + if err = p.readMsgContext(r); err != nil { + return + } + if err = p.readMsgId(r); err != nil { + return + } + if err = p.readMsgIdPlural(r); err != nil { + return + } + if err = p.readMsgStrOrPlural(r); err != nil { + return + } + } +} + +func (p *Message) readMsgContext(r *lineReader) (err error) { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if !reMsgContext.MatchString(s) { + return + } + p.MsgContext, err = p.readString(r) + return +} + +func (p *Message) readMsgId(r *lineReader) (err error) { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if !reMsgId.MatchString(s) { + return + } + p.MsgId, err = p.readString(r) + return +} + +func (p *Message) readMsgIdPlural(r *lineReader) (err error) { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if !reMsgIdPlural.MatchString(s) { + return + } + p.MsgIdPlural, err = p.readString(r) + return nil +} + +func (p *Message) readMsgStrOrPlural(r *lineReader) (err error) { + var s string + if s, _, err = r.currentLine(); err != nil { + return + } + if !reMsgStr.MatchString(s) && !reMsgStrPlural.MatchString(s) { + return + } + if reMsgStrPlural.MatchString(s) { + left, right := strings.Index(s, `[`), strings.LastIndex(s, `]`) + idx, _ := strconv.Atoi(s[left+1 : right]) + s, err = p.readString(r) + if n := len(p.MsgStrPlural); (idx + 1) > n { + p.MsgStrPlural = append(p.MsgStrPlural, make([]string, (idx+1)-n)...) + } + p.MsgStrPlural[idx] = s + } else { + p.MsgStr, err = p.readString(r) + } + return nil +} + +func (p *Message) readString(r *lineReader) (msg string, err error) { + var s string + if s, _, err = r.readLine(); err != nil { + return + } + msg += decodePoString(s) + for { + if s, _, err = r.readLine(); err != nil { + return + } + if !reStringLine.MatchString(s) { + r.unreadLine() + break + } + msg += decodePoString(s) + } + return +} + +// String returns the po format entry string. +func (p Message) String() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, "%s", p.Comment.String()) + if p.MsgContext != "" { + fmt.Fprintf(&buf, "msgctxt %s", encodePoString(p.MsgContext)) + } + fmt.Fprintf(&buf, "msgid %s", encodePoString(p.MsgId)) + if p.MsgIdPlural != "" { + fmt.Fprintf(&buf, "msgid_plural %s", encodePoString(p.MsgIdPlural)) + } + if len(p.MsgStrPlural) == 0 { + if p.MsgStr != "" { + fmt.Fprintf(&buf, "msgstr %s", encodePoString(p.MsgStr)) + } else { + fmt.Fprintf(&buf, "msgstr %s", `""`+"\n") + } + } else { + for i := 0; i < len(p.MsgStrPlural); i++ { + if p.MsgStrPlural[i] != "" { + fmt.Fprintf(&buf, "msgstr[%d] %s", i, encodePoString(p.MsgStrPlural[i])) + } else { + fmt.Fprintf(&buf, "msgstr[%d] %s", i, `""`+"\n") + } + } + } + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/po/re.go b/vendor/github.com/chai2010/gettext-go/po/re.go new file mode 100644 index 0000000000..67c240a57b --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/re.go @@ -0,0 +1,58 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "regexp" +) + +var ( + reComment = regexp.MustCompile(`^#`) // # + reExtractedComments = regexp.MustCompile(`^#\.`) // #. + reReferenceComments = regexp.MustCompile(`^#:`) // #: + reFlagsComments = regexp.MustCompile(`^#,`) // #, fuzzy,c-format + rePrevMsgContextComments = regexp.MustCompile(`^#\|\s+msgctxt`) // #| msgctxt + rePrevMsgIdComments = regexp.MustCompile(`^#\|\s+msgid`) // #| msgid + reStringLineComments = regexp.MustCompile(`^#\|\s+".*"\s*$`) // #| "message" + + reMsgContext = regexp.MustCompile(`^msgctxt\s+".*"\s*$`) // msgctxt + reMsgId = regexp.MustCompile(`^msgid\s+".*"\s*$`) // msgid + reMsgIdPlural = regexp.MustCompile(`^msgid_plural\s+".*"\s*$`) // msgid_plural + reMsgStr = regexp.MustCompile(`^msgstr\s*".*"\s*$`) // msgstr + reMsgStrPlural = regexp.MustCompile(`^msgstr\s*(\[\d+\])\s*".*"\s*$`) // msgstr[0] + reStringLine = regexp.MustCompile(`^\s*".*"\s*$`) // "message" + reBlankLine = regexp.MustCompile(`^\s*$`) // +) + +func (p *Message) isInvalidLine(s string) bool { + if reComment.MatchString(s) { + return false + } + if reBlankLine.MatchString(s) { + return false + } + + if reMsgContext.MatchString(s) { + return false + } + if reMsgId.MatchString(s) { + return false + } + if reMsgIdPlural.MatchString(s) { + return false + } + if reMsgStr.MatchString(s) { + return false + } + if reMsgStrPlural.MatchString(s) { + return false + } + + if reStringLine.MatchString(s) { + return false + } + + return true +} diff --git a/vendor/github.com/chai2010/gettext-go/po/util.go b/vendor/github.com/chai2010/gettext-go/po/util.go new file mode 100644 index 0000000000..d8b3b0e254 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/po/util.go @@ -0,0 +1,114 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package po + +import ( + "bytes" + "strings" +) + +func decodePoString(text string) string { + lines := strings.Split(text, "\n") + for i := 0; i < len(lines); i++ { + left := strings.Index(lines[i], `"`) + right := strings.LastIndex(lines[i], `"`) + if left < 0 || right < 0 || left == right { + lines[i] = "" + continue + } + line := lines[i][left+1 : right] + data := make([]byte, 0, len(line)) + for i := 0; i < len(line); i++ { + if line[i] != '\\' { + data = append(data, line[i]) + continue + } + if i+1 >= len(line) { + break + } + switch line[i+1] { + case 'n': // \\n -> \n + data = append(data, '\n') + i++ + case 't': // \\t -> \n + data = append(data, '\t') + i++ + case '\\': // \\\ -> ? + data = append(data, '\\') + i++ + } + } + lines[i] = string(data) + } + return strings.Join(lines, "") +} + +func encodePoString(text string) string { + var buf bytes.Buffer + lines := strings.Split(text, "\n") + for i := 0; i < len(lines); i++ { + if lines[i] == "" { + if i != len(lines)-1 { + buf.WriteString(`"\n"` + "\n") + } + continue + } + buf.WriteRune('"') + for _, r := range lines[i] { + switch r { + case '\\': + buf.WriteString(`\\`) + case '"': + buf.WriteString(`\"`) + case '\n': + buf.WriteString(`\n`) + case '\t': + buf.WriteString(`\t`) + default: + buf.WriteRune(r) + } + } + if i < len(lines)-1 { + buf.WriteString(`\n"` + "\n") + } else { + buf.WriteString(`"` + "\n") + } + } + return buf.String() +} + +func encodeCommentPoString(text string) string { + var buf bytes.Buffer + lines := strings.Split(text, "\n") + if len(lines) > 1 { + buf.WriteString(`""` + "\n") + } + for i := 0; i < len(lines); i++ { + if len(lines) > 0 { + buf.WriteString("#| ") + } + buf.WriteRune('"') + for _, r := range lines[i] { + switch r { + case '\\': + buf.WriteString(`\\`) + case '"': + buf.WriteString(`\"`) + case '\n': + buf.WriteString(`\n`) + case '\t': + buf.WriteString(`\t`) + default: + buf.WriteRune(r) + } + } + if i < len(lines)-1 { + buf.WriteString(`\n"` + "\n") + } else { + buf.WriteString(`"`) + } + } + return buf.String() +} diff --git a/vendor/github.com/chai2010/gettext-go/tr.go b/vendor/github.com/chai2010/gettext-go/tr.go new file mode 100644 index 0000000000..5b9d08f426 --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/tr.go @@ -0,0 +1,175 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "encoding/json" + + "github.com/chai2010/gettext-go/mo" + "github.com/chai2010/gettext-go/plural" + "github.com/chai2010/gettext-go/po" +) + +var nilTranslator = &translator{ + MessageMap: make(map[string]mo.Message), + PluralFormula: plural.Formula("??"), +} + +type translator struct { + MessageMap map[string]mo.Message + PluralFormula func(n int) int +} + +func newMoTranslator(name string, data []byte) (*translator, error) { + var ( + f *mo.File + err error + ) + if len(data) != 0 { + f, err = mo.Load(data) + } else { + f, err = mo.LoadFile(name) + } + if err != nil { + return nil, err + } + var tr = &translator{ + MessageMap: make(map[string]mo.Message), + } + for _, v := range f.Messages { + tr.MessageMap[tr.makeMapKey(v.MsgContext, v.MsgId)] = v + } + if lang := f.MimeHeader.Language; lang != "" { + tr.PluralFormula = plural.Formula(lang) + } else { + tr.PluralFormula = plural.Formula("??") + } + return tr, nil +} + +func newPoTranslator(name string, data []byte) (*translator, error) { + var ( + f *po.File + err error + ) + if len(data) != 0 { + f, err = po.Load(data) + } else { + f, err = po.LoadFile(name) + } + if err != nil { + return nil, err + } + var tr = &translator{ + MessageMap: make(map[string]mo.Message), + } + for _, v := range f.Messages { + tr.MessageMap[tr.makeMapKey(v.MsgContext, v.MsgId)] = mo.Message{ + MsgContext: v.MsgContext, + MsgId: v.MsgId, + MsgIdPlural: v.MsgIdPlural, + MsgStr: v.MsgStr, + MsgStrPlural: v.MsgStrPlural, + } + } + if lang := f.MimeHeader.Language; lang != "" { + tr.PluralFormula = plural.Formula(lang) + } else { + tr.PluralFormula = plural.Formula("??") + } + return tr, nil +} + +func newJsonTranslator(lang, name string, jsonData []byte) (*translator, error) { + var msgList []struct { + MsgContext string `json:"msgctxt"` // msgctxt context + MsgId string `json:"msgid"` // msgid untranslated-string + MsgIdPlural string `json:"msgid_plural"` // msgid_plural untranslated-string-plural + MsgStr []string `json:"msgstr"` // msgstr translated-string + } + if err := json.Unmarshal(jsonData, &msgList); err != nil { + return nil, err + } + + var tr = &translator{ + MessageMap: make(map[string]mo.Message), + PluralFormula: plural.Formula(lang), + } + + for _, v := range msgList { + var v_MsgStr string + var v_MsgStrPlural = v.MsgStr + + if len(v.MsgStr) != 0 { + v_MsgStr = v.MsgStr[0] + } + + tr.MessageMap[tr.makeMapKey(v.MsgContext, v.MsgId)] = mo.Message{ + MsgContext: v.MsgContext, + MsgId: v.MsgId, + MsgIdPlural: v.MsgIdPlural, + MsgStr: v_MsgStr, + MsgStrPlural: v_MsgStrPlural, + } + } + return tr, nil +} + +func (p *translator) PGettext(msgctxt, msgid string) string { + return p.findMsgStr(msgctxt, msgid) +} + +func (p *translator) PNGettext(msgctxt, msgid, msgidPlural string, n int) string { + n = p.PluralFormula(n) + if ss := p.findMsgStrPlural(msgctxt, msgid, msgidPlural); len(ss) != 0 { + if n >= len(ss) { + n = len(ss) - 1 + } + if ss[n] != "" { + return ss[n] + } + } + if msgidPlural != "" && n > 0 { + return msgidPlural + } + return msgid +} + +func (p *translator) findMsgStr(msgctxt, msgid string) string { + key := p.makeMapKey(msgctxt, msgid) + if v, ok := p.MessageMap[key]; ok { + if v.MsgStr != "" { + return v.MsgStr + } + } + return msgid +} + +func (p *translator) findMsgStrPlural(msgctxt, msgid, msgidPlural string) []string { + key := p.makeMapKey(msgctxt, msgid) + if v, ok := p.MessageMap[key]; ok { + if len(v.MsgIdPlural) != 0 { + if len(v.MsgStrPlural) != 0 { + return v.MsgStrPlural + } else { + return nil + } + } else { + if len(v.MsgStr) != 0 { + return []string{v.MsgStr} + } else { + return nil + } + } + } + return nil +} + +func (p *translator) makeMapKey(msgctxt, msgid string) string { + if msgctxt != "" { + return msgctxt + mo.EotSeparator + msgid + } + return msgid +} diff --git a/vendor/github.com/chai2010/gettext-go/util.go b/vendor/github.com/chai2010/gettext-go/util.go new file mode 100644 index 0000000000..b8269a605c --- /dev/null +++ b/vendor/github.com/chai2010/gettext-go/util.go @@ -0,0 +1,34 @@ +// Copyright 2013 ChaiShushan . All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gettext + +import ( + "os" + "strings" +) + +func getDefaultLanguage() string { + if v := os.Getenv("LC_MESSAGES"); v != "" { + return simplifiedLanguage(v) + } + if v := os.Getenv("LANG"); v != "" { + return simplifiedLanguage(v) + } + return "default" +} + +func simplifiedLanguage(lang string) string { + // en_US/en_US.UTF-8/zh_CN/zh_TW/el_GR@euro/... + if idx := strings.Index(lang, ":"); idx != -1 { + lang = lang[:idx] + } + if idx := strings.Index(lang, "@"); idx != -1 { + lang = lang[:idx] + } + if idx := strings.Index(lang, "."); idx != -1 { + lang = lang[:idx] + } + return strings.TrimSpace(lang) +} diff --git a/vendor/github.com/exponent-io/jsonpath/.gitignore b/vendor/github.com/exponent-io/jsonpath/.gitignore new file mode 100644 index 0000000000..daf913b1b3 --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/exponent-io/jsonpath/.travis.yml b/vendor/github.com/exponent-io/jsonpath/.travis.yml new file mode 100644 index 0000000000..f4f458a416 --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/.travis.yml @@ -0,0 +1,5 @@ +language: go + +go: + - 1.5 + - tip diff --git a/vendor/github.com/exponent-io/jsonpath/LICENSE b/vendor/github.com/exponent-io/jsonpath/LICENSE new file mode 100644 index 0000000000..5419772507 --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Exponent Labs LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/exponent-io/jsonpath/README.md b/vendor/github.com/exponent-io/jsonpath/README.md new file mode 100644 index 0000000000..382fb3138c --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/README.md @@ -0,0 +1,66 @@ +[![GoDoc](https://godoc.org/github.com/exponent-io/jsonpath?status.svg)](https://godoc.org/github.com/exponent-io/jsonpath) +[![Build Status](https://travis-ci.org/exponent-io/jsonpath.svg?branch=master)](https://travis-ci.org/exponent-io/jsonpath) + +# jsonpath + +This package extends the [json.Decoder](https://golang.org/pkg/encoding/json/#Decoder) to support navigating a stream of JSON tokens. You should be able to use this extended Decoder places where a json.Decoder would have been used. + +This Decoder has the following enhancements... + * The [Scan](https://godoc.org/github.com/exponent-io/jsonpath/#Decoder.Scan) method supports scanning a JSON stream while extracting particular values along the way using [PathActions](https://godoc.org/github.com/exponent-io/jsonpath#PathActions). + * The [SeekTo](https://godoc.org/github.com/exponent-io/jsonpath#Decoder.SeekTo) method supports seeking forward in a JSON token stream to a particular path. + * The [Path](https://godoc.org/github.com/exponent-io/jsonpath#Decoder.Path) method returns the path of the most recently parsed token. + * The [Token](https://godoc.org/github.com/exponent-io/jsonpath#Decoder.Token) method has been modified to distinguish between strings that are object keys and strings that are values. Object key strings are returned as the [KeyString](https://godoc.org/github.com/exponent-io/jsonpath#KeyString) type rather than a native string. + +## Installation + + go get -u github.com/exponent-io/jsonpath + +## Example Usage + +#### SeekTo + +```go +import "github.com/exponent-io/jsonpath" + +var j = []byte(`[ + {"Space": "YCbCr", "Point": {"Y": 255, "Cb": 0, "Cr": -10}}, + {"Space": "RGB", "Point": {"R": 98, "G": 218, "B": 255}} +]`) + +w := json.NewDecoder(bytes.NewReader(j)) +var v interface{} + +w.SeekTo(1, "Point", "G") +w.Decode(&v) // v is 218 +``` + +#### Scan with PathActions + +```go +var j = []byte(`{"colors":[ + {"Space": "YCbCr", "Point": {"Y": 255, "Cb": 0, "Cr": -10, "A": 58}}, + {"Space": "RGB", "Point": {"R": 98, "G": 218, "B": 255, "A": 231}} +]}`) + +var actions PathActions + +// Extract the value at Point.A +actions.Add(func(d *Decoder) error { + var alpha int + err := d.Decode(&alpha) + fmt.Printf("Alpha: %v\n", alpha) + return err +}, "Point", "A") + +w := NewDecoder(bytes.NewReader(j)) +w.SeekTo("colors", 0) + +var ok = true +var err error +for ok { + ok, err = w.Scan(&actions) + if err != nil && err != io.EOF { + panic(err) + } +} +``` diff --git a/vendor/github.com/exponent-io/jsonpath/decoder.go b/vendor/github.com/exponent-io/jsonpath/decoder.go new file mode 100644 index 0000000000..31de46c738 --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/decoder.go @@ -0,0 +1,210 @@ +package jsonpath + +import ( + "encoding/json" + "io" +) + +// KeyString is returned from Decoder.Token to represent each key in a JSON object value. +type KeyString string + +// Decoder extends the Go runtime's encoding/json.Decoder to support navigating in a stream of JSON tokens. +type Decoder struct { + json.Decoder + + path JsonPath + context jsonContext +} + +// NewDecoder creates a new instance of the extended JSON Decoder. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{Decoder: *json.NewDecoder(r)} +} + +// SeekTo causes the Decoder to move forward to a given path in the JSON structure. +// +// The path argument must consist of strings or integers. Each string specifies an JSON object key, and +// each integer specifies an index into a JSON array. +// +// Consider the JSON structure +// +// { "a": [0,"s",12e4,{"b":0,"v":35} ] } +// +// SeekTo("a",3,"v") will move to the value referenced by the "a" key in the current object, +// followed by a move to the 4th value (index 3) in the array, followed by a move to the value at key "v". +// In this example, a subsequent call to the decoder's Decode() would unmarshal the value 35. +// +// SeekTo returns a boolean value indicating whether a match was found. +// +// Decoder is intended to be used with a stream of tokens. As a result it navigates forward only. +func (d *Decoder) SeekTo(path ...interface{}) (bool, error) { + + if len(path) == 0 { + return len(d.path) == 0, nil + } + last := len(path) - 1 + if i, ok := path[last].(int); ok { + path[last] = i - 1 + } + + for { + if d.path.Equal(path) { + return true, nil + } + _, err := d.Token() + if err == io.EOF { + return false, nil + } else if err != nil { + return false, err + } + } +} + +// Decode reads the next JSON-encoded value from its input and stores it in the value pointed to by v. This is +// equivalent to encoding/json.Decode(). +func (d *Decoder) Decode(v interface{}) error { + switch d.context { + case objValue: + d.context = objKey + break + case arrValue: + d.path.incTop() + break + } + return d.Decoder.Decode(v) +} + +// Path returns a slice of string and/or int values representing the path from the root of the JSON object to the +// position of the most-recently parsed token. +func (d *Decoder) Path() JsonPath { + p := make(JsonPath, len(d.path)) + copy(p, d.path) + return p +} + +// Token is equivalent to the Token() method on json.Decoder. The primary difference is that it distinguishes +// between strings that are keys and and strings that are values. String tokens that are object keys are returned as a +// KeyString rather than as a native string. +func (d *Decoder) Token() (json.Token, error) { + t, err := d.Decoder.Token() + if err != nil { + return t, err + } + + if t == nil { + switch d.context { + case objValue: + d.context = objKey + break + case arrValue: + d.path.incTop() + break + } + return t, err + } + + switch t := t.(type) { + case json.Delim: + switch t { + case json.Delim('{'): + if d.context == arrValue { + d.path.incTop() + } + d.path.push("") + d.context = objKey + break + case json.Delim('}'): + d.path.pop() + d.context = d.path.inferContext() + break + case json.Delim('['): + if d.context == arrValue { + d.path.incTop() + } + d.path.push(-1) + d.context = arrValue + break + case json.Delim(']'): + d.path.pop() + d.context = d.path.inferContext() + break + } + case float64, json.Number, bool: + switch d.context { + case objValue: + d.context = objKey + break + case arrValue: + d.path.incTop() + break + } + break + case string: + switch d.context { + case objKey: + d.path.nameTop(t) + d.context = objValue + return KeyString(t), err + case objValue: + d.context = objKey + case arrValue: + d.path.incTop() + } + break + } + + return t, err +} + +// Scan moves forward over the JSON stream consuming all the tokens at the current level (current object, current array) +// invoking each matching PathAction along the way. +// +// Scan returns true if there are more contiguous values to scan (for example in an array). +func (d *Decoder) Scan(ext *PathActions) (bool, error) { + + rootPath := d.Path() + + // If this is an array path, increment the root path in our local copy. + if rootPath.inferContext() == arrValue { + rootPath.incTop() + } + + for { + // advance the token position + _, err := d.Token() + if err != nil { + return false, err + } + + match: + var relPath JsonPath + + // capture the new JSON path + path := d.Path() + + if len(path) > len(rootPath) { + // capture the path relative to where the scan started + relPath = path[len(rootPath):] + } else { + // if the path is not longer than the root, then we are done with this scan + // return boolean flag indicating if there are more items to scan at the same level + return d.Decoder.More(), nil + } + + // match the relative path against the path actions + if node := ext.node.match(relPath); node != nil { + if node.action != nil { + // we have a match so execute the action + err = node.action(d) + if err != nil { + return d.Decoder.More(), err + } + // The action may have advanced the decoder. If we are in an array, advancing it further would + // skip tokens. So, if we are scanning an array, jump to the top without advancing the token. + if d.path.inferContext() == arrValue && d.Decoder.More() { + goto match + } + } + } + } +} diff --git a/vendor/github.com/exponent-io/jsonpath/path.go b/vendor/github.com/exponent-io/jsonpath/path.go new file mode 100644 index 0000000000..d7db2ad336 --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/path.go @@ -0,0 +1,67 @@ +// Extends the Go runtime's json.Decoder enabling navigation of a stream of json tokens. +package jsonpath + +import "fmt" + +type jsonContext int + +const ( + none jsonContext = iota + objKey + objValue + arrValue +) + +// AnyIndex can be used in a pattern to match any array index. +const AnyIndex = -2 + +// JsonPath is a slice of strings and/or integers. Each string specifies an JSON object key, and +// each integer specifies an index into a JSON array. +type JsonPath []interface{} + +func (p *JsonPath) push(n interface{}) { *p = append(*p, n) } +func (p *JsonPath) pop() { *p = (*p)[:len(*p)-1] } + +// increment the index at the top of the stack (must be an array index) +func (p *JsonPath) incTop() { (*p)[len(*p)-1] = (*p)[len(*p)-1].(int) + 1 } + +// name the key at the top of the stack (must be an object key) +func (p *JsonPath) nameTop(n string) { (*p)[len(*p)-1] = n } + +// infer the context from the item at the top of the stack +func (p *JsonPath) inferContext() jsonContext { + if len(*p) == 0 { + return none + } + t := (*p)[len(*p)-1] + switch t.(type) { + case string: + return objKey + case int: + return arrValue + default: + panic(fmt.Sprintf("Invalid stack type %T", t)) + } +} + +// Equal tests for equality between two JsonPath types. +func (p *JsonPath) Equal(o JsonPath) bool { + if len(*p) != len(o) { + return false + } + for i, v := range *p { + if v != o[i] { + return false + } + } + return true +} + +func (p *JsonPath) HasPrefix(o JsonPath) bool { + for i, v := range o { + if v != (*p)[i] { + return false + } + } + return true +} diff --git a/vendor/github.com/exponent-io/jsonpath/pathaction.go b/vendor/github.com/exponent-io/jsonpath/pathaction.go new file mode 100644 index 0000000000..497ed686ca --- /dev/null +++ b/vendor/github.com/exponent-io/jsonpath/pathaction.go @@ -0,0 +1,61 @@ +package jsonpath + +// pathNode is used to construct a trie of paths to be matched +type pathNode struct { + matchOn interface{} // string, or integer + childNodes []pathNode + action DecodeAction +} + +// match climbs the trie to find a node that matches the given JSON path. +func (n *pathNode) match(path JsonPath) *pathNode { + var node *pathNode = n + for _, ps := range path { + found := false + for i, n := range node.childNodes { + if n.matchOn == ps { + node = &node.childNodes[i] + found = true + break + } else if _, ok := ps.(int); ok && n.matchOn == AnyIndex { + node = &node.childNodes[i] + found = true + break + } + } + if !found { + return nil + } + } + return node +} + +// PathActions represents a collection of DecodeAction functions that should be called at certain path positions +// when scanning the JSON stream. PathActions can be created once and used many times in one or more JSON streams. +type PathActions struct { + node pathNode +} + +// DecodeAction handlers are called by the Decoder when scanning objects. See PathActions.Add for more detail. +type DecodeAction func(d *Decoder) error + +// Add specifies an action to call on the Decoder when the specified path is encountered. +func (je *PathActions) Add(action DecodeAction, path ...interface{}) { + + var node *pathNode = &je.node + for _, ps := range path { + found := false + for i, n := range node.childNodes { + if n.matchOn == ps { + node = &node.childNodes[i] + found = true + break + } + } + if !found { + node.childNodes = append(node.childNodes, pathNode{matchOn: ps}) + node = &node.childNodes[len(node.childNodes)-1] + } + } + node.action = action +} diff --git a/vendor/github.com/go-errors/errors/.travis.yml b/vendor/github.com/go-errors/errors/.travis.yml new file mode 100644 index 0000000000..9d00fdd5d6 --- /dev/null +++ b/vendor/github.com/go-errors/errors/.travis.yml @@ -0,0 +1,5 @@ +language: go + +go: + - "1.8.x" + - "1.10.x" diff --git a/vendor/github.com/go-errors/errors/LICENSE.MIT b/vendor/github.com/go-errors/errors/LICENSE.MIT new file mode 100644 index 0000000000..c9a5b2eeb7 --- /dev/null +++ b/vendor/github.com/go-errors/errors/LICENSE.MIT @@ -0,0 +1,7 @@ +Copyright (c) 2015 Conrad Irwin + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/go-errors/errors/README.md b/vendor/github.com/go-errors/errors/README.md new file mode 100644 index 0000000000..5d4f1873dd --- /dev/null +++ b/vendor/github.com/go-errors/errors/README.md @@ -0,0 +1,66 @@ +go-errors/errors +================ + +[![Build Status](https://travis-ci.org/go-errors/errors.svg?branch=master)](https://travis-ci.org/go-errors/errors) + +Package errors adds stacktrace support to errors in go. + +This is particularly useful when you want to understand the state of execution +when an error was returned unexpectedly. + +It provides the type \*Error which implements the standard golang error +interface, so you can use this library interchangably with code that is +expecting a normal error return. + +Usage +----- + +Full documentation is available on +[godoc](https://godoc.org/github.com/go-errors/errors), but here's a simple +example: + +```go +package crashy + +import "github.com/go-errors/errors" + +var Crashed = errors.Errorf("oh dear") + +func Crash() error { + return errors.New(Crashed) +} +``` + +This can be called as follows: + +```go +package main + +import ( + "crashy" + "fmt" + "github.com/go-errors/errors" +) + +func main() { + err := crashy.Crash() + if err != nil { + if errors.Is(err, crashy.Crashed) { + fmt.Println(err.(*errors.Error).ErrorStack()) + } else { + panic(err) + } + } +} +``` + +Meta-fu +------- + +This package was original written to allow reporting to +[Bugsnag](https://bugsnag.com/) from +[bugsnag-go](https://github.com/bugsnag/bugsnag-go), but after I found similar +packages by Facebook and Dropbox, it was moved to one canonical location so +everyone can benefit. + +This package is licensed under the MIT license, see LICENSE.MIT for details. diff --git a/vendor/github.com/go-errors/errors/cover.out b/vendor/github.com/go-errors/errors/cover.out new file mode 100644 index 0000000000..ab18b0519f --- /dev/null +++ b/vendor/github.com/go-errors/errors/cover.out @@ -0,0 +1,89 @@ +mode: set +github.com/go-errors/errors/stackframe.go:27.51,30.25 2 1 +github.com/go-errors/errors/stackframe.go:33.2,38.8 3 1 +github.com/go-errors/errors/stackframe.go:30.25,32.3 1 0 +github.com/go-errors/errors/stackframe.go:43.47,44.31 1 1 +github.com/go-errors/errors/stackframe.go:47.2,47.48 1 1 +github.com/go-errors/errors/stackframe.go:44.31,46.3 1 1 +github.com/go-errors/errors/stackframe.go:52.42,56.16 3 1 +github.com/go-errors/errors/stackframe.go:60.2,60.60 1 1 +github.com/go-errors/errors/stackframe.go:56.16,58.3 1 0 +github.com/go-errors/errors/stackframe.go:64.55,67.16 2 1 +github.com/go-errors/errors/stackframe.go:71.2,72.61 2 1 +github.com/go-errors/errors/stackframe.go:76.2,76.66 1 1 +github.com/go-errors/errors/stackframe.go:67.16,69.3 1 0 +github.com/go-errors/errors/stackframe.go:72.61,74.3 1 0 +github.com/go-errors/errors/stackframe.go:79.56,91.63 3 1 +github.com/go-errors/errors/stackframe.go:95.2,95.53 1 1 +github.com/go-errors/errors/stackframe.go:100.2,101.18 2 1 +github.com/go-errors/errors/stackframe.go:91.63,94.3 2 1 +github.com/go-errors/errors/stackframe.go:95.53,98.3 2 1 +github.com/go-errors/errors/error.go:70.32,73.23 2 1 +github.com/go-errors/errors/error.go:80.2,85.3 3 1 +github.com/go-errors/errors/error.go:74.2,75.10 1 1 +github.com/go-errors/errors/error.go:76.2,77.28 1 1 +github.com/go-errors/errors/error.go:92.43,95.23 2 1 +github.com/go-errors/errors/error.go:104.2,109.3 3 1 +github.com/go-errors/errors/error.go:96.2,97.11 1 1 +github.com/go-errors/errors/error.go:98.2,99.10 1 1 +github.com/go-errors/errors/error.go:100.2,101.28 1 1 +github.com/go-errors/errors/error.go:115.39,117.19 1 1 +github.com/go-errors/errors/error.go:121.2,121.29 1 1 +github.com/go-errors/errors/error.go:125.2,125.43 1 1 +github.com/go-errors/errors/error.go:129.2,129.14 1 1 +github.com/go-errors/errors/error.go:117.19,119.3 1 1 +github.com/go-errors/errors/error.go:121.29,123.3 1 1 +github.com/go-errors/errors/error.go:125.43,127.3 1 1 +github.com/go-errors/errors/error.go:135.53,137.2 1 1 +github.com/go-errors/errors/error.go:140.34,142.2 1 1 +github.com/go-errors/errors/error.go:146.34,149.42 2 1 +github.com/go-errors/errors/error.go:153.2,153.20 1 1 +github.com/go-errors/errors/error.go:149.42,151.3 1 1 +github.com/go-errors/errors/error.go:158.39,160.2 1 1 +github.com/go-errors/errors/error.go:164.46,165.23 1 1 +github.com/go-errors/errors/error.go:173.2,173.19 1 1 +github.com/go-errors/errors/error.go:165.23,168.32 2 1 +github.com/go-errors/errors/error.go:168.32,170.4 1 1 +github.com/go-errors/errors/error.go:177.37,178.42 1 1 +github.com/go-errors/errors/error.go:181.2,181.41 1 1 +github.com/go-errors/errors/error.go:178.42,180.3 1 1 +github.com/go-errors/errors/parse_panic.go:10.39,12.2 1 1 +github.com/go-errors/errors/parse_panic.go:16.46,24.34 5 1 +github.com/go-errors/errors/parse_panic.go:70.2,70.43 1 1 +github.com/go-errors/errors/parse_panic.go:73.2,73.55 1 0 +github.com/go-errors/errors/parse_panic.go:24.34,27.23 2 1 +github.com/go-errors/errors/parse_panic.go:27.23,28.42 1 1 +github.com/go-errors/errors/parse_panic.go:28.42,31.5 2 1 +github.com/go-errors/errors/parse_panic.go:31.6,33.5 1 0 +github.com/go-errors/errors/parse_panic.go:35.5,35.29 1 1 +github.com/go-errors/errors/parse_panic.go:35.29,36.86 1 1 +github.com/go-errors/errors/parse_panic.go:36.86,38.5 1 1 +github.com/go-errors/errors/parse_panic.go:40.5,40.32 1 1 +github.com/go-errors/errors/parse_panic.go:40.32,41.18 1 1 +github.com/go-errors/errors/parse_panic.go:45.4,46.46 2 1 +github.com/go-errors/errors/parse_panic.go:51.4,53.23 2 1 +github.com/go-errors/errors/parse_panic.go:57.4,58.18 2 1 +github.com/go-errors/errors/parse_panic.go:62.4,63.17 2 1 +github.com/go-errors/errors/parse_panic.go:41.18,43.10 2 1 +github.com/go-errors/errors/parse_panic.go:46.46,49.5 2 1 +github.com/go-errors/errors/parse_panic.go:53.23,55.5 1 0 +github.com/go-errors/errors/parse_panic.go:58.18,60.5 1 0 +github.com/go-errors/errors/parse_panic.go:63.17,65.10 2 1 +github.com/go-errors/errors/parse_panic.go:70.43,72.3 1 1 +github.com/go-errors/errors/parse_panic.go:80.85,82.29 2 1 +github.com/go-errors/errors/parse_panic.go:85.2,85.15 1 1 +github.com/go-errors/errors/parse_panic.go:88.2,90.63 2 1 +github.com/go-errors/errors/parse_panic.go:94.2,94.53 1 1 +github.com/go-errors/errors/parse_panic.go:99.2,101.36 2 1 +github.com/go-errors/errors/parse_panic.go:105.2,106.15 2 1 +github.com/go-errors/errors/parse_panic.go:109.2,112.49 3 1 +github.com/go-errors/errors/parse_panic.go:116.2,117.16 2 1 +github.com/go-errors/errors/parse_panic.go:121.2,126.8 1 1 +github.com/go-errors/errors/parse_panic.go:82.29,84.3 1 0 +github.com/go-errors/errors/parse_panic.go:85.15,87.3 1 1 +github.com/go-errors/errors/parse_panic.go:90.63,93.3 2 1 +github.com/go-errors/errors/parse_panic.go:94.53,97.3 2 1 +github.com/go-errors/errors/parse_panic.go:101.36,103.3 1 0 +github.com/go-errors/errors/parse_panic.go:106.15,108.3 1 0 +github.com/go-errors/errors/parse_panic.go:112.49,114.3 1 1 +github.com/go-errors/errors/parse_panic.go:117.16,119.3 1 0 diff --git a/vendor/github.com/go-errors/errors/error.go b/vendor/github.com/go-errors/errors/error.go new file mode 100644 index 0000000000..60062a4372 --- /dev/null +++ b/vendor/github.com/go-errors/errors/error.go @@ -0,0 +1,217 @@ +// Package errors provides errors that have stack-traces. +// +// This is particularly useful when you want to understand the +// state of execution when an error was returned unexpectedly. +// +// It provides the type *Error which implements the standard +// golang error interface, so you can use this library interchangably +// with code that is expecting a normal error return. +// +// For example: +// +// package crashy +// +// import "github.com/go-errors/errors" +// +// var Crashed = errors.Errorf("oh dear") +// +// func Crash() error { +// return errors.New(Crashed) +// } +// +// This can be called as follows: +// +// package main +// +// import ( +// "crashy" +// "fmt" +// "github.com/go-errors/errors" +// ) +// +// func main() { +// err := crashy.Crash() +// if err != nil { +// if errors.Is(err, crashy.Crashed) { +// fmt.Println(err.(*errors.Error).ErrorStack()) +// } else { +// panic(err) +// } +// } +// } +// +// This package was original written to allow reporting to Bugsnag, +// but after I found similar packages by Facebook and Dropbox, it +// was moved to one canonical location so everyone can benefit. +package errors + +import ( + "bytes" + "fmt" + "reflect" + "runtime" +) + +// The maximum number of stackframes on any error. +var MaxStackDepth = 50 + +// Error is an error with an attached stacktrace. It can be used +// wherever the builtin error interface is expected. +type Error struct { + Err error + stack []uintptr + frames []StackFrame + prefix string +} + +// New makes an Error from the given value. If that value is already an +// error then it will be used directly, if not, it will be passed to +// fmt.Errorf("%v"). The stacktrace will point to the line of code that +// called New. +func New(e interface{}) *Error { + var err error + + switch e := e.(type) { + case error: + err = e + default: + err = fmt.Errorf("%v", e) + } + + stack := make([]uintptr, MaxStackDepth) + length := runtime.Callers(2, stack[:]) + return &Error{ + Err: err, + stack: stack[:length], + } +} + +// Wrap makes an Error from the given value. If that value is already an +// error then it will be used directly, if not, it will be passed to +// fmt.Errorf("%v"). The skip parameter indicates how far up the stack +// to start the stacktrace. 0 is from the current call, 1 from its caller, etc. +func Wrap(e interface{}, skip int) *Error { + var err error + + switch e := e.(type) { + case *Error: + return e + case error: + err = e + default: + err = fmt.Errorf("%v", e) + } + + stack := make([]uintptr, MaxStackDepth) + length := runtime.Callers(2+skip, stack[:]) + return &Error{ + Err: err, + stack: stack[:length], + } +} + +// WrapPrefix makes an Error from the given value. If that value is already an +// error then it will be used directly, if not, it will be passed to +// fmt.Errorf("%v"). The prefix parameter is used to add a prefix to the +// error message when calling Error(). The skip parameter indicates how far +// up the stack to start the stacktrace. 0 is from the current call, +// 1 from its caller, etc. +func WrapPrefix(e interface{}, prefix string, skip int) *Error { + + err := Wrap(e, 1+skip) + + if err.prefix != "" { + prefix = fmt.Sprintf("%s: %s", prefix, err.prefix) + } + + return &Error{ + Err: err.Err, + stack: err.stack, + prefix: prefix, + } + +} + +// Is detects whether the error is equal to a given error. Errors +// are considered equal by this function if they are the same object, +// or if they both contain the same error inside an errors.Error. +func Is(e error, original error) bool { + + if e == original { + return true + } + + if e, ok := e.(*Error); ok { + return Is(e.Err, original) + } + + if original, ok := original.(*Error); ok { + return Is(e, original.Err) + } + + return false +} + +// Errorf creates a new error with the given message. You can use it +// as a drop-in replacement for fmt.Errorf() to provide descriptive +// errors in return values. +func Errorf(format string, a ...interface{}) *Error { + return Wrap(fmt.Errorf(format, a...), 1) +} + +// Error returns the underlying error's message. +func (err *Error) Error() string { + + msg := err.Err.Error() + if err.prefix != "" { + msg = fmt.Sprintf("%s: %s", err.prefix, msg) + } + + return msg +} + +// Stack returns the callstack formatted the same way that go does +// in runtime/debug.Stack() +func (err *Error) Stack() []byte { + buf := bytes.Buffer{} + + for _, frame := range err.StackFrames() { + buf.WriteString(frame.String()) + } + + return buf.Bytes() +} + +// Callers satisfies the bugsnag ErrorWithCallerS() interface +// so that the stack can be read out. +func (err *Error) Callers() []uintptr { + return err.stack +} + +// ErrorStack returns a string that contains both the +// error message and the callstack. +func (err *Error) ErrorStack() string { + return err.TypeName() + " " + err.Error() + "\n" + string(err.Stack()) +} + +// StackFrames returns an array of frames containing information about the +// stack. +func (err *Error) StackFrames() []StackFrame { + if err.frames == nil { + err.frames = make([]StackFrame, len(err.stack)) + + for i, pc := range err.stack { + err.frames[i] = NewStackFrame(pc) + } + } + + return err.frames +} + +// TypeName returns the type this error. e.g. *errors.stringError. +func (err *Error) TypeName() string { + if _, ok := err.Err.(uncaughtPanic); ok { + return "panic" + } + return reflect.TypeOf(err.Err).String() +} diff --git a/vendor/github.com/go-errors/errors/parse_panic.go b/vendor/github.com/go-errors/errors/parse_panic.go new file mode 100644 index 0000000000..cc37052d78 --- /dev/null +++ b/vendor/github.com/go-errors/errors/parse_panic.go @@ -0,0 +1,127 @@ +package errors + +import ( + "strconv" + "strings" +) + +type uncaughtPanic struct{ message string } + +func (p uncaughtPanic) Error() string { + return p.message +} + +// ParsePanic allows you to get an error object from the output of a go program +// that panicked. This is particularly useful with https://github.com/mitchellh/panicwrap. +func ParsePanic(text string) (*Error, error) { + lines := strings.Split(text, "\n") + + state := "start" + + var message string + var stack []StackFrame + + for i := 0; i < len(lines); i++ { + line := lines[i] + + if state == "start" { + if strings.HasPrefix(line, "panic: ") { + message = strings.TrimPrefix(line, "panic: ") + state = "seek" + } else { + return nil, Errorf("bugsnag.panicParser: Invalid line (no prefix): %s", line) + } + + } else if state == "seek" { + if strings.HasPrefix(line, "goroutine ") && strings.HasSuffix(line, "[running]:") { + state = "parsing" + } + + } else if state == "parsing" { + if line == "" { + state = "done" + break + } + createdBy := false + if strings.HasPrefix(line, "created by ") { + line = strings.TrimPrefix(line, "created by ") + createdBy = true + } + + i++ + + if i >= len(lines) { + return nil, Errorf("bugsnag.panicParser: Invalid line (unpaired): %s", line) + } + + frame, err := parsePanicFrame(line, lines[i], createdBy) + if err != nil { + return nil, err + } + + stack = append(stack, *frame) + if createdBy { + state = "done" + break + } + } + } + + if state == "done" || state == "parsing" { + return &Error{Err: uncaughtPanic{message}, frames: stack}, nil + } + return nil, Errorf("could not parse panic: %v", text) +} + +// The lines we're passing look like this: +// +// main.(*foo).destruct(0xc208067e98) +// /0/go/src/github.com/bugsnag/bugsnag-go/pan/main.go:22 +0x151 +func parsePanicFrame(name string, line string, createdBy bool) (*StackFrame, error) { + idx := strings.LastIndex(name, "(") + if idx == -1 && !createdBy { + return nil, Errorf("bugsnag.panicParser: Invalid line (no call): %s", name) + } + if idx != -1 { + name = name[:idx] + } + pkg := "" + + if lastslash := strings.LastIndex(name, "/"); lastslash >= 0 { + pkg += name[:lastslash] + "/" + name = name[lastslash+1:] + } + if period := strings.Index(name, "."); period >= 0 { + pkg += name[:period] + name = name[period+1:] + } + + name = strings.Replace(name, "·", ".", -1) + + if !strings.HasPrefix(line, "\t") { + return nil, Errorf("bugsnag.panicParser: Invalid line (no tab): %s", line) + } + + idx = strings.LastIndex(line, ":") + if idx == -1 { + return nil, Errorf("bugsnag.panicParser: Invalid line (no line number): %s", line) + } + file := line[1:idx] + + number := line[idx+1:] + if idx = strings.Index(number, " +"); idx > -1 { + number = number[:idx] + } + + lno, err := strconv.ParseInt(number, 10, 32) + if err != nil { + return nil, Errorf("bugsnag.panicParser: Invalid line (bad line number): %s", line) + } + + return &StackFrame{ + File: file, + LineNumber: int(lno), + Package: pkg, + Name: name, + }, nil +} diff --git a/vendor/github.com/go-errors/errors/stackframe.go b/vendor/github.com/go-errors/errors/stackframe.go new file mode 100644 index 0000000000..750ab9a521 --- /dev/null +++ b/vendor/github.com/go-errors/errors/stackframe.go @@ -0,0 +1,102 @@ +package errors + +import ( + "bytes" + "fmt" + "io/ioutil" + "runtime" + "strings" +) + +// A StackFrame contains all necessary information about to generate a line +// in a callstack. +type StackFrame struct { + // The path to the file containing this ProgramCounter + File string + // The LineNumber in that file + LineNumber int + // The Name of the function that contains this ProgramCounter + Name string + // The Package that contains this function + Package string + // The underlying ProgramCounter + ProgramCounter uintptr +} + +// NewStackFrame popoulates a stack frame object from the program counter. +func NewStackFrame(pc uintptr) (frame StackFrame) { + + frame = StackFrame{ProgramCounter: pc} + if frame.Func() == nil { + return + } + frame.Package, frame.Name = packageAndName(frame.Func()) + + // pc -1 because the program counters we use are usually return addresses, + // and we want to show the line that corresponds to the function call + frame.File, frame.LineNumber = frame.Func().FileLine(pc - 1) + return + +} + +// Func returns the function that contained this frame. +func (frame *StackFrame) Func() *runtime.Func { + if frame.ProgramCounter == 0 { + return nil + } + return runtime.FuncForPC(frame.ProgramCounter) +} + +// String returns the stackframe formatted in the same way as go does +// in runtime/debug.Stack() +func (frame *StackFrame) String() string { + str := fmt.Sprintf("%s:%d (0x%x)\n", frame.File, frame.LineNumber, frame.ProgramCounter) + + source, err := frame.SourceLine() + if err != nil { + return str + } + + return str + fmt.Sprintf("\t%s: %s\n", frame.Name, source) +} + +// SourceLine gets the line of code (from File and Line) of the original source if possible. +func (frame *StackFrame) SourceLine() (string, error) { + data, err := ioutil.ReadFile(frame.File) + + if err != nil { + return "", New(err) + } + + lines := bytes.Split(data, []byte{'\n'}) + if frame.LineNumber <= 0 || frame.LineNumber >= len(lines) { + return "???", nil + } + // -1 because line-numbers are 1 based, but our array is 0 based + return string(bytes.Trim(lines[frame.LineNumber-1], " \t")), nil +} + +func packageAndName(fn *runtime.Func) (string, string) { + name := fn.Name() + pkg := "" + + // The name includes the path name to the package, which is unnecessary + // since the file name is already included. Plus, it has center dots. + // That is, we see + // runtime/debug.*T·ptrmethod + // and want + // *T.ptrmethod + // Since the package path might contains dots (e.g. code.google.com/...), + // we first remove the path prefix if there is one. + if lastslash := strings.LastIndex(name, "/"); lastslash >= 0 { + pkg += name[:lastslash] + "/" + name = name[lastslash+1:] + } + if period := strings.Index(name, "."); period >= 0 { + pkg += name[:period] + name = name[period+1:] + } + + name = strings.Replace(name, "·", ".", -1) + return pkg, name +} diff --git a/vendor/github.com/google/btree/.travis.yml b/vendor/github.com/google/btree/.travis.yml new file mode 100644 index 0000000000..4f2ee4d973 --- /dev/null +++ b/vendor/github.com/google/btree/.travis.yml @@ -0,0 +1 @@ +language: go diff --git a/vendor/github.com/google/btree/LICENSE b/vendor/github.com/google/btree/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/vendor/github.com/google/btree/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/google/btree/README.md b/vendor/github.com/google/btree/README.md new file mode 100644 index 0000000000..6062a4dacd --- /dev/null +++ b/vendor/github.com/google/btree/README.md @@ -0,0 +1,12 @@ +# BTree implementation for Go + +![Travis CI Build Status](https://api.travis-ci.org/google/btree.svg?branch=master) + +This package provides an in-memory B-Tree implementation for Go, useful as +an ordered, mutable data structure. + +The API is based off of the wonderful +http://godoc.org/github.com/petar/GoLLRB/llrb, and is meant to allow btree to +act as a drop-in replacement for gollrb trees. + +See http://godoc.org/github.com/google/btree for documentation. diff --git a/vendor/github.com/google/btree/btree.go b/vendor/github.com/google/btree/btree.go new file mode 100644 index 0000000000..b83acdbc6d --- /dev/null +++ b/vendor/github.com/google/btree/btree.go @@ -0,0 +1,890 @@ +// Copyright 2014 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package btree implements in-memory B-Trees of arbitrary degree. +// +// btree implements an in-memory B-Tree for use as an ordered data structure. +// It is not meant for persistent storage solutions. +// +// It has a flatter structure than an equivalent red-black or other binary tree, +// which in some cases yields better memory usage and/or performance. +// See some discussion on the matter here: +// http://google-opensource.blogspot.com/2013/01/c-containers-that-save-memory-and-time.html +// Note, though, that this project is in no way related to the C++ B-Tree +// implementation written about there. +// +// Within this tree, each node contains a slice of items and a (possibly nil) +// slice of children. For basic numeric values or raw structs, this can cause +// efficiency differences when compared to equivalent C++ template code that +// stores values in arrays within the node: +// * Due to the overhead of storing values as interfaces (each +// value needs to be stored as the value itself, then 2 words for the +// interface pointing to that value and its type), resulting in higher +// memory use. +// * Since interfaces can point to values anywhere in memory, values are +// most likely not stored in contiguous blocks, resulting in a higher +// number of cache misses. +// These issues don't tend to matter, though, when working with strings or other +// heap-allocated structures, since C++-equivalent structures also must store +// pointers and also distribute their values across the heap. +// +// This implementation is designed to be a drop-in replacement to gollrb.LLRB +// trees, (http://github.com/petar/gollrb), an excellent and probably the most +// widely used ordered tree implementation in the Go ecosystem currently. +// Its functions, therefore, exactly mirror those of +// llrb.LLRB where possible. Unlike gollrb, though, we currently don't +// support storing multiple equivalent values. +package btree + +import ( + "fmt" + "io" + "sort" + "strings" + "sync" +) + +// Item represents a single object in the tree. +type Item interface { + // Less tests whether the current item is less than the given argument. + // + // This must provide a strict weak ordering. + // If !a.Less(b) && !b.Less(a), we treat this to mean a == b (i.e. we can only + // hold one of either a or b in the tree). + Less(than Item) bool +} + +const ( + DefaultFreeListSize = 32 +) + +var ( + nilItems = make(items, 16) + nilChildren = make(children, 16) +) + +// FreeList represents a free list of btree nodes. By default each +// BTree has its own FreeList, but multiple BTrees can share the same +// FreeList. +// Two Btrees using the same freelist are safe for concurrent write access. +type FreeList struct { + mu sync.Mutex + freelist []*node +} + +// NewFreeList creates a new free list. +// size is the maximum size of the returned free list. +func NewFreeList(size int) *FreeList { + return &FreeList{freelist: make([]*node, 0, size)} +} + +func (f *FreeList) newNode() (n *node) { + f.mu.Lock() + index := len(f.freelist) - 1 + if index < 0 { + f.mu.Unlock() + return new(node) + } + n = f.freelist[index] + f.freelist[index] = nil + f.freelist = f.freelist[:index] + f.mu.Unlock() + return +} + +// freeNode adds the given node to the list, returning true if it was added +// and false if it was discarded. +func (f *FreeList) freeNode(n *node) (out bool) { + f.mu.Lock() + if len(f.freelist) < cap(f.freelist) { + f.freelist = append(f.freelist, n) + out = true + } + f.mu.Unlock() + return +} + +// ItemIterator allows callers of Ascend* to iterate in-order over portions of +// the tree. When this function returns false, iteration will stop and the +// associated Ascend* function will immediately return. +type ItemIterator func(i Item) bool + +// New creates a new B-Tree with the given degree. +// +// New(2), for example, will create a 2-3-4 tree (each node contains 1-3 items +// and 2-4 children). +func New(degree int) *BTree { + return NewWithFreeList(degree, NewFreeList(DefaultFreeListSize)) +} + +// NewWithFreeList creates a new B-Tree that uses the given node free list. +func NewWithFreeList(degree int, f *FreeList) *BTree { + if degree <= 1 { + panic("bad degree") + } + return &BTree{ + degree: degree, + cow: ©OnWriteContext{freelist: f}, + } +} + +// items stores items in a node. +type items []Item + +// insertAt inserts a value into the given index, pushing all subsequent values +// forward. +func (s *items) insertAt(index int, item Item) { + *s = append(*s, nil) + if index < len(*s) { + copy((*s)[index+1:], (*s)[index:]) + } + (*s)[index] = item +} + +// removeAt removes a value at a given index, pulling all subsequent values +// back. +func (s *items) removeAt(index int) Item { + item := (*s)[index] + copy((*s)[index:], (*s)[index+1:]) + (*s)[len(*s)-1] = nil + *s = (*s)[:len(*s)-1] + return item +} + +// pop removes and returns the last element in the list. +func (s *items) pop() (out Item) { + index := len(*s) - 1 + out = (*s)[index] + (*s)[index] = nil + *s = (*s)[:index] + return +} + +// truncate truncates this instance at index so that it contains only the +// first index items. index must be less than or equal to length. +func (s *items) truncate(index int) { + var toClear items + *s, toClear = (*s)[:index], (*s)[index:] + for len(toClear) > 0 { + toClear = toClear[copy(toClear, nilItems):] + } +} + +// find returns the index where the given item should be inserted into this +// list. 'found' is true if the item already exists in the list at the given +// index. +func (s items) find(item Item) (index int, found bool) { + i := sort.Search(len(s), func(i int) bool { + return item.Less(s[i]) + }) + if i > 0 && !s[i-1].Less(item) { + return i - 1, true + } + return i, false +} + +// children stores child nodes in a node. +type children []*node + +// insertAt inserts a value into the given index, pushing all subsequent values +// forward. +func (s *children) insertAt(index int, n *node) { + *s = append(*s, nil) + if index < len(*s) { + copy((*s)[index+1:], (*s)[index:]) + } + (*s)[index] = n +} + +// removeAt removes a value at a given index, pulling all subsequent values +// back. +func (s *children) removeAt(index int) *node { + n := (*s)[index] + copy((*s)[index:], (*s)[index+1:]) + (*s)[len(*s)-1] = nil + *s = (*s)[:len(*s)-1] + return n +} + +// pop removes and returns the last element in the list. +func (s *children) pop() (out *node) { + index := len(*s) - 1 + out = (*s)[index] + (*s)[index] = nil + *s = (*s)[:index] + return +} + +// truncate truncates this instance at index so that it contains only the +// first index children. index must be less than or equal to length. +func (s *children) truncate(index int) { + var toClear children + *s, toClear = (*s)[:index], (*s)[index:] + for len(toClear) > 0 { + toClear = toClear[copy(toClear, nilChildren):] + } +} + +// node is an internal node in a tree. +// +// It must at all times maintain the invariant that either +// * len(children) == 0, len(items) unconstrained +// * len(children) == len(items) + 1 +type node struct { + items items + children children + cow *copyOnWriteContext +} + +func (n *node) mutableFor(cow *copyOnWriteContext) *node { + if n.cow == cow { + return n + } + out := cow.newNode() + if cap(out.items) >= len(n.items) { + out.items = out.items[:len(n.items)] + } else { + out.items = make(items, len(n.items), cap(n.items)) + } + copy(out.items, n.items) + // Copy children + if cap(out.children) >= len(n.children) { + out.children = out.children[:len(n.children)] + } else { + out.children = make(children, len(n.children), cap(n.children)) + } + copy(out.children, n.children) + return out +} + +func (n *node) mutableChild(i int) *node { + c := n.children[i].mutableFor(n.cow) + n.children[i] = c + return c +} + +// split splits the given node at the given index. The current node shrinks, +// and this function returns the item that existed at that index and a new node +// containing all items/children after it. +func (n *node) split(i int) (Item, *node) { + item := n.items[i] + next := n.cow.newNode() + next.items = append(next.items, n.items[i+1:]...) + n.items.truncate(i) + if len(n.children) > 0 { + next.children = append(next.children, n.children[i+1:]...) + n.children.truncate(i + 1) + } + return item, next +} + +// maybeSplitChild checks if a child should be split, and if so splits it. +// Returns whether or not a split occurred. +func (n *node) maybeSplitChild(i, maxItems int) bool { + if len(n.children[i].items) < maxItems { + return false + } + first := n.mutableChild(i) + item, second := first.split(maxItems / 2) + n.items.insertAt(i, item) + n.children.insertAt(i+1, second) + return true +} + +// insert inserts an item into the subtree rooted at this node, making sure +// no nodes in the subtree exceed maxItems items. Should an equivalent item be +// be found/replaced by insert, it will be returned. +func (n *node) insert(item Item, maxItems int) Item { + i, found := n.items.find(item) + if found { + out := n.items[i] + n.items[i] = item + return out + } + if len(n.children) == 0 { + n.items.insertAt(i, item) + return nil + } + if n.maybeSplitChild(i, maxItems) { + inTree := n.items[i] + switch { + case item.Less(inTree): + // no change, we want first split node + case inTree.Less(item): + i++ // we want second split node + default: + out := n.items[i] + n.items[i] = item + return out + } + } + return n.mutableChild(i).insert(item, maxItems) +} + +// get finds the given key in the subtree and returns it. +func (n *node) get(key Item) Item { + i, found := n.items.find(key) + if found { + return n.items[i] + } else if len(n.children) > 0 { + return n.children[i].get(key) + } + return nil +} + +// min returns the first item in the subtree. +func min(n *node) Item { + if n == nil { + return nil + } + for len(n.children) > 0 { + n = n.children[0] + } + if len(n.items) == 0 { + return nil + } + return n.items[0] +} + +// max returns the last item in the subtree. +func max(n *node) Item { + if n == nil { + return nil + } + for len(n.children) > 0 { + n = n.children[len(n.children)-1] + } + if len(n.items) == 0 { + return nil + } + return n.items[len(n.items)-1] +} + +// toRemove details what item to remove in a node.remove call. +type toRemove int + +const ( + removeItem toRemove = iota // removes the given item + removeMin // removes smallest item in the subtree + removeMax // removes largest item in the subtree +) + +// remove removes an item from the subtree rooted at this node. +func (n *node) remove(item Item, minItems int, typ toRemove) Item { + var i int + var found bool + switch typ { + case removeMax: + if len(n.children) == 0 { + return n.items.pop() + } + i = len(n.items) + case removeMin: + if len(n.children) == 0 { + return n.items.removeAt(0) + } + i = 0 + case removeItem: + i, found = n.items.find(item) + if len(n.children) == 0 { + if found { + return n.items.removeAt(i) + } + return nil + } + default: + panic("invalid type") + } + // If we get to here, we have children. + if len(n.children[i].items) <= minItems { + return n.growChildAndRemove(i, item, minItems, typ) + } + child := n.mutableChild(i) + // Either we had enough items to begin with, or we've done some + // merging/stealing, because we've got enough now and we're ready to return + // stuff. + if found { + // The item exists at index 'i', and the child we've selected can give us a + // predecessor, since if we've gotten here it's got > minItems items in it. + out := n.items[i] + // We use our special-case 'remove' call with typ=maxItem to pull the + // predecessor of item i (the rightmost leaf of our immediate left child) + // and set it into where we pulled the item from. + n.items[i] = child.remove(nil, minItems, removeMax) + return out + } + // Final recursive call. Once we're here, we know that the item isn't in this + // node and that the child is big enough to remove from. + return child.remove(item, minItems, typ) +} + +// growChildAndRemove grows child 'i' to make sure it's possible to remove an +// item from it while keeping it at minItems, then calls remove to actually +// remove it. +// +// Most documentation says we have to do two sets of special casing: +// 1) item is in this node +// 2) item is in child +// In both cases, we need to handle the two subcases: +// A) node has enough values that it can spare one +// B) node doesn't have enough values +// For the latter, we have to check: +// a) left sibling has node to spare +// b) right sibling has node to spare +// c) we must merge +// To simplify our code here, we handle cases #1 and #2 the same: +// If a node doesn't have enough items, we make sure it does (using a,b,c). +// We then simply redo our remove call, and the second time (regardless of +// whether we're in case 1 or 2), we'll have enough items and can guarantee +// that we hit case A. +func (n *node) growChildAndRemove(i int, item Item, minItems int, typ toRemove) Item { + if i > 0 && len(n.children[i-1].items) > minItems { + // Steal from left child + child := n.mutableChild(i) + stealFrom := n.mutableChild(i - 1) + stolenItem := stealFrom.items.pop() + child.items.insertAt(0, n.items[i-1]) + n.items[i-1] = stolenItem + if len(stealFrom.children) > 0 { + child.children.insertAt(0, stealFrom.children.pop()) + } + } else if i < len(n.items) && len(n.children[i+1].items) > minItems { + // steal from right child + child := n.mutableChild(i) + stealFrom := n.mutableChild(i + 1) + stolenItem := stealFrom.items.removeAt(0) + child.items = append(child.items, n.items[i]) + n.items[i] = stolenItem + if len(stealFrom.children) > 0 { + child.children = append(child.children, stealFrom.children.removeAt(0)) + } + } else { + if i >= len(n.items) { + i-- + } + child := n.mutableChild(i) + // merge with right child + mergeItem := n.items.removeAt(i) + mergeChild := n.children.removeAt(i + 1) + child.items = append(child.items, mergeItem) + child.items = append(child.items, mergeChild.items...) + child.children = append(child.children, mergeChild.children...) + n.cow.freeNode(mergeChild) + } + return n.remove(item, minItems, typ) +} + +type direction int + +const ( + descend = direction(-1) + ascend = direction(+1) +) + +// iterate provides a simple method for iterating over elements in the tree. +// +// When ascending, the 'start' should be less than 'stop' and when descending, +// the 'start' should be greater than 'stop'. Setting 'includeStart' to true +// will force the iterator to include the first item when it equals 'start', +// thus creating a "greaterOrEqual" or "lessThanEqual" rather than just a +// "greaterThan" or "lessThan" queries. +func (n *node) iterate(dir direction, start, stop Item, includeStart bool, hit bool, iter ItemIterator) (bool, bool) { + var ok, found bool + var index int + switch dir { + case ascend: + if start != nil { + index, _ = n.items.find(start) + } + for i := index; i < len(n.items); i++ { + if len(n.children) > 0 { + if hit, ok = n.children[i].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + if !includeStart && !hit && start != nil && !start.Less(n.items[i]) { + hit = true + continue + } + hit = true + if stop != nil && !n.items[i].Less(stop) { + return hit, false + } + if !iter(n.items[i]) { + return hit, false + } + } + if len(n.children) > 0 { + if hit, ok = n.children[len(n.children)-1].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + case descend: + if start != nil { + index, found = n.items.find(start) + if !found { + index = index - 1 + } + } else { + index = len(n.items) - 1 + } + for i := index; i >= 0; i-- { + if start != nil && !n.items[i].Less(start) { + if !includeStart || hit || start.Less(n.items[i]) { + continue + } + } + if len(n.children) > 0 { + if hit, ok = n.children[i+1].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + if stop != nil && !stop.Less(n.items[i]) { + return hit, false // continue + } + hit = true + if !iter(n.items[i]) { + return hit, false + } + } + if len(n.children) > 0 { + if hit, ok = n.children[0].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + } + return hit, true +} + +// Used for testing/debugging purposes. +func (n *node) print(w io.Writer, level int) { + fmt.Fprintf(w, "%sNODE:%v\n", strings.Repeat(" ", level), n.items) + for _, c := range n.children { + c.print(w, level+1) + } +} + +// BTree is an implementation of a B-Tree. +// +// BTree stores Item instances in an ordered structure, allowing easy insertion, +// removal, and iteration. +// +// Write operations are not safe for concurrent mutation by multiple +// goroutines, but Read operations are. +type BTree struct { + degree int + length int + root *node + cow *copyOnWriteContext +} + +// copyOnWriteContext pointers determine node ownership... a tree with a write +// context equivalent to a node's write context is allowed to modify that node. +// A tree whose write context does not match a node's is not allowed to modify +// it, and must create a new, writable copy (IE: it's a Clone). +// +// When doing any write operation, we maintain the invariant that the current +// node's context is equal to the context of the tree that requested the write. +// We do this by, before we descend into any node, creating a copy with the +// correct context if the contexts don't match. +// +// Since the node we're currently visiting on any write has the requesting +// tree's context, that node is modifiable in place. Children of that node may +// not share context, but before we descend into them, we'll make a mutable +// copy. +type copyOnWriteContext struct { + freelist *FreeList +} + +// Clone clones the btree, lazily. Clone should not be called concurrently, +// but the original tree (t) and the new tree (t2) can be used concurrently +// once the Clone call completes. +// +// The internal tree structure of b is marked read-only and shared between t and +// t2. Writes to both t and t2 use copy-on-write logic, creating new nodes +// whenever one of b's original nodes would have been modified. Read operations +// should have no performance degredation. Write operations for both t and t2 +// will initially experience minor slow-downs caused by additional allocs and +// copies due to the aforementioned copy-on-write logic, but should converge to +// the original performance characteristics of the original tree. +func (t *BTree) Clone() (t2 *BTree) { + // Create two entirely new copy-on-write contexts. + // This operation effectively creates three trees: + // the original, shared nodes (old b.cow) + // the new b.cow nodes + // the new out.cow nodes + cow1, cow2 := *t.cow, *t.cow + out := *t + t.cow = &cow1 + out.cow = &cow2 + return &out +} + +// maxItems returns the max number of items to allow per node. +func (t *BTree) maxItems() int { + return t.degree*2 - 1 +} + +// minItems returns the min number of items to allow per node (ignored for the +// root node). +func (t *BTree) minItems() int { + return t.degree - 1 +} + +func (c *copyOnWriteContext) newNode() (n *node) { + n = c.freelist.newNode() + n.cow = c + return +} + +type freeType int + +const ( + ftFreelistFull freeType = iota // node was freed (available for GC, not stored in freelist) + ftStored // node was stored in the freelist for later use + ftNotOwned // node was ignored by COW, since it's owned by another one +) + +// freeNode frees a node within a given COW context, if it's owned by that +// context. It returns what happened to the node (see freeType const +// documentation). +func (c *copyOnWriteContext) freeNode(n *node) freeType { + if n.cow == c { + // clear to allow GC + n.items.truncate(0) + n.children.truncate(0) + n.cow = nil + if c.freelist.freeNode(n) { + return ftStored + } else { + return ftFreelistFull + } + } else { + return ftNotOwned + } +} + +// ReplaceOrInsert adds the given item to the tree. If an item in the tree +// already equals the given one, it is removed from the tree and returned. +// Otherwise, nil is returned. +// +// nil cannot be added to the tree (will panic). +func (t *BTree) ReplaceOrInsert(item Item) Item { + if item == nil { + panic("nil item being added to BTree") + } + if t.root == nil { + t.root = t.cow.newNode() + t.root.items = append(t.root.items, item) + t.length++ + return nil + } else { + t.root = t.root.mutableFor(t.cow) + if len(t.root.items) >= t.maxItems() { + item2, second := t.root.split(t.maxItems() / 2) + oldroot := t.root + t.root = t.cow.newNode() + t.root.items = append(t.root.items, item2) + t.root.children = append(t.root.children, oldroot, second) + } + } + out := t.root.insert(item, t.maxItems()) + if out == nil { + t.length++ + } + return out +} + +// Delete removes an item equal to the passed in item from the tree, returning +// it. If no such item exists, returns nil. +func (t *BTree) Delete(item Item) Item { + return t.deleteItem(item, removeItem) +} + +// DeleteMin removes the smallest item in the tree and returns it. +// If no such item exists, returns nil. +func (t *BTree) DeleteMin() Item { + return t.deleteItem(nil, removeMin) +} + +// DeleteMax removes the largest item in the tree and returns it. +// If no such item exists, returns nil. +func (t *BTree) DeleteMax() Item { + return t.deleteItem(nil, removeMax) +} + +func (t *BTree) deleteItem(item Item, typ toRemove) Item { + if t.root == nil || len(t.root.items) == 0 { + return nil + } + t.root = t.root.mutableFor(t.cow) + out := t.root.remove(item, t.minItems(), typ) + if len(t.root.items) == 0 && len(t.root.children) > 0 { + oldroot := t.root + t.root = t.root.children[0] + t.cow.freeNode(oldroot) + } + if out != nil { + t.length-- + } + return out +} + +// AscendRange calls the iterator for every value in the tree within the range +// [greaterOrEqual, lessThan), until iterator returns false. +func (t *BTree) AscendRange(greaterOrEqual, lessThan Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, greaterOrEqual, lessThan, true, false, iterator) +} + +// AscendLessThan calls the iterator for every value in the tree within the range +// [first, pivot), until iterator returns false. +func (t *BTree) AscendLessThan(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, nil, pivot, false, false, iterator) +} + +// AscendGreaterOrEqual calls the iterator for every value in the tree within +// the range [pivot, last], until iterator returns false. +func (t *BTree) AscendGreaterOrEqual(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, pivot, nil, true, false, iterator) +} + +// Ascend calls the iterator for every value in the tree within the range +// [first, last], until iterator returns false. +func (t *BTree) Ascend(iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, nil, nil, false, false, iterator) +} + +// DescendRange calls the iterator for every value in the tree within the range +// [lessOrEqual, greaterThan), until iterator returns false. +func (t *BTree) DescendRange(lessOrEqual, greaterThan Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, lessOrEqual, greaterThan, true, false, iterator) +} + +// DescendLessOrEqual calls the iterator for every value in the tree within the range +// [pivot, first], until iterator returns false. +func (t *BTree) DescendLessOrEqual(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, pivot, nil, true, false, iterator) +} + +// DescendGreaterThan calls the iterator for every value in the tree within +// the range [last, pivot), until iterator returns false. +func (t *BTree) DescendGreaterThan(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, nil, pivot, false, false, iterator) +} + +// Descend calls the iterator for every value in the tree within the range +// [last, first], until iterator returns false. +func (t *BTree) Descend(iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, nil, nil, false, false, iterator) +} + +// Get looks for the key item in the tree, returning it. It returns nil if +// unable to find that item. +func (t *BTree) Get(key Item) Item { + if t.root == nil { + return nil + } + return t.root.get(key) +} + +// Min returns the smallest item in the tree, or nil if the tree is empty. +func (t *BTree) Min() Item { + return min(t.root) +} + +// Max returns the largest item in the tree, or nil if the tree is empty. +func (t *BTree) Max() Item { + return max(t.root) +} + +// Has returns true if the given key is in the tree. +func (t *BTree) Has(key Item) bool { + return t.Get(key) != nil +} + +// Len returns the number of items currently in the tree. +func (t *BTree) Len() int { + return t.length +} + +// Clear removes all items from the btree. If addNodesToFreelist is true, +// t's nodes are added to its freelist as part of this call, until the freelist +// is full. Otherwise, the root node is simply dereferenced and the subtree +// left to Go's normal GC processes. +// +// This can be much faster +// than calling Delete on all elements, because that requires finding/removing +// each element in the tree and updating the tree accordingly. It also is +// somewhat faster than creating a new tree to replace the old one, because +// nodes from the old tree are reclaimed into the freelist for use by the new +// one, instead of being lost to the garbage collector. +// +// This call takes: +// O(1): when addNodesToFreelist is false, this is a single operation. +// O(1): when the freelist is already full, it breaks out immediately +// O(freelist size): when the freelist is empty and the nodes are all owned +// by this tree, nodes are added to the freelist until full. +// O(tree size): when all nodes are owned by another tree, all nodes are +// iterated over looking for nodes to add to the freelist, and due to +// ownership, none are. +func (t *BTree) Clear(addNodesToFreelist bool) { + if t.root != nil && addNodesToFreelist { + t.root.reset(t.cow) + } + t.root, t.length = nil, 0 +} + +// reset returns a subtree to the freelist. It breaks out immediately if the +// freelist is full, since the only benefit of iterating is to fill that +// freelist up. Returns true if parent reset call should continue. +func (n *node) reset(c *copyOnWriteContext) bool { + for _, child := range n.children { + if !child.reset(c) { + return false + } + } + return c.freeNode(n) != ftFreelistFull +} + +// Int implements the Item interface for integers. +type Int int + +// Less returns true if int(a) < int(b). +func (a Int) Less(b Item) bool { + return a < b.(Int) +} diff --git a/vendor/github.com/gregjones/httpcache/.travis.yml b/vendor/github.com/gregjones/httpcache/.travis.yml new file mode 100644 index 0000000000..b5ffbe03d8 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/.travis.yml @@ -0,0 +1,19 @@ +sudo: false +language: go +go: + - 1.6.x + - 1.7.x + - 1.8.x + - 1.9.x + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/gregjones/httpcache/LICENSE.txt b/vendor/github.com/gregjones/httpcache/LICENSE.txt new file mode 100644 index 0000000000..81316beb0c --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/LICENSE.txt @@ -0,0 +1,7 @@ +Copyright © 2012 Greg Jones (greg.jones@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/gregjones/httpcache/README.md b/vendor/github.com/gregjones/httpcache/README.md new file mode 100644 index 0000000000..09c9e7c173 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/README.md @@ -0,0 +1,25 @@ +httpcache +========= + +[![Build Status](https://travis-ci.org/gregjones/httpcache.svg?branch=master)](https://travis-ci.org/gregjones/httpcache) [![GoDoc](https://godoc.org/github.com/gregjones/httpcache?status.svg)](https://godoc.org/github.com/gregjones/httpcache) + +Package httpcache provides a http.RoundTripper implementation that works as a mostly [RFC 7234](https://tools.ietf.org/html/rfc7234) compliant cache for http responses. + +It is only suitable for use as a 'private' cache (i.e. for a web-browser or an API-client and not for a shared proxy). + +Cache Backends +-------------- + +- The built-in 'memory' cache stores responses in an in-memory map. +- [`github.com/gregjones/httpcache/diskcache`](https://github.com/gregjones/httpcache/tree/master/diskcache) provides a filesystem-backed cache using the [diskv](https://github.com/peterbourgon/diskv) library. +- [`github.com/gregjones/httpcache/memcache`](https://github.com/gregjones/httpcache/tree/master/memcache) provides memcache implementations, for both App Engine and 'normal' memcache servers. +- [`sourcegraph.com/sourcegraph/s3cache`](https://sourcegraph.com/github.com/sourcegraph/s3cache) uses Amazon S3 for storage. +- [`github.com/gregjones/httpcache/leveldbcache`](https://github.com/gregjones/httpcache/tree/master/leveldbcache) provides a filesystem-backed cache using [leveldb](https://github.com/syndtr/goleveldb/leveldb). +- [`github.com/die-net/lrucache`](https://github.com/die-net/lrucache) provides an in-memory cache that will evict least-recently used entries. +- [`github.com/die-net/lrucache/twotier`](https://github.com/die-net/lrucache/tree/master/twotier) allows caches to be combined, for example to use lrucache above with a persistent disk-cache. +- [`github.com/birkelund/boltdbcache`](https://github.com/birkelund/boltdbcache) provides a BoltDB implementation (based on the [bbolt](https://github.com/coreos/bbolt) fork). + +License +------- + +- [MIT License](LICENSE.txt) diff --git a/vendor/github.com/gregjones/httpcache/diskcache/diskcache.go b/vendor/github.com/gregjones/httpcache/diskcache/diskcache.go new file mode 100644 index 0000000000..42e3129d82 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/diskcache/diskcache.go @@ -0,0 +1,61 @@ +// Package diskcache provides an implementation of httpcache.Cache that uses the diskv package +// to supplement an in-memory map with persistent storage +// +package diskcache + +import ( + "bytes" + "crypto/md5" + "encoding/hex" + "github.com/peterbourgon/diskv" + "io" +) + +// Cache is an implementation of httpcache.Cache that supplements the in-memory map with persistent storage +type Cache struct { + d *diskv.Diskv +} + +// Get returns the response corresponding to key if present +func (c *Cache) Get(key string) (resp []byte, ok bool) { + key = keyToFilename(key) + resp, err := c.d.Read(key) + if err != nil { + return []byte{}, false + } + return resp, true +} + +// Set saves a response to the cache as key +func (c *Cache) Set(key string, resp []byte) { + key = keyToFilename(key) + c.d.WriteStream(key, bytes.NewReader(resp), true) +} + +// Delete removes the response with key from the cache +func (c *Cache) Delete(key string) { + key = keyToFilename(key) + c.d.Erase(key) +} + +func keyToFilename(key string) string { + h := md5.New() + io.WriteString(h, key) + return hex.EncodeToString(h.Sum(nil)) +} + +// New returns a new Cache that will store files in basePath +func New(basePath string) *Cache { + return &Cache{ + d: diskv.New(diskv.Options{ + BasePath: basePath, + CacheSizeMax: 100 * 1024 * 1024, // 100MB + }), + } +} + +// NewWithDiskv returns a new Cache using the provided Diskv as underlying +// storage. +func NewWithDiskv(d *diskv.Diskv) *Cache { + return &Cache{d} +} diff --git a/vendor/github.com/gregjones/httpcache/httpcache.go b/vendor/github.com/gregjones/httpcache/httpcache.go new file mode 100644 index 0000000000..f6a2ec4a53 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/httpcache.go @@ -0,0 +1,551 @@ +// Package httpcache provides a http.RoundTripper implementation that works as a +// mostly RFC-compliant cache for http responses. +// +// It is only suitable for use as a 'private' cache (i.e. for a web-browser or an API-client +// and not for a shared proxy). +// +package httpcache + +import ( + "bufio" + "bytes" + "errors" + "io" + "io/ioutil" + "net/http" + "net/http/httputil" + "strings" + "sync" + "time" +) + +const ( + stale = iota + fresh + transparent + // XFromCache is the header added to responses that are returned from the cache + XFromCache = "X-From-Cache" +) + +// A Cache interface is used by the Transport to store and retrieve responses. +type Cache interface { + // Get returns the []byte representation of a cached response and a bool + // set to true if the value isn't empty + Get(key string) (responseBytes []byte, ok bool) + // Set stores the []byte representation of a response against a key + Set(key string, responseBytes []byte) + // Delete removes the value associated with the key + Delete(key string) +} + +// cacheKey returns the cache key for req. +func cacheKey(req *http.Request) string { + if req.Method == http.MethodGet { + return req.URL.String() + } else { + return req.Method + " " + req.URL.String() + } +} + +// CachedResponse returns the cached http.Response for req if present, and nil +// otherwise. +func CachedResponse(c Cache, req *http.Request) (resp *http.Response, err error) { + cachedVal, ok := c.Get(cacheKey(req)) + if !ok { + return + } + + b := bytes.NewBuffer(cachedVal) + return http.ReadResponse(bufio.NewReader(b), req) +} + +// MemoryCache is an implemtation of Cache that stores responses in an in-memory map. +type MemoryCache struct { + mu sync.RWMutex + items map[string][]byte +} + +// Get returns the []byte representation of the response and true if present, false if not +func (c *MemoryCache) Get(key string) (resp []byte, ok bool) { + c.mu.RLock() + resp, ok = c.items[key] + c.mu.RUnlock() + return resp, ok +} + +// Set saves response resp to the cache with key +func (c *MemoryCache) Set(key string, resp []byte) { + c.mu.Lock() + c.items[key] = resp + c.mu.Unlock() +} + +// Delete removes key from the cache +func (c *MemoryCache) Delete(key string) { + c.mu.Lock() + delete(c.items, key) + c.mu.Unlock() +} + +// NewMemoryCache returns a new Cache that will store items in an in-memory map +func NewMemoryCache() *MemoryCache { + c := &MemoryCache{items: map[string][]byte{}} + return c +} + +// Transport is an implementation of http.RoundTripper that will return values from a cache +// where possible (avoiding a network request) and will additionally add validators (etag/if-modified-since) +// to repeated requests allowing servers to return 304 / Not Modified +type Transport struct { + // The RoundTripper interface actually used to make requests + // If nil, http.DefaultTransport is used + Transport http.RoundTripper + Cache Cache + // If true, responses returned from the cache will be given an extra header, X-From-Cache + MarkCachedResponses bool +} + +// NewTransport returns a new Transport with the +// provided Cache implementation and MarkCachedResponses set to true +func NewTransport(c Cache) *Transport { + return &Transport{Cache: c, MarkCachedResponses: true} +} + +// Client returns an *http.Client that caches responses. +func (t *Transport) Client() *http.Client { + return &http.Client{Transport: t} +} + +// varyMatches will return false unless all of the cached values for the headers listed in Vary +// match the new request +func varyMatches(cachedResp *http.Response, req *http.Request) bool { + for _, header := range headerAllCommaSepValues(cachedResp.Header, "vary") { + header = http.CanonicalHeaderKey(header) + if header != "" && req.Header.Get(header) != cachedResp.Header.Get("X-Varied-"+header) { + return false + } + } + return true +} + +// RoundTrip takes a Request and returns a Response +// +// If there is a fresh Response already in cache, then it will be returned without connecting to +// the server. +// +// If there is a stale Response, then any validators it contains will be set on the new request +// to give the server a chance to respond with NotModified. If this happens, then the cached Response +// will be returned. +func (t *Transport) RoundTrip(req *http.Request) (resp *http.Response, err error) { + cacheKey := cacheKey(req) + cacheable := (req.Method == "GET" || req.Method == "HEAD") && req.Header.Get("range") == "" + var cachedResp *http.Response + if cacheable { + cachedResp, err = CachedResponse(t.Cache, req) + } else { + // Need to invalidate an existing value + t.Cache.Delete(cacheKey) + } + + transport := t.Transport + if transport == nil { + transport = http.DefaultTransport + } + + if cacheable && cachedResp != nil && err == nil { + if t.MarkCachedResponses { + cachedResp.Header.Set(XFromCache, "1") + } + + if varyMatches(cachedResp, req) { + // Can only use cached value if the new request doesn't Vary significantly + freshness := getFreshness(cachedResp.Header, req.Header) + if freshness == fresh { + return cachedResp, nil + } + + if freshness == stale { + var req2 *http.Request + // Add validators if caller hasn't already done so + etag := cachedResp.Header.Get("etag") + if etag != "" && req.Header.Get("etag") == "" { + req2 = cloneRequest(req) + req2.Header.Set("if-none-match", etag) + } + lastModified := cachedResp.Header.Get("last-modified") + if lastModified != "" && req.Header.Get("last-modified") == "" { + if req2 == nil { + req2 = cloneRequest(req) + } + req2.Header.Set("if-modified-since", lastModified) + } + if req2 != nil { + req = req2 + } + } + } + + resp, err = transport.RoundTrip(req) + if err == nil && req.Method == "GET" && resp.StatusCode == http.StatusNotModified { + // Replace the 304 response with the one from cache, but update with some new headers + endToEndHeaders := getEndToEndHeaders(resp.Header) + for _, header := range endToEndHeaders { + cachedResp.Header[header] = resp.Header[header] + } + resp = cachedResp + } else if (err != nil || (cachedResp != nil && resp.StatusCode >= 500)) && + req.Method == "GET" && canStaleOnError(cachedResp.Header, req.Header) { + // In case of transport failure and stale-if-error activated, returns cached content + // when available + return cachedResp, nil + } else { + if err != nil || resp.StatusCode != http.StatusOK { + t.Cache.Delete(cacheKey) + } + if err != nil { + return nil, err + } + } + } else { + reqCacheControl := parseCacheControl(req.Header) + if _, ok := reqCacheControl["only-if-cached"]; ok { + resp = newGatewayTimeoutResponse(req) + } else { + resp, err = transport.RoundTrip(req) + if err != nil { + return nil, err + } + } + } + + if cacheable && canStore(parseCacheControl(req.Header), parseCacheControl(resp.Header)) { + for _, varyKey := range headerAllCommaSepValues(resp.Header, "vary") { + varyKey = http.CanonicalHeaderKey(varyKey) + fakeHeader := "X-Varied-" + varyKey + reqValue := req.Header.Get(varyKey) + if reqValue != "" { + resp.Header.Set(fakeHeader, reqValue) + } + } + switch req.Method { + case "GET": + // Delay caching until EOF is reached. + resp.Body = &cachingReadCloser{ + R: resp.Body, + OnEOF: func(r io.Reader) { + resp := *resp + resp.Body = ioutil.NopCloser(r) + respBytes, err := httputil.DumpResponse(&resp, true) + if err == nil { + t.Cache.Set(cacheKey, respBytes) + } + }, + } + default: + respBytes, err := httputil.DumpResponse(resp, true) + if err == nil { + t.Cache.Set(cacheKey, respBytes) + } + } + } else { + t.Cache.Delete(cacheKey) + } + return resp, nil +} + +// ErrNoDateHeader indicates that the HTTP headers contained no Date header. +var ErrNoDateHeader = errors.New("no Date header") + +// Date parses and returns the value of the Date header. +func Date(respHeaders http.Header) (date time.Time, err error) { + dateHeader := respHeaders.Get("date") + if dateHeader == "" { + err = ErrNoDateHeader + return + } + + return time.Parse(time.RFC1123, dateHeader) +} + +type realClock struct{} + +func (c *realClock) since(d time.Time) time.Duration { + return time.Since(d) +} + +type timer interface { + since(d time.Time) time.Duration +} + +var clock timer = &realClock{} + +// getFreshness will return one of fresh/stale/transparent based on the cache-control +// values of the request and the response +// +// fresh indicates the response can be returned +// stale indicates that the response needs validating before it is returned +// transparent indicates the response should not be used to fulfil the request +// +// Because this is only a private cache, 'public' and 'private' in cache-control aren't +// signficant. Similarly, smax-age isn't used. +func getFreshness(respHeaders, reqHeaders http.Header) (freshness int) { + respCacheControl := parseCacheControl(respHeaders) + reqCacheControl := parseCacheControl(reqHeaders) + if _, ok := reqCacheControl["no-cache"]; ok { + return transparent + } + if _, ok := respCacheControl["no-cache"]; ok { + return stale + } + if _, ok := reqCacheControl["only-if-cached"]; ok { + return fresh + } + + date, err := Date(respHeaders) + if err != nil { + return stale + } + currentAge := clock.since(date) + + var lifetime time.Duration + var zeroDuration time.Duration + + // If a response includes both an Expires header and a max-age directive, + // the max-age directive overrides the Expires header, even if the Expires header is more restrictive. + if maxAge, ok := respCacheControl["max-age"]; ok { + lifetime, err = time.ParseDuration(maxAge + "s") + if err != nil { + lifetime = zeroDuration + } + } else { + expiresHeader := respHeaders.Get("Expires") + if expiresHeader != "" { + expires, err := time.Parse(time.RFC1123, expiresHeader) + if err != nil { + lifetime = zeroDuration + } else { + lifetime = expires.Sub(date) + } + } + } + + if maxAge, ok := reqCacheControl["max-age"]; ok { + // the client is willing to accept a response whose age is no greater than the specified time in seconds + lifetime, err = time.ParseDuration(maxAge + "s") + if err != nil { + lifetime = zeroDuration + } + } + if minfresh, ok := reqCacheControl["min-fresh"]; ok { + // the client wants a response that will still be fresh for at least the specified number of seconds. + minfreshDuration, err := time.ParseDuration(minfresh + "s") + if err == nil { + currentAge = time.Duration(currentAge + minfreshDuration) + } + } + + if maxstale, ok := reqCacheControl["max-stale"]; ok { + // Indicates that the client is willing to accept a response that has exceeded its expiration time. + // If max-stale is assigned a value, then the client is willing to accept a response that has exceeded + // its expiration time by no more than the specified number of seconds. + // If no value is assigned to max-stale, then the client is willing to accept a stale response of any age. + // + // Responses served only because of a max-stale value are supposed to have a Warning header added to them, + // but that seems like a hassle, and is it actually useful? If so, then there needs to be a different + // return-value available here. + if maxstale == "" { + return fresh + } + maxstaleDuration, err := time.ParseDuration(maxstale + "s") + if err == nil { + currentAge = time.Duration(currentAge - maxstaleDuration) + } + } + + if lifetime > currentAge { + return fresh + } + + return stale +} + +// Returns true if either the request or the response includes the stale-if-error +// cache control extension: https://tools.ietf.org/html/rfc5861 +func canStaleOnError(respHeaders, reqHeaders http.Header) bool { + respCacheControl := parseCacheControl(respHeaders) + reqCacheControl := parseCacheControl(reqHeaders) + + var err error + lifetime := time.Duration(-1) + + if staleMaxAge, ok := respCacheControl["stale-if-error"]; ok { + if staleMaxAge != "" { + lifetime, err = time.ParseDuration(staleMaxAge + "s") + if err != nil { + return false + } + } else { + return true + } + } + if staleMaxAge, ok := reqCacheControl["stale-if-error"]; ok { + if staleMaxAge != "" { + lifetime, err = time.ParseDuration(staleMaxAge + "s") + if err != nil { + return false + } + } else { + return true + } + } + + if lifetime >= 0 { + date, err := Date(respHeaders) + if err != nil { + return false + } + currentAge := clock.since(date) + if lifetime > currentAge { + return true + } + } + + return false +} + +func getEndToEndHeaders(respHeaders http.Header) []string { + // These headers are always hop-by-hop + hopByHopHeaders := map[string]struct{}{ + "Connection": struct{}{}, + "Keep-Alive": struct{}{}, + "Proxy-Authenticate": struct{}{}, + "Proxy-Authorization": struct{}{}, + "Te": struct{}{}, + "Trailers": struct{}{}, + "Transfer-Encoding": struct{}{}, + "Upgrade": struct{}{}, + } + + for _, extra := range strings.Split(respHeaders.Get("connection"), ",") { + // any header listed in connection, if present, is also considered hop-by-hop + if strings.Trim(extra, " ") != "" { + hopByHopHeaders[http.CanonicalHeaderKey(extra)] = struct{}{} + } + } + endToEndHeaders := []string{} + for respHeader, _ := range respHeaders { + if _, ok := hopByHopHeaders[respHeader]; !ok { + endToEndHeaders = append(endToEndHeaders, respHeader) + } + } + return endToEndHeaders +} + +func canStore(reqCacheControl, respCacheControl cacheControl) (canStore bool) { + if _, ok := respCacheControl["no-store"]; ok { + return false + } + if _, ok := reqCacheControl["no-store"]; ok { + return false + } + return true +} + +func newGatewayTimeoutResponse(req *http.Request) *http.Response { + var braw bytes.Buffer + braw.WriteString("HTTP/1.1 504 Gateway Timeout\r\n\r\n") + resp, err := http.ReadResponse(bufio.NewReader(&braw), req) + if err != nil { + panic(err) + } + return resp +} + +// cloneRequest returns a clone of the provided *http.Request. +// The clone is a shallow copy of the struct and its Header map. +// (This function copyright goauth2 authors: https://code.google.com/p/goauth2) +func cloneRequest(r *http.Request) *http.Request { + // shallow copy of the struct + r2 := new(http.Request) + *r2 = *r + // deep copy of the Header + r2.Header = make(http.Header) + for k, s := range r.Header { + r2.Header[k] = s + } + return r2 +} + +type cacheControl map[string]string + +func parseCacheControl(headers http.Header) cacheControl { + cc := cacheControl{} + ccHeader := headers.Get("Cache-Control") + for _, part := range strings.Split(ccHeader, ",") { + part = strings.Trim(part, " ") + if part == "" { + continue + } + if strings.ContainsRune(part, '=') { + keyval := strings.Split(part, "=") + cc[strings.Trim(keyval[0], " ")] = strings.Trim(keyval[1], ",") + } else { + cc[part] = "" + } + } + return cc +} + +// headerAllCommaSepValues returns all comma-separated values (each +// with whitespace trimmed) for header name in headers. According to +// Section 4.2 of the HTTP/1.1 spec +// (http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2), +// values from multiple occurrences of a header should be concatenated, if +// the header's value is a comma-separated list. +func headerAllCommaSepValues(headers http.Header, name string) []string { + var vals []string + for _, val := range headers[http.CanonicalHeaderKey(name)] { + fields := strings.Split(val, ",") + for i, f := range fields { + fields[i] = strings.TrimSpace(f) + } + vals = append(vals, fields...) + } + return vals +} + +// cachingReadCloser is a wrapper around ReadCloser R that calls OnEOF +// handler with a full copy of the content read from R when EOF is +// reached. +type cachingReadCloser struct { + // Underlying ReadCloser. + R io.ReadCloser + // OnEOF is called with a copy of the content of R when EOF is reached. + OnEOF func(io.Reader) + + buf bytes.Buffer // buf stores a copy of the content of R. +} + +// Read reads the next len(p) bytes from R or until R is drained. The +// return value n is the number of bytes read. If R has no data to +// return, err is io.EOF and OnEOF is called with a full copy of what +// has been read so far. +func (r *cachingReadCloser) Read(p []byte) (n int, err error) { + n, err = r.R.Read(p) + r.buf.Write(p[:n]) + if err == io.EOF { + r.OnEOF(bytes.NewReader(r.buf.Bytes())) + } + return n, err +} + +func (r *cachingReadCloser) Close() error { + return r.R.Close() +} + +// NewMemoryCacheTransport returns a new Transport using the in-memory cache implementation +func NewMemoryCacheTransport() *Transport { + c := NewMemoryCache() + t := NewTransport(c) + return t +} diff --git a/vendor/github.com/liggitt/tabwriter/.travis.yml b/vendor/github.com/liggitt/tabwriter/.travis.yml new file mode 100644 index 0000000000..2768dc0727 --- /dev/null +++ b/vendor/github.com/liggitt/tabwriter/.travis.yml @@ -0,0 +1,11 @@ +language: go + +go: + - "1.8" + - "1.9" + - "1.10" + - "1.11" + - "1.12" + - master + +script: go test -v ./... diff --git a/vendor/github.com/liggitt/tabwriter/LICENSE b/vendor/github.com/liggitt/tabwriter/LICENSE new file mode 100644 index 0000000000..6a66aea5ea --- /dev/null +++ b/vendor/github.com/liggitt/tabwriter/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/liggitt/tabwriter/README.md b/vendor/github.com/liggitt/tabwriter/README.md new file mode 100644 index 0000000000..e75d35672e --- /dev/null +++ b/vendor/github.com/liggitt/tabwriter/README.md @@ -0,0 +1,7 @@ +This repo is a drop-in replacement for the golang [text/tabwriter](https://golang.org/pkg/text/tabwriter/) package. + +It is based on that package at [cf2c2ea8](https://github.com/golang/go/tree/cf2c2ea89d09d486bb018b1817c5874388038c3a/src/text/tabwriter) and inherits its license. + +The following additional features are supported: +* `RememberWidths` flag allows remembering maximum widths seen per column even after Flush() is called. +* `RememberedWidths() []int` and `SetRememberedWidths([]int) *Writer` allows obtaining and transferring remembered column width between writers. diff --git a/vendor/github.com/liggitt/tabwriter/tabwriter.go b/vendor/github.com/liggitt/tabwriter/tabwriter.go new file mode 100644 index 0000000000..fd3431fb03 --- /dev/null +++ b/vendor/github.com/liggitt/tabwriter/tabwriter.go @@ -0,0 +1,637 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package tabwriter implements a write filter (tabwriter.Writer) that +// translates tabbed columns in input into properly aligned text. +// +// It is a drop-in replacement for the golang text/tabwriter package (https://golang.org/pkg/text/tabwriter), +// based on that package at https://github.com/golang/go/tree/cf2c2ea89d09d486bb018b1817c5874388038c3a +// with support for additional features. +// +// The package is using the Elastic Tabstops algorithm described at +// http://nickgravgaard.com/elastictabstops/index.html. +package tabwriter + +import ( + "io" + "unicode/utf8" +) + +// ---------------------------------------------------------------------------- +// Filter implementation + +// A cell represents a segment of text terminated by tabs or line breaks. +// The text itself is stored in a separate buffer; cell only describes the +// segment's size in bytes, its width in runes, and whether it's an htab +// ('\t') terminated cell. +// +type cell struct { + size int // cell size in bytes + width int // cell width in runes + htab bool // true if the cell is terminated by an htab ('\t') +} + +// A Writer is a filter that inserts padding around tab-delimited +// columns in its input to align them in the output. +// +// The Writer treats incoming bytes as UTF-8-encoded text consisting +// of cells terminated by horizontal ('\t') or vertical ('\v') tabs, +// and newline ('\n') or formfeed ('\f') characters; both newline and +// formfeed act as line breaks. +// +// Tab-terminated cells in contiguous lines constitute a column. The +// Writer inserts padding as needed to make all cells in a column have +// the same width, effectively aligning the columns. It assumes that +// all characters have the same width, except for tabs for which a +// tabwidth must be specified. Column cells must be tab-terminated, not +// tab-separated: non-tab terminated trailing text at the end of a line +// forms a cell but that cell is not part of an aligned column. +// For instance, in this example (where | stands for a horizontal tab): +// +// aaaa|bbb|d +// aa |b |dd +// a | +// aa |cccc|eee +// +// the b and c are in distinct columns (the b column is not contiguous +// all the way). The d and e are not in a column at all (there's no +// terminating tab, nor would the column be contiguous). +// +// The Writer assumes that all Unicode code points have the same width; +// this may not be true in some fonts or if the string contains combining +// characters. +// +// If DiscardEmptyColumns is set, empty columns that are terminated +// entirely by vertical (or "soft") tabs are discarded. Columns +// terminated by horizontal (or "hard") tabs are not affected by +// this flag. +// +// If a Writer is configured to filter HTML, HTML tags and entities +// are passed through. The widths of tags and entities are +// assumed to be zero (tags) and one (entities) for formatting purposes. +// +// A segment of text may be escaped by bracketing it with Escape +// characters. The tabwriter passes escaped text segments through +// unchanged. In particular, it does not interpret any tabs or line +// breaks within the segment. If the StripEscape flag is set, the +// Escape characters are stripped from the output; otherwise they +// are passed through as well. For the purpose of formatting, the +// width of the escaped text is always computed excluding the Escape +// characters. +// +// The formfeed character acts like a newline but it also terminates +// all columns in the current line (effectively calling Flush). Tab- +// terminated cells in the next line start new columns. Unless found +// inside an HTML tag or inside an escaped text segment, formfeed +// characters appear as newlines in the output. +// +// The Writer must buffer input internally, because proper spacing +// of one line may depend on the cells in future lines. Clients must +// call Flush when done calling Write. +// +type Writer struct { + // configuration + output io.Writer + minwidth int + tabwidth int + padding int + padbytes [8]byte + flags uint + + // current state + buf []byte // collected text excluding tabs or line breaks + pos int // buffer position up to which cell.width of incomplete cell has been computed + cell cell // current incomplete cell; cell.width is up to buf[pos] excluding ignored sections + endChar byte // terminating char of escaped sequence (Escape for escapes, '>', ';' for HTML tags/entities, or 0) + lines [][]cell // list of lines; each line is a list of cells + widths []int // list of column widths in runes - re-used during formatting + + maxwidths []int // list of max column widths in runes +} + +// addLine adds a new line. +// flushed is a hint indicating whether the underlying writer was just flushed. +// If so, the previous line is not likely to be a good indicator of the new line's cells. +func (b *Writer) addLine(flushed bool) { + // Grow slice instead of appending, + // as that gives us an opportunity + // to re-use an existing []cell. + if n := len(b.lines) + 1; n <= cap(b.lines) { + b.lines = b.lines[:n] + b.lines[n-1] = b.lines[n-1][:0] + } else { + b.lines = append(b.lines, nil) + } + + if !flushed { + // The previous line is probably a good indicator + // of how many cells the current line will have. + // If the current line's capacity is smaller than that, + // abandon it and make a new one. + if n := len(b.lines); n >= 2 { + if prev := len(b.lines[n-2]); prev > cap(b.lines[n-1]) { + b.lines[n-1] = make([]cell, 0, prev) + } + } + } +} + +// Reset the current state. +func (b *Writer) reset() { + b.buf = b.buf[:0] + b.pos = 0 + b.cell = cell{} + b.endChar = 0 + b.lines = b.lines[0:0] + b.widths = b.widths[0:0] + b.addLine(true) +} + +// Internal representation (current state): +// +// - all text written is appended to buf; tabs and line breaks are stripped away +// - at any given time there is a (possibly empty) incomplete cell at the end +// (the cell starts after a tab or line break) +// - cell.size is the number of bytes belonging to the cell so far +// - cell.width is text width in runes of that cell from the start of the cell to +// position pos; html tags and entities are excluded from this width if html +// filtering is enabled +// - the sizes and widths of processed text are kept in the lines list +// which contains a list of cells for each line +// - the widths list is a temporary list with current widths used during +// formatting; it is kept in Writer because it's re-used +// +// |<---------- size ---------->| +// | | +// |<- width ->|<- ignored ->| | +// | | | | +// [---processed---tab------------......] +// ^ ^ ^ +// | | | +// buf start of incomplete cell pos + +// Formatting can be controlled with these flags. +const ( + // Ignore html tags and treat entities (starting with '&' + // and ending in ';') as single characters (width = 1). + FilterHTML uint = 1 << iota + + // Strip Escape characters bracketing escaped text segments + // instead of passing them through unchanged with the text. + StripEscape + + // Force right-alignment of cell content. + // Default is left-alignment. + AlignRight + + // Handle empty columns as if they were not present in + // the input in the first place. + DiscardEmptyColumns + + // Always use tabs for indentation columns (i.e., padding of + // leading empty cells on the left) independent of padchar. + TabIndent + + // Print a vertical bar ('|') between columns (after formatting). + // Discarded columns appear as zero-width columns ("||"). + Debug + + // Remember maximum widths seen per column even after Flush() is called. + RememberWidths +) + +// A Writer must be initialized with a call to Init. The first parameter (output) +// specifies the filter output. The remaining parameters control the formatting: +// +// minwidth minimal cell width including any padding +// tabwidth width of tab characters (equivalent number of spaces) +// padding padding added to a cell before computing its width +// padchar ASCII char used for padding +// if padchar == '\t', the Writer will assume that the +// width of a '\t' in the formatted output is tabwidth, +// and cells are left-aligned independent of align_left +// (for correct-looking results, tabwidth must correspond +// to the tab width in the viewer displaying the result) +// flags formatting control +// +func (b *Writer) Init(output io.Writer, minwidth, tabwidth, padding int, padchar byte, flags uint) *Writer { + if minwidth < 0 || tabwidth < 0 || padding < 0 { + panic("negative minwidth, tabwidth, or padding") + } + b.output = output + b.minwidth = minwidth + b.tabwidth = tabwidth + b.padding = padding + for i := range b.padbytes { + b.padbytes[i] = padchar + } + if padchar == '\t' { + // tab padding enforces left-alignment + flags &^= AlignRight + } + b.flags = flags + + b.reset() + + return b +} + +// debugging support (keep code around) +func (b *Writer) dump() { + pos := 0 + for i, line := range b.lines { + print("(", i, ") ") + for _, c := range line { + print("[", string(b.buf[pos:pos+c.size]), "]") + pos += c.size + } + print("\n") + } + print("\n") +} + +// local error wrapper so we can distinguish errors we want to return +// as errors from genuine panics (which we don't want to return as errors) +type osError struct { + err error +} + +func (b *Writer) write0(buf []byte) { + n, err := b.output.Write(buf) + if n != len(buf) && err == nil { + err = io.ErrShortWrite + } + if err != nil { + panic(osError{err}) + } +} + +func (b *Writer) writeN(src []byte, n int) { + for n > len(src) { + b.write0(src) + n -= len(src) + } + b.write0(src[0:n]) +} + +var ( + newline = []byte{'\n'} + tabs = []byte("\t\t\t\t\t\t\t\t") +) + +func (b *Writer) writePadding(textw, cellw int, useTabs bool) { + if b.padbytes[0] == '\t' || useTabs { + // padding is done with tabs + if b.tabwidth == 0 { + return // tabs have no width - can't do any padding + } + // make cellw the smallest multiple of b.tabwidth + cellw = (cellw + b.tabwidth - 1) / b.tabwidth * b.tabwidth + n := cellw - textw // amount of padding + if n < 0 { + panic("internal error") + } + b.writeN(tabs, (n+b.tabwidth-1)/b.tabwidth) + return + } + + // padding is done with non-tab characters + b.writeN(b.padbytes[0:], cellw-textw) +} + +var vbar = []byte{'|'} + +func (b *Writer) writeLines(pos0 int, line0, line1 int) (pos int) { + pos = pos0 + for i := line0; i < line1; i++ { + line := b.lines[i] + + // if TabIndent is set, use tabs to pad leading empty cells + useTabs := b.flags&TabIndent != 0 + + for j, c := range line { + if j > 0 && b.flags&Debug != 0 { + // indicate column break + b.write0(vbar) + } + + if c.size == 0 { + // empty cell + if j < len(b.widths) { + b.writePadding(c.width, b.widths[j], useTabs) + } + } else { + // non-empty cell + useTabs = false + if b.flags&AlignRight == 0 { // align left + b.write0(b.buf[pos : pos+c.size]) + pos += c.size + if j < len(b.widths) { + b.writePadding(c.width, b.widths[j], false) + } + } else { // align right + if j < len(b.widths) { + b.writePadding(c.width, b.widths[j], false) + } + b.write0(b.buf[pos : pos+c.size]) + pos += c.size + } + } + } + + if i+1 == len(b.lines) { + // last buffered line - we don't have a newline, so just write + // any outstanding buffered data + b.write0(b.buf[pos : pos+b.cell.size]) + pos += b.cell.size + } else { + // not the last line - write newline + b.write0(newline) + } + } + return +} + +// Format the text between line0 and line1 (excluding line1); pos +// is the buffer position corresponding to the beginning of line0. +// Returns the buffer position corresponding to the beginning of +// line1 and an error, if any. +// +func (b *Writer) format(pos0 int, line0, line1 int) (pos int) { + pos = pos0 + column := len(b.widths) + for this := line0; this < line1; this++ { + line := b.lines[this] + + if column >= len(line)-1 { + continue + } + // cell exists in this column => this line + // has more cells than the previous line + // (the last cell per line is ignored because cells are + // tab-terminated; the last cell per line describes the + // text before the newline/formfeed and does not belong + // to a column) + + // print unprinted lines until beginning of block + pos = b.writeLines(pos, line0, this) + line0 = this + + // column block begin + width := b.minwidth // minimal column width + discardable := true // true if all cells in this column are empty and "soft" + for ; this < line1; this++ { + line = b.lines[this] + if column >= len(line)-1 { + break + } + // cell exists in this column + c := line[column] + // update width + if w := c.width + b.padding; w > width { + width = w + } + // update discardable + if c.width > 0 || c.htab { + discardable = false + } + } + // column block end + + // discard empty columns if necessary + if discardable && b.flags&DiscardEmptyColumns != 0 { + width = 0 + } + + if b.flags&RememberWidths != 0 { + if len(b.maxwidths) < len(b.widths) { + b.maxwidths = append(b.maxwidths, b.widths[len(b.maxwidths):]...) + } + + switch { + case len(b.maxwidths) == len(b.widths): + b.maxwidths = append(b.maxwidths, width) + case b.maxwidths[len(b.widths)] > width: + width = b.maxwidths[len(b.widths)] + case b.maxwidths[len(b.widths)] < width: + b.maxwidths[len(b.widths)] = width + } + } + + // format and print all columns to the right of this column + // (we know the widths of this column and all columns to the left) + b.widths = append(b.widths, width) // push width + pos = b.format(pos, line0, this) + b.widths = b.widths[0 : len(b.widths)-1] // pop width + line0 = this + } + + // print unprinted lines until end + return b.writeLines(pos, line0, line1) +} + +// Append text to current cell. +func (b *Writer) append(text []byte) { + b.buf = append(b.buf, text...) + b.cell.size += len(text) +} + +// Update the cell width. +func (b *Writer) updateWidth() { + b.cell.width += utf8.RuneCount(b.buf[b.pos:]) + b.pos = len(b.buf) +} + +// To escape a text segment, bracket it with Escape characters. +// For instance, the tab in this string "Ignore this tab: \xff\t\xff" +// does not terminate a cell and constitutes a single character of +// width one for formatting purposes. +// +// The value 0xff was chosen because it cannot appear in a valid UTF-8 sequence. +// +const Escape = '\xff' + +// Start escaped mode. +func (b *Writer) startEscape(ch byte) { + switch ch { + case Escape: + b.endChar = Escape + case '<': + b.endChar = '>' + case '&': + b.endChar = ';' + } +} + +// Terminate escaped mode. If the escaped text was an HTML tag, its width +// is assumed to be zero for formatting purposes; if it was an HTML entity, +// its width is assumed to be one. In all other cases, the width is the +// unicode width of the text. +// +func (b *Writer) endEscape() { + switch b.endChar { + case Escape: + b.updateWidth() + if b.flags&StripEscape == 0 { + b.cell.width -= 2 // don't count the Escape chars + } + case '>': // tag of zero width + case ';': + b.cell.width++ // entity, count as one rune + } + b.pos = len(b.buf) + b.endChar = 0 +} + +// Terminate the current cell by adding it to the list of cells of the +// current line. Returns the number of cells in that line. +// +func (b *Writer) terminateCell(htab bool) int { + b.cell.htab = htab + line := &b.lines[len(b.lines)-1] + *line = append(*line, b.cell) + b.cell = cell{} + return len(*line) +} + +func handlePanic(err *error, op string) { + if e := recover(); e != nil { + if nerr, ok := e.(osError); ok { + *err = nerr.err + return + } + panic("tabwriter: panic during " + op) + } +} + +// RememberedWidths returns a copy of the remembered per-column maximum widths. +// Requires use of the RememberWidths flag, and is not threadsafe. +func (b *Writer) RememberedWidths() []int { + retval := make([]int, len(b.maxwidths)) + copy(retval, b.maxwidths) + return retval +} + +// SetRememberedWidths sets the remembered per-column maximum widths. +// Requires use of the RememberWidths flag, and is not threadsafe. +func (b *Writer) SetRememberedWidths(widths []int) *Writer { + b.maxwidths = make([]int, len(widths)) + copy(b.maxwidths, widths) + return b +} + +// Flush should be called after the last call to Write to ensure +// that any data buffered in the Writer is written to output. Any +// incomplete escape sequence at the end is considered +// complete for formatting purposes. +func (b *Writer) Flush() error { + return b.flush() +} + +func (b *Writer) flush() (err error) { + defer b.reset() // even in the presence of errors + defer handlePanic(&err, "Flush") + + // add current cell if not empty + if b.cell.size > 0 { + if b.endChar != 0 { + // inside escape - terminate it even if incomplete + b.endEscape() + } + b.terminateCell(false) + } + + // format contents of buffer + b.format(0, 0, len(b.lines)) + return nil +} + +var hbar = []byte("---\n") + +// Write writes buf to the writer b. +// The only errors returned are ones encountered +// while writing to the underlying output stream. +// +func (b *Writer) Write(buf []byte) (n int, err error) { + defer handlePanic(&err, "Write") + + // split text into cells + n = 0 + for i, ch := range buf { + if b.endChar == 0 { + // outside escape + switch ch { + case '\t', '\v', '\n', '\f': + // end of cell + b.append(buf[n:i]) + b.updateWidth() + n = i + 1 // ch consumed + ncells := b.terminateCell(ch == '\t') + if ch == '\n' || ch == '\f' { + // terminate line + b.addLine(ch == '\f') + if ch == '\f' || ncells == 1 { + // A '\f' always forces a flush. Otherwise, if the previous + // line has only one cell which does not have an impact on + // the formatting of the following lines (the last cell per + // line is ignored by format()), thus we can flush the + // Writer contents. + if err = b.Flush(); err != nil { + return + } + if ch == '\f' && b.flags&Debug != 0 { + // indicate section break + b.write0(hbar) + } + } + } + + case Escape: + // start of escaped sequence + b.append(buf[n:i]) + b.updateWidth() + n = i + if b.flags&StripEscape != 0 { + n++ // strip Escape + } + b.startEscape(Escape) + + case '<', '&': + // possibly an html tag/entity + if b.flags&FilterHTML != 0 { + // begin of tag/entity + b.append(buf[n:i]) + b.updateWidth() + n = i + b.startEscape(ch) + } + } + + } else { + // inside escape + if ch == b.endChar { + // end of tag/entity + j := i + 1 + if ch == Escape && b.flags&StripEscape != 0 { + j = i // strip Escape + } + b.append(buf[n:j]) + n = i + 1 // ch consumed + b.endEscape() + } + } + } + + // append leftover text + b.append(buf[n:]) + n = len(buf) + return +} + +// NewWriter allocates and initializes a new tabwriter.Writer. +// The parameters are the same as for the Init function. +// +func NewWriter(output io.Writer, minwidth, tabwidth, padding int, padchar byte, flags uint) *Writer { + return new(Writer).Init(output, minwidth, tabwidth, padding, padchar, flags) +} diff --git a/vendor/github.com/monochromegane/go-gitignore/.travis.yml b/vendor/github.com/monochromegane/go-gitignore/.travis.yml new file mode 100644 index 0000000000..b06a36a466 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/.travis.yml @@ -0,0 +1,6 @@ +language: go +go: + - 1.14.x + - master +script: + - go test -v ./... diff --git a/vendor/github.com/monochromegane/go-gitignore/LICENSE b/vendor/github.com/monochromegane/go-gitignore/LICENSE new file mode 100644 index 0000000000..91b84e9277 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) [2015] [go-gitignore] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/monochromegane/go-gitignore/README.md b/vendor/github.com/monochromegane/go-gitignore/README.md new file mode 100644 index 0000000000..51a480747b --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/README.md @@ -0,0 +1,95 @@ +# go-gitignore [![Build Status](https://travis-ci.org/monochromegane/go-gitignore.svg)](https://travis-ci.org/monochromegane/go-gitignore) + +A fast gitignore matching library for Go. + +This library use simple tree index for matching, so keep fast if gitignore file has many pattern. + +## Usage + +```go +gitignore, _ := gitignore.NewGitIgnore("/path/to/gitignore") + +path := "/path/to/file" +isDir := false +gitignore.Match(path, isDir) +``` + +### Specify base directory + +go-gitignore treat `path` as a base directory. +If you want to specify other base (e.g. current directory and Global gitignore), you can like the following. + +```go +gitignore, _ := gitignore.NewGitIgnore("/home/you/.gitignore", ".") +``` + +### From io.Reader + +go-gitignore can initialize from io.Reader. + +```go +gitignore, _ := gitignore.NewGitIgnoreFromReader(base, reader) +``` + +## Simple tree index + +go-gitignore parse gitignore file, and generate a simple tree index for matching like the following. + +``` +. +├── accept +│   ├── absolute +│   │   └── depth +│   │   ├── initial +│   │   └── other +│   └── relative +│   └── depth +│   ├── initial +│   └── other +└── ignore + ├── absolute + │   └── depth + │   ├── initial + │   └── other + └── relative + └── depth + ├── initial + └── other +``` + +## Features + +- Support absolute path (/path/to/ignore) +- Support relative path (path/to/ignore) +- Support accept pattern (!path/to/accept) +- Support directory pattern (path/to/directory/) +- Support glob pattern (path/to/\*.txt) + +*note: glob pattern* + +go-gitignore use [filepath.Match](https://golang.org/pkg/path/filepath/#Match) for matching meta char pattern, so not support recursive pattern (path/`**`/file). + +## Installation + +```sh +$ go get github.com/monochromegane/go-gitignore +``` + +## Contribution + +1. Fork it +2. Create a feature branch +3. Commit your changes +4. Rebase your local changes against the master branch +5. Run test suite with the `go test ./...` command and confirm that it passes +6. Run `gofmt -s` +7. Create new Pull Request + +## License + +[MIT](https://github.com/monochromegane/go-gitignore/blob/master/LICENSE) + +## Author + +[monochromegane](https://github.com/monochromegane) + diff --git a/vendor/github.com/monochromegane/go-gitignore/depth_holder.go b/vendor/github.com/monochromegane/go-gitignore/depth_holder.go new file mode 100644 index 0000000000..9805b325d4 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/depth_holder.go @@ -0,0 +1,79 @@ +package gitignore + +import "strings" + +const ( + asc = iota + desc +) + +type depthPatternHolder struct { + patterns depthPatterns + order int +} + +func newDepthPatternHolder(order int) depthPatternHolder { + return depthPatternHolder{ + patterns: depthPatterns{m: map[int]initialPatternHolder{}}, + order: order, + } +} + +func (h *depthPatternHolder) add(pattern string) { + count := strings.Count(strings.Trim(pattern, "/"), "/") + h.patterns.set(count+1, pattern) +} + +func (h depthPatternHolder) match(path string, isDir bool) bool { + if h.patterns.size() == 0 { + return false + } + + for depth := 1; ; depth++ { + var part string + var isLast, isDirCurrent bool + if h.order == asc { + part, isLast = cutN(path, depth) + if isLast { + isDirCurrent = isDir + } else { + isDirCurrent = false + } + } else { + part, isLast = cutLastN(path, depth) + isDirCurrent = isDir + } + if patterns, ok := h.patterns.get(depth); ok { + if patterns.match(part, isDirCurrent) { + return true + } + } + if isLast { + break + } + } + return false +} + +type depthPatterns struct { + m map[int]initialPatternHolder +} + +func (p *depthPatterns) set(depth int, pattern string) { + if ps, ok := p.m[depth]; ok { + ps.add(pattern) + } else { + holder := newInitialPatternHolder() + holder.add(pattern) + p.m[depth] = holder + } +} + +func (p depthPatterns) get(depth int) (initialPatternHolder, bool) { + patterns, ok := p.m[depth] + return patterns, ok +} + +func (p depthPatterns) size() int { + return len(p.m) +} diff --git a/vendor/github.com/monochromegane/go-gitignore/full_scan_patterns.go b/vendor/github.com/monochromegane/go-gitignore/full_scan_patterns.go new file mode 100644 index 0000000000..8c04ef3a77 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/full_scan_patterns.go @@ -0,0 +1,31 @@ +package gitignore + +import "strings" + +// Only benchmark use +type fullScanPatterns struct { + absolute patterns + relative patterns +} + +func newFullScanPatterns() *fullScanPatterns { + return &fullScanPatterns{ + absolute: patterns{}, + relative: patterns{}, + } +} + +func (ps *fullScanPatterns) add(pattern string) { + if strings.HasPrefix(pattern, "/") { + ps.absolute.add(newPattern(pattern)) + } else { + ps.relative.add(newPattern(pattern)) + } +} + +func (ps fullScanPatterns) match(path string, isDir bool) bool { + if ps.absolute.match(path, isDir) { + return true + } + return ps.relative.match(path, isDir) +} diff --git a/vendor/github.com/monochromegane/go-gitignore/gitignore.go b/vendor/github.com/monochromegane/go-gitignore/gitignore.go new file mode 100644 index 0000000000..9c719a6cab --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/gitignore.go @@ -0,0 +1,80 @@ +package gitignore + +import ( + "bufio" + "io" + "os" + "path/filepath" + "strings" +) + +type IgnoreMatcher interface { + Match(path string, isDir bool) bool +} + +type DummyIgnoreMatcher bool + +func (d DummyIgnoreMatcher) Match(path string, isDir bool) bool { + return bool(d) +} + +type gitIgnore struct { + ignorePatterns scanStrategy + acceptPatterns scanStrategy + path string +} + +func NewGitIgnore(gitignore string, base ...string) (IgnoreMatcher, error) { + var path string + if len(base) > 0 { + path = base[0] + } else { + path = filepath.Dir(gitignore) + } + + file, err := os.Open(gitignore) + if err != nil { + return nil, err + } + defer file.Close() + + return NewGitIgnoreFromReader(path, file), nil +} + +func NewGitIgnoreFromReader(path string, r io.Reader) IgnoreMatcher { + g := gitIgnore{ + ignorePatterns: newIndexScanPatterns(), + acceptPatterns: newIndexScanPatterns(), + path: path, + } + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := strings.Trim(scanner.Text(), " ") + if len(line) == 0 || strings.HasPrefix(line, "#") { + continue + } + if strings.HasPrefix(line, `\#`) { + line = strings.TrimPrefix(line, `\`) + } + + if strings.HasPrefix(line, "!") { + g.acceptPatterns.add(strings.TrimPrefix(line, "!")) + } else { + g.ignorePatterns.add(line) + } + } + return g +} + +func (g gitIgnore) Match(path string, isDir bool) bool { + relativePath, err := filepath.Rel(g.path, path) + if err != nil { + return false + } + relativePath = filepath.ToSlash(relativePath) + + if g.acceptPatterns.match(relativePath, isDir) { + return false + } + return g.ignorePatterns.match(relativePath, isDir) +} diff --git a/vendor/github.com/monochromegane/go-gitignore/index_scan_patterns.go b/vendor/github.com/monochromegane/go-gitignore/index_scan_patterns.go new file mode 100644 index 0000000000..882280e953 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/index_scan_patterns.go @@ -0,0 +1,35 @@ +package gitignore + +import "strings" + +type indexScanPatterns struct { + absolute depthPatternHolder + relative depthPatternHolder +} + +func newIndexScanPatterns() *indexScanPatterns { + return &indexScanPatterns{ + absolute: newDepthPatternHolder(asc), + relative: newDepthPatternHolder(desc), + } +} + +func (ps *indexScanPatterns) add(pattern string) { + if strings.HasPrefix(pattern, "/") { + ps.absolute.add(pattern) + } else { + ps.relative.add(pattern) + } +} + +func (ps indexScanPatterns) match(path string, isDir bool) bool { + if ps.absolute.match(path, isDir) { + return true + } + return ps.relative.match(path, isDir) +} + +type scanStrategy interface { + add(pattern string) + match(path string, isDir bool) bool +} diff --git a/vendor/github.com/monochromegane/go-gitignore/initial_holder.go b/vendor/github.com/monochromegane/go-gitignore/initial_holder.go new file mode 100644 index 0000000000..86f0bfee2b --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/initial_holder.go @@ -0,0 +1,62 @@ +package gitignore + +import "strings" + +const initials = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ." + +type initialPatternHolder struct { + patterns initialPatterns + otherPatterns *patterns +} + +func newInitialPatternHolder() initialPatternHolder { + return initialPatternHolder{ + patterns: initialPatterns{m: map[byte]*patterns{}}, + otherPatterns: &patterns{}, + } +} + +func (h *initialPatternHolder) add(pattern string) { + trimedPattern := strings.TrimPrefix(pattern, "/") + if strings.IndexAny(trimedPattern[0:1], initials) != -1 { + h.patterns.set(trimedPattern[0], newPatternForEqualizedPath(pattern)) + } else { + h.otherPatterns.add(newPatternForEqualizedPath(pattern)) + } +} + +func (h initialPatternHolder) match(path string, isDir bool) bool { + if h.patterns.size() == 0 && h.otherPatterns.size() == 0 { + return false + } + if patterns, ok := h.patterns.get(path[0]); ok { + if patterns.match(path, isDir) { + return true + } + } + return h.otherPatterns.match(path, isDir) +} + +type initialPatterns struct { + m map[byte]*patterns +} + +func (p *initialPatterns) set(initial byte, pattern pattern) { + if ps, ok := p.m[initial]; ok { + ps.add(pattern) + } else { + patterns := &patterns{} + patterns.add(pattern) + p.m[initial] = patterns + + } +} + +func (p initialPatterns) get(initial byte) (*patterns, bool) { + patterns, ok := p.m[initial] + return patterns, ok +} + +func (p initialPatterns) size() int { + return len(p.m) +} diff --git a/vendor/github.com/monochromegane/go-gitignore/match.go b/vendor/github.com/monochromegane/go-gitignore/match.go new file mode 100644 index 0000000000..4140a9bdc5 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/match.go @@ -0,0 +1,24 @@ +package gitignore + +import "path/filepath" + +type pathMatcher interface { + match(path string) bool +} + +type simpleMatcher struct { + path string +} + +func (m simpleMatcher) match(path string) bool { + return m.path == path +} + +type filepathMatcher struct { + path string +} + +func (m filepathMatcher) match(path string) bool { + match, _ := filepath.Match(m.path, path) + return match +} diff --git a/vendor/github.com/monochromegane/go-gitignore/pattern.go b/vendor/github.com/monochromegane/go-gitignore/pattern.go new file mode 100644 index 0000000000..93adbf7636 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/pattern.go @@ -0,0 +1,69 @@ +package gitignore + +import ( + "path/filepath" + "strings" +) + +var Separator = string(filepath.Separator) + +type pattern struct { + hasRootPrefix bool + hasDirSuffix bool + pathDepth int + matcher pathMatcher + onlyEqualizedPath bool +} + +func newPattern(path string) pattern { + hasRootPrefix := path[0] == '/' + hasDirSuffix := path[len(path)-1] == '/' + + var pathDepth int + if !hasRootPrefix { + pathDepth = strings.Count(path, "/") + } + + var matcher pathMatcher + matchingPath := strings.Trim(path, "/") + if hasMeta(path) { + matcher = filepathMatcher{path: matchingPath} + } else { + matcher = simpleMatcher{path: matchingPath} + } + + return pattern{ + hasRootPrefix: hasRootPrefix, + hasDirSuffix: hasDirSuffix, + pathDepth: pathDepth, + matcher: matcher, + } +} + +func newPatternForEqualizedPath(path string) pattern { + pattern := newPattern(path) + pattern.onlyEqualizedPath = true + return pattern +} + +func (p pattern) match(path string, isDir bool) bool { + + if p.hasDirSuffix && !isDir { + return false + } + + var targetPath string + if p.hasRootPrefix || p.onlyEqualizedPath { + // absolute pattern or only equalized path mode + targetPath = path + } else { + // relative pattern + targetPath = p.equalizeDepth(path) + } + return p.matcher.match(targetPath) +} + +func (p pattern) equalizeDepth(path string) string { + equalizedPath, _ := cutLastN(path, p.pathDepth+1) + return equalizedPath +} diff --git a/vendor/github.com/monochromegane/go-gitignore/patterns.go b/vendor/github.com/monochromegane/go-gitignore/patterns.go new file mode 100644 index 0000000000..6770fb4655 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/patterns.go @@ -0,0 +1,22 @@ +package gitignore + +type patterns struct { + patterns []pattern +} + +func (ps *patterns) add(pattern pattern) { + ps.patterns = append(ps.patterns, pattern) +} + +func (ps *patterns) size() int { + return len(ps.patterns) +} + +func (ps patterns) match(path string, isDir bool) bool { + for _, p := range ps.patterns { + if match := p.match(path, isDir); match { + return true + } + } + return false +} diff --git a/vendor/github.com/monochromegane/go-gitignore/util.go b/vendor/github.com/monochromegane/go-gitignore/util.go new file mode 100644 index 0000000000..b5ab9bbfd2 --- /dev/null +++ b/vendor/github.com/monochromegane/go-gitignore/util.go @@ -0,0 +1,45 @@ +package gitignore + +import ( + "os" + "strings" +) + +func cutN(path string, n int) (string, bool) { + isLast := true + + var i, count int + for i < len(path)-1 { + if os.IsPathSeparator(path[i]) { + count++ + if count >= n { + isLast = false + break + } + } + i++ + } + return path[:i+1], isLast +} + +func cutLastN(path string, n int) (string, bool) { + isLast := true + i := len(path) - 1 + + var count int + for i >= 0 { + if os.IsPathSeparator(path[i]) { + count++ + if count >= n { + isLast = false + break + } + } + i-- + } + return path[i+1:], isLast +} + +func hasMeta(path string) bool { + return strings.IndexAny(path, "*?[") >= 0 +} diff --git a/vendor/github.com/peterbourgon/diskv/LICENSE b/vendor/github.com/peterbourgon/diskv/LICENSE new file mode 100644 index 0000000000..41ce7f16e1 --- /dev/null +++ b/vendor/github.com/peterbourgon/diskv/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2011-2012 Peter Bourgon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/peterbourgon/diskv/README.md b/vendor/github.com/peterbourgon/diskv/README.md new file mode 100644 index 0000000000..3474739edc --- /dev/null +++ b/vendor/github.com/peterbourgon/diskv/README.md @@ -0,0 +1,141 @@ +# What is diskv? + +Diskv (disk-vee) is a simple, persistent key-value store written in the Go +language. It starts with an incredibly simple API for storing arbitrary data on +a filesystem by key, and builds several layers of performance-enhancing +abstraction on top. The end result is a conceptually simple, but highly +performant, disk-backed storage system. + +[![Build Status][1]][2] + +[1]: https://drone.io/github.com/peterbourgon/diskv/status.png +[2]: https://drone.io/github.com/peterbourgon/diskv/latest + + +# Installing + +Install [Go 1][3], either [from source][4] or [with a prepackaged binary][5]. +Then, + +```bash +$ go get github.com/peterbourgon/diskv +``` + +[3]: http://golang.org +[4]: http://golang.org/doc/install/source +[5]: http://golang.org/doc/install + + +# Usage + +```go +package main + +import ( + "fmt" + "github.com/peterbourgon/diskv" +) + +func main() { + // Simplest transform function: put all the data files into the base dir. + flatTransform := func(s string) []string { return []string{} } + + // Initialize a new diskv store, rooted at "my-data-dir", with a 1MB cache. + d := diskv.New(diskv.Options{ + BasePath: "my-data-dir", + Transform: flatTransform, + CacheSizeMax: 1024 * 1024, + }) + + // Write three bytes to the key "alpha". + key := "alpha" + d.Write(key, []byte{'1', '2', '3'}) + + // Read the value back out of the store. + value, _ := d.Read(key) + fmt.Printf("%v\n", value) + + // Erase the key+value from the store (and the disk). + d.Erase(key) +} +``` + +More complex examples can be found in the "examples" subdirectory. + + +# Theory + +## Basic idea + +At its core, diskv is a map of a key (`string`) to arbitrary data (`[]byte`). +The data is written to a single file on disk, with the same name as the key. +The key determines where that file will be stored, via a user-provided +`TransformFunc`, which takes a key and returns a slice (`[]string`) +corresponding to a path list where the key file will be stored. The simplest +TransformFunc, + +```go +func SimpleTransform (key string) []string { + return []string{} +} +``` + +will place all keys in the same, base directory. The design is inspired by +[Redis diskstore][6]; a TransformFunc which emulates the default diskstore +behavior is available in the content-addressable-storage example. + +[6]: http://groups.google.com/group/redis-db/browse_thread/thread/d444bc786689bde9?pli=1 + +**Note** that your TransformFunc should ensure that one valid key doesn't +transform to a subset of another valid key. That is, it shouldn't be possible +to construct valid keys that resolve to directory names. As a concrete example, +if your TransformFunc splits on every 3 characters, then + +```go +d.Write("abcabc", val) // OK: written to /abc/abc/abcabc +d.Write("abc", val) // Error: attempted write to /abc/abc, but it's a directory +``` + +This will be addressed in an upcoming version of diskv. + +Probably the most important design principle behind diskv is that your data is +always flatly available on the disk. diskv will never do anything that would +prevent you from accessing, copying, backing up, or otherwise interacting with +your data via common UNIX commandline tools. + +## Adding a cache + +An in-memory caching layer is provided by combining the BasicStore +functionality with a simple map structure, and keeping it up-to-date as +appropriate. Since the map structure in Go is not threadsafe, it's combined +with a RWMutex to provide safe concurrent access. + +## Adding order + +diskv is a key-value store and therefore inherently unordered. An ordering +system can be injected into the store by passing something which satisfies the +diskv.Index interface. (A default implementation, using Google's +[btree][7] package, is provided.) Basically, diskv keeps an ordered (by a +user-provided Less function) index of the keys, which can be queried. + +[7]: https://github.com/google/btree + +## Adding compression + +Something which implements the diskv.Compression interface may be passed +during store creation, so that all Writes and Reads are filtered through +a compression/decompression pipeline. Several default implementations, +using stdlib compression algorithms, are provided. Note that data is cached +compressed; the cost of decompression is borne with each Read. + +## Streaming + +diskv also now provides ReadStream and WriteStream methods, to allow very large +data to be handled efficiently. + + +# Future plans + + * Needs plenty of robust testing: huge datasets, etc... + * More thorough benchmarking + * Your suggestions for use-cases I haven't thought of diff --git a/vendor/github.com/peterbourgon/diskv/compression.go b/vendor/github.com/peterbourgon/diskv/compression.go new file mode 100644 index 0000000000..5192b02733 --- /dev/null +++ b/vendor/github.com/peterbourgon/diskv/compression.go @@ -0,0 +1,64 @@ +package diskv + +import ( + "compress/flate" + "compress/gzip" + "compress/zlib" + "io" +) + +// Compression is an interface that Diskv uses to implement compression of +// data. Writer takes a destination io.Writer and returns a WriteCloser that +// compresses all data written through it. Reader takes a source io.Reader and +// returns a ReadCloser that decompresses all data read through it. You may +// define these methods on your own type, or use one of the NewCompression +// helpers. +type Compression interface { + Writer(dst io.Writer) (io.WriteCloser, error) + Reader(src io.Reader) (io.ReadCloser, error) +} + +// NewGzipCompression returns a Gzip-based Compression. +func NewGzipCompression() Compression { + return NewGzipCompressionLevel(flate.DefaultCompression) +} + +// NewGzipCompressionLevel returns a Gzip-based Compression with the given level. +func NewGzipCompressionLevel(level int) Compression { + return &genericCompression{ + wf: func(w io.Writer) (io.WriteCloser, error) { return gzip.NewWriterLevel(w, level) }, + rf: func(r io.Reader) (io.ReadCloser, error) { return gzip.NewReader(r) }, + } +} + +// NewZlibCompression returns a Zlib-based Compression. +func NewZlibCompression() Compression { + return NewZlibCompressionLevel(flate.DefaultCompression) +} + +// NewZlibCompressionLevel returns a Zlib-based Compression with the given level. +func NewZlibCompressionLevel(level int) Compression { + return NewZlibCompressionLevelDict(level, nil) +} + +// NewZlibCompressionLevelDict returns a Zlib-based Compression with the given +// level, based on the given dictionary. +func NewZlibCompressionLevelDict(level int, dict []byte) Compression { + return &genericCompression{ + func(w io.Writer) (io.WriteCloser, error) { return zlib.NewWriterLevelDict(w, level, dict) }, + func(r io.Reader) (io.ReadCloser, error) { return zlib.NewReaderDict(r, dict) }, + } +} + +type genericCompression struct { + wf func(w io.Writer) (io.WriteCloser, error) + rf func(r io.Reader) (io.ReadCloser, error) +} + +func (g *genericCompression) Writer(dst io.Writer) (io.WriteCloser, error) { + return g.wf(dst) +} + +func (g *genericCompression) Reader(src io.Reader) (io.ReadCloser, error) { + return g.rf(src) +} diff --git a/vendor/github.com/peterbourgon/diskv/diskv.go b/vendor/github.com/peterbourgon/diskv/diskv.go new file mode 100644 index 0000000000..524dc0a6e3 --- /dev/null +++ b/vendor/github.com/peterbourgon/diskv/diskv.go @@ -0,0 +1,624 @@ +// Diskv (disk-vee) is a simple, persistent, key-value store. +// It stores all data flatly on the filesystem. + +package diskv + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + "sync" + "syscall" +) + +const ( + defaultBasePath = "diskv" + defaultFilePerm os.FileMode = 0666 + defaultPathPerm os.FileMode = 0777 +) + +var ( + defaultTransform = func(s string) []string { return []string{} } + errCanceled = errors.New("canceled") + errEmptyKey = errors.New("empty key") + errBadKey = errors.New("bad key") + errImportDirectory = errors.New("can't import a directory") +) + +// TransformFunction transforms a key into a slice of strings, with each +// element in the slice representing a directory in the file path where the +// key's entry will eventually be stored. +// +// For example, if TransformFunc transforms "abcdef" to ["ab", "cde", "f"], +// the final location of the data file will be /ab/cde/f/abcdef +type TransformFunction func(s string) []string + +// Options define a set of properties that dictate Diskv behavior. +// All values are optional. +type Options struct { + BasePath string + Transform TransformFunction + CacheSizeMax uint64 // bytes + PathPerm os.FileMode + FilePerm os.FileMode + // If TempDir is set, it will enable filesystem atomic writes by + // writing temporary files to that location before being moved + // to BasePath. + // Note that TempDir MUST be on the same device/partition as + // BasePath. + TempDir string + + Index Index + IndexLess LessFunction + + Compression Compression +} + +// Diskv implements the Diskv interface. You shouldn't construct Diskv +// structures directly; instead, use the New constructor. +type Diskv struct { + Options + mu sync.RWMutex + cache map[string][]byte + cacheSize uint64 +} + +// New returns an initialized Diskv structure, ready to use. +// If the path identified by baseDir already contains data, +// it will be accessible, but not yet cached. +func New(o Options) *Diskv { + if o.BasePath == "" { + o.BasePath = defaultBasePath + } + if o.Transform == nil { + o.Transform = defaultTransform + } + if o.PathPerm == 0 { + o.PathPerm = defaultPathPerm + } + if o.FilePerm == 0 { + o.FilePerm = defaultFilePerm + } + + d := &Diskv{ + Options: o, + cache: map[string][]byte{}, + cacheSize: 0, + } + + if d.Index != nil && d.IndexLess != nil { + d.Index.Initialize(d.IndexLess, d.Keys(nil)) + } + + return d +} + +// Write synchronously writes the key-value pair to disk, making it immediately +// available for reads. Write relies on the filesystem to perform an eventual +// sync to physical media. If you need stronger guarantees, see WriteStream. +func (d *Diskv) Write(key string, val []byte) error { + return d.WriteStream(key, bytes.NewBuffer(val), false) +} + +// WriteStream writes the data represented by the io.Reader to the disk, under +// the provided key. If sync is true, WriteStream performs an explicit sync on +// the file as soon as it's written. +// +// bytes.Buffer provides io.Reader semantics for basic data types. +func (d *Diskv) WriteStream(key string, r io.Reader, sync bool) error { + if len(key) <= 0 { + return errEmptyKey + } + + d.mu.Lock() + defer d.mu.Unlock() + + return d.writeStreamWithLock(key, r, sync) +} + +// createKeyFileWithLock either creates the key file directly, or +// creates a temporary file in TempDir if it is set. +func (d *Diskv) createKeyFileWithLock(key string) (*os.File, error) { + if d.TempDir != "" { + if err := os.MkdirAll(d.TempDir, d.PathPerm); err != nil { + return nil, fmt.Errorf("temp mkdir: %s", err) + } + f, err := ioutil.TempFile(d.TempDir, "") + if err != nil { + return nil, fmt.Errorf("temp file: %s", err) + } + + if err := f.Chmod(d.FilePerm); err != nil { + f.Close() // error deliberately ignored + os.Remove(f.Name()) // error deliberately ignored + return nil, fmt.Errorf("chmod: %s", err) + } + return f, nil + } + + mode := os.O_WRONLY | os.O_CREATE | os.O_TRUNC // overwrite if exists + f, err := os.OpenFile(d.completeFilename(key), mode, d.FilePerm) + if err != nil { + return nil, fmt.Errorf("open file: %s", err) + } + return f, nil +} + +// writeStream does no input validation checking. +func (d *Diskv) writeStreamWithLock(key string, r io.Reader, sync bool) error { + if err := d.ensurePathWithLock(key); err != nil { + return fmt.Errorf("ensure path: %s", err) + } + + f, err := d.createKeyFileWithLock(key) + if err != nil { + return fmt.Errorf("create key file: %s", err) + } + + wc := io.WriteCloser(&nopWriteCloser{f}) + if d.Compression != nil { + wc, err = d.Compression.Writer(f) + if err != nil { + f.Close() // error deliberately ignored + os.Remove(f.Name()) // error deliberately ignored + return fmt.Errorf("compression writer: %s", err) + } + } + + if _, err := io.Copy(wc, r); err != nil { + f.Close() // error deliberately ignored + os.Remove(f.Name()) // error deliberately ignored + return fmt.Errorf("i/o copy: %s", err) + } + + if err := wc.Close(); err != nil { + f.Close() // error deliberately ignored + os.Remove(f.Name()) // error deliberately ignored + return fmt.Errorf("compression close: %s", err) + } + + if sync { + if err := f.Sync(); err != nil { + f.Close() // error deliberately ignored + os.Remove(f.Name()) // error deliberately ignored + return fmt.Errorf("file sync: %s", err) + } + } + + if err := f.Close(); err != nil { + return fmt.Errorf("file close: %s", err) + } + + if f.Name() != d.completeFilename(key) { + if err := os.Rename(f.Name(), d.completeFilename(key)); err != nil { + os.Remove(f.Name()) // error deliberately ignored + return fmt.Errorf("rename: %s", err) + } + } + + if d.Index != nil { + d.Index.Insert(key) + } + + d.bustCacheWithLock(key) // cache only on read + + return nil +} + +// Import imports the source file into diskv under the destination key. If the +// destination key already exists, it's overwritten. If move is true, the +// source file is removed after a successful import. +func (d *Diskv) Import(srcFilename, dstKey string, move bool) (err error) { + if dstKey == "" { + return errEmptyKey + } + + if fi, err := os.Stat(srcFilename); err != nil { + return err + } else if fi.IsDir() { + return errImportDirectory + } + + d.mu.Lock() + defer d.mu.Unlock() + + if err := d.ensurePathWithLock(dstKey); err != nil { + return fmt.Errorf("ensure path: %s", err) + } + + if move { + if err := syscall.Rename(srcFilename, d.completeFilename(dstKey)); err == nil { + d.bustCacheWithLock(dstKey) + return nil + } else if err != syscall.EXDEV { + // If it failed due to being on a different device, fall back to copying + return err + } + } + + f, err := os.Open(srcFilename) + if err != nil { + return err + } + defer f.Close() + err = d.writeStreamWithLock(dstKey, f, false) + if err == nil && move { + err = os.Remove(srcFilename) + } + return err +} + +// Read reads the key and returns the value. +// If the key is available in the cache, Read won't touch the disk. +// If the key is not in the cache, Read will have the side-effect of +// lazily caching the value. +func (d *Diskv) Read(key string) ([]byte, error) { + rc, err := d.ReadStream(key, false) + if err != nil { + return []byte{}, err + } + defer rc.Close() + return ioutil.ReadAll(rc) +} + +// ReadStream reads the key and returns the value (data) as an io.ReadCloser. +// If the value is cached from a previous read, and direct is false, +// ReadStream will use the cached value. Otherwise, it will return a handle to +// the file on disk, and cache the data on read. +// +// If direct is true, ReadStream will lazily delete any cached value for the +// key, and return a direct handle to the file on disk. +// +// If compression is enabled, ReadStream taps into the io.Reader stream prior +// to decompression, and caches the compressed data. +func (d *Diskv) ReadStream(key string, direct bool) (io.ReadCloser, error) { + d.mu.RLock() + defer d.mu.RUnlock() + + if val, ok := d.cache[key]; ok { + if !direct { + buf := bytes.NewBuffer(val) + if d.Compression != nil { + return d.Compression.Reader(buf) + } + return ioutil.NopCloser(buf), nil + } + + go func() { + d.mu.Lock() + defer d.mu.Unlock() + d.uncacheWithLock(key, uint64(len(val))) + }() + } + + return d.readWithRLock(key) +} + +// read ignores the cache, and returns an io.ReadCloser representing the +// decompressed data for the given key, streamed from the disk. Clients should +// acquire a read lock on the Diskv and check the cache themselves before +// calling read. +func (d *Diskv) readWithRLock(key string) (io.ReadCloser, error) { + filename := d.completeFilename(key) + + fi, err := os.Stat(filename) + if err != nil { + return nil, err + } + if fi.IsDir() { + return nil, os.ErrNotExist + } + + f, err := os.Open(filename) + if err != nil { + return nil, err + } + + var r io.Reader + if d.CacheSizeMax > 0 { + r = newSiphon(f, d, key) + } else { + r = &closingReader{f} + } + + var rc = io.ReadCloser(ioutil.NopCloser(r)) + if d.Compression != nil { + rc, err = d.Compression.Reader(r) + if err != nil { + return nil, err + } + } + + return rc, nil +} + +// closingReader provides a Reader that automatically closes the +// embedded ReadCloser when it reaches EOF +type closingReader struct { + rc io.ReadCloser +} + +func (cr closingReader) Read(p []byte) (int, error) { + n, err := cr.rc.Read(p) + if err == io.EOF { + if closeErr := cr.rc.Close(); closeErr != nil { + return n, closeErr // close must succeed for Read to succeed + } + } + return n, err +} + +// siphon is like a TeeReader: it copies all data read through it to an +// internal buffer, and moves that buffer to the cache at EOF. +type siphon struct { + f *os.File + d *Diskv + key string + buf *bytes.Buffer +} + +// newSiphon constructs a siphoning reader that represents the passed file. +// When a successful series of reads ends in an EOF, the siphon will write +// the buffered data to Diskv's cache under the given key. +func newSiphon(f *os.File, d *Diskv, key string) io.Reader { + return &siphon{ + f: f, + d: d, + key: key, + buf: &bytes.Buffer{}, + } +} + +// Read implements the io.Reader interface for siphon. +func (s *siphon) Read(p []byte) (int, error) { + n, err := s.f.Read(p) + + if err == nil { + return s.buf.Write(p[0:n]) // Write must succeed for Read to succeed + } + + if err == io.EOF { + s.d.cacheWithoutLock(s.key, s.buf.Bytes()) // cache may fail + if closeErr := s.f.Close(); closeErr != nil { + return n, closeErr // close must succeed for Read to succeed + } + return n, err + } + + return n, err +} + +// Erase synchronously erases the given key from the disk and the cache. +func (d *Diskv) Erase(key string) error { + d.mu.Lock() + defer d.mu.Unlock() + + d.bustCacheWithLock(key) + + // erase from index + if d.Index != nil { + d.Index.Delete(key) + } + + // erase from disk + filename := d.completeFilename(key) + if s, err := os.Stat(filename); err == nil { + if s.IsDir() { + return errBadKey + } + if err = os.Remove(filename); err != nil { + return err + } + } else { + // Return err as-is so caller can do os.IsNotExist(err). + return err + } + + // clean up and return + d.pruneDirsWithLock(key) + return nil +} + +// EraseAll will delete all of the data from the store, both in the cache and on +// the disk. Note that EraseAll doesn't distinguish diskv-related data from non- +// diskv-related data. Care should be taken to always specify a diskv base +// directory that is exclusively for diskv data. +func (d *Diskv) EraseAll() error { + d.mu.Lock() + defer d.mu.Unlock() + d.cache = make(map[string][]byte) + d.cacheSize = 0 + if d.TempDir != "" { + os.RemoveAll(d.TempDir) // errors ignored + } + return os.RemoveAll(d.BasePath) +} + +// Has returns true if the given key exists. +func (d *Diskv) Has(key string) bool { + d.mu.Lock() + defer d.mu.Unlock() + + if _, ok := d.cache[key]; ok { + return true + } + + filename := d.completeFilename(key) + s, err := os.Stat(filename) + if err != nil { + return false + } + if s.IsDir() { + return false + } + + return true +} + +// Keys returns a channel that will yield every key accessible by the store, +// in undefined order. If a cancel channel is provided, closing it will +// terminate and close the keys channel. +func (d *Diskv) Keys(cancel <-chan struct{}) <-chan string { + return d.KeysPrefix("", cancel) +} + +// KeysPrefix returns a channel that will yield every key accessible by the +// store with the given prefix, in undefined order. If a cancel channel is +// provided, closing it will terminate and close the keys channel. If the +// provided prefix is the empty string, all keys will be yielded. +func (d *Diskv) KeysPrefix(prefix string, cancel <-chan struct{}) <-chan string { + var prepath string + if prefix == "" { + prepath = d.BasePath + } else { + prepath = d.pathFor(prefix) + } + c := make(chan string) + go func() { + filepath.Walk(prepath, walker(c, prefix, cancel)) + close(c) + }() + return c +} + +// walker returns a function which satisfies the filepath.WalkFunc interface. +// It sends every non-directory file entry down the channel c. +func walker(c chan<- string, prefix string, cancel <-chan struct{}) filepath.WalkFunc { + return func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if info.IsDir() || !strings.HasPrefix(info.Name(), prefix) { + return nil // "pass" + } + + select { + case c <- info.Name(): + case <-cancel: + return errCanceled + } + + return nil + } +} + +// pathFor returns the absolute path for location on the filesystem where the +// data for the given key will be stored. +func (d *Diskv) pathFor(key string) string { + return filepath.Join(d.BasePath, filepath.Join(d.Transform(key)...)) +} + +// ensurePathWithLock is a helper function that generates all necessary +// directories on the filesystem for the given key. +func (d *Diskv) ensurePathWithLock(key string) error { + return os.MkdirAll(d.pathFor(key), d.PathPerm) +} + +// completeFilename returns the absolute path to the file for the given key. +func (d *Diskv) completeFilename(key string) string { + return filepath.Join(d.pathFor(key), key) +} + +// cacheWithLock attempts to cache the given key-value pair in the store's +// cache. It can fail if the value is larger than the cache's maximum size. +func (d *Diskv) cacheWithLock(key string, val []byte) error { + valueSize := uint64(len(val)) + if err := d.ensureCacheSpaceWithLock(valueSize); err != nil { + return fmt.Errorf("%s; not caching", err) + } + + // be very strict about memory guarantees + if (d.cacheSize + valueSize) > d.CacheSizeMax { + panic(fmt.Sprintf("failed to make room for value (%d/%d)", valueSize, d.CacheSizeMax)) + } + + d.cache[key] = val + d.cacheSize += valueSize + return nil +} + +// cacheWithoutLock acquires the store's (write) mutex and calls cacheWithLock. +func (d *Diskv) cacheWithoutLock(key string, val []byte) error { + d.mu.Lock() + defer d.mu.Unlock() + return d.cacheWithLock(key, val) +} + +func (d *Diskv) bustCacheWithLock(key string) { + if val, ok := d.cache[key]; ok { + d.uncacheWithLock(key, uint64(len(val))) + } +} + +func (d *Diskv) uncacheWithLock(key string, sz uint64) { + d.cacheSize -= sz + delete(d.cache, key) +} + +// pruneDirsWithLock deletes empty directories in the path walk leading to the +// key k. Typically this function is called after an Erase is made. +func (d *Diskv) pruneDirsWithLock(key string) error { + pathlist := d.Transform(key) + for i := range pathlist { + dir := filepath.Join(d.BasePath, filepath.Join(pathlist[:len(pathlist)-i]...)) + + // thanks to Steven Blenkinsop for this snippet + switch fi, err := os.Stat(dir); true { + case err != nil: + return err + case !fi.IsDir(): + panic(fmt.Sprintf("corrupt dirstate at %s", dir)) + } + + nlinks, err := filepath.Glob(filepath.Join(dir, "*")) + if err != nil { + return err + } else if len(nlinks) > 0 { + return nil // has subdirs -- do not prune + } + if err = os.Remove(dir); err != nil { + return err + } + } + + return nil +} + +// ensureCacheSpaceWithLock deletes entries from the cache in arbitrary order +// until the cache has at least valueSize bytes available. +func (d *Diskv) ensureCacheSpaceWithLock(valueSize uint64) error { + if valueSize > d.CacheSizeMax { + return fmt.Errorf("value size (%d bytes) too large for cache (%d bytes)", valueSize, d.CacheSizeMax) + } + + safe := func() bool { return (d.cacheSize + valueSize) <= d.CacheSizeMax } + + for key, val := range d.cache { + if safe() { + break + } + + d.uncacheWithLock(key, uint64(len(val))) + } + + if !safe() { + panic(fmt.Sprintf("%d bytes still won't fit in the cache! (max %d bytes)", valueSize, d.CacheSizeMax)) + } + + return nil +} + +// nopWriteCloser wraps an io.Writer and provides a no-op Close method to +// satisfy the io.WriteCloser interface. +type nopWriteCloser struct { + io.Writer +} + +func (wc *nopWriteCloser) Write(p []byte) (int, error) { return wc.Writer.Write(p) } +func (wc *nopWriteCloser) Close() error { return nil } diff --git a/vendor/github.com/peterbourgon/diskv/index.go b/vendor/github.com/peterbourgon/diskv/index.go new file mode 100644 index 0000000000..96fee5152b --- /dev/null +++ b/vendor/github.com/peterbourgon/diskv/index.go @@ -0,0 +1,115 @@ +package diskv + +import ( + "sync" + + "github.com/google/btree" +) + +// Index is a generic interface for things that can +// provide an ordered list of keys. +type Index interface { + Initialize(less LessFunction, keys <-chan string) + Insert(key string) + Delete(key string) + Keys(from string, n int) []string +} + +// LessFunction is used to initialize an Index of keys in a specific order. +type LessFunction func(string, string) bool + +// btreeString is a custom data type that satisfies the BTree Less interface, +// making the strings it wraps sortable by the BTree package. +type btreeString struct { + s string + l LessFunction +} + +// Less satisfies the BTree.Less interface using the btreeString's LessFunction. +func (s btreeString) Less(i btree.Item) bool { + return s.l(s.s, i.(btreeString).s) +} + +// BTreeIndex is an implementation of the Index interface using google/btree. +type BTreeIndex struct { + sync.RWMutex + LessFunction + *btree.BTree +} + +// Initialize populates the BTree tree with data from the keys channel, +// according to the passed less function. It's destructive to the BTreeIndex. +func (i *BTreeIndex) Initialize(less LessFunction, keys <-chan string) { + i.Lock() + defer i.Unlock() + i.LessFunction = less + i.BTree = rebuild(less, keys) +} + +// Insert inserts the given key (only) into the BTree tree. +func (i *BTreeIndex) Insert(key string) { + i.Lock() + defer i.Unlock() + if i.BTree == nil || i.LessFunction == nil { + panic("uninitialized index") + } + i.BTree.ReplaceOrInsert(btreeString{s: key, l: i.LessFunction}) +} + +// Delete removes the given key (only) from the BTree tree. +func (i *BTreeIndex) Delete(key string) { + i.Lock() + defer i.Unlock() + if i.BTree == nil || i.LessFunction == nil { + panic("uninitialized index") + } + i.BTree.Delete(btreeString{s: key, l: i.LessFunction}) +} + +// Keys yields a maximum of n keys in order. If the passed 'from' key is empty, +// Keys will return the first n keys. If the passed 'from' key is non-empty, the +// first key in the returned slice will be the key that immediately follows the +// passed key, in key order. +func (i *BTreeIndex) Keys(from string, n int) []string { + i.RLock() + defer i.RUnlock() + + if i.BTree == nil || i.LessFunction == nil { + panic("uninitialized index") + } + + if i.BTree.Len() <= 0 { + return []string{} + } + + btreeFrom := btreeString{s: from, l: i.LessFunction} + skipFirst := true + if len(from) <= 0 || !i.BTree.Has(btreeFrom) { + // no such key, so fabricate an always-smallest item + btreeFrom = btreeString{s: "", l: func(string, string) bool { return true }} + skipFirst = false + } + + keys := []string{} + iterator := func(i btree.Item) bool { + keys = append(keys, i.(btreeString).s) + return len(keys) < n + } + i.BTree.AscendGreaterOrEqual(btreeFrom, iterator) + + if skipFirst && len(keys) > 0 { + keys = keys[1:] + } + + return keys +} + +// rebuildIndex does the work of regenerating the index +// with the given keys. +func rebuild(less LessFunction, keys <-chan string) *btree.BTree { + tree := btree.New(2) + for key := range keys { + tree.ReplaceOrInsert(btreeString{s: key, l: less}) + } + return tree +} diff --git a/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/pmezard/go-difflib/LICENSE new file mode 100644 index 0000000000..c67dad612a --- /dev/null +++ b/vendor/github.com/pmezard/go-difflib/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Patrick Mezard +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + The names of its contributors may not be used to endorse or promote +products derived from this software without specific prior written +permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go new file mode 100644 index 0000000000..003e99fadb --- /dev/null +++ b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go @@ -0,0 +1,772 @@ +// Package difflib is a partial port of Python difflib module. +// +// It provides tools to compare sequences of strings and generate textual diffs. +// +// The following class and functions have been ported: +// +// - SequenceMatcher +// +// - unified_diff +// +// - context_diff +// +// Getting unified diffs was the main goal of the port. Keep in mind this code +// is mostly suitable to output text differences in a human friendly way, there +// are no guarantees generated diffs are consumable by patch(1). +package difflib + +import ( + "bufio" + "bytes" + "fmt" + "io" + "strings" +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func calculateRatio(matches, length int) float64 { + if length > 0 { + return 2.0 * float64(matches) / float64(length) + } + return 1.0 +} + +type Match struct { + A int + B int + Size int +} + +type OpCode struct { + Tag byte + I1 int + I2 int + J1 int + J2 int +} + +// SequenceMatcher compares sequence of strings. The basic +// algorithm predates, and is a little fancier than, an algorithm +// published in the late 1980's by Ratcliff and Obershelp under the +// hyperbolic name "gestalt pattern matching". The basic idea is to find +// the longest contiguous matching subsequence that contains no "junk" +// elements (R-O doesn't address junk). The same idea is then applied +// recursively to the pieces of the sequences to the left and to the right +// of the matching subsequence. This does not yield minimal edit +// sequences, but does tend to yield matches that "look right" to people. +// +// SequenceMatcher tries to compute a "human-friendly diff" between two +// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the +// longest *contiguous* & junk-free matching subsequence. That's what +// catches peoples' eyes. The Windows(tm) windiff has another interesting +// notion, pairing up elements that appear uniquely in each sequence. +// That, and the method here, appear to yield more intuitive difference +// reports than does diff. This method appears to be the least vulnerable +// to synching up on blocks of "junk lines", though (like blank lines in +// ordinary text files, or maybe "

" lines in HTML files). That may be +// because this is the only method of the 3 that has a *concept* of +// "junk" . +// +// Timing: Basic R-O is cubic time worst case and quadratic time expected +// case. SequenceMatcher is quadratic time for the worst case and has +// expected-case behavior dependent in a complicated way on how many +// elements the sequences have in common; best case time is linear. +type SequenceMatcher struct { + a []string + b []string + b2j map[string][]int + IsJunk func(string) bool + autoJunk bool + bJunk map[string]struct{} + matchingBlocks []Match + fullBCount map[string]int + bPopular map[string]struct{} + opCodes []OpCode +} + +func NewMatcher(a, b []string) *SequenceMatcher { + m := SequenceMatcher{autoJunk: true} + m.SetSeqs(a, b) + return &m +} + +func NewMatcherWithJunk(a, b []string, autoJunk bool, + isJunk func(string) bool) *SequenceMatcher { + + m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} + m.SetSeqs(a, b) + return &m +} + +// Set two sequences to be compared. +func (m *SequenceMatcher) SetSeqs(a, b []string) { + m.SetSeq1(a) + m.SetSeq2(b) +} + +// Set the first sequence to be compared. The second sequence to be compared is +// not changed. +// +// SequenceMatcher computes and caches detailed information about the second +// sequence, so if you want to compare one sequence S against many sequences, +// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other +// sequences. +// +// See also SetSeqs() and SetSeq2(). +func (m *SequenceMatcher) SetSeq1(a []string) { + if &a == &m.a { + return + } + m.a = a + m.matchingBlocks = nil + m.opCodes = nil +} + +// Set the second sequence to be compared. The first sequence to be compared is +// not changed. +func (m *SequenceMatcher) SetSeq2(b []string) { + if &b == &m.b { + return + } + m.b = b + m.matchingBlocks = nil + m.opCodes = nil + m.fullBCount = nil + m.chainB() +} + +func (m *SequenceMatcher) chainB() { + // Populate line -> index mapping + b2j := map[string][]int{} + for i, s := range m.b { + indices := b2j[s] + indices = append(indices, i) + b2j[s] = indices + } + + // Purge junk elements + m.bJunk = map[string]struct{}{} + if m.IsJunk != nil { + junk := m.bJunk + for s, _ := range b2j { + if m.IsJunk(s) { + junk[s] = struct{}{} + } + } + for s, _ := range junk { + delete(b2j, s) + } + } + + // Purge remaining popular elements + popular := map[string]struct{}{} + n := len(m.b) + if m.autoJunk && n >= 200 { + ntest := n/100 + 1 + for s, indices := range b2j { + if len(indices) > ntest { + popular[s] = struct{}{} + } + } + for s, _ := range popular { + delete(b2j, s) + } + } + m.bPopular = popular + m.b2j = b2j +} + +func (m *SequenceMatcher) isBJunk(s string) bool { + _, ok := m.bJunk[s] + return ok +} + +// Find longest matching block in a[alo:ahi] and b[blo:bhi]. +// +// If IsJunk is not defined: +// +// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where +// alo <= i <= i+k <= ahi +// blo <= j <= j+k <= bhi +// and for all (i',j',k') meeting those conditions, +// k >= k' +// i <= i' +// and if i == i', j <= j' +// +// In other words, of all maximal matching blocks, return one that +// starts earliest in a, and of all those maximal matching blocks that +// start earliest in a, return the one that starts earliest in b. +// +// If IsJunk is defined, first the longest matching block is +// determined as above, but with the additional restriction that no +// junk element appears in the block. Then that block is extended as +// far as possible by matching (only) junk elements on both sides. So +// the resulting block never matches on junk except as identical junk +// happens to be adjacent to an "interesting" match. +// +// If no blocks match, return (alo, blo, 0). +func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { + // CAUTION: stripping common prefix or suffix would be incorrect. + // E.g., + // ab + // acab + // Longest matching block is "ab", but if common prefix is + // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so + // strip, so ends up claiming that ab is changed to acab by + // inserting "ca" in the middle. That's minimal but unintuitive: + // "it's obvious" that someone inserted "ac" at the front. + // Windiff ends up at the same place as diff, but by pairing up + // the unique 'b's and then matching the first two 'a's. + besti, bestj, bestsize := alo, blo, 0 + + // find longest junk-free match + // during an iteration of the loop, j2len[j] = length of longest + // junk-free match ending with a[i-1] and b[j] + j2len := map[int]int{} + for i := alo; i != ahi; i++ { + // look at all instances of a[i] in b; note that because + // b2j has no junk keys, the loop is skipped if a[i] is junk + newj2len := map[int]int{} + for _, j := range m.b2j[m.a[i]] { + // a[i] matches b[j] + if j < blo { + continue + } + if j >= bhi { + break + } + k := j2len[j-1] + 1 + newj2len[j] = k + if k > bestsize { + besti, bestj, bestsize = i-k+1, j-k+1, k + } + } + j2len = newj2len + } + + // Extend the best by non-junk elements on each end. In particular, + // "popular" non-junk elements aren't in b2j, which greatly speeds + // the inner loop above, but also means "the best" match so far + // doesn't contain any junk *or* popular non-junk elements. + for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && + m.a[besti-1] == m.b[bestj-1] { + besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 + } + for besti+bestsize < ahi && bestj+bestsize < bhi && + !m.isBJunk(m.b[bestj+bestsize]) && + m.a[besti+bestsize] == m.b[bestj+bestsize] { + bestsize += 1 + } + + // Now that we have a wholly interesting match (albeit possibly + // empty!), we may as well suck up the matching junk on each + // side of it too. Can't think of a good reason not to, and it + // saves post-processing the (possibly considerable) expense of + // figuring out what to do with it. In the case of an empty + // interesting match, this is clearly the right thing to do, + // because no other kind of match is possible in the regions. + for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && + m.a[besti-1] == m.b[bestj-1] { + besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 + } + for besti+bestsize < ahi && bestj+bestsize < bhi && + m.isBJunk(m.b[bestj+bestsize]) && + m.a[besti+bestsize] == m.b[bestj+bestsize] { + bestsize += 1 + } + + return Match{A: besti, B: bestj, Size: bestsize} +} + +// Return list of triples describing matching subsequences. +// +// Each triple is of the form (i, j, n), and means that +// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in +// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are +// adjacent triples in the list, and the second is not the last triple in the +// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe +// adjacent equal blocks. +// +// The last triple is a dummy, (len(a), len(b), 0), and is the only +// triple with n==0. +func (m *SequenceMatcher) GetMatchingBlocks() []Match { + if m.matchingBlocks != nil { + return m.matchingBlocks + } + + var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match + matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { + match := m.findLongestMatch(alo, ahi, blo, bhi) + i, j, k := match.A, match.B, match.Size + if match.Size > 0 { + if alo < i && blo < j { + matched = matchBlocks(alo, i, blo, j, matched) + } + matched = append(matched, match) + if i+k < ahi && j+k < bhi { + matched = matchBlocks(i+k, ahi, j+k, bhi, matched) + } + } + return matched + } + matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) + + // It's possible that we have adjacent equal blocks in the + // matching_blocks list now. + nonAdjacent := []Match{} + i1, j1, k1 := 0, 0, 0 + for _, b := range matched { + // Is this block adjacent to i1, j1, k1? + i2, j2, k2 := b.A, b.B, b.Size + if i1+k1 == i2 && j1+k1 == j2 { + // Yes, so collapse them -- this just increases the length of + // the first block by the length of the second, and the first + // block so lengthened remains the block to compare against. + k1 += k2 + } else { + // Not adjacent. Remember the first block (k1==0 means it's + // the dummy we started with), and make the second block the + // new block to compare against. + if k1 > 0 { + nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) + } + i1, j1, k1 = i2, j2, k2 + } + } + if k1 > 0 { + nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) + } + + nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) + m.matchingBlocks = nonAdjacent + return m.matchingBlocks +} + +// Return list of 5-tuples describing how to turn a into b. +// +// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple +// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the +// tuple preceding it, and likewise for j1 == the previous j2. +// +// The tags are characters, with these meanings: +// +// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] +// +// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. +// +// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. +// +// 'e' (equal): a[i1:i2] == b[j1:j2] +func (m *SequenceMatcher) GetOpCodes() []OpCode { + if m.opCodes != nil { + return m.opCodes + } + i, j := 0, 0 + matching := m.GetMatchingBlocks() + opCodes := make([]OpCode, 0, len(matching)) + for _, m := range matching { + // invariant: we've pumped out correct diffs to change + // a[:i] into b[:j], and the next matching block is + // a[ai:ai+size] == b[bj:bj+size]. So we need to pump + // out a diff to change a[i:ai] into b[j:bj], pump out + // the matching block, and move (i,j) beyond the match + ai, bj, size := m.A, m.B, m.Size + tag := byte(0) + if i < ai && j < bj { + tag = 'r' + } else if i < ai { + tag = 'd' + } else if j < bj { + tag = 'i' + } + if tag > 0 { + opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) + } + i, j = ai+size, bj+size + // the list of matching blocks is terminated by a + // sentinel with size 0 + if size > 0 { + opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) + } + } + m.opCodes = opCodes + return m.opCodes +} + +// Isolate change clusters by eliminating ranges with no changes. +// +// Return a generator of groups with up to n lines of context. +// Each group is in the same format as returned by GetOpCodes(). +func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { + if n < 0 { + n = 3 + } + codes := m.GetOpCodes() + if len(codes) == 0 { + codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} + } + // Fixup leading and trailing groups if they show no changes. + if codes[0].Tag == 'e' { + c := codes[0] + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} + } + if codes[len(codes)-1].Tag == 'e' { + c := codes[len(codes)-1] + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} + } + nn := n + n + groups := [][]OpCode{} + group := []OpCode{} + for _, c := range codes { + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + // End the current group and start a new one whenever + // there is a large range with no changes. + if c.Tag == 'e' && i2-i1 > nn { + group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), + j1, min(j2, j1+n)}) + groups = append(groups, group) + group = []OpCode{} + i1, j1 = max(i1, i2-n), max(j1, j2-n) + } + group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) + } + if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { + groups = append(groups, group) + } + return groups +} + +// Return a measure of the sequences' similarity (float in [0,1]). +// +// Where T is the total number of elements in both sequences, and +// M is the number of matches, this is 2.0*M / T. +// Note that this is 1 if the sequences are identical, and 0 if +// they have nothing in common. +// +// .Ratio() is expensive to compute if you haven't already computed +// .GetMatchingBlocks() or .GetOpCodes(), in which case you may +// want to try .QuickRatio() or .RealQuickRation() first to get an +// upper bound. +func (m *SequenceMatcher) Ratio() float64 { + matches := 0 + for _, m := range m.GetMatchingBlocks() { + matches += m.Size + } + return calculateRatio(matches, len(m.a)+len(m.b)) +} + +// Return an upper bound on ratio() relatively quickly. +// +// This isn't defined beyond that it is an upper bound on .Ratio(), and +// is faster to compute. +func (m *SequenceMatcher) QuickRatio() float64 { + // viewing a and b as multisets, set matches to the cardinality + // of their intersection; this counts the number of matches + // without regard to order, so is clearly an upper bound + if m.fullBCount == nil { + m.fullBCount = map[string]int{} + for _, s := range m.b { + m.fullBCount[s] = m.fullBCount[s] + 1 + } + } + + // avail[x] is the number of times x appears in 'b' less the + // number of times we've seen it in 'a' so far ... kinda + avail := map[string]int{} + matches := 0 + for _, s := range m.a { + n, ok := avail[s] + if !ok { + n = m.fullBCount[s] + } + avail[s] = n - 1 + if n > 0 { + matches += 1 + } + } + return calculateRatio(matches, len(m.a)+len(m.b)) +} + +// Return an upper bound on ratio() very quickly. +// +// This isn't defined beyond that it is an upper bound on .Ratio(), and +// is faster to compute than either .Ratio() or .QuickRatio(). +func (m *SequenceMatcher) RealQuickRatio() float64 { + la, lb := len(m.a), len(m.b) + return calculateRatio(min(la, lb), la+lb) +} + +// Convert range to the "ed" format +func formatRangeUnified(start, stop int) string { + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning := start + 1 // lines start numbering with one + length := stop - start + if length == 1 { + return fmt.Sprintf("%d", beginning) + } + if length == 0 { + beginning -= 1 // empty ranges begin at line just before the range + } + return fmt.Sprintf("%d,%d", beginning, length) +} + +// Unified diff parameters +type UnifiedDiff struct { + A []string // First sequence lines + FromFile string // First file name + FromDate string // First file time + B []string // Second sequence lines + ToFile string // Second file name + ToDate string // Second file time + Eol string // Headers end of line, defaults to LF + Context int // Number of context lines +} + +// Compare two sequences of lines; generate the delta as a unified diff. +// +// Unified diffs are a compact way of showing line changes and a few +// lines of context. The number of context lines is set by 'n' which +// defaults to three. +// +// By default, the diff control lines (those with ---, +++, or @@) are +// created with a trailing newline. This is helpful so that inputs +// created from file.readlines() result in diffs that are suitable for +// file.writelines() since both the inputs and outputs have trailing +// newlines. +// +// For inputs that do not have trailing newlines, set the lineterm +// argument to "" so that the output will be uniformly newline free. +// +// The unidiff format normally has a header for filenames and modification +// times. Any or all of these may be specified using strings for +// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. +// The modification times are normally expressed in the ISO 8601 format. +func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { + buf := bufio.NewWriter(writer) + defer buf.Flush() + wf := func(format string, args ...interface{}) error { + _, err := buf.WriteString(fmt.Sprintf(format, args...)) + return err + } + ws := func(s string) error { + _, err := buf.WriteString(s) + return err + } + + if len(diff.Eol) == 0 { + diff.Eol = "\n" + } + + started := false + m := NewMatcher(diff.A, diff.B) + for _, g := range m.GetGroupedOpCodes(diff.Context) { + if !started { + started = true + fromDate := "" + if len(diff.FromDate) > 0 { + fromDate = "\t" + diff.FromDate + } + toDate := "" + if len(diff.ToDate) > 0 { + toDate = "\t" + diff.ToDate + } + if diff.FromFile != "" || diff.ToFile != "" { + err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) + if err != nil { + return err + } + err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) + if err != nil { + return err + } + } + } + first, last := g[0], g[len(g)-1] + range1 := formatRangeUnified(first.I1, last.I2) + range2 := formatRangeUnified(first.J1, last.J2) + if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { + return err + } + for _, c := range g { + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + if c.Tag == 'e' { + for _, line := range diff.A[i1:i2] { + if err := ws(" " + line); err != nil { + return err + } + } + continue + } + if c.Tag == 'r' || c.Tag == 'd' { + for _, line := range diff.A[i1:i2] { + if err := ws("-" + line); err != nil { + return err + } + } + } + if c.Tag == 'r' || c.Tag == 'i' { + for _, line := range diff.B[j1:j2] { + if err := ws("+" + line); err != nil { + return err + } + } + } + } + } + return nil +} + +// Like WriteUnifiedDiff but returns the diff a string. +func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { + w := &bytes.Buffer{} + err := WriteUnifiedDiff(w, diff) + return string(w.Bytes()), err +} + +// Convert range to the "ed" format. +func formatRangeContext(start, stop int) string { + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning := start + 1 // lines start numbering with one + length := stop - start + if length == 0 { + beginning -= 1 // empty ranges begin at line just before the range + } + if length <= 1 { + return fmt.Sprintf("%d", beginning) + } + return fmt.Sprintf("%d,%d", beginning, beginning+length-1) +} + +type ContextDiff UnifiedDiff + +// Compare two sequences of lines; generate the delta as a context diff. +// +// Context diffs are a compact way of showing line changes and a few +// lines of context. The number of context lines is set by diff.Context +// which defaults to three. +// +// By default, the diff control lines (those with *** or ---) are +// created with a trailing newline. +// +// For inputs that do not have trailing newlines, set the diff.Eol +// argument to "" so that the output will be uniformly newline free. +// +// The context diff format normally has a header for filenames and +// modification times. Any or all of these may be specified using +// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. +// The modification times are normally expressed in the ISO 8601 format. +// If not specified, the strings default to blanks. +func WriteContextDiff(writer io.Writer, diff ContextDiff) error { + buf := bufio.NewWriter(writer) + defer buf.Flush() + var diffErr error + wf := func(format string, args ...interface{}) { + _, err := buf.WriteString(fmt.Sprintf(format, args...)) + if diffErr == nil && err != nil { + diffErr = err + } + } + ws := func(s string) { + _, err := buf.WriteString(s) + if diffErr == nil && err != nil { + diffErr = err + } + } + + if len(diff.Eol) == 0 { + diff.Eol = "\n" + } + + prefix := map[byte]string{ + 'i': "+ ", + 'd': "- ", + 'r': "! ", + 'e': " ", + } + + started := false + m := NewMatcher(diff.A, diff.B) + for _, g := range m.GetGroupedOpCodes(diff.Context) { + if !started { + started = true + fromDate := "" + if len(diff.FromDate) > 0 { + fromDate = "\t" + diff.FromDate + } + toDate := "" + if len(diff.ToDate) > 0 { + toDate = "\t" + diff.ToDate + } + if diff.FromFile != "" || diff.ToFile != "" { + wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) + wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) + } + } + + first, last := g[0], g[len(g)-1] + ws("***************" + diff.Eol) + + range1 := formatRangeContext(first.I1, last.I2) + wf("*** %s ****%s", range1, diff.Eol) + for _, c := range g { + if c.Tag == 'r' || c.Tag == 'd' { + for _, cc := range g { + if cc.Tag == 'i' { + continue + } + for _, line := range diff.A[cc.I1:cc.I2] { + ws(prefix[cc.Tag] + line) + } + } + break + } + } + + range2 := formatRangeContext(first.J1, last.J2) + wf("--- %s ----%s", range2, diff.Eol) + for _, c := range g { + if c.Tag == 'r' || c.Tag == 'i' { + for _, cc := range g { + if cc.Tag == 'd' { + continue + } + for _, line := range diff.B[cc.J1:cc.J2] { + ws(prefix[cc.Tag] + line) + } + } + break + } + } + } + return diffErr +} + +// Like WriteContextDiff but returns the diff a string. +func GetContextDiffString(diff ContextDiff) (string, error) { + w := &bytes.Buffer{} + err := WriteContextDiff(w, diff) + return string(w.Bytes()), err +} + +// Split a string on "\n" while preserving them. The output can be used +// as input for UnifiedDiff and ContextDiff structures. +func SplitLines(s string) []string { + lines := strings.SplitAfter(s, "\n") + lines[len(lines)-1] += "\n" + return lines +} diff --git a/vendor/github.com/russross/blackfriday/.gitignore b/vendor/github.com/russross/blackfriday/.gitignore new file mode 100644 index 0000000000..75623dcccb --- /dev/null +++ b/vendor/github.com/russross/blackfriday/.gitignore @@ -0,0 +1,8 @@ +*.out +*.swp +*.8 +*.6 +_obj +_test* +markdown +tags diff --git a/vendor/github.com/russross/blackfriday/.travis.yml b/vendor/github.com/russross/blackfriday/.travis.yml new file mode 100644 index 0000000000..a49fff15ac --- /dev/null +++ b/vendor/github.com/russross/blackfriday/.travis.yml @@ -0,0 +1,18 @@ +sudo: false +language: go +go: + - "1.9.x" + - "1.10.x" + - "1.11.x" + - tip +matrix: + fast_finish: true + allow_failures: + - go: tip +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/russross/blackfriday/LICENSE.txt b/vendor/github.com/russross/blackfriday/LICENSE.txt new file mode 100644 index 0000000000..7fbb253a8e --- /dev/null +++ b/vendor/github.com/russross/blackfriday/LICENSE.txt @@ -0,0 +1,28 @@ +Blackfriday is distributed under the Simplified BSD License: + +Copyright © 2011 Russ Ross +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/russross/blackfriday/README.md b/vendor/github.com/russross/blackfriday/README.md new file mode 100644 index 0000000000..997ef5d429 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/README.md @@ -0,0 +1,364 @@ +Blackfriday +[![Build Status][BuildV2SVG]][BuildV2URL] +[![PkgGoDev][PkgGoDevV2SVG]][PkgGoDevV2URL] +=========== + +Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It +is paranoid about its input (so you can safely feed it user-supplied +data), it is fast, it supports common extensions (tables, smart +punctuation substitutions, etc.), and it is safe for all utf-8 +(unicode) input. + +HTML output is currently supported, along with Smartypants +extensions. + +It started as a translation from C of [Sundown][3]. + + +Installation +------------ + +Blackfriday is compatible with modern Go releases in module mode. +With Go installed: + + go get github.com/russross/blackfriday + +will resolve and add the package to the current development module, +then build and install it. Alternatively, you can achieve the same +if you import it in a package: + + import "github.com/russross/blackfriday" + +and `go get` without parameters. + +Old versions of Go and legacy GOPATH mode might work, +but no effort is made to keep them working. + + +Versions +-------- + +Currently maintained and recommended version of Blackfriday is `v2`. It's being +developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the +documentation is available at +https://pkg.go.dev/github.com/russross/blackfriday/v2. + +It is `go get`-able in module mode at `github.com/russross/blackfriday/v2`. + +Version 2 offers a number of improvements over v1: + +* Cleaned up API +* A separate call to [`Parse`][4], which produces an abstract syntax tree for + the document +* Latest bug fixes +* Flexibility to easily add your own rendering extensions + +Potential drawbacks: + +* Our benchmarks show v2 to be slightly slower than v1. Currently in the + ballpark of around 15%. +* API breakage. If you can't afford modifying your code to adhere to the new API + and don't care too much about the new features, v2 is probably not for you. +* Several bug fixes are trailing behind and still need to be forward-ported to + v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for + tracking. + +If you are still interested in the legacy `v1`, you can import it from +`github.com/russross/blackfriday`. Documentation for the legacy v1 can be found +here: https://pkg.go.dev/github.com/russross/blackfriday. + + +Usage +----- + +### v1 + +For basic usage, it is as simple as getting your input into a byte +slice and calling: + +```go +output := blackfriday.MarkdownBasic(input) +``` + +This renders it with no extensions enabled. To get a more useful +feature set, use this instead: + +```go +output := blackfriday.MarkdownCommon(input) +``` + +### v2 + +For the most sensible markdown processing, it is as simple as getting your input +into a byte slice and calling: + +```go +output := blackfriday.Run(input) +``` + +Your input will be parsed and the output rendered with a set of most popular +extensions enabled. If you want the most basic feature set, corresponding with +the bare Markdown specification, use: + +```go +output := blackfriday.Run(input, blackfriday.WithNoExtensions()) +``` + +### Sanitize untrusted content + +Blackfriday itself does nothing to protect against malicious content. If you are +dealing with user-supplied markdown, we recommend running Blackfriday's output +through HTML sanitizer such as [Bluemonday][5]. + +Here's an example of simple usage of Blackfriday together with Bluemonday: + +```go +import ( + "github.com/microcosm-cc/bluemonday" + "github.com/russross/blackfriday" +) + +// ... +unsafe := blackfriday.Run(input) +html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) +``` + +### Custom options, v1 + +If you want to customize the set of options, first get a renderer +(currently only the HTML output engine), then use it to +call the more general `Markdown` function. For examples, see the +implementations of `MarkdownBasic` and `MarkdownCommon` in +`markdown.go`. + +### Custom options, v2 + +If you want to customize the set of options, use `blackfriday.WithExtensions`, +`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`. + +### `blackfriday-tool` + +You can also check out `blackfriday-tool` for a more complete example +of how to use it. Download and install it using: + + go get github.com/russross/blackfriday-tool + +This is a simple command-line tool that allows you to process a +markdown file using a standalone program. You can also browse the +source directly on github if you are just looking for some example +code: + +* + +Note that if you have not already done so, installing +`blackfriday-tool` will be sufficient to download and install +blackfriday in addition to the tool itself. The tool binary will be +installed in `$GOPATH/bin`. This is a statically-linked binary that +can be copied to wherever you need it without worrying about +dependencies and library versions. + +### Sanitized anchor names + +Blackfriday includes an algorithm for creating sanitized anchor names +corresponding to a given input text. This algorithm is used to create +anchors for headings when `EXTENSION_AUTO_HEADER_IDS` is enabled. The +algorithm has a specification, so that other packages can create +compatible anchor names and links to those anchors. + +The specification is located at https://pkg.go.dev/github.com/russross/blackfriday#hdr-Sanitized_Anchor_Names. + +[`SanitizedAnchorName`](https://pkg.go.dev/github.com/russross/blackfriday#SanitizedAnchorName) exposes this functionality, and can be used to +create compatible links to the anchor names generated by blackfriday. +This algorithm is also implemented in a small standalone package at +[`github.com/shurcooL/sanitized_anchor_name`](https://pkg.go.dev/github.com/shurcooL/sanitized_anchor_name). It can be useful for clients +that want a small package and don't need full functionality of blackfriday. + + +Features +-------- + +All features of Sundown are supported, including: + +* **Compatibility**. The Markdown v1.0.3 test suite passes with + the `--tidy` option. Without `--tidy`, the differences are + mostly in whitespace and entity escaping, where blackfriday is + more consistent and cleaner. + +* **Common extensions**, including table support, fenced code + blocks, autolinks, strikethroughs, non-strict emphasis, etc. + +* **Safety**. Blackfriday is paranoid when parsing, making it safe + to feed untrusted user input without fear of bad things + happening. The test suite stress tests this and there are no + known inputs that make it crash. If you find one, please let me + know and send me the input that does it. + + NOTE: "safety" in this context means *runtime safety only*. In order to + protect yourself against JavaScript injection in untrusted content, see + [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). + +* **Fast processing**. It is fast enough to render on-demand in + most web applications without having to cache the output. + +* **Thread safety**. You can run multiple parsers in different + goroutines without ill effect. There is no dependence on global + shared state. + +* **Minimal dependencies**. Blackfriday only depends on standard + library packages in Go. The source code is pretty + self-contained, so it is easy to add to any project, including + Google App Engine projects. + +* **Standards compliant**. Output successfully validates using the + W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. + + +Extensions +---------- + +In addition to the standard markdown syntax, this package +implements the following extensions: + +* **Intra-word emphasis supression**. The `_` character is + commonly used inside words when discussing code, so having + markdown interpret it as an emphasis command is usually the + wrong thing. Blackfriday lets you treat all emphasis markers as + normal characters when they occur inside a word. + +* **Tables**. Tables can be created by drawing them in the input + using a simple syntax: + + ``` + Name | Age + --------|------ + Bob | 27 + Alice | 23 + ``` + +* **Fenced code blocks**. In addition to the normal 4-space + indentation to mark code blocks, you can explicitly mark them + and supply a language (to make syntax highlighting simple). Just + mark it like this: + + ```go + func getTrue() bool { + return true + } + ``` + + You can use 3 or more backticks to mark the beginning of the + block, and the same number to mark the end of the block. + + To preserve classes of fenced code blocks while using the bluemonday + HTML sanitizer, use the following policy: + + ```go + p := bluemonday.UGCPolicy() + p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code") + html := p.SanitizeBytes(unsafe) + ``` + +* **Definition lists**. A simple definition list is made of a single-line + term followed by a colon and the definition for that term. + + Cat + : Fluffy animal everyone likes + + Internet + : Vector of transmission for pictures of cats + + Terms must be separated from the previous definition by a blank line. + +* **Footnotes**. A marker in the text that will become a superscript number; + a footnote definition that will be placed in a list of footnotes at the + end of the document. A footnote looks like this: + + This is a footnote.[^1] + + [^1]: the footnote text. + +* **Autolinking**. Blackfriday can find URLs that have not been + explicitly marked as links and turn them into links. + +* **Strikethrough**. Use two tildes (`~~`) to mark text that + should be crossed out. + +* **Hard line breaks**. With this extension enabled (it is off by + default in the `MarkdownBasic` and `MarkdownCommon` convenience + functions), newlines in the input translate into line breaks in + the output. + +* **Smart quotes**. Smartypants-style punctuation substitution is + supported, turning normal double- and single-quote marks into + curly quotes, etc. + +* **LaTeX-style dash parsing** is an additional option, where `--` + is translated into `–`, and `---` is translated into + `—`. This differs from most smartypants processors, which + turn a single hyphen into an ndash and a double hyphen into an + mdash. + +* **Smart fractions**, where anything that looks like a fraction + is translated into suitable HTML (instead of just a few special + cases like most smartypant processors). For example, `4/5` + becomes `45`, which renders as + 45. + + +Other renderers +--------------- + +Blackfriday is structured to allow alternative rendering engines. Here +are a few of note: + +* [github_flavored_markdown](https://pkg.go.dev/github.com/shurcooL/github_flavored_markdown): + provides a GitHub Flavored Markdown renderer with fenced code block + highlighting, clickable heading anchor links. + + It's not customizable, and its goal is to produce HTML output + equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), + except the rendering is performed locally. + +* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, + but for markdown. + +* [LaTeX output](https://gitlab.com/ambrevar/blackfriday-latex): + renders output as LaTeX. + +* [bfchroma](https://github.com/Depado/bfchroma/): provides convenience + integration with the [Chroma](https://github.com/alecthomas/chroma) code + highlighting library. bfchroma is only compatible with v2 of Blackfriday and + provides a drop-in renderer ready to use with Blackfriday, as well as + options and means for further customization. + +* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer. + +* [Blackfriday-Slack](https://github.com/karriereat/blackfriday-slack): converts markdown to slack message style + + +TODO +---- + +* More unit testing +* Improve Unicode support. It does not understand all Unicode + rules (about what constitutes a letter, a punctuation symbol, + etc.), so it may fail to detect word boundaries correctly in + some instances. It is safe on all UTF-8 input. + + +License +------- + +[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) + + + [1]: https://daringfireball.net/projects/markdown/ "Markdown" + [2]: https://golang.org/ "Go Language" + [3]: https://github.com/vmg/sundown "Sundown" + [4]: https://pkg.go.dev/github.com/russross/blackfriday/v2#Parse "Parse func" + [5]: https://github.com/microcosm-cc/bluemonday "Bluemonday" + + [BuildV2SVG]: https://travis-ci.org/russross/blackfriday.svg?branch=v2 + [BuildV2URL]: https://travis-ci.org/russross/blackfriday + [PkgGoDevV2SVG]: https://pkg.go.dev/badge/github.com/russross/blackfriday/v2 + [PkgGoDevV2URL]: https://pkg.go.dev/github.com/russross/blackfriday/v2 diff --git a/vendor/github.com/russross/blackfriday/block.go b/vendor/github.com/russross/blackfriday/block.go new file mode 100644 index 0000000000..563cb29038 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/block.go @@ -0,0 +1,1480 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// Functions to parse block-level elements. +// + +package blackfriday + +import ( + "bytes" + "strings" + "unicode" +) + +// Parse block-level data. +// Note: this function and many that it calls assume that +// the input buffer ends with a newline. +func (p *parser) block(out *bytes.Buffer, data []byte) { + if len(data) == 0 || data[len(data)-1] != '\n' { + panic("block input is missing terminating newline") + } + + // this is called recursively: enforce a maximum depth + if p.nesting >= p.maxNesting { + return + } + p.nesting++ + + // parse out one block-level construct at a time + for len(data) > 0 { + // prefixed header: + // + // # Header 1 + // ## Header 2 + // ... + // ###### Header 6 + if p.isPrefixHeader(data) { + data = data[p.prefixHeader(out, data):] + continue + } + + // block of preformatted HTML: + // + //

+ // ... + //
+ if data[0] == '<' { + if i := p.html(out, data, true); i > 0 { + data = data[i:] + continue + } + } + + // title block + // + // % stuff + // % more stuff + // % even more stuff + if p.flags&EXTENSION_TITLEBLOCK != 0 { + if data[0] == '%' { + if i := p.titleBlock(out, data, true); i > 0 { + data = data[i:] + continue + } + } + } + + // blank lines. note: returns the # of bytes to skip + if i := p.isEmpty(data); i > 0 { + data = data[i:] + continue + } + + // indented code block: + // + // func max(a, b int) int { + // if a > b { + // return a + // } + // return b + // } + if p.codePrefix(data) > 0 { + data = data[p.code(out, data):] + continue + } + + // fenced code block: + // + // ``` go info string here + // func fact(n int) int { + // if n <= 1 { + // return n + // } + // return n * fact(n-1) + // } + // ``` + if p.flags&EXTENSION_FENCED_CODE != 0 { + if i := p.fencedCodeBlock(out, data, true); i > 0 { + data = data[i:] + continue + } + } + + // horizontal rule: + // + // ------ + // or + // ****** + // or + // ______ + if p.isHRule(data) { + p.r.HRule(out) + var i int + for i = 0; data[i] != '\n'; i++ { + } + data = data[i:] + continue + } + + // block quote: + // + // > A big quote I found somewhere + // > on the web + if p.quotePrefix(data) > 0 { + data = data[p.quote(out, data):] + continue + } + + // table: + // + // Name | Age | Phone + // ------|-----|--------- + // Bob | 31 | 555-1234 + // Alice | 27 | 555-4321 + if p.flags&EXTENSION_TABLES != 0 { + if i := p.table(out, data); i > 0 { + data = data[i:] + continue + } + } + + // an itemized/unordered list: + // + // * Item 1 + // * Item 2 + // + // also works with + or - + if p.uliPrefix(data) > 0 { + data = data[p.list(out, data, 0):] + continue + } + + // a numbered/ordered list: + // + // 1. Item 1 + // 2. Item 2 + if p.oliPrefix(data) > 0 { + data = data[p.list(out, data, LIST_TYPE_ORDERED):] + continue + } + + // definition lists: + // + // Term 1 + // : Definition a + // : Definition b + // + // Term 2 + // : Definition c + if p.flags&EXTENSION_DEFINITION_LISTS != 0 { + if p.dliPrefix(data) > 0 { + data = data[p.list(out, data, LIST_TYPE_DEFINITION):] + continue + } + } + + // anything else must look like a normal paragraph + // note: this finds underlined headers, too + data = data[p.paragraph(out, data):] + } + + p.nesting-- +} + +func (p *parser) isPrefixHeader(data []byte) bool { + if data[0] != '#' { + return false + } + + if p.flags&EXTENSION_SPACE_HEADERS != 0 { + level := 0 + for level < 6 && data[level] == '#' { + level++ + } + if data[level] != ' ' { + return false + } + } + return true +} + +func (p *parser) prefixHeader(out *bytes.Buffer, data []byte) int { + level := 0 + for level < 6 && data[level] == '#' { + level++ + } + i := skipChar(data, level, ' ') + end := skipUntilChar(data, i, '\n') + skip := end + id := "" + if p.flags&EXTENSION_HEADER_IDS != 0 { + j, k := 0, 0 + // find start/end of header id + for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ { + } + for k = j + 1; k < end && data[k] != '}'; k++ { + } + // extract header id iff found + if j < end && k < end { + id = string(data[j+2 : k]) + end = j + skip = k + 1 + for end > 0 && data[end-1] == ' ' { + end-- + } + } + } + for end > 0 && data[end-1] == '#' { + if isBackslashEscaped(data, end-1) { + break + } + end-- + } + for end > 0 && data[end-1] == ' ' { + end-- + } + if end > i { + if id == "" && p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { + id = SanitizedAnchorName(string(data[i:end])) + } + work := func() bool { + p.inline(out, data[i:end]) + return true + } + p.r.Header(out, work, level, id) + } + return skip +} + +func (p *parser) isUnderlinedHeader(data []byte) int { + // test of level 1 header + if data[0] == '=' { + i := skipChar(data, 1, '=') + i = skipChar(data, i, ' ') + if data[i] == '\n' { + return 1 + } else { + return 0 + } + } + + // test of level 2 header + if data[0] == '-' { + i := skipChar(data, 1, '-') + i = skipChar(data, i, ' ') + if data[i] == '\n' { + return 2 + } else { + return 0 + } + } + + return 0 +} + +func (p *parser) titleBlock(out *bytes.Buffer, data []byte, doRender bool) int { + if data[0] != '%' { + return 0 + } + splitData := bytes.Split(data, []byte("\n")) + var i int + for idx, b := range splitData { + if !bytes.HasPrefix(b, []byte("%")) { + i = idx // - 1 + break + } + } + + data = bytes.Join(splitData[0:i], []byte("\n")) + p.r.TitleBlock(out, data) + + return len(data) +} + +func (p *parser) html(out *bytes.Buffer, data []byte, doRender bool) int { + var i, j int + + // identify the opening tag + if data[0] != '<' { + return 0 + } + curtag, tagfound := p.htmlFindTag(data[1:]) + + // handle special cases + if !tagfound { + // check for an HTML comment + if size := p.htmlComment(out, data, doRender); size > 0 { + return size + } + + // check for an
tag + if size := p.htmlHr(out, data, doRender); size > 0 { + return size + } + + // check for HTML CDATA + if size := p.htmlCDATA(out, data, doRender); size > 0 { + return size + } + + // no special case recognized + return 0 + } + + // look for an unindented matching closing tag + // followed by a blank line + found := false + /* + closetag := []byte("\n") + j = len(curtag) + 1 + for !found { + // scan for a closing tag at the beginning of a line + if skip := bytes.Index(data[j:], closetag); skip >= 0 { + j += skip + len(closetag) + } else { + break + } + + // see if it is the only thing on the line + if skip := p.isEmpty(data[j:]); skip > 0 { + // see if it is followed by a blank line/eof + j += skip + if j >= len(data) { + found = true + i = j + } else { + if skip := p.isEmpty(data[j:]); skip > 0 { + j += skip + found = true + i = j + } + } + } + } + */ + + // if not found, try a second pass looking for indented match + // but not if tag is "ins" or "del" (following original Markdown.pl) + if !found && curtag != "ins" && curtag != "del" { + i = 1 + for i < len(data) { + i++ + for i < len(data) && !(data[i-1] == '<' && data[i] == '/') { + i++ + } + + if i+2+len(curtag) >= len(data) { + break + } + + j = p.htmlFindEnd(curtag, data[i-1:]) + + if j > 0 { + i += j - 1 + found = true + break + } + } + } + + if !found { + return 0 + } + + // the end of the block has been found + if doRender { + // trim newlines + end := i + for end > 0 && data[end-1] == '\n' { + end-- + } + p.r.BlockHtml(out, data[:end]) + } + + return i +} + +func (p *parser) renderHTMLBlock(out *bytes.Buffer, data []byte, start int, doRender bool) int { + // html block needs to end with a blank line + if i := p.isEmpty(data[start:]); i > 0 { + size := start + i + if doRender { + // trim trailing newlines + end := size + for end > 0 && data[end-1] == '\n' { + end-- + } + p.r.BlockHtml(out, data[:end]) + } + return size + } + return 0 +} + +// HTML comment, lax form +func (p *parser) htmlComment(out *bytes.Buffer, data []byte, doRender bool) int { + i := p.inlineHTMLComment(out, data) + return p.renderHTMLBlock(out, data, i, doRender) +} + +// HTML CDATA section +func (p *parser) htmlCDATA(out *bytes.Buffer, data []byte, doRender bool) int { + const cdataTag = "') { + i++ + } + i++ + // no end-of-comment marker + if i >= len(data) { + return 0 + } + return p.renderHTMLBlock(out, data, i, doRender) +} + +// HR, which is the only self-closing block tag considered +func (p *parser) htmlHr(out *bytes.Buffer, data []byte, doRender bool) int { + if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') { + return 0 + } + if data[3] != ' ' && data[3] != '/' && data[3] != '>' { + // not an
tag after all; at least not a valid one + return 0 + } + + i := 3 + for data[i] != '>' && data[i] != '\n' { + i++ + } + + if data[i] == '>' { + return p.renderHTMLBlock(out, data, i+1, doRender) + } + + return 0 +} + +func (p *parser) htmlFindTag(data []byte) (string, bool) { + i := 0 + for isalnum(data[i]) { + i++ + } + key := string(data[:i]) + if _, ok := blockTags[key]; ok { + return key, true + } + return "", false +} + +func (p *parser) htmlFindEnd(tag string, data []byte) int { + // assume data[0] == '<' && data[1] == '/' already tested + + // check if tag is a match + closetag := []byte("") + if !bytes.HasPrefix(data, closetag) { + return 0 + } + i := len(closetag) + + // check that the rest of the line is blank + skip := 0 + if skip = p.isEmpty(data[i:]); skip == 0 { + return 0 + } + i += skip + skip = 0 + + if i >= len(data) { + return i + } + + if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { + return i + } + if skip = p.isEmpty(data[i:]); skip == 0 { + // following line must be blank + return 0 + } + + return i + skip +} + +func (*parser) isEmpty(data []byte) int { + // it is okay to call isEmpty on an empty buffer + if len(data) == 0 { + return 0 + } + + var i int + for i = 0; i < len(data) && data[i] != '\n'; i++ { + if data[i] != ' ' && data[i] != '\t' { + return 0 + } + } + return i + 1 +} + +func (*parser) isHRule(data []byte) bool { + i := 0 + + // skip up to three spaces + for i < 3 && data[i] == ' ' { + i++ + } + + // look at the hrule char + if data[i] != '*' && data[i] != '-' && data[i] != '_' { + return false + } + c := data[i] + + // the whole line must be the char or whitespace + n := 0 + for data[i] != '\n' { + switch { + case data[i] == c: + n++ + case data[i] != ' ': + return false + } + i++ + } + + return n >= 3 +} + +// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data, +// and returns the end index if so, or 0 otherwise. It also returns the marker found. +// If syntax is not nil, it gets set to the syntax specified in the fence line. +// A final newline is mandatory to recognize the fence line, unless newlineOptional is true. +func isFenceLine(data []byte, info *string, oldmarker string, newlineOptional bool) (end int, marker string) { + i, size := 0, 0 + + // skip up to three spaces + for i < len(data) && i < 3 && data[i] == ' ' { + i++ + } + + // check for the marker characters: ~ or ` + if i >= len(data) { + return 0, "" + } + if data[i] != '~' && data[i] != '`' { + return 0, "" + } + + c := data[i] + + // the whole line must be the same char or whitespace + for i < len(data) && data[i] == c { + size++ + i++ + } + + // the marker char must occur at least 3 times + if size < 3 { + return 0, "" + } + marker = string(data[i-size : i]) + + // if this is the end marker, it must match the beginning marker + if oldmarker != "" && marker != oldmarker { + return 0, "" + } + + // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here + // into one, always get the info string, and discard it if the caller doesn't care. + if info != nil { + infoLength := 0 + i = skipChar(data, i, ' ') + + if i >= len(data) { + if newlineOptional && i == len(data) { + return i, marker + } + return 0, "" + } + + infoStart := i + + if data[i] == '{' { + i++ + infoStart++ + + for i < len(data) && data[i] != '}' && data[i] != '\n' { + infoLength++ + i++ + } + + if i >= len(data) || data[i] != '}' { + return 0, "" + } + + // strip all whitespace at the beginning and the end + // of the {} block + for infoLength > 0 && isspace(data[infoStart]) { + infoStart++ + infoLength-- + } + + for infoLength > 0 && isspace(data[infoStart+infoLength-1]) { + infoLength-- + } + + i++ + } else { + for i < len(data) && !isverticalspace(data[i]) { + infoLength++ + i++ + } + } + + *info = strings.TrimSpace(string(data[infoStart : infoStart+infoLength])) + } + + i = skipChar(data, i, ' ') + if i >= len(data) { + if newlineOptional { + return i, marker + } + return 0, "" + } + if data[i] == '\n' { + i++ // Take newline into account + } + + return i, marker +} + +// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning, +// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects. +// If doRender is true, a final newline is mandatory to recognize the fenced code block. +func (p *parser) fencedCodeBlock(out *bytes.Buffer, data []byte, doRender bool) int { + var infoString string + beg, marker := isFenceLine(data, &infoString, "", false) + if beg == 0 || beg >= len(data) { + return 0 + } + + var work bytes.Buffer + + for { + // safe to assume beg < len(data) + + // check for the end of the code block + newlineOptional := !doRender + fenceEnd, _ := isFenceLine(data[beg:], nil, marker, newlineOptional) + if fenceEnd != 0 { + beg += fenceEnd + break + } + + // copy the current line + end := skipUntilChar(data, beg, '\n') + 1 + + // did we reach the end of the buffer without a closing marker? + if end >= len(data) { + return 0 + } + + // verbatim copy to the working buffer + if doRender { + work.Write(data[beg:end]) + } + beg = end + } + + if doRender { + p.r.BlockCode(out, work.Bytes(), infoString) + } + + return beg +} + +func (p *parser) table(out *bytes.Buffer, data []byte) int { + var header bytes.Buffer + i, columns := p.tableHeader(&header, data) + if i == 0 { + return 0 + } + + var body bytes.Buffer + + for i < len(data) { + pipes, rowStart := 0, i + for ; data[i] != '\n'; i++ { + if data[i] == '|' { + pipes++ + } + } + + if pipes == 0 { + i = rowStart + break + } + + // include the newline in data sent to tableRow + i++ + p.tableRow(&body, data[rowStart:i], columns, false) + } + + p.r.Table(out, header.Bytes(), body.Bytes(), columns) + + return i +} + +// check if the specified position is preceded by an odd number of backslashes +func isBackslashEscaped(data []byte, i int) bool { + backslashes := 0 + for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' { + backslashes++ + } + return backslashes&1 == 1 +} + +func (p *parser) tableHeader(out *bytes.Buffer, data []byte) (size int, columns []int) { + i := 0 + colCount := 1 + for i = 0; data[i] != '\n'; i++ { + if data[i] == '|' && !isBackslashEscaped(data, i) { + colCount++ + } + } + + // doesn't look like a table header + if colCount == 1 { + return + } + + // include the newline in the data sent to tableRow + header := data[:i+1] + + // column count ignores pipes at beginning or end of line + if data[0] == '|' { + colCount-- + } + if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) { + colCount-- + } + + columns = make([]int, colCount) + + // move on to the header underline + i++ + if i >= len(data) { + return + } + + if data[i] == '|' && !isBackslashEscaped(data, i) { + i++ + } + i = skipChar(data, i, ' ') + + // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3 + // and trailing | optional on last column + col := 0 + for data[i] != '\n' { + dashes := 0 + + if data[i] == ':' { + i++ + columns[col] |= TABLE_ALIGNMENT_LEFT + dashes++ + } + for data[i] == '-' { + i++ + dashes++ + } + if data[i] == ':' { + i++ + columns[col] |= TABLE_ALIGNMENT_RIGHT + dashes++ + } + for data[i] == ' ' { + i++ + } + + // end of column test is messy + switch { + case dashes < 3: + // not a valid column + return + + case data[i] == '|' && !isBackslashEscaped(data, i): + // marker found, now skip past trailing whitespace + col++ + i++ + for data[i] == ' ' { + i++ + } + + // trailing junk found after last column + if col >= colCount && data[i] != '\n' { + return + } + + case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount: + // something else found where marker was required + return + + case data[i] == '\n': + // marker is optional for the last column + col++ + + default: + // trailing junk found after last column + return + } + } + if col != colCount { + return + } + + p.tableRow(out, header, columns, true) + size = i + 1 + return +} + +func (p *parser) tableRow(out *bytes.Buffer, data []byte, columns []int, header bool) { + i, col := 0, 0 + var rowWork bytes.Buffer + + if data[i] == '|' && !isBackslashEscaped(data, i) { + i++ + } + + for col = 0; col < len(columns) && i < len(data); col++ { + for data[i] == ' ' { + i++ + } + + cellStart := i + + for (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' { + i++ + } + + cellEnd := i + + // skip the end-of-cell marker, possibly taking us past end of buffer + i++ + + for cellEnd > cellStart && data[cellEnd-1] == ' ' { + cellEnd-- + } + + var cellWork bytes.Buffer + p.inline(&cellWork, data[cellStart:cellEnd]) + + if header { + p.r.TableHeaderCell(&rowWork, cellWork.Bytes(), columns[col]) + } else { + p.r.TableCell(&rowWork, cellWork.Bytes(), columns[col]) + } + } + + // pad it out with empty columns to get the right number + for ; col < len(columns); col++ { + if header { + p.r.TableHeaderCell(&rowWork, nil, columns[col]) + } else { + p.r.TableCell(&rowWork, nil, columns[col]) + } + } + + // silently ignore rows with too many cells + + p.r.TableRow(out, rowWork.Bytes()) +} + +// returns blockquote prefix length +func (p *parser) quotePrefix(data []byte) int { + i := 0 + for i < 3 && data[i] == ' ' { + i++ + } + if data[i] == '>' { + if data[i+1] == ' ' { + return i + 2 + } + return i + 1 + } + return 0 +} + +// blockquote ends with at least one blank line +// followed by something without a blockquote prefix +func (p *parser) terminateBlockquote(data []byte, beg, end int) bool { + if p.isEmpty(data[beg:]) <= 0 { + return false + } + if end >= len(data) { + return true + } + return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0 +} + +// parse a blockquote fragment +func (p *parser) quote(out *bytes.Buffer, data []byte) int { + var raw bytes.Buffer + beg, end := 0, 0 + for beg < len(data) { + end = beg + // Step over whole lines, collecting them. While doing that, check for + // fenced code and if one's found, incorporate it altogether, + // irregardless of any contents inside it + for data[end] != '\n' { + if p.flags&EXTENSION_FENCED_CODE != 0 { + if i := p.fencedCodeBlock(out, data[end:], false); i > 0 { + // -1 to compensate for the extra end++ after the loop: + end += i - 1 + break + } + } + end++ + } + end++ + + if pre := p.quotePrefix(data[beg:]); pre > 0 { + // skip the prefix + beg += pre + } else if p.terminateBlockquote(data, beg, end) { + break + } + + // this line is part of the blockquote + raw.Write(data[beg:end]) + beg = end + } + + var cooked bytes.Buffer + p.block(&cooked, raw.Bytes()) + p.r.BlockQuote(out, cooked.Bytes()) + return end +} + +// returns prefix length for block code +func (p *parser) codePrefix(data []byte) int { + if data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' { + return 4 + } + return 0 +} + +func (p *parser) code(out *bytes.Buffer, data []byte) int { + var work bytes.Buffer + + i := 0 + for i < len(data) { + beg := i + for data[i] != '\n' { + i++ + } + i++ + + blankline := p.isEmpty(data[beg:i]) > 0 + if pre := p.codePrefix(data[beg:i]); pre > 0 { + beg += pre + } else if !blankline { + // non-empty, non-prefixed line breaks the pre + i = beg + break + } + + // verbatim copy to the working buffeu + if blankline { + work.WriteByte('\n') + } else { + work.Write(data[beg:i]) + } + } + + // trim all the \n off the end of work + workbytes := work.Bytes() + eol := len(workbytes) + for eol > 0 && workbytes[eol-1] == '\n' { + eol-- + } + if eol != len(workbytes) { + work.Truncate(eol) + } + + work.WriteByte('\n') + + p.r.BlockCode(out, work.Bytes(), "") + + return i +} + +// returns unordered list item prefix +func (p *parser) uliPrefix(data []byte) int { + i := 0 + + // start with up to 3 spaces + for i < 3 && data[i] == ' ' { + i++ + } + + // need a *, +, or - followed by a space + if (data[i] != '*' && data[i] != '+' && data[i] != '-') || + data[i+1] != ' ' { + return 0 + } + return i + 2 +} + +// returns ordered list item prefix +func (p *parser) oliPrefix(data []byte) int { + i := 0 + + // start with up to 3 spaces + for i < 3 && data[i] == ' ' { + i++ + } + + // count the digits + start := i + for data[i] >= '0' && data[i] <= '9' { + i++ + } + + // we need >= 1 digits followed by a dot and a space + if start == i || data[i] != '.' || data[i+1] != ' ' { + return 0 + } + return i + 2 +} + +// returns definition list item prefix +func (p *parser) dliPrefix(data []byte) int { + i := 0 + + // need a : followed by a spaces + if data[i] != ':' || data[i+1] != ' ' { + return 0 + } + for data[i] == ' ' { + i++ + } + return i + 2 +} + +// parse ordered or unordered list block +func (p *parser) list(out *bytes.Buffer, data []byte, flags int) int { + i := 0 + flags |= LIST_ITEM_BEGINNING_OF_LIST + work := func() bool { + for i < len(data) { + skip := p.listItem(out, data[i:], &flags) + i += skip + + if skip == 0 || flags&LIST_ITEM_END_OF_LIST != 0 { + break + } + flags &= ^LIST_ITEM_BEGINNING_OF_LIST + } + return true + } + + p.r.List(out, work, flags) + return i +} + +// Parse a single list item. +// Assumes initial prefix is already removed if this is a sublist. +func (p *parser) listItem(out *bytes.Buffer, data []byte, flags *int) int { + // keep track of the indentation of the first line + itemIndent := 0 + for itemIndent < 3 && data[itemIndent] == ' ' { + itemIndent++ + } + + i := p.uliPrefix(data) + if i == 0 { + i = p.oliPrefix(data) + } + if i == 0 { + i = p.dliPrefix(data) + // reset definition term flag + if i > 0 { + *flags &= ^LIST_TYPE_TERM + } + } + if i == 0 { + // if in defnition list, set term flag and continue + if *flags&LIST_TYPE_DEFINITION != 0 { + *flags |= LIST_TYPE_TERM + } else { + return 0 + } + } + + // skip leading whitespace on first line + for data[i] == ' ' { + i++ + } + + // find the end of the line + line := i + for i > 0 && data[i-1] != '\n' { + i++ + } + + // process the following lines + containsBlankLine := false + sublist := 0 + codeBlockMarker := "" + if p.flags&EXTENSION_FENCED_CODE != 0 && i > line { + // determine if codeblock starts on the first line + _, codeBlockMarker = isFenceLine(data[line:i], nil, "", false) + } + + // get working buffer + var raw bytes.Buffer + + // put the first line into the working buffer + raw.Write(data[line:i]) + line = i + +gatherlines: + for line < len(data) { + i++ + + // find the end of this line + for data[i-1] != '\n' { + i++ + } + // if it is an empty line, guess that it is part of this item + // and move on to the next line + if p.isEmpty(data[line:i]) > 0 { + containsBlankLine = true + raw.Write(data[line:i]) + line = i + continue + } + + // calculate the indentation + indent := 0 + for indent < 4 && line+indent < i && data[line+indent] == ' ' { + indent++ + } + + chunk := data[line+indent : i] + + if p.flags&EXTENSION_FENCED_CODE != 0 { + // determine if in or out of codeblock + // if in codeblock, ignore normal list processing + _, marker := isFenceLine(chunk, nil, codeBlockMarker, false) + if marker != "" { + if codeBlockMarker == "" { + // start of codeblock + codeBlockMarker = marker + } else { + // end of codeblock. + *flags |= LIST_ITEM_CONTAINS_BLOCK + codeBlockMarker = "" + } + } + // we are in a codeblock, write line, and continue + if codeBlockMarker != "" || marker != "" { + raw.Write(data[line+indent : i]) + line = i + continue gatherlines + } + } + + // evaluate how this line fits in + switch { + // is this a nested list item? + case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) || + p.oliPrefix(chunk) > 0 || + p.dliPrefix(chunk) > 0: + + if containsBlankLine { + // end the list if the type changed after a blank line + if indent <= itemIndent && + ((*flags&LIST_TYPE_ORDERED != 0 && p.uliPrefix(chunk) > 0) || + (*flags&LIST_TYPE_ORDERED == 0 && p.oliPrefix(chunk) > 0)) { + + *flags |= LIST_ITEM_END_OF_LIST + break gatherlines + } + *flags |= LIST_ITEM_CONTAINS_BLOCK + } + + // to be a nested list, it must be indented more + // if not, it is the next item in the same list + if indent <= itemIndent { + break gatherlines + } + + // is this the first item in the nested list? + if sublist == 0 { + sublist = raw.Len() + } + + // is this a nested prefix header? + case p.isPrefixHeader(chunk): + // if the header is not indented, it is not nested in the list + // and thus ends the list + if containsBlankLine && indent < 4 { + *flags |= LIST_ITEM_END_OF_LIST + break gatherlines + } + *flags |= LIST_ITEM_CONTAINS_BLOCK + + // anything following an empty line is only part + // of this item if it is indented 4 spaces + // (regardless of the indentation of the beginning of the item) + case containsBlankLine && indent < 4: + if *flags&LIST_TYPE_DEFINITION != 0 && i < len(data)-1 { + // is the next item still a part of this list? + next := i + for data[next] != '\n' { + next++ + } + for next < len(data)-1 && data[next] == '\n' { + next++ + } + if i < len(data)-1 && data[i] != ':' && data[next] != ':' { + *flags |= LIST_ITEM_END_OF_LIST + } + } else { + *flags |= LIST_ITEM_END_OF_LIST + } + break gatherlines + + // a blank line means this should be parsed as a block + case containsBlankLine: + *flags |= LIST_ITEM_CONTAINS_BLOCK + } + + containsBlankLine = false + + // add the line into the working buffer without prefix + raw.Write(data[line+indent : i]) + + line = i + } + + // If reached end of data, the Renderer.ListItem call we're going to make below + // is definitely the last in the list. + if line >= len(data) { + *flags |= LIST_ITEM_END_OF_LIST + } + + rawBytes := raw.Bytes() + + // render the contents of the list item + var cooked bytes.Buffer + if *flags&LIST_ITEM_CONTAINS_BLOCK != 0 && *flags&LIST_TYPE_TERM == 0 { + // intermediate render of block item, except for definition term + if sublist > 0 { + p.block(&cooked, rawBytes[:sublist]) + p.block(&cooked, rawBytes[sublist:]) + } else { + p.block(&cooked, rawBytes) + } + } else { + // intermediate render of inline item + if sublist > 0 { + p.inline(&cooked, rawBytes[:sublist]) + p.block(&cooked, rawBytes[sublist:]) + } else { + p.inline(&cooked, rawBytes) + } + } + + // render the actual list item + cookedBytes := cooked.Bytes() + parsedEnd := len(cookedBytes) + + // strip trailing newlines + for parsedEnd > 0 && cookedBytes[parsedEnd-1] == '\n' { + parsedEnd-- + } + p.r.ListItem(out, cookedBytes[:parsedEnd], *flags) + + return line +} + +// render a single paragraph that has already been parsed out +func (p *parser) renderParagraph(out *bytes.Buffer, data []byte) { + if len(data) == 0 { + return + } + + // trim leading spaces + beg := 0 + for data[beg] == ' ' { + beg++ + } + + // trim trailing newline + end := len(data) - 1 + + // trim trailing spaces + for end > beg && data[end-1] == ' ' { + end-- + } + + work := func() bool { + p.inline(out, data[beg:end]) + return true + } + p.r.Paragraph(out, work) +} + +func (p *parser) paragraph(out *bytes.Buffer, data []byte) int { + // prev: index of 1st char of previous line + // line: index of 1st char of current line + // i: index of cursor/end of current line + var prev, line, i int + + // keep going until we find something to mark the end of the paragraph + for i < len(data) { + // mark the beginning of the current line + prev = line + current := data[i:] + line = i + + // did we find a blank line marking the end of the paragraph? + if n := p.isEmpty(current); n > 0 { + // did this blank line followed by a definition list item? + if p.flags&EXTENSION_DEFINITION_LISTS != 0 { + if i < len(data)-1 && data[i+1] == ':' { + return p.list(out, data[prev:], LIST_TYPE_DEFINITION) + } + } + + p.renderParagraph(out, data[:i]) + return i + n + } + + // an underline under some text marks a header, so our paragraph ended on prev line + if i > 0 { + if level := p.isUnderlinedHeader(current); level > 0 { + // render the paragraph + p.renderParagraph(out, data[:prev]) + + // ignore leading and trailing whitespace + eol := i - 1 + for prev < eol && data[prev] == ' ' { + prev++ + } + for eol > prev && data[eol-1] == ' ' { + eol-- + } + + // render the header + // this ugly double closure avoids forcing variables onto the heap + work := func(o *bytes.Buffer, pp *parser, d []byte) func() bool { + return func() bool { + pp.inline(o, d) + return true + } + }(out, p, data[prev:eol]) + + id := "" + if p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { + id = SanitizedAnchorName(string(data[prev:eol])) + } + + p.r.Header(out, work, level, id) + + // find the end of the underline + for data[i] != '\n' { + i++ + } + return i + } + } + + // if the next line starts a block of HTML, then the paragraph ends here + if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { + if data[i] == '<' && p.html(out, current, false) > 0 { + // rewind to before the HTML block + p.renderParagraph(out, data[:i]) + return i + } + } + + // if there's a prefixed header or a horizontal rule after this, paragraph is over + if p.isPrefixHeader(current) || p.isHRule(current) { + p.renderParagraph(out, data[:i]) + return i + } + + // if there's a fenced code block, paragraph is over + if p.flags&EXTENSION_FENCED_CODE != 0 { + if p.fencedCodeBlock(out, current, false) > 0 { + p.renderParagraph(out, data[:i]) + return i + } + } + + // if there's a definition list item, prev line is a definition term + if p.flags&EXTENSION_DEFINITION_LISTS != 0 { + if p.dliPrefix(current) != 0 { + return p.list(out, data[prev:], LIST_TYPE_DEFINITION) + } + } + + // if there's a list after this, paragraph is over + if p.flags&EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK != 0 { + if p.uliPrefix(current) != 0 || + p.oliPrefix(current) != 0 || + p.quotePrefix(current) != 0 || + p.codePrefix(current) != 0 { + p.renderParagraph(out, data[:i]) + return i + } + } + + // otherwise, scan to the beginning of the next line + for data[i] != '\n' { + i++ + } + i++ + } + + p.renderParagraph(out, data[:i]) + return i +} + +// SanitizedAnchorName returns a sanitized anchor name for the given text. +// +// It implements the algorithm specified in the package comment. +func SanitizedAnchorName(text string) string { + var anchorName []rune + futureDash := false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +} diff --git a/vendor/github.com/russross/blackfriday/doc.go b/vendor/github.com/russross/blackfriday/doc.go new file mode 100644 index 0000000000..9656c42a19 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/doc.go @@ -0,0 +1,32 @@ +// Package blackfriday is a Markdown processor. +// +// It translates plain text with simple formatting rules into HTML or LaTeX. +// +// Sanitized Anchor Names +// +// Blackfriday includes an algorithm for creating sanitized anchor names +// corresponding to a given input text. This algorithm is used to create +// anchors for headings when EXTENSION_AUTO_HEADER_IDS is enabled. The +// algorithm is specified below, so that other packages can create +// compatible anchor names and links to those anchors. +// +// The algorithm iterates over the input text, interpreted as UTF-8, +// one Unicode code point (rune) at a time. All runes that are letters (category L) +// or numbers (category N) are considered valid characters. They are mapped to +// lower case, and included in the output. All other runes are considered +// invalid characters. Invalid characters that preceed the first valid character, +// as well as invalid character that follow the last valid character +// are dropped completely. All other sequences of invalid characters +// between two valid characters are replaced with a single dash character '-'. +// +// SanitizedAnchorName exposes this functionality, and can be used to +// create compatible links to the anchor names generated by blackfriday. +// This algorithm is also implemented in a small standalone package at +// github.com/shurcooL/sanitized_anchor_name. It can be useful for clients +// that want a small package and don't need full functionality of blackfriday. +package blackfriday + +// NOTE: Keep Sanitized Anchor Name algorithm in sync with package +// github.com/shurcooL/sanitized_anchor_name. +// Otherwise, users of sanitized_anchor_name will get anchor names +// that are incompatible with those generated by blackfriday. diff --git a/vendor/github.com/russross/blackfriday/html.go b/vendor/github.com/russross/blackfriday/html.go new file mode 100644 index 0000000000..fa044ca215 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/html.go @@ -0,0 +1,945 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// +// HTML rendering backend +// +// + +package blackfriday + +import ( + "bytes" + "fmt" + "regexp" + "strconv" + "strings" +) + +// Html renderer configuration options. +const ( + HTML_SKIP_HTML = 1 << iota // skip preformatted HTML blocks + HTML_SKIP_STYLE // skip embedded