From cf3915fbd8e5437782b407580397f5fff50bf299 Mon Sep 17 00:00:00 2001 From: Lucas Ramage Date: Sun, 3 May 2020 11:41:34 -0400 Subject: [PATCH 01/28] Document working with Ansible (#306) Bug: https://github.com/StackExchange/blackbox/issues/295 See: https://docs.ansible.com/ansible/latest/user_guide/vault.html#providing-vault-passwords See: https://docs.ansible.com/ansible/latest/reference_appendices/config.html#default-vault-password-file --- README.md | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 0045998b..3aaa5f2e 100644 --- a/README.md +++ b/README.md @@ -19,9 +19,11 @@ Table of Contents - [Compatibility](#compatibility) - [How is the encryption done?](#how-is-the-encryption-done) - [What does this look like to the typical user?](#what-does-this-look-like-to-the-typical-user) -- [How to use the secrets with Puppet?](#how-to-use-the-secrets-with-puppet) - - [Entire files](#entire-files) - - [Small strings](#small-strings) +- Configuration Management + - [How to use the secrets with Ansible?](#how-to-use-the-secrets-with-ansible) + - [How to use the secrets with Puppet?](#how-to-use-the-secrets-with-puppet) + - [Entire files](#entire-files) + - [Small strings](#small-strings) - File Management - [How to enroll a new file into the system?](#how-to-enroll-a-new-file-into-the-system) - [How to remove a file from the system?](#how-to-remove-a-file-from-the-system) @@ -233,6 +235,22 @@ What does this look like to the typical user? Wait... it can be even easier than that! Run `blackbox_edit FILENAME`, and it'll decrypt the file in a temp file and call `$EDITOR` on it, re-encrypting again after the editor is closed. +How to use the secrets with Ansible? +=================================== + +Ansible Vault provides functionality for encrypting both entire files and strings stored within files; however, +keeping track of the password(s) required for decryption is not handled by this module. + +Instead one must specify a password file when running the playbook. + +Ansible example for password file: `my_secret_password.txt.gpg` + +``` +ansible-playbook --vault-password-file my_secret_password.txt site.yml +``` + +Alternatively, one can specify this in the `ANSIBLE_VAULT_PASSWORD_FILE` environment variable. + How to use the secrets with Puppet? =================================== From 6761bfc356e437ad68840a650ed0ce9f263756c9 Mon Sep 17 00:00:00 2001 From: James Ottaway Date: Thu, 30 Apr 2020 21:31:44 +1000 Subject: [PATCH 02/28] Add a test for `blackbox_cat` --- tools/confidence_test.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tools/confidence_test.sh b/tools/confidence_test.sh index c71d2d33..4f748672 100755 --- a/tools/confidence_test.sh +++ b/tools/confidence_test.sh @@ -161,6 +161,11 @@ assert_file_missing secret.txt assert_file_exists secret.txt.gpg assert_line_exists '/secret.txt' .gitignore +PHASE 'She cats secrets.txt.gpg' +make_self_deleting_tempfile catsecret +blackbox_cat secret.txt.gpg > $catsecret +assert_line_exists 'this is my secret' $catsecret + PHASE 'She decrypts secrets.txt.' blackbox_edit_start secret.txt assert_file_exists secret.txt From 72253818be9fd306badf7da686290fc7e84611b9 Mon Sep 17 00:00:00 2001 From: James Ottaway Date: Thu, 30 Apr 2020 22:26:46 +1000 Subject: [PATCH 03/28] Call `vcs_ignore` bwfore the file is shredded --- bin/blackbox_register_new_file | 2 +- tools/confidence_test.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/blackbox_register_new_file b/bin/blackbox_register_new_file index 0a49aad6..3020f779 100755 --- a/bin/blackbox_register_new_file +++ b/bin/blackbox_register_new_file @@ -29,6 +29,7 @@ function register_new_file() { prepare_keychain encrypt_file "$unencrypted_file" "$encrypted_file" add_filename_to_cryptlist "$unencrypted_file" + vcs_ignore "$unencrypted_file" # Is the unencrypted file already in HG? (ie. are we correcting a bad situation) SECRETSEXPOSED=$(is_in_vcs "${unencrypted_file}") @@ -41,7 +42,6 @@ function register_new_file() { vcs_add "$encrypted_file" fi - vcs_ignore "$unencrypted_file" echo 'NOTE: "already tracked!" messages are safe to ignore.' vcs_add "$BB_FILES" "$encrypted_file" vcs_commit "registered in blackbox: ${unencrypted_file}" "$BB_FILES" "$encrypted_file" "$(vcs_ignore_file_path)" diff --git a/tools/confidence_test.sh b/tools/confidence_test.sh index 4f748672..aa8412ef 100755 --- a/tools/confidence_test.sh +++ b/tools/confidence_test.sh @@ -161,7 +161,7 @@ assert_file_missing secret.txt assert_file_exists secret.txt.gpg assert_line_exists '/secret.txt' .gitignore -PHASE 'She cats secrets.txt.gpg' +PHASE 'She cats secrets.txt.gpg.' make_self_deleting_tempfile catsecret blackbox_cat secret.txt.gpg > $catsecret assert_line_exists 'this is my secret' $catsecret From a7fd51456949a0fcd30c0074cb61a2f5aad947e9 Mon Sep 17 00:00:00 2001 From: James Ottaway Date: Thu, 30 Apr 2020 22:30:03 +1000 Subject: [PATCH 04/28] [wip] Fix how `vcs_relative_path` first resolves an absolute path --- bin/_blackbox_common.sh | 19 ++++++++++++++----- tools/confidence_test.sh | 1 + 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/bin/_blackbox_common.sh b/bin/_blackbox_common.sh index b935f5d8..a3939cb2 100755 --- a/bin/_blackbox_common.sh +++ b/bin/_blackbox_common.sh @@ -366,13 +366,22 @@ function enumerate_blackbox_repos() { done } +# Resolve the absolute path of a relative one +# Adapted from https://unix.stackexchange.com/a/483514 +function abs() { + local _pwd bn + [ -d "${1}" ] && _pwd="${1}" + [ -f "${1}" ] && { _pwd=$(dirname "${1}") ; bn=/$(basename "${1}") ;} + pushd "$_pwd" >/dev/null || exit + echo "$(pwd)${bn}" + popd >/dev/null || exit +} + # Output the path of a file relative to the repo base function vcs_relative_path() { - # Usage: vcs_relative_path file - local name="$1" - #python -c 'import os ; print(os.path.relpath("'"$(pwd -P)"'/'"$name"'", "'"$REPOBASE"'"))' - local p=$( printf "%s" "$( pwd -P )/${1}" | sed 's#//*#/#g' ) - local name="${p#$REPOBASE}" + local name + name=$(abs "$1") + name="${name#$REPOBASE}" name=$( printf "%s" "$name" | sed 's#^/##g' | sed 's#/$##g' ) printf "%s" "$name" } diff --git a/tools/confidence_test.sh b/tools/confidence_test.sh index aa8412ef..eaeef233 100755 --- a/tools/confidence_test.sh +++ b/tools/confidence_test.sh @@ -160,6 +160,7 @@ blackbox_register_new_file secret.txt assert_file_missing secret.txt assert_file_exists secret.txt.gpg assert_line_exists '/secret.txt' .gitignore +assert_line_exists 'secret.txt' keyrings/live/blackbox-files.txt PHASE 'She cats secrets.txt.gpg.' make_self_deleting_tempfile catsecret From 6ae742aa7ad741a75c991c8d9bc00d7112e95526 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Tue, 26 May 2020 19:31:24 +0000 Subject: [PATCH 05/28] Make the /etc/profile.d script executable --- tools/mk_rpm_fpmdir.stack_blackbox.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/mk_rpm_fpmdir.stack_blackbox.txt b/tools/mk_rpm_fpmdir.stack_blackbox.txt index 99e2f4b9..55170566 100644 --- a/tools/mk_rpm_fpmdir.stack_blackbox.txt +++ b/tools/mk_rpm_fpmdir.stack_blackbox.txt @@ -1,5 +1,5 @@ # Update tools/mk_rpm_fpmdir.stack_blackbox.txt. Other files generate from it. -read /etc/profile.d/usrblackbox.sh profile.d-usrblackbox.sh +exec /etc/profile.d/usrblackbox.sh profile.d-usrblackbox.sh exec /usr/blackbox/bin/_blackbox_common.sh ../bin/_blackbox_common.sh exec /usr/blackbox/bin/_stack_lib.sh ../bin/_stack_lib.sh exec /usr/blackbox/bin/blackbox_addadmin ../bin/blackbox_addadmin From 5ce3c9370f425bee63b8e0f93ddbff56c9d4e0e3 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Wed, 27 May 2020 08:03:12 -0400 Subject: [PATCH 06/28] Revert "[wip] Fix how `vcs_relative_path` first resolves an absolute path" This reverts commit a7fd51456949a0fcd30c0074cb61a2f5aad947e9. Sadly this doesn't work in all cases (if the file doesn't exist) and fails on MacOS (I think). --- bin/_blackbox_common.sh | 19 +++++-------------- tools/confidence_test.sh | 1 - 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/bin/_blackbox_common.sh b/bin/_blackbox_common.sh index a3939cb2..b935f5d8 100755 --- a/bin/_blackbox_common.sh +++ b/bin/_blackbox_common.sh @@ -366,22 +366,13 @@ function enumerate_blackbox_repos() { done } -# Resolve the absolute path of a relative one -# Adapted from https://unix.stackexchange.com/a/483514 -function abs() { - local _pwd bn - [ -d "${1}" ] && _pwd="${1}" - [ -f "${1}" ] && { _pwd=$(dirname "${1}") ; bn=/$(basename "${1}") ;} - pushd "$_pwd" >/dev/null || exit - echo "$(pwd)${bn}" - popd >/dev/null || exit -} - # Output the path of a file relative to the repo base function vcs_relative_path() { - local name - name=$(abs "$1") - name="${name#$REPOBASE}" + # Usage: vcs_relative_path file + local name="$1" + #python -c 'import os ; print(os.path.relpath("'"$(pwd -P)"'/'"$name"'", "'"$REPOBASE"'"))' + local p=$( printf "%s" "$( pwd -P )/${1}" | sed 's#//*#/#g' ) + local name="${p#$REPOBASE}" name=$( printf "%s" "$name" | sed 's#^/##g' | sed 's#/$##g' ) printf "%s" "$name" } diff --git a/tools/confidence_test.sh b/tools/confidence_test.sh index eaeef233..aa8412ef 100755 --- a/tools/confidence_test.sh +++ b/tools/confidence_test.sh @@ -160,7 +160,6 @@ blackbox_register_new_file secret.txt assert_file_missing secret.txt assert_file_exists secret.txt.gpg assert_line_exists '/secret.txt' .gitignore -assert_line_exists 'secret.txt' keyrings/live/blackbox-files.txt PHASE 'She cats secrets.txt.gpg.' make_self_deleting_tempfile catsecret From e049c02655d2b0f506fa1a98727c347745c116bd Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Wed, 27 May 2020 08:53:17 -0400 Subject: [PATCH 07/28] More thoughts on v2 --- Version2-Ideas.md | 110 ++++++++++++++++++++++++++++++++++------------ 1 file changed, 82 insertions(+), 28 deletions(-) diff --git a/Version2-Ideas.md b/Version2-Ideas.md index 57b50b51..25e1b69a 100644 --- a/Version2-Ideas.md +++ b/Version2-Ideas.md @@ -19,40 +19,93 @@ These are the things I'd like to change someday. There should be one program, with subcommands that have names that make more sense: * `blackbox init` -* `blackbox register <...>` -* `blackbox deregister <...>` -* `blackbox edit <...>` -* `blackbox decrypt <...>` -* `blackbox encrypt <...>` +* `blackbox admin add ` +* `blackbox admin remove ` +* `blackbox admin list` +* `blackbox files add` +* `blackbox files list` +* `blackbox files remove` +* `blackbox encrypt ...` +* `blackbox decrypt ...` * `blackbox decrypt_all` -* `blackbox addadmin ` -* `blackbox removeadmin ` -* `blackbox cat <...>` -* `blackbox diff <...>` -* `blackbox list_files` -* `blackbox list_admins` +* `blackbox cat ...` +* `blackbox edit ...` +* `blackbox reencrypt_all` * `blackbox shred_all` -* `blackbox update_all` -* `blackbox whatsnew` +* `blackbox diff ...` +* `blackbox files unchanged` +* `blackbox files changed` -Backwards compatibility: The old commands would simply call the new commands. +Flags where appropriate + +* -verbose -v +* -noshred +* -debug + +Backwards compatibility: The old scripts will be rewritten to use the new commands. ## Change the "keyrings" directory -The name "keyrings" was unfortunate. First, it should probably begin with a ".". Second, it stores more than just keyrings. Lastly, I'm finding that in most cases we want many repos to refer to the same keyring, which is not supported very well. +The name `keyrings` was unfortunate. First, it should probably begin with a `.`. Second, it stores more than just keyrings. Lastly, I'm finding that in most cases we want many repos to refer to the same keyring, which is not supported very well. A better system would be: 1. If `$BLACKBOX_CONFIG` is set, use that directory. -2. If the repo base directory has a file called ".blackbox_external", read that file as if you are reading `$BLACKBOX_CONFIG` -3. If the repo base directory has a "keyrings" directory, use that. -4. If the repo base directory has a ".blackboxconfig" directory, use that. +2. If the repo base directory has a file called `.blackbox_external`, read that file as if you are reading `$BLACKBOX_CONFIG` +3. If the repo base directory has a `keyrings` directory, use that. +4. If the repo base directory has a `.blackbox` directory, use that. + +Some thoughts on `.blackbox_external`: +I'm not sure what the format should be, but I want it to be simple and expandable. It should support support `../../dir/name` and `/long/path`. However some day we may want to include a Git URL and have the system automatically get the keychain from it. That means the format has to be something like directory:../dir/name so that later we can add git:the-url. + +NOTE: Maybe `.blackbox_external` should be `.blackbox/BLACKBOX_CONFIG`? + +Backwards compatibility: `keyrings` would be checked before `.blackbox`. + +## System Test + +There needs to be a very complete system test. The `make test` we +have now is great for something written in bash. + +It should be easy to make tests. Perhaps a directory of files, each +specifying a test. We could make a little language for writing tests. -Some thoughts on .blackbox_external: -I'm not sure what the format should be, but I want it to be simple and expandable. It should support support "../../dir/name" and "/long/path". However some day we may want to include a Git URL and have the system automatically get the keychain from it. That means the format has to be something like directory:../dir/name so that later we can add git:the_url. + # This test becomes the user "alice" and verifies that she + # can encrypt a file, and decrypt it, with full fidelity. + BECOME alice a + BASH echo "foo contents" >foo.txt + SHOULD_NOT_EXIST foo.txt.gpg + BASH blackbox encrypt foo.txt + SHOULD_NOT_EXIST foo.txt + SHOULD_EXIST foo.txt.gpg + BASH_WITH_PASSWORD a blackbox decrypt foo.txt + SHOULD_EXIST foo.txt.gpg + SHOULD_EXIST foo.txt + SHOULD_CONTAIN foo.txt "foo contents\n" +## Plug-in support + +There should plug-ins support for: + +Repo type: + +* Git -- Using /usr/bin/git or git.exe +* Subversion +* Mercurial +* None (repoless) +* Autodetect + +Encryption software: + +* GnuPG -- using /usr/bin/gpg{,2} or gpg.exe +* golang.org/x/crypto/openpgp + +## JSON or .txt + +The files in .blackbox are mostly .txt files. Instead we should +define a .json format, and only read the .txt file is the .json file +doesn't exist. -Backwards compatibility: "keyrings" would be checked before .blackbox ## Repo-less mode @@ -62,33 +115,34 @@ I prefer the file commits to be automatic because when they were manual, people That said, I'm willing to have a "repo-less" mode. -When this mode is triggered, no add/commit/ignore tasks are done. The search for the keyrings directory still uses `$BLACKBOX_CONFIG` but if that is unset it looks for .blackbox_config in the current directory, then recursively ".." until we hit "/". +When this mode is triggered, no add/commit/ignore tasks are done. The search for the keyrings directory still uses `$BLACKBOX_CONFIG` but if that is unset it looks for `.blackbox_config` in the current directory, then recursively `..` until we hit `/`. I think (but I'm not sure) this would benefit the entire system because it would force us to re-think what VCS actions are done when. -I think (but I'm not sure) that a simple way to implement this would be to add an environment variable that overrides the automatic VCS detection. When set to "none", all VCS operations would basically become no-ops. (This could be done by writing a plug-in that does nothing for all the vcs_* calls) +I think (but I'm not sure) that a simple way to implement this would be to add an environment variable that overrides the automatic VCS detection. When set to "none", all VCS operations would basically become no-ops. (This could be done by writing a plug-in that does nothing for all the `vcs_*` calls) + -Backwards compatibility: This would add a "none" VCS, not remove any existing functionality. +Backwards compatibility: This would add a `none` VCS, not remove any existing functionality. ## Is "bash" the right language? -`bash` is fairly universal. It even exists on Windows. However it is not the right language for large systems. Writing the acceptance tests is quite a bear. Managing ".gitignore" files in bash is impossible and the current implementation fails in many cases. +`bash` is fairly universal. It even exists on Windows. However it is not the right language for large systems. Writing the acceptance tests is quite a bear. Managing `.gitignore` files in bash is impossible and the current implementation fails in many cases. `python` is my second favorite language. It would make the code cleaner and more testable. However it is not installed everywhere. I would also want to write it in Python3 (why start a new project in Python2?) but sadly Python3 is less common. It is a chicken vs. egg situation. -`go` is my favorite language. I could probably rewrite this in go in a weekend. However, now the code is compiled, not interpreted. Therefore we lose the ability to just "git clone" and have the tools you want. Not everyone has a Go compiler installed on every machine. +`go` is my favorite language. I could probably rewrite this in go in a weekend. However, now the code is compiled, not interpreted. Therefore we lose the ability to just `git clone` and have the tools you want. Not everyone has a Go compiler installed on every machine. The system is basically unusable on Windows without Cygwin or MINGW. A rewrite in python or go would make it work better on Windows, which currently requires Cygwin or MinGW (which is a bigger investment than installing Python). On the other hand, maybe Ubuntu-on-Windows makes that a non-issue. As long as the code is in `bash` the configuration files like `blackbox-files.txt` and `blackbox-admins.txt` have problems. Filenames with carriage returns aren't supported. If this was in Python/Go/etc. those files could be json or some format with decent quoting and we could handle funny file names better. On the other hand, maybe it is best that we don't support funny filenames... we shouldn't enable bad behavior. -How important is itto blackbox users that the system is written in "bash"? +How important is itto blackbox users that the system is written in `bash`? ## Ditch the project and use git-crypt -People tell me that git-crypt is better because, as a plug-in, automagically supports "git diff", "git log" and "git blame". +People tell me that git-crypt is better because, as a plug-in, automagically supports `git diff`, `git log` and `git blame`. However, I've never used it so I don't have any idea whether git-crypt is any better than blackbox. From 1c77c87555e18393423311857bac1d47d01f98a7 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Fri, 24 Jul 2020 14:21:33 -0400 Subject: [PATCH 08/28] Implement blackbox in Golang (#250) * Initial release --- .gitignore | 10 +- DESIGN.md | 74 ++ Version2-Ideas.md | 31 +- binv2/blackbox_addadmin | 2 + binv2/blackbox_cat | 2 + binv2/blackbox_decrypt_all_files | 2 + binv2/blackbox_decrypt_file | 2 + binv2/blackbox_deregister_file | 2 + binv2/blackbox_diff | 2 + binv2/blackbox_edit | 2 + binv2/blackbox_edit_end | 2 + binv2/blackbox_edit_start | 2 + binv2/blackbox_initialize | 2 + binv2/blackbox_list_admins | 2 + binv2/blackbox_list_files | 2 + binv2/blackbox_listadmins | 2 + binv2/blackbox_postdeploy | 2 + binv2/blackbox_register_new_file | 2 + binv2/blackbox_removeadmin | 2 + binv2/blackbox_shred_all_files | 2 + binv2/blackbox_update_all_files | 2 + binv2/blackbox_view | 2 + binv2/blackbox_whatsnew | 51 ++ cmd/blackbox/blackbox.go | 22 + cmd/blackbox/cli.go | 226 +++++++ cmd/blackbox/drive.go | 296 ++++++++ docs/README.md | 61 ++ docs/admin-ops.md | 148 ++++ docs/advanced.md | 46 ++ docs/alternatives.md | 14 + docs/backwards-compatibility.md | 55 ++ docs/compatibility.md | 78 +++ docs/dev-code-overview.md | 36 + docs/dev.md | 36 + docs/enable-repo.md | 58 ++ docs/encryption.md | 54 ++ docs/expired-keys.md | 62 ++ docs/file-ops.md | 55 ++ docs/full-command-list.md | 34 + docs/git-tips.md | 22 + docs/gnupg-tips.md | 31 + docs/installation.md | 17 + docs/role-accounts.md | 158 +++++ docs/subversion-tips.md | 21 + docs/support.md | 16 + docs/user-overview.md | 124 ++++ docs/why-is-this-important.md | 17 + docs/with-ansible.md | 18 + docs/with-puppet.md | 68 ++ go.mod | 11 + go.sum | 39 ++ integrationTest/NOTES.txt | 91 +++ integrationTest/README.txt | 55 ++ integrationTest/asserts.go | 68 ++ integrationTest/integration_test.go | 343 ++++++++++ integrationTest/ithelpers.go | 617 +++++++++++++++++ integrationTest/test_data/000-admin-list.txt | 2 + integrationTest/test_data/000-file-list.txt | 2 + integrationTest/test_data/000-status.txt | 9 + integrationTest/test_data/alice-cat-plain.txt | 1 + integrationTest/test_data/basic-status.txt | 5 + integrationTest/test_data/reencrypt-plain.txt | 1 + integrationTest/test_data/status-noreg.txt | 6 + models/crypters.go | 15 + models/vcs.go | 30 + pkg/bblog/bblog.go | 48 ++ pkg/bbutil/filestats.go | 130 ++++ pkg/bbutil/rbio_test.go | 21 + pkg/bbutil/runbash.go | 77 +++ pkg/bbutil/shred.go | 109 +++ pkg/bbutil/sortedfile_test.go | 66 ++ pkg/box/box.go | 233 +++++++ pkg/box/boxutils.go | 224 +++++++ pkg/box/pretty_test.go | 35 + pkg/box/verbs.go | 633 ++++++++++++++++++ pkg/commitlater/commitlater.go | 84 +++ pkg/crypters/_all/all.go | 5 + pkg/crypters/crypters.go | 58 ++ pkg/crypters/gnupg/gnupg.go | 180 +++++ pkg/crypters/gnupg/keychain.go | 107 +++ pkg/makesafe/makesafe.go | 285 ++++++++ pkg/makesafe/makesafe_test.go | 136 ++++ pkg/vcs/_all/all.go | 6 + pkg/vcs/git/git.go | 226 +++++++ pkg/vcs/none/none.go | 79 +++ pkg/vcs/vcs.go | 82 +++ 86 files changed, 6074 insertions(+), 22 deletions(-) create mode 100644 DESIGN.md create mode 100755 binv2/blackbox_addadmin create mode 100755 binv2/blackbox_cat create mode 100755 binv2/blackbox_decrypt_all_files create mode 100755 binv2/blackbox_decrypt_file create mode 100755 binv2/blackbox_deregister_file create mode 100755 binv2/blackbox_diff create mode 100755 binv2/blackbox_edit create mode 100755 binv2/blackbox_edit_end create mode 100755 binv2/blackbox_edit_start create mode 100755 binv2/blackbox_initialize create mode 100755 binv2/blackbox_list_admins create mode 100755 binv2/blackbox_list_files create mode 100755 binv2/blackbox_listadmins create mode 100755 binv2/blackbox_postdeploy create mode 100755 binv2/blackbox_register_new_file create mode 100755 binv2/blackbox_removeadmin create mode 100755 binv2/blackbox_shred_all_files create mode 100755 binv2/blackbox_update_all_files create mode 100755 binv2/blackbox_view create mode 100755 binv2/blackbox_whatsnew create mode 100644 cmd/blackbox/blackbox.go create mode 100644 cmd/blackbox/cli.go create mode 100644 cmd/blackbox/drive.go create mode 100644 docs/README.md create mode 100644 docs/admin-ops.md create mode 100644 docs/advanced.md create mode 100644 docs/alternatives.md create mode 100644 docs/backwards-compatibility.md create mode 100644 docs/compatibility.md create mode 100644 docs/dev-code-overview.md create mode 100644 docs/dev.md create mode 100644 docs/enable-repo.md create mode 100644 docs/encryption.md create mode 100644 docs/expired-keys.md create mode 100644 docs/file-ops.md create mode 100644 docs/full-command-list.md create mode 100644 docs/git-tips.md create mode 100644 docs/gnupg-tips.md create mode 100644 docs/installation.md create mode 100644 docs/role-accounts.md create mode 100644 docs/subversion-tips.md create mode 100644 docs/support.md create mode 100644 docs/user-overview.md create mode 100644 docs/why-is-this-important.md create mode 100644 docs/with-ansible.md create mode 100644 docs/with-puppet.md create mode 100644 go.mod create mode 100644 go.sum create mode 100644 integrationTest/NOTES.txt create mode 100644 integrationTest/README.txt create mode 100644 integrationTest/asserts.go create mode 100644 integrationTest/integration_test.go create mode 100644 integrationTest/ithelpers.go create mode 100644 integrationTest/test_data/000-admin-list.txt create mode 100644 integrationTest/test_data/000-file-list.txt create mode 100644 integrationTest/test_data/000-status.txt create mode 100644 integrationTest/test_data/alice-cat-plain.txt create mode 100644 integrationTest/test_data/basic-status.txt create mode 100644 integrationTest/test_data/reencrypt-plain.txt create mode 100644 integrationTest/test_data/status-noreg.txt create mode 100644 models/crypters.go create mode 100644 models/vcs.go create mode 100644 pkg/bblog/bblog.go create mode 100644 pkg/bbutil/filestats.go create mode 100644 pkg/bbutil/rbio_test.go create mode 100644 pkg/bbutil/runbash.go create mode 100644 pkg/bbutil/shred.go create mode 100644 pkg/bbutil/sortedfile_test.go create mode 100644 pkg/box/box.go create mode 100644 pkg/box/boxutils.go create mode 100644 pkg/box/pretty_test.go create mode 100644 pkg/box/verbs.go create mode 100644 pkg/commitlater/commitlater.go create mode 100644 pkg/crypters/_all/all.go create mode 100644 pkg/crypters/crypters.go create mode 100644 pkg/crypters/gnupg/gnupg.go create mode 100644 pkg/crypters/gnupg/keychain.go create mode 100644 pkg/makesafe/makesafe.go create mode 100644 pkg/makesafe/makesafe_test.go create mode 100644 pkg/vcs/_all/all.go create mode 100644 pkg/vcs/git/git.go create mode 100644 pkg/vcs/none/none.go create mode 100644 pkg/vcs/vcs.go diff --git a/.gitignore b/.gitignore index f2d9497a..21f230d0 100644 --- a/.gitignore +++ b/.gitignore @@ -5,9 +5,6 @@ __pycache__/ # C extensions *.so -# backup shell files -*~ - # Distribution / packaging .Python env/ @@ -54,3 +51,10 @@ coverage.xml # Sphinx documentation docs/_build/ +# macOS +.DS_Store + +# Blackbox +bbintegration +.*.swp +/integrationTest/.blackbox diff --git a/DESIGN.md b/DESIGN.md new file mode 100644 index 00000000..28ca459f --- /dev/null +++ b/DESIGN.md @@ -0,0 +1,74 @@ +BlackBox Internals +================== + +The goal of the Go rewrite is to improve the usability and +maintainability of Blackbox, meanwhile make it easier to implement new + +The system is built in distinct layers: view, controller, model. + +Suppose there is a subcommand "`foo`". `blackbox.go` parses the +user's command line args and calls `cmdFoo()`, which is given +everything it needs to do the operation. For example, it is given the +filenames the user specified exactly; even if an empty list means "all +files", at this layer the empty list is passed to the function. + +`cmdFoo()` contains the business logic of how the operation should be +done: usually iterating over filenames and calling verb(s) for each +one. For example if an empty file list means "all files", this is the +layer that enumerates the files. + +`cmdFoo()` is implemented in the file `cmd_foo.go`. The caller of +`cmdFoo()` should provide all data it needs to get the job done. +`cmdFoo()` doesn't refer to global flags, they are passed to the +function as parameters. Therefore the function has zero side-effects +(except possibly logging) and can be called as library functions by +other systems. This is the external (binary) API which should be +relatively stable. + +`cmdFoo()` calls verbs that are in `bbutil/`. Some of those verbs are +actually interfaces. For example, any VCS-related verbs are actually a +Go interface which might be implemented one of many ways (Git, +Subversion, Mercurial), GPG-functions may be implemented by shelling +out to `gpg.exe` or by using Go's gpg library. + +They layers look like this: + +| View | `blackbox.go` | Parses User Commands, calls controller | +| Controller | `cmd_*.go` | The business logic. Iterates and calls verbs | +| Model | `pkg/bbutil` | Verbs | +| Interfaces | `pkg/*` | Interfaces and their implementations | + +At least that's the goal. We'll see how well we can achieve this. + + +Version 2.0 +=========== + +Software architecture. + +We try to keep the command-line parsing separate from the business +logic and all plug-ins. This keeps things clean and easy to refactor. +In fact layer 2 could be used as a stand-alone module for projects +that want to embed blackbox actions. + +Layer 1: The command itself + + * cmd/blackbox/blackbox.go -- main() not much more + * cmd/blackbox/cli.go -- Set up and call the ufave/cli flag parser + * cmd/blackbox/drive.go -- Check # of arguments, conflicting flags, and then call the businss logic layer + +Layer 2: The business logic + + * pkg/box/box.go -- The interface to accessing .blackbox (admins, files, etc.) + * pkg/box/verbs.go -- Verbs called by Layer 1. Just the verbs + * pkg/box/boxutils.go -- Functions needed by the verbs + +Layer 3: The plug-ins + + * pkg/vcs/... -- Plug-ins for Git, (Mercurial, Subversion, Perforce,) and None + * pkg/crypters/... -- Plug-ins for PGP access: GnuPG, (go-openpgp, others in the future) + +Layer 4: Support functions for use by Layer 3 + + * pkg/bbutil/filestats.go -- File manipulations + * pkg/bbutil/runbash.go -- Safely run external Linux commands diff --git a/Version2-Ideas.md b/Version2-Ideas.md index 25e1b69a..1cbe8df9 100644 --- a/Version2-Ideas.md +++ b/Version2-Ideas.md @@ -18,29 +18,22 @@ These are the things I'd like to change someday. There should be one program, with subcommands that have names that make more sense: -* `blackbox init` * `blackbox admin add ` -* `blackbox admin remove ` * `blackbox admin list` -* `blackbox files add` -* `blackbox files list` -* `blackbox files remove` -* `blackbox encrypt ...` -* `blackbox decrypt ...` -* `blackbox decrypt_all` +* `blackbox admin remove ` * `blackbox cat ...` -* `blackbox edit ...` -* `blackbox reencrypt_all` -* `blackbox shred_all` +* `blackbox decrypt ...` * `blackbox diff ...` -* `blackbox files unchanged` -* `blackbox files changed` - -Flags where appropriate - -* -verbose -v -* -noshred -* -debug +* `blackbox edit ...` +* `blackbox encrypt ...` +* `blackbox file add ...` +* `blackbox file list` +* `blackbox file remove ...` +* `blackbox info` +* `blackbox init` +* `blackbox reencrypt` +* `blackbox shred --all| ...` +* `blackbox status --all| ...` Backwards compatibility: The old scripts will be rewritten to use the new commands. diff --git a/binv2/blackbox_addadmin b/binv2/blackbox_addadmin new file mode 100755 index 00000000..78242c3f --- /dev/null +++ b/binv2/blackbox_addadmin @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox admin add "$@" diff --git a/binv2/blackbox_cat b/binv2/blackbox_cat new file mode 100755 index 00000000..bde064f3 --- /dev/null +++ b/binv2/blackbox_cat @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox cat "$@" diff --git a/binv2/blackbox_decrypt_all_files b/binv2/blackbox_decrypt_all_files new file mode 100755 index 00000000..19f1f9dd --- /dev/null +++ b/binv2/blackbox_decrypt_all_files @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox decrypt --all --agentcheck=true --overwrite "@" diff --git a/binv2/blackbox_decrypt_file b/binv2/blackbox_decrypt_file new file mode 100755 index 00000000..ca1f46aa --- /dev/null +++ b/binv2/blackbox_decrypt_file @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox decrypt --overwrite "$@" diff --git a/binv2/blackbox_deregister_file b/binv2/blackbox_deregister_file new file mode 100755 index 00000000..18ade4a4 --- /dev/null +++ b/binv2/blackbox_deregister_file @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox file remove --safe "$@" diff --git a/binv2/blackbox_diff b/binv2/blackbox_diff new file mode 100755 index 00000000..82be48aa --- /dev/null +++ b/binv2/blackbox_diff @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox diff --diff "$@" diff --git a/binv2/blackbox_edit b/binv2/blackbox_edit new file mode 100755 index 00000000..f06cfa55 --- /dev/null +++ b/binv2/blackbox_edit @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox edit "$@" diff --git a/binv2/blackbox_edit_end b/binv2/blackbox_edit_end new file mode 100755 index 00000000..3f44f72e --- /dev/null +++ b/binv2/blackbox_edit_end @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox encrypt --shred "$@" diff --git a/binv2/blackbox_edit_start b/binv2/blackbox_edit_start new file mode 100755 index 00000000..449ac3a6 --- /dev/null +++ b/binv2/blackbox_edit_start @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox decrypt "$@" diff --git a/binv2/blackbox_initialize b/binv2/blackbox_initialize new file mode 100755 index 00000000..0845a202 --- /dev/null +++ b/binv2/blackbox_initialize @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox init "$@" diff --git a/binv2/blackbox_list_admins b/binv2/blackbox_list_admins new file mode 100755 index 00000000..d971bb12 --- /dev/null +++ b/binv2/blackbox_list_admins @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox admin list diff --git a/binv2/blackbox_list_files b/binv2/blackbox_list_files new file mode 100755 index 00000000..bd4fae3d --- /dev/null +++ b/binv2/blackbox_list_files @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox file list diff --git a/binv2/blackbox_listadmins b/binv2/blackbox_listadmins new file mode 100755 index 00000000..d971bb12 --- /dev/null +++ b/binv2/blackbox_listadmins @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox admin list diff --git a/binv2/blackbox_postdeploy b/binv2/blackbox_postdeploy new file mode 100755 index 00000000..ef5a142e --- /dev/null +++ b/binv2/blackbox_postdeploy @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +blackbox decrypt --all --overwrite --group "$1" diff --git a/binv2/blackbox_register_new_file b/binv2/blackbox_register_new_file new file mode 100755 index 00000000..76797931 --- /dev/null +++ b/binv2/blackbox_register_new_file @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox file add --shred "$@" diff --git a/binv2/blackbox_removeadmin b/binv2/blackbox_removeadmin new file mode 100755 index 00000000..ede86d19 --- /dev/null +++ b/binv2/blackbox_removeadmin @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox admin remove "$@" diff --git a/binv2/blackbox_shred_all_files b/binv2/blackbox_shred_all_files new file mode 100755 index 00000000..4237cc8b --- /dev/null +++ b/binv2/blackbox_shred_all_files @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox shred --all diff --git a/binv2/blackbox_update_all_files b/binv2/blackbox_update_all_files new file mode 100755 index 00000000..00caf9b6 --- /dev/null +++ b/binv2/blackbox_update_all_files @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec blackbox reencrypt --all --agentcheck diff --git a/binv2/blackbox_view b/binv2/blackbox_view new file mode 100755 index 00000000..04b0060f --- /dev/null +++ b/binv2/blackbox_view @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +blackbox cat "$@" | ${PAGER:-less} diff --git a/binv2/blackbox_whatsnew b/binv2/blackbox_whatsnew new file mode 100755 index 00000000..996b0b91 --- /dev/null +++ b/binv2/blackbox_whatsnew @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# +# blackbox_whatsnew - show what has changed in the last commit for a given file +# +exec blackbox whatsnew "$@" +exit 0 + +set -e +source "${0%/*}/_blackbox_common.sh" + +if [[ $# -ne 1 ]] +then + echo "Pass only 1 file at a time" + exit 1 +fi + +fail_if_not_in_repo +gpg_agent_notice + +COLUMNS=`tput cols` +FILE=$1 +GIT="git log --abbrev-commit --pretty=oneline" +CURR_COMMIT=`$GIT $FILE | head -1 | awk '{print $1}'` +PREV_COMMIT=`$GIT ${CURR_COMMIT}~1 $FILE | head -1 | awk '{print $1}'` +# Use colordiff if available +if which colordiff > /dev/null 2>&1 + then DIFF="colordiff" + else DIFF="diff" +fi + +cat_commit() +{ + COMMIT=$1 + git checkout $COMMIT $FILE + echo "[$COMMIT] $FILE" + echo "---------------------" + "${BLACKBOX_HOME}/blackbox_cat" $FILE | sed '/========== PLAINFILE/,/========== EXTRACTING/d' +} + +CURR_CONTENT=`cat_commit $CURR_COMMIT` +PREV_CONTENT=`cat_commit $PREV_COMMIT` +clear + +# For some unknown reason this command executes fine but return exit code 1 +$DIFF -y --width $COLUMNS \ + <(echo "CURRENT" "$CURR_CONTENT" | fold -w $(( $COLUMNS / 2 - 4 )) ) \ + <(echo "PREVIOUS" "$PREV_CONTENT" | fold -w $(( $COLUMNS / 2 - 4 )) ) + +git checkout $CURR_COMMIT $FILE +echo diff --git a/cmd/blackbox/blackbox.go b/cmd/blackbox/blackbox.go new file mode 100644 index 00000000..28ce017d --- /dev/null +++ b/cmd/blackbox/blackbox.go @@ -0,0 +1,22 @@ +package main + +import ( + "fmt" + "os" + + _ "github.com/StackExchange/blackbox/v2/pkg/crypters" + _ "github.com/StackExchange/blackbox/v2/pkg/crypters/_all" + _ "github.com/StackExchange/blackbox/v2/pkg/vcs" + _ "github.com/StackExchange/blackbox/v2/pkg/vcs/_all" +) + +var dryRun bool + +func main() { + app := flags() + err := app.Run(os.Args) + if err != nil { + fmt.Fprintf(os.Stderr, "ERROR: %s\n", err) + os.Exit(1) + } +} diff --git a/cmd/blackbox/cli.go b/cmd/blackbox/cli.go new file mode 100644 index 00000000..74a252f4 --- /dev/null +++ b/cmd/blackbox/cli.go @@ -0,0 +1,226 @@ +package main + +// cli.go -- Create urfave/cli datastructures and apply them. + +import ( + "fmt" + "syscall" + + "github.com/urfave/cli/v2" +) + +func flags() *cli.App { + app := cli.NewApp() + app.Version = "2.0.0" + app.Usage = "Maintain encrypted files in a VCS (Git, Hg, Svn)" + + defUmask := syscall.Umask(0) + syscall.Umask(defUmask) + defUmaskS := fmt.Sprintf("%04o", defUmask) + + app.Flags = []cli.Flag{ + // &cli.BoolFlag{ + // Name: "dry-run", + // Aliases: []string{"n"}, + // Usage: "show what would have been done", + // Destination: &dryRun, + // }, + &cli.StringFlag{ + Name: "vcs", + Usage: "Use this VCS (GIT, NONE) rather than autodetect", + EnvVars: []string{"BLACKBOX_VCS"}, + }, + &cli.StringFlag{ + Name: "crypto", + Usage: "Crypto back-end plugin", + Value: "GnuPG", + EnvVars: []string{"BLACKBOX_CRYPTO"}, + }, + &cli.StringFlag{ + Name: "config", + Usage: "Path to config", + //Value: ".blackbox", + EnvVars: []string{"BLACKBOX_CONFIGDIR", "BLACKBOXDATA"}, + }, + &cli.StringFlag{ + Name: "team", + Usage: "Use .blackbox-$TEAM as the configdir", + EnvVars: []string{"BLACKBOX_TEAM"}, + }, + &cli.StringFlag{ + Name: "editor", + Usage: "editor to use", + Value: "vi", + EnvVars: []string{"EDITOR", "BLACKBOX_EDITOR"}, + }, + &cli.StringFlag{ + Name: "umask", + Usage: "umask to set when decrypting", + Value: defUmaskS, + EnvVars: []string{"BLACKBOX_UMASK", "DECRYPT_UMASK"}, + }, + &cli.BoolFlag{ + Name: "debug", + Usage: "Show debug output", + EnvVars: []string{"BLACKBOX_DEBUG"}, + }, + } + + app.Commands = []*cli.Command{ + + // List items in the order they appear in the help menu. + + { + Name: "decrypt", + Aliases: []string{"de", "start"}, + Usage: "Decrypt file(s)", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "all", Usage: "All registered files"}, + &cli.BoolFlag{Name: "agentcheck", Usage: "Do not check for gpg-agent when using --all"}, + &cli.StringFlag{Name: "group", Usage: "Set group ownership"}, + &cli.BoolFlag{Name: "overwrite", Usage: "Overwrite plaintext if it exists"}, + }, + Action: func(c *cli.Context) error { return cmdDecrypt(c) }, + }, + + { + Name: "encrypt", + Aliases: []string{"en", "end"}, + Usage: "Encrypts file(s)", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "shred", Usage: "Remove plaintext afterwords"}, + }, + Action: func(c *cli.Context) error { return cmdEncrypt(c) }, + }, + + { + Name: "edit", + Aliases: []string{"vi"}, + Usage: "Runs $EDITOR on file(s) (decrypt if needed)", + Action: func(c *cli.Context) error { return cmdEdit(c) }, + }, + + { + Name: "cat", + Usage: "Output plaintext to stderr (decrypt if needed)", + Action: func(c *cli.Context) error { return cmdCat(c) }, + }, + + { + Name: "diff", + Usage: "Diffs against encrypted version", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "all", Usage: "all files"}, + }, + Action: func(c *cli.Context) error { return cmdDiff(c) }, + }, + + { + Name: "init", + Category: "ADMINISTRATIVE", + Usage: "Initialized blackbox for this repo", + Action: func(c *cli.Context) error { return cmdInit(c) }, + }, + + { + Name: "admin", + Category: "ADMINISTRATIVE", + Usage: "Add/list/remove administrators", + Subcommands: []*cli.Command{ + { + Name: "add", + Usage: "Adds admin(s)", + Action: func(c *cli.Context) error { return cmdAdminAdd(c) }, + }, + { + Name: "list", + Usage: "Lists admins", + Action: func(c *cli.Context) error { return cmdAdminList(c) }, + }, + { + Name: "remove", + Usage: "Remove admin(s)", + Action: func(c *cli.Context) error { return cmdAdminRemove(c) }, + }, + }, + }, + + { + Name: "file", + Category: "ADMINISTRATIVE", + Usage: "Add/list/remove files from the registry", + Subcommands: []*cli.Command{ + { + Name: "add", + Usage: "Registers file with the system", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "shred", Usage: "Remove plaintext afterwords"}, + }, + Action: func(c *cli.Context) error { return cmdFileAdd(c) }, + }, + { + Name: "list", + Usage: "Lists the registered files", + Action: func(c *cli.Context) error { return cmdFileList(c) }, + }, + { + Name: "remove", + Usage: "Deregister file from the system", + Action: func(c *cli.Context) error { return cmdFileRemove(c) }, + }, + }, + }, + + { + Name: "info", + Category: "DEBUG", + Usage: "Report what we know about this repo", + Action: func(c *cli.Context) error { return cmdInfo(c) }, + }, + + { + Name: "shred", + Usage: "Shred files, or --all for all registered files", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "all", Usage: "All registered files"}, + }, + Action: func(c *cli.Context) error { return cmdShred(c) }, + }, + + { + Name: "status", + Category: "ADMINISTRATIVE", + Usage: "Print status of files", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "name-only", Usage: "Show only names of the files"}, + &cli.BoolFlag{Name: "all", Usage: "All registered files"}, + &cli.StringFlag{Name: "type", Usage: "only list if status matching this string"}, + }, + Action: func(c *cli.Context) error { return cmdStatus(c) }, + }, + + { + Name: "reencrypt", + Usage: "Decrypt then re-encrypt files (erases any plaintext)", + Category: "ADMINISTRATIVE", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "all", Usage: "All registered files"}, + &cli.BoolFlag{Name: "overwrite", Usage: "Overwrite plaintext if it exists"}, + &cli.BoolFlag{Name: "agentcheck", Usage: "Do not check for gpg-agent when using --all"}, + }, + Action: func(c *cli.Context) error { return cmdReencrypt(c) }, + }, + + { + Name: "testing_init", + Usage: "For use with integration test", + Category: "INTEGRATION TEST", + Action: func(c *cli.Context) error { return testingInit(c) }, + }, + + // + + } + + return app +} diff --git a/cmd/blackbox/drive.go b/cmd/blackbox/drive.go new file mode 100644 index 00000000..6e827ccc --- /dev/null +++ b/cmd/blackbox/drive.go @@ -0,0 +1,296 @@ +package main + +// Now that cli.go has processed the flags, validate there are no +// conflicts and drive to the business logic. + +import ( + "fmt" + "log" + "os" + + "github.com/StackExchange/blackbox/v2/pkg/bblog" + "github.com/StackExchange/blackbox/v2/pkg/box" + "github.com/urfave/cli/v2" +) + +var logErr *log.Logger + +func init() { + if logErr == nil { + logErr = log.New(os.Stderr, "", 0) + } +} + +func allOrSomeFiles(c *cli.Context) error { + if c.Bool("all") && c.Args().Present() { + return fmt.Errorf("Can not specify filenames and --all") + } + if (!c.Args().Present()) && (!c.Bool("all")) { + return fmt.Errorf("Must specify at least one file name or --all") + } + return nil +} + +const roError = `This command is disabled due to --config flag being used. +We can not determine if the flag's value is in or out of the repo, and +Blackbox can only work on one repo at a time. If the value is inside the +repo, and you'd like to suggest an algorithm that would let us determine that +automatically, please file a bug. We'd love to have this work better. In the +meanwhile, run this command without the --config flag, perhaps after cd'ing +to the base of the repo.` + +// Keep these functions in alphabetical order. + +func cmdAdminAdd(c *cli.Context) error { + if c.NArg() == 0 || c.NArg() > 2 { + return fmt.Errorf( + "Must specify one admin's GnuPG user-id (i.e. email address) and optionally the directory of the pubkey data (default ~/.GnuPG)") + } + bx := box.NewFromFlags(c) + if bx.ConfigRO { + return fmt.Errorf(roError) + } + err := bx.AdminAdd(c.Args().Get(0), c.Args().Get(1)) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdAdminList(c *cli.Context) error { + if c.Args().Present() { + return fmt.Errorf("This command takes zero arguments") + } + bx := box.NewFromFlags(c) + err := bx.AdminList() + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdAdminRemove(c *cli.Context) error { + if !c.Args().Present() { + return fmt.Errorf("Must specify at least one admin's GnuPG user-id (i.e. email address)") + } + bx := box.NewFromFlags(c) + if bx.ConfigRO { + return fmt.Errorf(roError) + } + err := bx.AdminRemove(c.Args().Slice()) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdCat(c *cli.Context) error { + if !c.Args().Present() { + return fmt.Errorf("Must specify at least one file name") + } + bx := box.NewFromFlags(c) + err := bx.Cat(c.Args().Slice()) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdDecrypt(c *cli.Context) error { + if err := allOrSomeFiles(c); err != nil { + return err + } + + // The default for --agentcheck is off normally, and on when using --all. + pauseNeeded := c.Bool("all") + // If the user used the flag, abide by it. + if c.IsSet("agentcheck") { + pauseNeeded = c.Bool("agentcheck") + } + + bx := box.NewFromFlags(c) + err := bx.Decrypt(c.Args().Slice(), + c.Bool("overwrite"), + pauseNeeded, + c.String("group"), + ) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdDiff(c *cli.Context) error { + if err := allOrSomeFiles(c); err != nil { + return err + } + bx := box.NewFromFlags(c) + err := bx.Diff(c.Args().Slice()) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdEdit(c *cli.Context) error { + if !c.Args().Present() { + return fmt.Errorf("Must specify at least one file name") + } + bx := box.NewFromFlags(c) + err := bx.Edit(c.Args().Slice()) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdEncrypt(c *cli.Context) error { + if err := allOrSomeFiles(c); err != nil { + return err + } + bx := box.NewFromFlags(c) + err := bx.Encrypt(c.Args().Slice(), c.Bool("shred")) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdFileAdd(c *cli.Context) error { + if !c.Args().Present() { + return fmt.Errorf("Must specify at least one file name") + } + bx := box.NewFromFlags(c) + if bx.ConfigRO { + return fmt.Errorf(roError) + } + err := bx.FileAdd(c.Args().Slice(), c.Bool("shred")) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdFileList(c *cli.Context) error { + if c.Args().Present() { + return fmt.Errorf("This command takes zero arguments") + } + bx := box.NewFromFlags(c) + err := bx.FileList() + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdFileRemove(c *cli.Context) error { + if !c.Args().Present() { + return fmt.Errorf("Must specify at least one file name") + } + bx := box.NewFromFlags(c) + if bx.ConfigRO { + return fmt.Errorf(roError) + } + err := bx.FileRemove(c.Args().Slice()) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdInfo(c *cli.Context) error { + if c.Args().Present() { + return fmt.Errorf("This command takes zero arguments") + } + bx := box.NewFromFlags(c) + err := bx.Info() + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdInit(c *cli.Context) error { + if c.Args().Len() > 1 { + return fmt.Errorf("This command takes one or two arguments") + } + bx := box.NewUninitialized(c) + if bx.ConfigRO { + return fmt.Errorf(roError) + } + err := bx.Init(c.Args().First(), c.String("vcs")) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdReencrypt(c *cli.Context) error { + if err := allOrSomeFiles(c); err != nil { + return err + } + + // The default for --agentcheck is off normally, and on when using --all. + pauseNeeded := c.Bool("all") + // If the user used the flag, abide by it. + if c.IsSet("agentcheck") { + pauseNeeded = c.Bool("agentcheck") + } + + bx := box.NewFromFlags(c) + err := bx.Reencrypt(c.Args().Slice(), + c.Bool("overwrite"), + pauseNeeded, + ) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdShred(c *cli.Context) error { + if err := allOrSomeFiles(c); err != nil { + return err + } + bx := box.NewFromFlags(c) + err := bx.Shred(c.Args().Slice()) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +func cmdStatus(c *cli.Context) error { + if c.Bool("all") && c.Args().Present() { + return fmt.Errorf("Can not specify filenames and --all") + } + bx := box.NewFromFlags(c) + err := bx.Status(c.Args().Slice(), c.Bool("name-only"), c.String("type")) + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} + +// These are "secret" commands used by the integration tests. + +func testingInit(c *cli.Context) error { + if c.Args().Present() { + return fmt.Errorf("No args required") + } + + logDebug := bblog.GetDebug(c.Bool("debug")) + logDebug.Printf( + "c.String(vcs) reports %q\n", + c.String("vcs"), + ) + bx := box.NewForTestingInit(c.String("vcs")) + if bx.ConfigRO { + return fmt.Errorf(roError) + } + err := bx.TestingInitRepo() + if err != nil { + return err + } + return bx.Vcs.FlushCommits() +} diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..b3b76032 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,61 @@ +BlackBox +======== + +Blackbox is an open source tool that enables you to safe store sensitive information in +Git (or other) repos by encrypting them with GPG. Only the encrypted +version of the file is available. You can be free to provide access +to the repo, as but only people with the right GPG keys can access the +encrypted data. + +Things you should **never** store in a repo without encryption: + +* TLS (SSL) certificates +* Passwords +* API keys +* And more! + +Project Info: + +* [Overview](user-overview.md) +* [Why is this important?](why-is-this-important.md) +* [Support/Community](support.md) +* [How BB encrypts](encryption.md) +* [OS Compatibility](compatibility.md) +* [Installation Instructions](installation.md) +* [Alternatives](alternatives.md) + +User Info: + +* [Enabling Blackbox on a Repo](enable-repo.md) +* [Enroll a file](enable-repo.md) +* [Full Command List](full-command-list.md) +* [Add/Remove users](admin-ops.md) +* [Add/Remove files](file-ops.md) +* [Advanced techiques](advanced.md) +* [Use with Role Accounts](role-accounts.md) +* [Backwards Compatibility](backwards-compatibility.md) +* [Replacing expired keys](expired-keys.md) +* [Git Tips](git-tips.md) +* [SubVersion Tips](subversion-tips.md) +* [GnuPG tips](gnupg-tips.md) +* [Use with Ansible](with-ansible.md) +* [Use with Puppet](with-puppet.md) + +For contributors: + +* [Developer Info](dev.md) +* [Code overview](dev-code-overview.md) +* [HOWTO: Add new OS support](dev-add-os-support.md) +* [HOWTO: Add new VCS support](dev-add-vcs-support.md) + + +A slide presentation about an older release [is on SlideShare](http://www.slideshare.net/TomLimoncelli/the-blackbox-project-sfae). + +Join our mailing list: [https://groups.google.com/d/forum/blackbox-project](https://groups.google.com/d/forum/blackbox-project) + + +License +======= + +This content is released under the MIT License. +See the [LICENSE.txt](LICENSE.txt) file. diff --git a/docs/admin-ops.md b/docs/admin-ops.md new file mode 100644 index 00000000..7767154b --- /dev/null +++ b/docs/admin-ops.md @@ -0,0 +1,148 @@ +User Management +=============== + + +# Who are the current admins? + +``` +blackbox admin list +``` + + +# Add a new user (admin) + +FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?" + +`.blackbox/blackbox-admins.txt` is a file that lists which users are able to decrypt files. (More pedantically, it is a list of the GnuPG key names that the file is encrypted for.) + +To join the list of people that can edit the file requires three steps; You create a GPG key and add it to the key ring. Then, someone that already has access adds you to the system. Lastly, you should test your access. + +## Step 1: NEWPERSON creates a GPG key pair on a secure machine and add to public keychain. + +If you don't already have a GPG key, here's how to generate one: + +``` +gpg --gen-key +``` + +WARNING: New versions of GPG generate keys which are not understood by +old versions of GPG. If you generate a key with a new version of GPG, +this will cause problems for users of older versions of GPG. +Therefore it is recommended that you either assure that everyone using +Blackbox have the exact same version of GPG, or generate GPG keys +using a version of GPG as old as the oldest version of GPG used by +everyone using Blackbox. + +Pick defaults for encryption settings, 0 expiration. Pick a VERY GOOD +passphrase. Store a backup of the private key someplace secure. For +example, keep the backup copy on a USB drive that is locked in safe. +Or, at least put it on a secure machine with little or no internet +access, full-disk-encryption, etc. Your employer probably has rules +about how to store such things. + +FYI: If generating the key is slow, this is usually because the system +isn't generating enough entropy. Tip: Open another window on that +machine and run this command: `ls -R /` + +Now that you have a GPG key, add yourself as an admin: + +``` +blackbox admin add KEYNAME +``` + +...where "KEYNAME" is the email address listed in the gpg key you created previously. For example: + +``` +blackbox admin add tal@example.com +``` + +When the command completes successfully, instructions on how to commit these changes will be output. Run the command as given to commit the changes. It will look like this: + +``` +git commit -m'NEW ADMIN: tal@example.com' .blackbox/pubring.gpg .blackbox/trustdb.gpg .blackbox/blackbox-admins.txt +``` + + +Then push it to the repo: + +``` +git push + +or + +ht push + +(or whatever is appropriate) +``` + +NOTE: Creating a Role Account? If you are adding the pubring.gpg of a role account, you can specify the directory where the pubring.gpg file can be found as a 2nd parameter: `blackbox admin add puppetmaster@puppet-master-1.example.com /path/to/the/dir` + +## Step 2: AN EXISTING ADMIN accepts you into the system. + +Ask someone that already has access to re-encrypt the data files. This +gives you access. They simply decrypt and re-encrypt the data without +making any changes. + +Pre-check: Verify the new keys look good. + +``` +git pull # Or whatever is required for your system +gpg --homedir=.blackbox --list-keys +``` + +For example, examine the key name (email address) to make sure it conforms to corporate standards. + +Import the keychain into your personal keychain and reencrypt: + +``` +gpg --import .blackbox/pubring.gpg +blackbox reencrypt --all shred +``` + +Push the re-encrypted files: + +``` +git commit -a +git push + +or + +hg commit +hg push +``` + +### Step 3: NEWPERSON tests. + +Make sure you can decrypt a file. (Suggestion: Keep a dummy file in +VCS just for new people to practice on.) + + +# Remove a user + +Simply run `blackbox admin remove` with their keyname then re-encrypt: + +Example: + +``` +blackbox admin remove olduser@example.com +blackbox reencrypt --all shred +``` + +When the command completes, you will be given a reminder to check in the change and push it. + +Note that their keys will still be in the key ring, but they will go unused. If you'd like to clean up the keyring, use the normal GPG commands and check in the file. + +FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?" + +``` +gpg --homedir=.blackbox --list-keys +gpg --homedir=.blackbox --delete-key olduser@example.com +git commit -m'Cleaned olduser@example.com from keyring' .blackbox/* +``` + +FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?" + +The key ring only has public keys. There are no secret keys to delete. + +Remember that this person did have access to all the secrets at one time. They could have made a copy. Therefore, to be completely secure, you should change all passwords, generate new SSL keys, and so on just like when anyone that had privileged access leaves an organization. + diff --git a/docs/advanced.md b/docs/advanced.md new file mode 100644 index 00000000..77062826 --- /dev/null +++ b/docs/advanced.md @@ -0,0 +1,46 @@ +Advanced Techniques +=================== + + +# Using Blackbox without a repo + +If the files are copied out of a repo they can still be decrypted and +edited. Obviously edits, changes to keys, and such will be lost if +they are made outside the repo. Also note that commands are most +likely to only work if run from the base directory (i.e. the parent to +the .blackbox directory). + +Without a repo, all commands must be run from the same directory +as the ".blackbox" directory. It might work otherwise but no +promises. + + +# Mixing gpg 1.x/2.0 and 2.2 + +WARNING: Each version of GnuPG uses a different, and incompatible, +binary format to store the keychain. When Blackbox was originally +created, I didn't know this. Things are mostly upwards compatible. +That said, if you have some admins with GnuPG 1.x and others with GnuPG 2.2, +you may corrupt the keychain. + +A future version will store the keychain in an GnuPG-approved +version-neutral format. + + +# Having gpg and gpg2 on the same machine + +NOTE: This is not implemented at this time. TODO(tlim) Use GPG to find +the binary. + +In some situations, team members or automated roles need to install gpg +2.x alongside the system gpg version 1.x to catch up with the team's gpg +version. On Ubuntu 16, you can ```apt-get install gnupg2``` which +installs the binary gpg2. If you want to use this gpg2 binary, run every +blackbox command with GPG=gpg2. + +For example: + +``` +GPG=gpg2 blackbox_postdeploy +``` + diff --git a/docs/alternatives.md b/docs/alternatives.md new file mode 100644 index 00000000..01d52ea0 --- /dev/null +++ b/docs/alternatives.md @@ -0,0 +1,14 @@ +Alternatives +============ + +Here are other open source packages that do something similar to +BlackBox. If you like them better than BlackBox, please use them. + +- [git-crypt](https://www.agwa.name/projects/git-crypt/) +- [Pass](http://www.zx2c4.com/projects/password-store/) +- [Transcrypt](https://github.com/elasticdog/transcrypt) +- [Keyringer](https://keyringer.pw/) +- [git-secret](https://github.com/sobolevn/git-secret) + +git-crypt has the best git integration. Once set up it is nearly +transparent to the users. However it only works with git. diff --git a/docs/backwards-compatibility.md b/docs/backwards-compatibility.md new file mode 100644 index 00000000..a2790ae0 --- /dev/null +++ b/docs/backwards-compatibility.md @@ -0,0 +1,55 @@ +Backwards Compatibility +======================= + +# Where is the configuration stored? .blackbox vs. keyrings/live + +Blackbox stores its configuration data in the `.blackbox` subdirectory. Older +repos use `keyrings/live`. For backwards compatibility either will work. + +All documentation refers to `.blackbox`. + +You can convert an old repo by simply renaming the directory: + +``` +mv keyrings/live .blackbox +rmdir keyrings +``` + +There is no technical reason to convert old repos except that it is less +confusing to users. + +This change was made in commit 60e782a0, release v1.20180615. + + +# How blackbox fines the config directory: + +## Creating the repo: + +`blackbox init` creates the config directory in the root +of the repo. Here's how it picks the name: + +- If `$BLACKBOX_TEAM` is set, `.blackbox-$BLACKBOX_TEAM` is used. +- If the flag `--team ` is set, it uses `.blackbox-` +- Otherwise, it uses `.blackbox` + +When searching for the configuration directory, the following +locations are checked. First match wins. + +- `.blackbox-$BLACKBOX_TEAM` (only if `$BLACKBOX_TEAM` is set) +- The value of `--config value` (if the flag is set) +- `$BLACKBOX_CONFIGDIR` (the preferred env. variable to use) +- `$BLACKBOXDATA` (for backwards compatibility with v1) +- `.blackbox` +- `keyrings/live` (for backwards compatibility) + +NOTE: The env variables and `--config` should be set to the full path +to the config directory (i.e.: `/Users/tom/gitstuff/myrepo/.blackbox`). +If it is set to a relative directory (i.e. `.blackbox` or +`../myrepo/.blackbox`) most commands will break. + +NOTE: Why the change from `$BLACKBOXDATA` to `$BLACKBOX_CONFIGDIR`? We want +all the env. variables to begin with the prefix `BLACKBOX_`. If v1 +supported another name, that is still supported. If you are starting +with v2 and have no other users using v1, please use the `BLACKBOX_` +prefix. + diff --git a/docs/compatibility.md b/docs/compatibility.md new file mode 100644 index 00000000..0ae48658 --- /dev/null +++ b/docs/compatibility.md @@ -0,0 +1,78 @@ +Compatibility +============= + +# Compatibility with Blackbox v1 + +The command names all changed from v1 to v2. The `binv2` directory +includes shell scripts that provide full backwards compatibility. + +# Supported Architectures + +Blackbox supports a plug-in archtecture to easily support multiple VCS +system. Current support is for: + +## Supported VCS/DVCS systems + +* git +* "none" (repo-less use is supported) +* WOULD LOVE VOLUNTEERS TO HELP ADD SUPPORT FOR: hg, svn, p4 + +## Supported GPG versions + +* Git 1.x and 2.0 +* Git 2.2 and higher +* WOULD LOVE VOLUNTEERS TO HELP ADD SUPPORT FOR: + golang.org/x/crypto/openpgp (this would make the code have no + external dependencies) + +## Supported Operating systems + +Blackbox should work on any Linux system with GnuPG installed. +Blackbox simply looks for `gpg` in `$PATH`. + +Windows: It should work (but has not been extensively tested) on +Windows WSL2. + +# Automated testing + +While many combinations work, we do automated tests +on these combinations. If any of these fail it blocks the release: + +* macOS: GnuPG 2.2 executables from https://gpgtools.org/ +* CentOS: GnuPG 2.0.x executables from the "base" or "updates" repo. + +Windows native: VOLUNTEER NEEDED to make a native Windows version +(should be rather simple as Go does most of the work) + +NOTE: Version 1 worked on CentOS/RedHat, macOS, Gygwin, WinGW, NetBSD, +and SmartOS. Hopefully we can achieve that broad level of support in +the future. Any system that is supported by the Go language and +has GuPG 2.0.x or higher binaries available should be easy to achieve. +We'd also like to have automated testing for the same. + +# Windows Support + +BlackBox assumes that `blackbox-admins.txt` and `blackbox-files.txt` will have +LF line endings. Windows users should be careful to configure Git or other systems +to not convert or "fix" those files. + +If you use Git, add the following lines to your `.gitattributes` file: + + **/blackbox-admins.txt text eol=lf + **/blackbox-files.txt text eol=lf + +The `blackbox init` (and newer versions of `blackbox_initialize`) +will create an appropriate `.gitattributes` file for you. + +# Cygwin + +TODO: List what packages are required for building the software. + +TODO: List what packages are required for running the software. + + +# MinGW + +MinGW (comes with Git for Windows) support requires the following: + +TODO: FILL IN any requirements diff --git a/docs/dev-code-overview.md b/docs/dev-code-overview.md new file mode 100644 index 00000000..319a0e5f --- /dev/null +++ b/docs/dev-code-overview.md @@ -0,0 +1,36 @@ +Code Overview +============= + +Here is how the code is laid out. + +TODO(tlim): Add a diagram of the layers + +``` +cmd/blackbox/ The command line tool. + blackbox.go main() + cli.go Definition of all subcommands and flags + drive.go Processes flags and calls functions in verbs.go + NOTE: These are the only files that are aware of the + flags. Everything else gets the flag data passed to it + as a parameter. This way the remaining system can be + used as a module. + +pkg/box/ High-level functions related to "the black box". + verbs.go One function per subcommand. + box.go Functions for manipulating the files in .blackbox + boxutils.go Helper functions for the above. + +pkg/bblog/ Module that provides logging facilities. +pkg/bbutil/ Functions that are useful to box, plug-ins, etc. +pkg/tainedname/ Module for printing filenames escaped for Bash. + +models/vcs.go The interface that defines a VCS plug-in. +models/crypters.go The interface that defines a GPG plug-in. + +pkg/crypters/ Plug-ins for GPG functionality. +pkg/crypters/gnupg Plug-in that runs an external gpg binary (found via $PATH) + +pkg/vcs/ Plug-ins for VCS functionality. +pkg/vcs/none Repo-less mode. +pkg/vcs/git Git mode. +``` diff --git a/docs/dev.md b/docs/dev.md new file mode 100644 index 00000000..931407d8 --- /dev/null +++ b/docs/dev.md @@ -0,0 +1,36 @@ +Developer Info +============== + +Code submissions are gladly welcomed! The code is fairly easy to read. + +Get the code: + +``` +git clone git@github.com:StackExchange/blackbox.git +``` + +Test your changes: + +``` +go test ./... +``` + +This runs through a number of system tests. It creates a repo, +encrypts files, decrypts files, and so on. You can run these tests to +verify that the changes you made didn't break anything. You can also +use these tests to verify that the system works with a new operating +system. + +Please submit tests with code changes: + +The best way to change BlackBox is via Test Driven Development. First +add a test to `tools/confidence.sh`. This test should fail, and +demonstrate the need for the change you are about to make. Then fix +the bug or add the feature you want. When you are done, `make +confidence` should pass all tests. The PR you submit should include +your code as well as the new test. This way the confidence tests +accumulate as the system grows as we know future changes don't break +old features. + +Note: More info about compatibility are on the [Compatibility Page](compatibility.md) + diff --git a/docs/enable-repo.md b/docs/enable-repo.md new file mode 100644 index 00000000..24b55ddb --- /dev/null +++ b/docs/enable-repo.md @@ -0,0 +1,58 @@ +Enabling Blackbox on a Repo +=========================== + +Overview: +1. Run the initialization command +2. Add at least one admin. +3. Add files. (don't add files before the admins) + +The long version: + +1. If you don't have a GPG key, set it up using instructions such as: +[Set up GPG key](https://help.github.com/articles/generating-a-new-gpg-key/). \ +Now you are ready to go. + +1. `cd` into a Git, Mercurial, Subversion or Perforce repository and run `blackbox init`. + +1. Add yourself with `blackbox admin add YOUR@EMAIL` + +1. Commit the files as directed. + +That's it! + +At this point you should encrypt a file and make sure you can decrypt +it. This verifies that everything is working as expected. + + +1. Pick a file to be encrypted. Since this is a test, you might want + to create a test file. Call it `secret.txt` and edit the file + so that it includes your mother's maiden name. Just kidding! + Store this sentence: `This is my test file.` + +2. Run `blackbox file add secret.txt` + +3. Decode the encrypted version: `blackbox cat secret.txt` + +The "cat" subcommand only accesses the encrypted (`.gpg`) file and is +a good way to see that the file was encrypted properly. You should +see `This is my test file.` + +4 Verify that editing the file works. + +To view and/or edit a file, run `blackbox edit --shred secret.txt` + +Now encrypt it and shred the original: + +``` +blackbox encrypt --shred secret.txt +``` + +Now make sure you can decrypt the new file: + +``` +blackbox cat secret.txt +``` + +You should see the changed text. + +Now commit and push `secret.txt.gpg` and you are done! diff --git a/docs/encryption.md b/docs/encryption.md new file mode 100644 index 00000000..6a0ad577 --- /dev/null +++ b/docs/encryption.md @@ -0,0 +1,54 @@ +How is the encryption done? +=========================== + +GPG has many different ways to encrypt a file. BlackBox uses the mode +that lets you specify a list of keys that can decrypt the message. + +If you have 5 people ("admins") that should be able to access the +secrets, each creates a GPG key and adds their public key to the +keychain. The GPG command used to encrypt the file lists all 5 key +names, and therefore any 1 key can decrypt the file. + +Blackbox stores a copy of the public keys of all admins. It never +stores the private keys. + +To remove someone's access, remove that admin's key name (i.e. email +address) from the list of admins and re-encrypt all the files. They +can still read the .gpg file (assuming they have access to the +repository) but they can't decrypt it any more. + +*What if they kept a copy of the old repo before you removed access?* +Yes, they can decrypt old versions of the file. This is why when an +admin leaves the team, you should change all your passwords, SSL +certs, and so on. You should have been doing that before BlackBox, +right? + +*Why don't you use symmetric keys?* In other words, why mess with all +this GPG key stuff and instead why don't we just encrypt all the files +with a single passphrase. Yes, GPG supports that, but then we are +managing a shared password, which is fraught with problems. If someone +"leaves the team" we would have to communicate to everyone a new +password. Now we just have to remove their key. This scales better. + +*How do automated processes decrypt without asking for a password?* +GPG requires a passphrase on a private key. However, it permits the +creation of subkeys that have no passphrase. For automated processes, +create a subkey that is only stored on the machine that needs to +decrypt the files. For example, at Stack Exchange, when our Continuous +Integration (CI) system pushes a code change to our Puppet masters, +they run `blackbox decrypt --all --overwrite` to decrypt all the files. +The user that +runs this code has a subkey that doesn't require a passphrase. Since +we have many masters, each has its own key. And, yes, this means our +Puppet Masters have to be very secure. However, they were already +secure because, like, dude... if you can break into someone's puppet +master you own their network. + +*If you use Puppet, why didn't you just use hiera-eyaml?* There are 4 +reasons: + +1. This works with any Git or Mercurial repo, even if you aren't using Puppet. +2. hiera-eyaml decrypts "on demand" which means your Puppet Master now uses a lot of CPU to decrypt keys every time it is contacted. It slows down your master, which, in my case, is already slow enough. +3. This works with binary files, without having to ASCIIify them and paste them into a YAML file. Have you tried to do this with a cert that is 10K long and changes every few weeks? Ick. +4. hiera-eyaml didn't exist when I wrote this. (That's the real reason.) + diff --git a/docs/expired-keys.md b/docs/expired-keys.md new file mode 100644 index 00000000..7f7756a6 --- /dev/null +++ b/docs/expired-keys.md @@ -0,0 +1,62 @@ +Replacing expired keys +====================== + +If someone's key has already expired, blackbox will stop +encrypting. You see this error: + +``` +$ blackbox_edit_end modified_file.txt +--> Error: can't re-encrypt because a key has expired. +``` + +FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?" + +You can also detect keys that are about to expire by issuing this command and manually reviewing the "expired:" dates: + + gpg --homedir=.blackbox --list-keys + +or... list UIDs that will expire within 1 month from today: (Warning: this also lists keys without an expiration date) + + gpg --homedir=.blackbox --list-keys --with-colons --fixed-list-mode | grep ^uid | awk -F: '$6 < '$(( $(date +%s) + 2592000)) + +Here's how to replace the key: + +- Step 1. Administrator removes expired user: + +Warning: This process will erase any unencrypted files that you were in the process of editing. Copy them elsewhere and restore the changes when done. + +``` +blackbox_removeadmin expired_user@example.com +# This next command overwrites any changed unencrypted files. See warning above. +blackbox_update_all_files +git commit -m "Re-encrypt all files" +gpg --homedir=.blackbox --delete-key expired_user@example.com +git commit -m 'Cleaned expired_user@example.com from keyring' .blackbox/* +git push +``` + +- Step 2. Expired user adds an updated key: + +``` +git pull +blackbox_addadmin updated_user@example.com +git commit -m'NEW ADMIN: updated_user@example.com .blackbox/pubring.gpg .blackbox/trustdb.gpg .blackbox/blackbox-admins.txt +git push +``` + +- Step 3. Administrator re-encrypts all files with the updated key of the expired user: + +``` +git pull +gpg --import .blackbox/pubring.gpg +blackbox_update_all_files +git commit -m "Re-encrypt all files" +git push +``` + +- Step 4: Clean up: + +Any files that were temporarily copied in the first step so as to not be overwritten can now be copied back and re-encrypted with the `blackbox_edit_end` command. + +(Thanks to @chishaku for finding a solution to this problem!) + diff --git a/docs/file-ops.md b/docs/file-ops.md new file mode 100644 index 00000000..3c37b6b9 --- /dev/null +++ b/docs/file-ops.md @@ -0,0 +1,55 @@ +How to add/remove a file into the system? +========================================= + +# Adding files: + +- If you need to, start the GPG Agent: `eval $(gpg-agent --daemon)` +- Add the file to the system: + +``` +blackbox file add path/to/file.name.key + +# If you want to delete the old plaintext: +blackbox file add --shred path/to/file.name.key +``` + +Multiple file names can be specified on the command line: + +Example 1: Register 2 files: + +``` +blackbox file add --shred file1.txt file2.txt +``` + +Example 2: Register all the files in `$DIR`: + +``` +find $DIR -type f -not -name '*.gpg' -print0 | xargs -0 blackbox file add +``` + + +# Removing files + +This command + +``` +blackbox file remove path/to/file.name.key +``` + +TODO(tlim): Add examples. + +# List files + +To see what files are currently enrolled in the system: + +``` +blackbox file list +``` + +You can also see their status: + +``` +blackbox status +blackbox status just_one_file.txt +blackbox status --type ENCRYPTED +``` diff --git a/docs/full-command-list.md b/docs/full-command-list.md new file mode 100644 index 00000000..ae04878f --- /dev/null +++ b/docs/full-command-list.md @@ -0,0 +1,34 @@ +Blackbox Command List +===================== + +## Global Flags +### `--vcs` +### `--crypto` +### `--config` +### `--team` +### `--editor` +### `--umask` +### `--debug` +### `--help` +### `--help` +### `--version` +## User Commands +### `blackbox decrypt` +### `blackbox encrypt` +### `blackbox edit` +### `blackbox cat` +### `blackbox diff` +### `blackbox shred` +### `blackbox help` +## User Commands +### `blackbox init` +### `blackbox admin` +### `blackbox file` +### `blackbox status` +### `blackbox reencrypt` +## Debug +### `blackbox info` +## Integration Test (secret menu) +### `blackbox testing_init` + +TODO(tlim): Can we automatically generate this? The data is all in cli.go diff --git a/docs/git-tips.md b/docs/git-tips.md new file mode 100644 index 00000000..2db72d33 --- /dev/null +++ b/docs/git-tips.md @@ -0,0 +1,22 @@ +GIT tips +======== + + +# Configure git to show diffs in encrypted files + +It's possible to tell Git to decrypt versions of the file before running them through `git diff` or `git log`. To achieve this do: + +- Add the following to `.gitattributes` at the top of the git repository: + +``` +*.gpg diff=blackbox +``` + +- Add the following to `.git/config`: + +``` +[diff "blackbox"] + textconv = gpg --use-agent -q --batch --decrypt +```` + +Commands like `git log -p file.gpg` and `git diff master --` will display as expected. diff --git a/docs/gnupg-tips.md b/docs/gnupg-tips.md new file mode 100644 index 00000000..4c5a42be --- /dev/null +++ b/docs/gnupg-tips.md @@ -0,0 +1,31 @@ +GnuPG tips +========== + +# Common error messages + +* Message: `gpg: filename: skipped: No public key` +* Solution: Usually this means there is an item in + `.blackbox/blackbox-admins.txt` that is not the name of the key. + Either something invalid was inserted (like a filename instead of a + username) or a user has left the organization and their key was + removed from the keychain, but their name wasn't removed from the + blackbox-admins.txt file. + +* Message: `gpg: decryption failed: No secret key` +* Solution: Usually means you forgot to re-encrypt the file with the new key. + +* Message: `Error: can't re-encrypt because a key has expired.` +* Solution: A user's key has expired and can't be used to encrypt any more. Follow the [Replace expired keys](expired-keys.md) page. + +FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?" + +# GnuPG problems + +Blackbox is just a front-end to GPG. If you get into a problem with a +key or file, you'll usually have better luck asking for advice on +the gnupg users mailing list TODO: Get link to this list + + +The author of Blackbox is not a GnuPG expert. He wrote Blackbox +because it was better than trying to remember GPG's horrible flag +names. diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 00000000..3777def8 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,17 @@ +Installation Instructions +========================= + +Currently blackbox v2 is installed by compiling the code and +copying the binary someplace: + +TODO: + +``` +git clone FILL IN +``` + +Future: We will have RPM, DEB, Chocolately packages. + + +Next step: [Enable on a repo](enable-repo.md) + diff --git a/docs/role-accounts.md b/docs/role-accounts.md new file mode 100644 index 00000000..25196714 --- /dev/null +++ b/docs/role-accounts.md @@ -0,0 +1,158 @@ +Set up automated users or "role accounts" +========================================= + +TODO(tlim): I think this is overly complex. With GnuPG 2.2 and later, +you can use `--password '' --quick-generate-key userid` and you are +done. No need for subkeys. Maybe rework this? + +With role accounts, you have an automated system that needs to be able +to decrypt secrets without a password. This means the security of your +repo is based on how locked down the automation system is. This +is risky, so be careful. + + +i.e. This is how a Puppet Master can have access to the unencrypted data. + +FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?" + +An automated user (a "role account") is one that that must be able to decrypt without a passphrase. In general you'll want to do this for the user that pulls the files from the repo to the master. This may be automated with Jenkins CI or other CI system. + +GPG keys have to have a passphrase. However, passphrases are optional on subkeys. Therefore, we will create a key with a passphrase then create a subkey without a passphrase. Since the subkey is very powerful, it should be created on a very secure machine. + +There's another catch. The role account probably can't check files into Git/Mercurial. It probably only has read-only access to the repo. That's a good security policy. This means that the role account can't be used to upload the subkey public bits into the repo. + +Therefore, we will create the key/subkey on a secure machine as yourself. From there we can commit the public portions into the repo. Also from this account we will export the parts that the role account needs, copy them to where the role account can access them, and import them as the role account. + +ProTip: If asked to generate entropy, consider running this on the same machine in another window: `sudo dd if=/dev/sda of=/dev/null` + +For the rest of this doc, you'll need to make the following substitutions: + +- ROLEUSER: svc_deployacct or whatever your role account's name is. +- NEWMASTER: the machine this role account exists on. +- SECUREHOST: The machine you use to create the keys. + +NOTE: This should be more automated/scripted. Patches welcome. + +On SECUREHOST, create the puppet master's keys: + +``` +$ mkdir /tmp/NEWMASTER +$ cd /tmp/NEWMASTER +$ gpg --homedir . --gen-key +Your selection? + (1) RSA and RSA (default) +What keysize do you want? (2048) DEFAULT +Key is valid for? (0) DEFAULT + +# Real name: Puppet CI Deploy Account +# Email address: svc_deployacct@hostname.domain.name +``` + +NOTE: Rather than a real email address, use the username@FQDN of the host the key will be used on. If you use this role account on many machines, each should have its own key. By using the FQDN of the host, you will be able to know which key is which. In this doc, we'll refer to username@FQDN as $KEYNAME + +Save the passphrase somewhere safe! + +Create a sub-key that has no password: + +``` +$ gpg --homedir . --edit-key svc_deployacct +gpg> addkey +(enter passphrase) + Please select what kind of key you want: + (3) DSA (sign only) + (4) RSA (sign only) + (5) Elgamal (encrypt only) + (6) RSA (encrypt only) +Your selection? 6 +What keysize do you want? (2048) +Key is valid for? (0) +Command> key 2 +(the new subkey has a "*" next to it) +Command> passwd +(enter the main key's passphrase) +(enter an empty passphrase for the subkey... confirm you want to do this) +Command> save +``` + +Now securely export this directory to NEWMASTER: + +``` +gpg --homedir . --export -a svc_sadeploy >/tmp/NEWMASTER/pubkey.txt +tar cvf /tmp/keys.tar . +rsync -avP /tmp/keys.tar NEWMASTER:/tmp/. +``` + +On NEWMASTER, receive the new GnuPG config: + +``` +sudo -u svc_deployacct bash +mkdir -m 0700 -p ~/.gnupg +cd ~/.gnupg && tar xpvf /tmp/keys.tar +``` + + + +Back on SECUREHOST, add the new email address to .blackbox/blackbox-admins.txt: + +``` +cd /path/to/the/repo +blackbox_addadmin $KEYNAME /tmp/NEWMASTER +``` + +Verify that secring.gpg is a zero-length file. If it isn't, you have somehow added a private key to the keyring. Start over. + +``` +cd .blackbox +ls -l secring.gpg +``` + +Commit the recent changes: + +``` +cd .blackbox +git commit -m"Adding key for KEYNAME" pubring.gpg trustdb.gpg blackbox-admins.txt +``` + +Regenerate all encrypted files with the new key: + +``` +blackbox_update_all_files +git status +git commit -m"updated encryption" -a +git push +``` + +On NEWMASTER, import the keys and decrypt the files: + +``` +sudo -u svc_sadeploy bash # Become the role account. +gpg --import /etc/puppet/.blackbox/pubring.gpg +export PATH=$PATH:/path/to/blackbox/bin +blackbox_postdeploy +sudo -u puppet cat /etc/puppet/hieradata/blackbox.yaml # or any encrypted file. +``` + +ProTip: If you get "gpg: decryption failed: No secret key" then you forgot to re-encrypt blackbox.yaml with the new key. + +On SECUREHOST, securely delete your files: + +``` +cd /tmp/NEWMASTER +# On machines with the "shred" command: +shred -u /tmp/keys.tar +find . -type f -print0 | xargs -0 shred -u +# All else: +rm -rf /tmp/NEWMASTER +``` + +Also shred any other temporary files you may have made. + + + diff --git a/docs/subversion-tips.md b/docs/subversion-tips.md new file mode 100644 index 00000000..6b6991c4 --- /dev/null +++ b/docs/subversion-tips.md @@ -0,0 +1,21 @@ +Subversion Tips +=============== + +NOTE: This is from v1. Can someone that uses Subversion check +this and update it? + + +The current implementation will store the blackbox in `/keyrings` at +the root of the entire repo. This will create an issue between +environments that have different roots (i.e. checking out `/` on +development vs `/releases/foo` in production). To get around this, you +can `export BLACKBOX_REPOBASE=/path/to/repo` and set a specific base +for your repo. + +This was originally written for git and supports a two-phase commit, +in which `commit` is a local commit and "push" sends the change +upstream to the version control server when something is registered or +deregistered with the system. The current implementation will +immediately `commit` a file (to the upstream subversion server) when +you execute a `blackbox_*` command. + diff --git a/docs/support.md b/docs/support.md new file mode 100644 index 00000000..72e04662 --- /dev/null +++ b/docs/support.md @@ -0,0 +1,16 @@ +Support +======= + +# Join our community! + +Join the [blackbox-project mailing list](https://groups.google.com/d/forum/blackbox-project)! + + + +# How to submit bugs or ask questions? + +We welcome questions, bug reports and feedback! + +The best place to start is to join the [blackbox-project mailing list](https://groups.google.com/d/forum/blackbox-project) and ask there. + +Bugs are tracked here in Github. Please feel free to [report bugs](https://github.com/StackExchange/blackbox/issues) yourself. diff --git a/docs/user-overview.md b/docs/user-overview.md new file mode 100644 index 00000000..66bd594a --- /dev/null +++ b/docs/user-overview.md @@ -0,0 +1,124 @@ +User Guide +========== + +# Overview + +Suppose you have a VCS repository (i.e. a Git or Mercurial repo) and +certain files contain secrets such as passwords or SSL private keys. +Often people just store such files "and hope that nobody finds them in +the repo". That's not safe. Hope is not a strategy. + +With BlackBox, those files are stored encrypted using GPG. Access to +the repo without also having the right GPG keys makes those files as worthless +as random bits. As long as you keep your GPG keys safe, you don't +have to worry about storing your VCS repo on an untrusted server or +letting anyone clone the repo. + +Heck, even if you trust your server, now you don't have to trust the +people that do backups of that server! + +Each person ("admin") of the system can decrypt all the files using +their GPG key, which has its own passphrase. The authorized GPG keys +can decrypt any file. This is better than systems that use one +GPG key (and passphrase) that must be shared among a group of people. +It is much better than having one passphrase for each file (I don't +think anyone actually does that). + +Since any admin's GPG key can decrypt the files, if one person leaves +the company, you don't have to communicate a new passphrase to everyone. +Simply disable the one key that should no longer have access. +The process for doing this is as easy as running 2 commands (1 to +disable their key, 1 to re-encrypt all files.) Obviously if they kept +a copy of the repo (and their own passphrase) before leaving the +company, they have access to the secrets. However, you should rotate +those secrets anyway. ("rotate secrets" means changing the passwords, +regenerating TLS certs, and so on). + +# Sample session: + +First we are going to list the files currently in the blackbox. In +this case, it is an SSH private key. + +``` +$ blackbox file list +modules/log_management/files/id_rsa +``` + +Excellent! Our coworkers have already registered a file with the +system. Let's decrypt it, edit it, and re-encrypt it. + +``` +$ blackbox decrypt modules/log_management/files/id_rsa +========== DECRYPTING "modules/log_management/files/id_rsa" +$ vi modules/log_management/files/id_rsa +``` + +That was easy so far! + +When we encrypt it, Blackbox will not commit the changes, but it +will give a hint that you should. It spells out the exact command you +need to type and even proposes a commit message. + +``` +$ blackbox encrypt modules/log_management/files/id_rsa +========== ENCRYPTING "modules/log_management/files/id_rsa" + +NEXT STEP: You need to manually check these in: + git commit -m"ENCRYPTED modules/log_management/files/id_rsa" modules/log_management/files/id_rsa.gpg +``` + +You can also use `blackbox edit ` to decrypt a file, edit it +(it will call `$EDITOR`) and re-encrypt it. + + +Now let's register a new file with the blackbox system. +`data/pass.yaml` is a small file that stores a very important +password. In this example, we had just stored the unecrypted +password in our repo. That's bad. Let's encrypt it. + +``` +$ blackbox file add data/pass.yaml +========== SHREDDING ("/bin/rm", "-f"): "data/pass.yaml" + +NEXT STEP: You need to manually check these in: + git commit -m"NEW FILES: data/pass.yaml" .gitignore keyrings/live/blackbox-files.txt modules/stacklb/pass.yaml modules/stacklb/pass.yaml.gpg +``` + +Before we commit the change, let's do a `git status` to see what else +has changed. + +``` +$ git status +On branch master +Changes to be committed: + (use "git restore --staged ..." to unstage) + modified: .gitignore + modified: keyrings/live/blackbox-files.txt + deleted: modules/stacklb/pass.yaml + new file: modules/stacklb/pass.yaml.gpg + +``` + +Notice that a number of files were modified: + +* `.gitignore`: This file is updated to include the plaintext + filename, so that you don't accidentally add it to the repo in the + future. +* `.blackbox/blackbox-files.txt`: The list of files that are registered with the system. +* `data/pass.yaml`: The file we encrypted is deleted from the repo. +* `data/pass.yaml.gpg`: The encrypted file is added to the repo. + +Even though pass.yaml was deleted from the repo, it is still in the +repo's history. Anyone with an old copy of the repo, or a new copy +that knows how to view the repo's history, can see the secret +password. For that reason, you should change the password and +re-encrypt the file. This is an important point. Blackbox is not +magic and it doesn't have a "Men In Black"-style neuralizer that +can make people forget the past. If someone leaves a project, you +have to change the old passwords, etc. + +Those are the basics. Your next step might be: + +* TODO: How to enable Blackbox for a repo. +* TODO: How to add yourself as an admin to a repo. +* TODO: Complete list of [all blackbox commands](all-commands) diff --git a/docs/why-is-this-important.md b/docs/why-is-this-important.md new file mode 100644 index 00000000..eba9cf26 --- /dev/null +++ b/docs/why-is-this-important.md @@ -0,0 +1,17 @@ +Why encrypt your secrets? +========================= + +OBVIOUSLY we don't want secret things like SSL private keys and +passwords to be leaked. + +NOT SO OBVIOUSLY when we store "secrets" in a VCS repo like Git or +Mercurial, suddenly we are less able to share our code with other +people. Communication between subteams of an organization is hurt. You +can't collaborate as well. Either you find yourself emailing +individual files around (yuck!), making a special repo with just the +files needed by your collaborators (yuck!!), or just deciding that +collaboration isn't worth all that effort (yuck!!!). + +The ability to be open and transparent about our code, with the +exception of a few specific files, is key to the kind of collaboration +that DevOps and modern IT practitioners need to do. diff --git a/docs/with-ansible.md b/docs/with-ansible.md new file mode 100644 index 00000000..cc88ab0c --- /dev/null +++ b/docs/with-ansible.md @@ -0,0 +1,18 @@ +How to use the secrets with Ansible? +=================================== + +Ansible Vault provides functionality for encrypting both entire files +and strings stored within files; however, keeping track of the +password(s) required for decryption is not handled by this module. + +Instead one must specify a password file when running the playbook. + +Ansible example for password file: `my_secret_password.txt.gpg` + +``` +ansible-playbook --vault-password-file my_secret_password.txt site.yml +``` + +Alternatively, one can specify this in the +`ANSIBLE_VAULT_PASSWORD_FILE` environment variable. + diff --git a/docs/with-puppet.md b/docs/with-puppet.md new file mode 100644 index 00000000..255a077b --- /dev/null +++ b/docs/with-puppet.md @@ -0,0 +1,68 @@ +How to use the secrets with Puppet? +=================================== + +# Entire files: + +Entire files, such as SSL certs and private keys, are treated just +like regular files. You decrypt them any time you push a new release +to the puppet master. + +Example of an encrypted file named `secret_file.key.gpg` + +* Plaintext file is: `modules/${module_name}/files/secret_file.key` +* Encrypted file is: `modules/${module_name}/files/secret_file.key.gpg` +* Puppet sees it as: `puppet:///modules/${module_name}/secret_file.key` + +Puppet code that stores `secret_file.key` in `/etc/my_little_secret.key`: + +``` +file { '/etc/my_little_secret.key': + ensure => 'file', + owner => 'root', + group => 'puppet', + mode => '0760', + source => "puppet:///modules/${module_name}/secret_file.key", # No ".gpg" +} +``` + +# Small strings: + +For small strings such as passwords and API keys, it makes sense +to store them in an (encrypted) YAML file which is then made +available via hiera. + +For example, we use a file called `blackbox.yaml`. You can access the +data in it using the hiera() function. + +*Setup:* + +Edit `hiera.yaml` to include "blackbox" to the search hierarchy: + +``` +:hierarchy: + - ... + - blackbox + - ... +``` + +In blackbox.yaml specify: + +``` +--- +module::test_password: "my secret password" +``` + +In your Puppet Code, access the password as you would any hiera data: + +``` +$the_password = hiera('module::test_password', 'fail') + +file {'/tmp/debug-blackbox.txt': + content => $the_password, + owner => 'root', + group => 'root', + mode => '0600', +} +``` + +The variable `$the_password` will contain "my secret password" and can be used anywhere strings are used. diff --git a/go.mod b/go.mod new file mode 100644 index 00000000..7f43a864 --- /dev/null +++ b/go.mod @@ -0,0 +1,11 @@ +module github.com/StackExchange/blackbox/v2 + +go 1.14 + +require ( + github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 + github.com/mattn/go-runewidth v0.0.9 // indirect + github.com/olekukonko/tablewriter v0.0.4 + github.com/sergi/go-diff v1.1.0 // indirect + github.com/urfave/cli/v2 v2.2.0 +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000..99dfded7 --- /dev/null +++ b/go.sum @@ -0,0 +1,39 @@ +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/StackExchange/blackbox v0.0.0-20200527125317-e049c02655d2 h1:iihTTtoGPPfol75KUvwzSe+dlrVlAA78Ky2r+PA/tI0= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54= +github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8= +github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/urfave/cli/v2 v2.2.0 h1:JTTnM6wKzdA0Jqodd966MVj4vWbbquZykeX1sKbe2C4= +github.com/urfave/cli/v2 v2.2.0/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/integrationTest/NOTES.txt b/integrationTest/NOTES.txt new file mode 100644 index 00000000..d482c7d2 --- /dev/null +++ b/integrationTest/NOTES.txt @@ -0,0 +1,91 @@ + +This should accept VCS-type and --crypto flags. +Then a shell script should run various combinations of VCS and crypters. + +# Startup +* Create a repo (git, none) + +# Test basic operations: +* As Alice: + * initialize blackbox, add her keys to it, see that the usual files + exist. See her name in bb-admins.txt + * encrypt a file, see that the plaintext is deleted, see the file in bb-files.txt + * decrypt the file, see the original plaintext is recovered. + * Encrypt a file --noshred. + * Decrypt the file, it should fail as the plaintext exists. + * Remove the plaintext. + * Decrypt the file, it should fail as the plaintext exists. + +# Test hand-off from Alice to Bob. +* As Bob + * add himself to the admins. +* As Alice + * Update-all-files + * Create a new file. Encrypt it. +* As Bob + * Decrypt both files + * Verify contents of the new file, and the file from previous. + * Create a new file. Encrypt it. +* As Alice: + * Decrypt all files. + * Verify contents of the 3 plaintext files. + +# Test a git-less directory +* Copy the old repo somewhere. Remove the .git directory. +* As Alice: + * Decrypt all + * Verify plaintext contents + +# Test post-deploy with/without GID +* Back at the original repo: +* Shred all +* Run post-deploy. Verify. +* Shred all +* Run post-deploy with a custom GID. Verify. + +# Test removing an admin +* As Bob: + * removes Alice. (Verify) + * Re-encrypt + * Decrypt all & verify. +* As alice + * Decrypting should fail. + +# Test funny names and paths + * my/path/to/relsecrets.txt + * cwd=other/place ../../my/path/to/relsecrets.txt + * !important!.txt + * #andpounds.txt + * stars*bars?.txt + * space space.txt +* Do add/encrypt/decrypt +* Do blackbox_update_all_files +* Do remove them all + +# When people start asking for commands to work with relative paths +# Test from outside the repo +* mkdir ../other/place +* cd ../other/place +* decrypt ../../secret1.txt +* encrypt ../../secret1.txt + +# Test specific commands: +# blackbox admins list +# blackbox file list +# blackbox status --name-only (create 1 of each "type") +# blackbox status --type=FOO + +# These should all fail: +# blackbox file list --all +# blackbox file list blah +# blackbox shred list --all +# blackbox shred list blah + + + +rm -rf /tmp/bbhome-* && BLACKBOX_DEBUG=true go test -verbose -long -nocleanup +rm -rf /tmp/bbhome-* && go test -long -nocleanup + +( gbb && cd cmd/blackbox && go install ) && blackbox + +cd /tmp && rm -rf /tmp/bbhome-* ; mkdir /tmp/bbhome-1 ; cd /tmp/bbhome-1 && git init ; gitmeWork ; ( gbb && cd cmd/blackbox && go install ) && blackbox init yes && gitmeWork ; git commit -mm -a ; blackbox admin add tlimoncelli ; git commit -mnewadmin -a ; echo secrt > secret.txt ; blackbox file add secret.txt diff --git a/integrationTest/README.txt b/integrationTest/README.txt new file mode 100644 index 00000000..7c81bb14 --- /dev/null +++ b/integrationTest/README.txt @@ -0,0 +1,55 @@ + +Each test does the following: +1. Copy the files from testdata/NNNN +2. Run the command in test_NNNN.sh +3. + + +TEST ENROLLMENT: + +PHASE 'Alice creates a repo. She creates secret.txt.' +PHASE 'Alice wants to be part of the secret system.' +PHASE 'She creates a GPG key...' +PHASE 'Initializes BB...' +PHASE 'and adds herself as an admin.' +PHASE 'Bob arrives.' +PHASE 'Bob creates a gpg key.' +PHASE 'Alice does the second part to enroll bob.' +PHASE 'She enrolls bob.' +PHASE 'She enrolls secrets.txt.' +PHASE 'She decrypts secrets.txt.' +PHASE 'She edits secrets.txt.' +PHASE 'Alice copies files to a non-repo directory. (NO REPO)' +PHASE 'Alice shreds these non-repo files. (NO REPO)' +PHASE 'Alice decrypts secrets.txt (NO REPO).' +PHASE 'Alice edits secrets.txt. (NO REPO EDIT)' +PHASE 'Alice decrypts secrets.txt (NO REPO EDIT).' +PHASE 'appears.' +#PHASE 'Bob makes sure he has all new keys.' + +TEST INDIVIDUAL COMMANDS: + +PHASE 'Bob postdeploys... default.' +PHASE 'Bob postdeploys... with a GID.' +PHASE 'Bob cleans up the secret.' +PHASE 'Bob removes Alice.' +PHASE 'Bob reencrypts files so alice can not access them.' +PHASE 'Bob decrypts secrets.txt.' +PHASE 'Bob edits secrets.txt.' +PHASE 'Bob decrypts secrets.txt VERSION 3.' +PHASE 'Bob exposes a secret in the repo.' +PHASE 'Bob corrects it by registering it.' +PHASE 'Bob enrolls my/path/to/relsecrets.txt.' +PHASE 'Bob decrypts relsecrets.txt.' +PHASE 'Bob enrolls !important!.txt' +PHASE 'Bob enrolls #andpounds.txt' +PHASE 'Bob enrolls stars*bars?.txt' +PHASE 'Bob enrolls space space.txt' +PHASE 'Bob checks out stars*bars?.txt.' +PHASE 'Bob checks out space space.txt.' +PHASE 'Bob shreds all exposed files.' +PHASE 'Bob updates all files.' +PHASE 'Bob DEregisters mistake.txt' +PHASE 'Bob enrolls multiple files: multi1.txt and multi2.txt' +PHASE 'Alice returns. She should be locked out' +PHASE 'Alice tries to decrypt secret.txt. Is blocked.' diff --git a/integrationTest/asserts.go b/integrationTest/asserts.go new file mode 100644 index 00000000..91f5871c --- /dev/null +++ b/integrationTest/asserts.go @@ -0,0 +1,68 @@ +package main + +import ( + "io/ioutil" + "os" + "testing" + + "github.com/andreyvit/diff" +) + +func assertFileMissing(t *testing.T, name string) { + t.Helper() + _, err := os.Stat(name) + if err != nil && os.IsNotExist(err) { + return + } + if err == nil { + t.Fatalf("assertFileMissing failed: %v exists", name) + } + t.Fatalf("assertFileMissing: %q: %v", name, err) +} + +func assertFileExists(t *testing.T, name string) { + t.Helper() + _, err := os.Stat(name) + if err == nil { + return + } + if os.IsNotExist(err) { + t.Fatalf("assertFileExists failed: %v not exist", name) + } + t.Fatalf("assertFileExists: file can't be accessed: %v: %v", name, err) +} + +func assertFileEmpty(t *testing.T, name string) { + t.Helper() + c, err := ioutil.ReadFile(name) + if err != nil { + t.Fatal(err) + } + if len(c) != 0 { + t.Fatalf("got=%v want=%v: %v", len(c), 0, name) + } +} + +func assertFileContents(t *testing.T, name string, contents string) { + t.Helper() + c, err := ioutil.ReadFile(name) + if err != nil { + t.Fatal(err) + } + + if w, g := contents, string(c); w != g { + t.Errorf("assertFileContents(%q) mismatch (-got +want):\n%s", + name, diff.LineDiff(g, w)) + } +} + +func assertFilePerms(t *testing.T, name string, perms os.FileMode) { + t.Helper() + s, err := os.Stat(name) + if err != nil { + t.Fatal(err) + } + if s.Mode() != perms { + t.Fatalf("got=%#o want=%#o: %v", s.Mode(), perms, name) + } +} diff --git a/integrationTest/integration_test.go b/integrationTest/integration_test.go new file mode 100644 index 00000000..a96517b8 --- /dev/null +++ b/integrationTest/integration_test.go @@ -0,0 +1,343 @@ +package main + +import ( + "flag" + "fmt" + "os" + "os/exec" + "path/filepath" + "testing" + + "github.com/StackExchange/blackbox/v2/pkg/bblog" + _ "github.com/StackExchange/blackbox/v2/pkg/bblog" + _ "github.com/StackExchange/blackbox/v2/pkg/vcs/_all" +) + +var vcsToTest = flag.String("testvcs", "GIT", "VCS to test") +var longTests = flag.Bool("long", false, "Run long version of tests") + +//var crypterToTest = flag.String("crypter", "GnuPG", "crypter to test") + +func init() { + testing.Init() + flag.Parse() + + op, err := os.Getwd() + if err != nil { + panic(err) + } + originPath = op +} + +func compile(t *testing.T) { + if PathToBlackBox() != "" { + // It's been compiled already. + return + } + // Make sure we have the latest binary + fmt.Println("========== Compiling") + cmd := exec.Command("go", "build", "-o", "../bbintegration", "../cmd/blackbox") + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err := cmd.Run() + if err != nil { + t.Fatalf("setup_compile: %v", err) + } + cwd, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + SetPathToBlackBox(filepath.Join(cwd, "../bbintegration")) +} + +func setup(t *testing.T) { + logDebug := bblog.GetDebug(*verbose) + + logDebug.Printf("flag.testvcs is %v", *vcsToTest) + vh := getVcs(t, *vcsToTest) + logDebug.Printf("Using BLACKBOX_VCS=%v", vh.Name()) + os.Setenv("BLACKBOX_VCS", vh.Name()) + +} + +func TestInit(t *testing.T) { + if !*longTests { + return + } + compile(t) + makeHomeDir(t, "init") + + // Only zero or one args are permitted. + invalidArgs(t, "init", "one", "two") + invalidArgs(t, "init", "one", "two", "three") + + runBB(t, "init", "yes") + assertFileEmpty(t, ".blackbox/blackbox-admins.txt") + assertFileEmpty(t, ".blackbox/blackbox-files.txt") + assertFilePerms(t, ".blackbox/blackbox-admins.txt", 0o640) + assertFilePerms(t, ".blackbox/blackbox-files.txt", 0o640) +} + +func TestList(t *testing.T) { + if !*longTests { + return + } + compile(t) + makeHomeDir(t, "init") + + runBB(t, "init", "yes") + createDummyFilesAdmin(t) + checkOutput("000-admin-list.txt", t, "admin", "list") + checkOutput("000-file-list.txt", t, "file", "list") + + invalidArgs(t, "file", "list", "extra") + invalidArgs(t, "admin", "list", "extra") +} + +func TestStatus(t *testing.T) { + if !*longTests { + return + } + compile(t) + makeHomeDir(t, "init") + + runBB(t, "init", "yes") + createFilesStatus(t) + checkOutput("000-status.txt", t, "status") +} + +func TestShred(t *testing.T) { + if !*longTests { + return + } + compile(t) + makeHomeDir(t, "shred") + runBB(t, "init", "yes") + + makeFile(t, "shredme.txt", "File with SHREDME in it.\n") + assertFileExists(t, "shredme.txt") + runBB(t, "shred", "shredme.txt") + assertFileMissing(t, "shredme.txt") +} + +func TestStatus_notreg(t *testing.T) { + if !*longTests { + return + } + compile(t) + makeHomeDir(t, "init") + + runBB(t, "init", "yes") + createFilesStatus(t) + checkOutput("status-noreg.txt", t, "status", "status-ENCRYPTED.txt", "blah.txt") +} + +// TestHard tests the functions using a fake homedir and repo. +func TestHard(t *testing.T) { + if !*longTests { + return + } + // These are basic tests that work on a fake repo. + // The repo has mostly real data, except any .gpg file + // is just garbage. + compile(t) + setup(t) + + for _, cx := range []struct{ subname, prefix string }{ + //{subname: ".", prefix: "."}, + {subname: "mysub", prefix: ".."}, + } { + subname := cx.subname + prefix := cx.prefix + _ = prefix + + phase("========== SUBDIR = " + subname + " ==========") + + makeHomeDir(t, "BasicAlice") + + plaintextFoo := "I am the foo.txt file!\n" + plainAltered := "I am the altered file!\n" + + runBB(t, "testing_init") // Runs "git init" or equiv + assertFileExists(t, ".git") + runBB(t, "init", "yes") // Creates .blackbox or equiv + + if subname != "." { + err := os.Mkdir(subname, 0770) + if err != nil { + t.Fatal(fmt.Errorf("hard-mk-home %q: %v", subname, err)) + } + } + olddir, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + + os.Chdir(subname) + os.Chdir(olddir) + + phase("Alice creates a GPG key") + gpgdir := makeAdmin(t, "alice", "Alice Example", "alice@example.com") + become(t, "alice") + + phase("Alice enrolls as an admin") + //os.Chdir(subname) + runBB(t, "admin", "add", "alice@example.com", gpgdir) + //os.Chdir(olddir) + + // encrypt + phase("Alice registers foo.txt") + makeFile(t, "foo.txt", plaintextFoo) + //os.Chdir(subname) + //runBB(t, "file", "add", "--shred", filepath.Join(prefix, "foo.txt")) + runBB(t, "file", "add", "--shred", "foo.txt") + //os.Chdir(olddir) + // "file add" encrypts the file. + // We shred the plaintext so that we are sure that when Decrypt runs, + // we can verify the contents wasn't just sitting there all the time. + assertFileMissing(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + + phase("Alice decrypts foo.txt") + // decrypt + //os.Chdir(subname) + runBB(t, "decrypt", "foo.txt") + //runBB(t, "decrypt", filepath.Join(prefix, "foo.txt")) + //os.Chdir(olddir) + assertFileExists(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + assertFileContents(t, "foo.txt", plaintextFoo) + + // encrypts (without shredding) + phase("Alice encrypts foo.txt (again)") + runBB(t, "encrypt", "foo.txt") + assertFileExists(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + assertFileContents(t, "foo.txt", plaintextFoo) + + // reencrypt + phase("Alice reencrypts") + checkOutput("basic-status.txt", t, "status") + runBB(t, "reencrypt", "--overwrite", "foo.txt") + + // Test variations of cat + + // foo.txt=plain result=plain + phase("Alice cats plain:plain") + makeFile(t, "foo.txt", plaintextFoo) + assertFileExists(t, "foo.txt") + runBB(t, "encrypt", "foo.txt") + assertFileExists(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + checkOutput("alice-cat-plain.txt", t, "cat", "foo.txt") + assertFileExists(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + + // foo.txt=altered result=plain + phase("Alice cats altered:plain") + makeFile(t, "foo.txt", plainAltered) + assertFileExists(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + checkOutput("alice-cat-plain.txt", t, "cat", "foo.txt") + assertFileExists(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + + // foo.txt=missing result=plain + phase("Alice cats missing:plain") + removeFile(t, "foo.txt") + assertFileMissing(t, "foo.txt") + assertFileMissing(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + checkOutput("alice-cat-plain.txt", t, "cat", "foo.txt") + assertFileMissing(t, "foo.txt") + assertFileExists(t, "foo.txt.gpg") + + // Chapter 2: Bob + // Alice adds Bob. + // Bob encrypts a file. + // Bob makes sure he can decrypt alice's file. + // Bob removes Alice. + // Alice verifies she CAN'T decrypt files. + // Bob adds Alice back. + // Alice verifies she CAN decrypt files. + // Bob adds an encrypted file by mistake, "bb add" and fixes it. + // Bob corrupts the blackbox-admins.txt file, verifies that commands fail. + + } + +} + +// TestEvilFilenames verifies commands work with "difficult" file names +func TestEvilFilenames(t *testing.T) { + if !*longTests { + return + } + compile(t) + setup(t) + makeHomeDir(t, "Mallory") + + runBB(t, "testing_init") // Runs "git init" or equiv + assertFileExists(t, ".git") + runBB(t, "init", "yes") // Creates .blackbox or equiv + + phase("Malory creates a GPG key") + gpgdir := makeAdmin(t, "mallory", "Mallory Evil", "mallory@example.com") + become(t, "mallory") + + phase("Mallory enrolls as an admin") + runBB(t, "admin", "add", "mallory@example.com", gpgdir) + + _ = os.MkdirAll("my/path/to", 0o770) + _ = os.Mkdir("other", 0o770) + + for i, name := range []string{ + "!important!.txt", + "#andpounds.txt", + "stars*bars?.txt", + "space space.txt", + "tab\ttab.txt", + "ret\rret.txt", + "smile😁eyes", + "¡que!", + "thé", + "pound£", + "*.go", + "rm -f erase ; echo done", + `smile☺`, + `dub𝓦`, + "my/path/to/relsecrets.txt", + //"my/../my/path/../path/to/myother.txt", // Not permitted yet + //"other/../my//path/../path/to/otherother.txt", // Not permitted yet + //"new\nnew.txt", // \n not permitted + //"two\n", // \n not permitted (yet) + //"four\U0010FFFF", // Illegal byte sequence. git won't accept. + } { + phase(fmt.Sprintf("Mallory tries %02d: %q", i, name)) + contents := "the name of this file is the talking heads... i mean, " + name + makeFile(t, name, contents) + assertFileExists(t, name) + assertFileMissing(t, name+".gpg") + assertFileContents(t, name, contents) + + runBB(t, "file", "add", name) + assertFileMissing(t, name) + assertFileExists(t, name+".gpg") + + runBB(t, "decrypt", name) + assertFileExists(t, name) + assertFileExists(t, name+".gpg") + assertFileContents(t, name, contents) + + runBB(t, "encrypt", name) + assertFileExists(t, name) + assertFileExists(t, name+".gpg") + assertFileContents(t, name, contents) + + runBB(t, "shred", name) + assertFileMissing(t, name) + assertFileExists(t, name+".gpg") + } +} + +// More tests to implement. +// 1. Verify that the --gid works (blackbox decrypt --gid) diff --git a/integrationTest/ithelpers.go b/integrationTest/ithelpers.go new file mode 100644 index 00000000..5a49dcca --- /dev/null +++ b/integrationTest/ithelpers.go @@ -0,0 +1,617 @@ +package main + +import ( + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "testing" + "time" + + "github.com/StackExchange/blackbox/v2/pkg/bblog" + "github.com/StackExchange/blackbox/v2/pkg/bbutil" + "github.com/StackExchange/blackbox/v2/pkg/vcs" + _ "github.com/StackExchange/blackbox/v2/pkg/vcs/_all" + + "github.com/andreyvit/diff" +) + +var verbose = flag.Bool("verbose", false, "reveal stderr") +var nocleanup = flag.Bool("nocleanup", false, "do not delete the tmp directory") + +type userinfo struct { + name string + dir string // .gnupg-$name + agentInfo string // GPG_AGENT_INFO + email string + fullname string +} + +var users = map[string]*userinfo{} + +func init() { + testing.Init() + flag.Parse() +} + +var logErr *log.Logger +var logDebug *log.Logger + +func init() { + logErr = bblog.GetErr() + logDebug = bblog.GetDebug(*verbose) +} + +func getVcs(t *testing.T, name string) vcs.Vcs { + t.Helper() + // Set up the vcs + for _, v := range vcs.Catalog { + logDebug.Printf("Testing vcs: %v == %v", name, v.Name) + if strings.ToLower(v.Name) == strings.ToLower(name) { + h, err := v.New() + if err != nil { + return nil // No idea how that would happen. + } + return h + } + logDebug.Println("...Nope.") + + } + return nil +} + +// TestBasicCommands's helpers + +func makeHomeDir(t *testing.T, testname string) { + t.Helper() + var homedir string + var err error + + if *nocleanup { + // Make a predictable location; don't deleted. + homedir = "/tmp/bbhome-" + testname + os.RemoveAll(homedir) + err = os.Mkdir(homedir, 0770) + if err != nil { + t.Fatal(fmt.Errorf("mk-home %q: %v", homedir, err)) + } + } else { + // Make a random location that is deleted automatically + homedir, err = ioutil.TempDir("", filepath.Join("bbhome-"+testname)) + defer os.RemoveAll(homedir) // clean up + if err != nil { + t.Fatal(err) + } + } + + err = os.Setenv("HOME", homedir) + if err != nil { + t.Fatal(err) + } + logDebug.Printf("TESTING DIR HOME: cd %v\n", homedir) + + repodir := filepath.Join(homedir, "repo") + err = os.Mkdir(repodir, 0770) + if err != nil { + t.Fatal(fmt.Errorf("mk-repo %q: %v", repodir, err)) + } + err = os.Chdir(repodir) + if err != nil { + t.Fatal(err) + } +} + +func createDummyFilesAdmin(t *testing.T) { + // This creates a repo with real data, except any .gpg file + // is just garbage. + addLineSorted(t, ".blackbox/blackbox-admins.txt", "user1@example.com") + addLineSorted(t, ".blackbox/blackbox-admins.txt", "user2@example.com") + addLineSorted(t, ".blackbox/blackbox-files.txt", "foo.txt") + addLineSorted(t, ".blackbox/blackbox-files.txt", "bar.txt") + makeFile(t, "foo.txt", "I am the foo.txt file!") + makeFile(t, "bar.txt", "I am the foo.txt file!") + makeFile(t, "foo.txt.gpg", "V nz gur sbb.gkg svyr!") + makeFile(t, "bar.txt.gpg", "V nz gur one.gkg svyr!") +} + +func createFilesStatus(t *testing.T) { + // This creates a few files with real plaintext but fake cyphertext. + // There are a variety of timestamps to enable many statuses. + t.Helper() + + // DECRYPTED: File is decrypted and ready to edit (unknown if it has been edited). + // ENCRYPTED: GPG file is newer than plaintext. Indicates recented edited then encrypted. + // SHREDDED: Plaintext is missing. + // GPGMISSING: The .gpg file is missing. Oops? + // PLAINERROR: Can't access the plaintext file to determine status. + // GPGERROR: Can't access .gpg file to determine status. + + addLineSorted(t, ".blackbox/blackbox-files.txt", "status-DECRYPTED.txt") + addLineSorted(t, ".blackbox/blackbox-files.txt", "status-ENCRYPTED.txt") + addLineSorted(t, ".blackbox/blackbox-files.txt", "status-SHREDDED.txt") + addLineSorted(t, ".blackbox/blackbox-files.txt", "status-GPGMISSING.txt") + // addLineSorted(t, ".blackbox/blackbox-files.txt", "status-PLAINERROR.txt") + // addLineSorted(t, ".blackbox/blackbox-files.txt", "status-GPGERROR.txt") + addLineSorted(t, ".blackbox/blackbox-files.txt", "status-BOTHMISSING.txt") + + // Combination of age difference either missing, file error, both missing. + makeFile(t, "status-DECRYPTED.txt", "File with DECRYPTED in it.") + makeFile(t, "status-DECRYPTED.txt.gpg", "Svyr jvgu QRPELCGRQ va vg.") + + makeFile(t, "status-ENCRYPTED.txt", "File with ENCRYPTED in it.") + makeFile(t, "status-ENCRYPTED.txt.gpg", "Svyr jvgu RAPELCGRQ va vg.") + + // Plaintext intentionally missing. + makeFile(t, "status-SHREDDED.txt.gpg", "Svyr jvgu FUERQQRQ va vg.") + + makeFile(t, "status-GPGMISSING.txt", "File with GPGMISSING in it.") + // gpg file intentionally missing. + + // Plaintext intentionally missing. ("status-BOTHMISSING.txt") + // gpg file intentionally missing. ("status-BOTHMISSING.txt.gpg") + + // NB(tlim): commented out. I can't think of an error I can reproduce. + // makeFile(t, "status-PLAINERROR.txt", "File with PLAINERROR in it.") + // makeFile(t, "status-PLAINERROR.txt.gpg", "Svyr jvgu CYNVAREEBE va vg.") + // setFilePerms(t, "status-PLAINERROR.txt", 0000) + + // NB(tlim): commented out. I can't think of an error I can reproduce. + // makeFile(t, "status-GPGERROR.txt", "File with GPGERROR in it.") + // makeFile(t, "status-GPGERROR.txt.gpg", "Svyr jvgu TCTREEBE va vg.") + // setFilePerms(t, "status-GPGERROR.txt.gpg", 0000) + + time.Sleep(200 * time.Millisecond) + + if err := bbutil.Touch("status-DECRYPTED.txt"); err != nil { + t.Fatal(err) + } + if err := bbutil.Touch("status-ENCRYPTED.txt.gpg"); err != nil { + t.Fatal(err) + } +} + +func addLineSorted(t *testing.T, filename, line string) { + err := bbutil.AddLinesToSortedFile(filename, line) + if err != nil { + t.Fatalf("addLineSorted failed: %v", err) + } +} + +func removeFile(t *testing.T, name string) { + os.RemoveAll(name) +} + +func makeFile(t *testing.T, name string, content string) { + t.Helper() + + err := ioutil.WriteFile(name, []byte(content), 0666) + if err != nil { + t.Fatalf("makeFile can't create %q: %v", name, err) + } +} + +func setFilePerms(t *testing.T, name string, perms int) { + t.Helper() + + err := os.Chmod(name, os.FileMode(perms)) + if err != nil { + t.Fatalf("setFilePerms can't chmod %q: %v", name, err) + } +} + +var originPath string // CWD when program started. + +// checkOutput runs blackbox with args, the last arg is the filename +// of the expected output. Error if output is not expected. +func checkOutput(name string, t *testing.T, args ...string) { + t.Helper() + + cmd := exec.Command(PathToBlackBox(), args...) + cmd.Stdin = nil + cmd.Stdout = nil + cmd.Stderr = os.Stderr + var gb []byte + gb, err := cmd.Output() + if err != nil { + t.Fatal(fmt.Errorf("checkOutput(%q): %w", args, err)) + } + got := string(gb) + + wb, err := ioutil.ReadFile(filepath.Join(originPath, "test_data", name)) + if err != nil { + t.Fatalf("checkOutput can't read %v: %v", name, err) + } + want := string(wb) + + //fmt.Printf("CHECKOUTPUT g: %v\n", got) + //fmt.Printf("CHECKOUTPUT w: %v\n", want) + + if g, w := got, want; g != w { + t.Errorf("checkOutput(%q) mismatch (-got +want):\n%s", + args, diff.LineDiff(g, w)) + } + +} + +func invalidArgs(t *testing.T, args ...string) { + t.Helper() + + logDebug.Printf("invalidArgs(%q): \n", args) + cmd := exec.Command(PathToBlackBox(), args...) + cmd.Stdin = nil + if *verbose { + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + } + err := cmd.Run() + if err == nil { + logDebug.Println("BAD") + t.Fatal(fmt.Errorf("invalidArgs(%q): wanted failure but got success", args)) + } + logDebug.Printf("^^^^ (correct error received): err=%q\n", err) +} + +// TestAliceAndBob's helpers. + +func setupUser(t *testing.T, user, passphrase string) { + t.Helper() + logDebug.Printf("DEBUG: setupUser %q %q\n", user, passphrase) +} + +var pathToBlackBox string + +// PathToBlackBox returns the path to the executable we compile for integration testing. +func PathToBlackBox() string { return pathToBlackBox } + +// SetPathToBlackBox sets the path. +func SetPathToBlackBox(n string) { + logDebug.Printf("PathToBlackBox=%q\n", n) + pathToBlackBox = n +} + +func runBB(t *testing.T, args ...string) { + t.Helper() + + logDebug.Printf("runBB(%q)\n", args) + cmd := exec.Command(PathToBlackBox(), args...) + cmd.Stdin = nil + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err := cmd.Run() + if err != nil { + t.Fatal(fmt.Errorf("runBB(%q): %w", args, err)) + } +} + +func phase(msg string) { + logDebug.Println("********************") + logDebug.Println("********************") + logDebug.Printf("********* %v\n", msg) + logDebug.Println("********************") + logDebug.Println("********************") +} + +func makeAdmin(t *testing.T, name, fullname, email string) string { + testing.Init() + + dir, err := filepath.Abs(filepath.Join(os.Getenv("HOME"), ".gnupg-"+name)) + if err != nil { + t.Fatal(err) + } + os.Mkdir(dir, 0700) + + u := &userinfo{ + name: name, + dir: dir, + fullname: fullname, + email: email, + } + users[name] = u + + // GNUPGHOME=u.dir + // echo 'pinentry-program' "$(which pinentry-tty)" >> "$GNUPGHOME/gpg-agent.conf" + os.Setenv("GNUPGHOME", u.dir) + if runtime.GOOS != "darwin" { + ai, err := bbutil.RunBashOutput("gpg-agent", "--homedir", u.dir, "--daemon") + // NB(tlim): It should return something like: + // `GPG_AGENT_INFO=/home/tlimoncelli/.gnupg/S.gpg-agent:18548:1; export GPG_AGENT_INFO;` + if err != nil { + //t.Fatal(err) + } + if !strings.HasPrefix(ai, "GPG_AGENT_INFO=") { + fmt.Println("WARNING: gpg-agent didn't output what we expected. Assumed dead.") + } else { + u.agentInfo = ai[15:strings.Index(ai, ";")] + os.Setenv("GPG_AGENT_INFO", u.agentInfo) + fmt.Printf("GPG_AGENT_INFO=%q (was %q)\n", ai, u.agentInfo) + } + } + + os.Setenv("GNUPGHOME", u.dir) + // Generate key: + if hasQuick(t) { + fmt.Println("DISCOVERED: NEW GPG") + fmt.Printf("Generating %q using --qgk\n", u.email) + bbutil.RunBash("gpg", + "--homedir", u.dir, + "--batch", + "--passphrase", "", + "--quick-generate-key", u.email, + ) + if err != nil { + t.Fatal(err) + } + + } else { + + fmt.Println("DISCOVERED: OLD GPG") + fmt.Println("MAKING KEY") + + tmpfile, err := ioutil.TempFile("", "example") + if err != nil { + log.Fatal(err) + } + defer os.Remove(tmpfile.Name()) // clean up + + batch := `%echo Generating a basic OpenPGP key +Key-Type: RSA +Key-Length: 2048 +Subkey-Type: RSA +Subkey-Length: 2048 +Name-Real: ` + u.fullname + ` +Name-Comment: Not for actual use +Name-Email: ` + u.email + ` +Expire-Date: 0 +%pubring ` + filepath.Join(u.dir, `pubring.gpg`) + ` +%secring ` + filepath.Join(u.dir, `secring.gpg`) + ` +# Do a commit here, so that we can later print "done" +%commit +%echo done` + //fmt.Printf("BATCH START\n%s\nBATCH END\n", batch) + fmt.Fprintln(tmpfile, batch) + + // FIXME(tlim): The batch file should include a password, but then + // we need to figure out how to get "blackbox encrypt" and other + // commands to input a password in an automated way. + // To experiment with this, add after "Expire-Date:" a line like: + // Passphrase: kljfhslfjkhsaljkhsdflgjkhsd + // Current status: without that line GPG keys have no passphrase + // and none is requested. + + bbutil.RunBash("gpg", + "--homedir", u.dir, + "--verbose", + "--batch", + "--gen-key", + tmpfile.Name(), + ) + if err != nil { + t.Fatal(err) + } + if err := tmpfile.Close(); err != nil { + log.Fatal(err) + } + + // We do this just to for gpg to create trustdb.gpg + bbutil.RunBash("gpg", + "--homedir", u.dir, + "--list-keys", + ) + if err != nil { + t.Fatal(err) + } + + bbutil.RunBash("gpg", + "--homedir", u.dir, + "--list-secret-keys", + ) + if err != nil { + t.Fatal(err) + } + + } + + return u.dir +} + +func hasQuick(t *testing.T) bool { + testing.Init() + fmt.Println("========== Do we have --quick-generate-key?") + err := bbutil.RunBash("gpg2", + "--dry-run", + "--quick-generate-key", + "--batch", + "--passphrase", "", + "foo", "rsa", "encr") + fmt.Println("========== Done") + if err == nil { + return true + } + //fmt.Printf("DISCOVER GPG: %d", err.ExitCode()) + if exitError, ok := err.(*exec.ExitError); ok { + if exitError.ExitCode() == 0 { + return true + } + } + return false +} + +func become(t *testing.T, name string) { + testing.Init() + u := users[name] + + os.Setenv("GNUPGHOME", u.dir) + os.Setenv("GPG_AGENT_INFO", u.agentInfo) + bbutil.RunBash("git", "config", "user.name", u.name) + bbutil.RunBash("git", "config", "user.email", u.fullname) +} + +// // Get fingerprint: +// // Retrieve fingerprint of generated key. +// // Use it to extract the secret/public keys. +// // (stolen from https://raymii.org/s/articles/GPG_noninteractive_batch_sign_trust_and_send_gnupg_keys.html) +// +// // fpr=`gpg --homedir /tmp/blackbox_createrole --fingerprint --with-colons "$ROLE_NAME" | awk -F: '/fpr:/ {print $10}' | head -n 1` +// var fpr string +// bbutil.RunBashOutput("gpg", +// "--homedir", "/tmp/blackbox_createrole", +// "--fingerprint", +// "--with-colons", +// u.email, +// ) +// for i, l := range string.Split(out, "\n") { +// if string.HasPrefix(l, "fpr:") { +// fpr = strings.Split(l, ":")[9] +// } +// break +// } +// +// // Create key key: +// // gpg --homedir "$gpghomedir" --batch --passphrase '' --quick-add-key "$fpr" rsa encr +// bbutil.RunBash("gpg", +// "--homedir", u.dir, +// "--batch", +// "--passphrase", "", +// "--quick-add-key", fpr, +// "rsa", "encr", +// ) + +// function md5sum_file() { +// # Portably generate the MD5 hash of file $1. +// case $(uname -s) in +// Darwin | FreeBSD ) +// md5 -r "$1" | awk '{ print $1 }' +// ;; +// NetBSD ) +// md5 -q "$1" +// ;; +// SunOS ) +// digest -a md5 "$1" +// ;; +// Linux ) +// md5sum "$1" | awk '{ print $1 }' +// ;; +// CYGWIN* ) +// md5sum "$1" | awk '{ print $1 }' +// ;; +// * ) +// echo 'ERROR: Unknown OS. Exiting.' +// exit 1 +// ;; +// esac +// } +// +// function assert_file_missing() { +// if [[ -e "$1" ]]; then +// echo "ASSERT FAILED: ${1} should not exist." +// exit 1 +// fi +// } +// +// function assert_file_exists() { +// if [[ ! -e "$1" ]]; then +// echo "ASSERT FAILED: ${1} should exist." +// echo "PWD=$(/usr/bin/env pwd -P)" +// #echo "LS START" +// #ls -la +// #echo "LS END" +// exit 1 +// fi +// } +// function assert_file_md5hash() { +// local file="$1" +// local wanted="$2" +// assert_file_exists "$file" +// local found +// found=$(md5sum_file "$file") +// if [[ "$wanted" != "$found" ]]; then +// echo "ASSERT FAILED: $file hash wanted=$wanted found=$found" +// exit 1 +// fi +// } +// function assert_file_group() { +// local file="$1" +// local wanted="$2" +// local found +// assert_file_exists "$file" +// +// case $(uname -s) in +// Darwin | FreeBSD | NetBSD ) +// found=$(stat -f '%Dg' "$file") +// ;; +// Linux | SunOS ) +// found=$(stat -c '%g' "$file") +// ;; +// CYGWIN* ) +// echo "ASSERT_FILE_GROUP: Running on Cygwin. Not being tested." +// return 0 +// ;; +// * ) +// echo 'ERROR: Unknown OS. Exiting.' +// exit 1 +// ;; +// esac +// +// echo "DEBUG: assert_file_group X${wanted}X vs. X${found}X" +// echo "DEBUG:" $(which stat) +// if [[ "$wanted" != "$found" ]]; then +// echo "ASSERT FAILED: $file chgrp group wanted=$wanted found=$found" +// exit 1 +// fi +// } +// function assert_file_perm() { +// local wanted="$1" +// local file="$2" +// local found +// assert_file_exists "$file" +// +// case $(uname -s) in +// Darwin | FreeBSD | NetBSD ) +// found=$(stat -f '%Sp' "$file") +// ;; +// # NB(tlim): CYGWIN hasn't been tested. It might be more like Darwin. +// Linux | CYGWIN* | SunOS ) +// found=$(stat -c '%A' "$file") +// ;; +// * ) +// echo 'ERROR: Unknown OS. Exiting.' +// exit 1 +// ;; +// esac +// +// echo "DEBUG: assert_file_perm X${wanted}X vs. X${found}X" +// echo "DEBUG:" $(which stat) +// if [[ "$wanted" != "$found" ]]; then +// echo "ASSERT FAILED: $file chgrp perm wanted=$wanted found=$found" +// exit 1 +// fi +// } +// function assert_line_not_exists() { +// local target="$1" +// local file="$2" +// assert_file_exists "$file" +// if grep -F -x -s -q >/dev/null "$target" "$file" ; then +// echo "ASSERT FAILED: line '$target' should not exist in file $file" +// echo "==== file contents: START $file" +// cat "$file" +// echo "==== file contents: END $file" +// exit 1 +// fi +// } +// function assert_line_exists() { +// local target="$1" +// local file="$2" +// assert_file_exists "$file" +// if ! grep -F -x -s -q >/dev/null "$target" "$file" ; then +// echo "ASSERT FAILED: line '$target' should exist in file $file" +// echo "==== file contents: START $file" +// cat "$file" +// echo "==== file contents: END $file" +// exit 1 +// fi +// } diff --git a/integrationTest/test_data/000-admin-list.txt b/integrationTest/test_data/000-admin-list.txt new file mode 100644 index 00000000..5dc1ca0e --- /dev/null +++ b/integrationTest/test_data/000-admin-list.txt @@ -0,0 +1,2 @@ +user1@example.com +user2@example.com diff --git a/integrationTest/test_data/000-file-list.txt b/integrationTest/test_data/000-file-list.txt new file mode 100644 index 00000000..ee1e7122 --- /dev/null +++ b/integrationTest/test_data/000-file-list.txt @@ -0,0 +1,2 @@ +bar.txt +foo.txt diff --git a/integrationTest/test_data/000-status.txt b/integrationTest/test_data/000-status.txt new file mode 100644 index 00000000..7ecf5510 --- /dev/null +++ b/integrationTest/test_data/000-status.txt @@ -0,0 +1,9 @@ ++-------------+------------------------+ +| STATUS | NAME | ++-------------+------------------------+ +| BOTHMISSING | status-BOTHMISSING.txt | +| DECRYPTED | status-DECRYPTED.txt | +| ENCRYPTED | status-ENCRYPTED.txt | +| GPGMISSING | status-GPGMISSING.txt | +| SHREDDED | status-SHREDDED.txt | ++-------------+------------------------+ diff --git a/integrationTest/test_data/alice-cat-plain.txt b/integrationTest/test_data/alice-cat-plain.txt new file mode 100644 index 00000000..dcb5c6c4 --- /dev/null +++ b/integrationTest/test_data/alice-cat-plain.txt @@ -0,0 +1 @@ +I am the foo.txt file! diff --git a/integrationTest/test_data/basic-status.txt b/integrationTest/test_data/basic-status.txt new file mode 100644 index 00000000..134241a3 --- /dev/null +++ b/integrationTest/test_data/basic-status.txt @@ -0,0 +1,5 @@ ++-----------+---------+ +| STATUS | NAME | ++-----------+---------+ +| ENCRYPTED | foo.txt | ++-----------+---------+ diff --git a/integrationTest/test_data/reencrypt-plain.txt b/integrationTest/test_data/reencrypt-plain.txt new file mode 100644 index 00000000..dcb5c6c4 --- /dev/null +++ b/integrationTest/test_data/reencrypt-plain.txt @@ -0,0 +1 @@ +I am the foo.txt file! diff --git a/integrationTest/test_data/status-noreg.txt b/integrationTest/test_data/status-noreg.txt new file mode 100644 index 00000000..9b12e46e --- /dev/null +++ b/integrationTest/test_data/status-noreg.txt @@ -0,0 +1,6 @@ ++-----------+----------------------+ +| STATUS | NAME | ++-----------+----------------------+ +| ENCRYPTED | status-ENCRYPTED.txt | +| NOTREG | blah.txt | ++-----------+----------------------+ diff --git a/models/crypters.go b/models/crypters.go new file mode 100644 index 00000000..e861fd00 --- /dev/null +++ b/models/crypters.go @@ -0,0 +1,15 @@ +package models + +// Crypter is gpg binaries, go-opengpg, etc. +type Crypter interface { + // Name returns the plug-in's canonical name. + Name() string + // Decrypt name+".gpg", possibly overwriting name. + Decrypt(filename string, umask int, overwrite bool) error + // Encrypt name, overwriting name+".gpg" + Encrypt(filename string, umask int, receivers []string) (string, error) + // Cat outputs a file, unencrypting if needed. + Cat(filename string) ([]byte, error) + // AddNewKey extracts keyname from sourcedir's GnuPG chain to destdir keychain. + AddNewKey(keyname, repobasename, sourcedir, destdir string) ([]string, error) +} diff --git a/models/vcs.go b/models/vcs.go new file mode 100644 index 00000000..fb1ffb4c --- /dev/null +++ b/models/vcs.go @@ -0,0 +1,30 @@ +package models + +import "github.com/StackExchange/blackbox/v2/pkg/commitlater" + +// Vcs is git/hg/etc. +type Vcs interface { + // Name returns the plug-in's canonical name. + Name() string + // Discover returns true if we are a repo of this type; along with the Abs path to the repo root (or "" if we don't know). + Discover() (bool, string) + + // SetFileTypeUnix informs the VCS that files should maintain unix-style line endings. + SetFileTypeUnix(repobasedir string, files ...string) error + // IgnoreAnywhere tells the VCS to ignore these files anywhere in the repo. + IgnoreAnywhere(repobasedir string, files []string) error + // IgnoreAnywhere tells the VCS to ignore these files, rooted in the base of the repo. + IgnoreFiles(repobasedir string, files []string) error + + // CommitTitle sets the title of the next commit. + CommitTitle(title string) + // NeedsCommit queues up commits for later execution. + NeedsCommit(message string, repobasedir string, names []string) + // DebugCommits dumps a list of future commits. + DebugCommits() commitlater.List + // FlushCommits informs the VCS to do queued up commits. + FlushCommits() error + + // TestingInitRepo initializes a repo of this type (for use by integration tests) + TestingInitRepo() error +} diff --git a/pkg/bblog/bblog.go b/pkg/bblog/bblog.go new file mode 100644 index 00000000..d55ccf6d --- /dev/null +++ b/pkg/bblog/bblog.go @@ -0,0 +1,48 @@ +package bblog + +import ( + "io/ioutil" + "log" + "os" +) + +/* + +To use this, include the following lines in your .go file. + +var logErr *log.Logger +var logDebug *log.Logger +func init() { + logErr = bblog.GetErr() + logDebug = bblog.GetDebug(debug) +} + +Or in a function: + + logErr := bblog.GetErr() + logDebug := bblog.GetDebug(debug) + logDebug.Printf("whatever: %v", err) + +*/ + +var logErr *log.Logger +var logDebug *log.Logger + +// GetErr returns a logger handle used for errors +func GetErr() *log.Logger { + if logErr == nil { + logErr = log.New(os.Stderr, "", 0) + } + return logErr +} + +// GetDebug returns a Logger handle used for debug info (output is discarded if viable=false) +func GetDebug(visible bool) *log.Logger { + if visible { + logDebug = log.New(os.Stderr, "", 0) + } else { + // Invisible mode (i.e. display nothing) + logDebug = log.New(ioutil.Discard, "", 0) + } + return logDebug +} diff --git a/pkg/bbutil/filestats.go b/pkg/bbutil/filestats.go new file mode 100644 index 00000000..6daed66d --- /dev/null +++ b/pkg/bbutil/filestats.go @@ -0,0 +1,130 @@ +package bbutil + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strings" + "time" +) + +// DirExists returns true if directory exists. +func DirExists(path string) (bool, error) { + stat, err := os.Stat(path) + if err == nil { + return stat.IsDir(), nil + } + if os.IsNotExist(err) { + return false, nil + } + return true, err +} + +// FileExistsOrProblem returns true if the file exists or if we can't determine its existence. +func FileExistsOrProblem(path string) bool { + _, err := os.Stat(path) + if err == nil { + return true + } + if os.IsNotExist(err) { + return false + } + return true +} + +// Touch updates the timestamp of a file. +func Touch(name string) error { + var err error + _, err = os.Stat(name) + if os.IsNotExist(err) { + file, err := os.Create(name) + if err != nil { + return fmt.Errorf("TouchFile failed: %w", err) + } + file.Close() + } + + currentTime := time.Now().Local() + return os.Chtimes(name, currentTime, currentTime) +} + +// ReadFileLines is like ioutil.ReadFile() but returns an []string. +func ReadFileLines(filename string) ([]string, error) { + b, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + s := string(b) + s = strings.TrimSuffix(s, "\n") + if s == "" { + return []string{}, nil + } + l := strings.Split(s, "\n") + return l, nil +} + +// AddLinesToSortedFile adds a line to a sorted file. +func AddLinesToSortedFile(filename string, newlines ...string) error { + lines, err := ReadFileLines(filename) + //fmt.Printf("DEBUG: read=%q\n", lines) + if err != nil { + return fmt.Errorf("AddLinesToSortedFile can't read %q: %w", filename, err) + } + if !sort.StringsAreSorted(lines) { + return fmt.Errorf("AddLinesToSortedFile: file wasn't sorted: %v", filename) + } + lines = append(lines, newlines...) + sort.Strings(lines) + contents := strings.Join(lines, "\n") + "\n" + //fmt.Printf("DEBUG: write=%q\n", contents) + err = ioutil.WriteFile(filename, []byte(contents), 0o660) + if err != nil { + return fmt.Errorf("AddLinesToSortedFile can't write %q: %w", filename, err) + } + return nil +} + +// AddLinesToFile adds lines to the end of a file. +func AddLinesToFile(filename string, newlines ...string) error { + lines, err := ReadFileLines(filename) + if err != nil { + return fmt.Errorf("AddLinesToFile can't read %q: %w", filename, err) + } + lines = append(lines, newlines...) + contents := strings.Join(lines, "\n") + "\n" + err = ioutil.WriteFile(filename, []byte(contents), 0o660) + if err != nil { + return fmt.Errorf("AddLinesToFile can't write %q: %w", filename, err) + } + return nil +} + +// FindDirInParent looks for target in CWD, or .., or ../.., etc. +func FindDirInParent(target string) (string, error) { + // Prevent an infinite loop by only doing "cd .." this many times + maxDirLevels := 30 + relpath := "." + for i := 0; i < maxDirLevels; i++ { + // Does relpath contain our target? + t := filepath.Join(relpath, target) + //logDebug.Printf("Trying %q\n", t) + _, err := os.Stat(t) + if err == nil { + return t, nil + } + if !os.IsNotExist(err) { + return "", fmt.Errorf("stat failed FindDirInParent (%q): %w", t, err) + } + // Ok, it really wasn't found. + + // If we are at the root, stop. + if abs, err := filepath.Abs(relpath); err == nil && abs == "/" { + break + } + // Try one directory up + relpath = filepath.Join("..", relpath) + } + return "", fmt.Errorf("Not found") +} diff --git a/pkg/bbutil/rbio_test.go b/pkg/bbutil/rbio_test.go new file mode 100644 index 00000000..6757c391 --- /dev/null +++ b/pkg/bbutil/rbio_test.go @@ -0,0 +1,21 @@ +package bbutil + +import ( + "testing" +) + +func TestRunBashInputOutput(t *testing.T) { + + in := "This is a test of the RBIO system.\n" + bin := []byte(in) + + out, err := RunBashInputOutput(bin, "cat") + sout := string(out) + if err != nil { + t.Error(err) + } + + if in != sout { + t.Errorf("not equal %q %q", in, out) + } +} diff --git a/pkg/bbutil/runbash.go b/pkg/bbutil/runbash.go new file mode 100644 index 00000000..bff442ce --- /dev/null +++ b/pkg/bbutil/runbash.go @@ -0,0 +1,77 @@ +package bbutil + +import ( + "bytes" + "fmt" + "log" + "os" + "os/exec" +) + +// RunBash runs a Bash command. +func RunBash(command string, args ...string) error { + cmd := exec.Command(command, args...) + cmd.Stdin = os.Stdin + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err := cmd.Start() + if err != nil { + log.Fatal(err) + } + err = cmd.Wait() + if err != nil { + return fmt.Errorf("RunBash cmd=%q err=%w", command, err) + } + return nil +} + +// RunBashOutput runs a Bash command, captures output. +func RunBashOutput(command string, args ...string) (string, error) { + cmd := exec.Command(command, args...) + cmd.Stdin = os.Stdin + cmd.Stderr = os.Stderr + out, err := cmd.Output() + if err != nil { + return "", fmt.Errorf("RunBashOutput err=%w", err) + } + return string(out), err +} + +// RunBashOutputSilent runs a Bash command, captures output, discards stderr. +func RunBashOutputSilent(command string, args ...string) (string, error) { + cmd := exec.Command(command, args...) + cmd.Stdin = os.Stdin + // Leave cmd.Stderr unmodified and stderr is discarded. + out, err := cmd.Output() + if err != nil { + return "", fmt.Errorf("RunBashOutputSilent err=%w", err) + } + return string(out), err +} + +// RunBashInput runs a Bash command, sends input on stdin. +func RunBashInput(input string, command string, args ...string) error { + + cmd := exec.Command(command, args...) + cmd.Stdin = bytes.NewBuffer([]byte(input)) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err := cmd.Run() + if err != nil { + return fmt.Errorf("RunBashInput err=%w", err) + } + return nil +} + +// RunBashInputOutput runs a Bash command, sends input on stdin. +func RunBashInputOutput(input []byte, command string, args ...string) ([]byte, error) { + + cmd := exec.Command(command, args...) + cmd.Stdin = bytes.NewBuffer(input) + cmd.Stderr = os.Stderr + out, err := cmd.Output() + if err != nil { + return nil, fmt.Errorf("RunBashInputOutput err=%w", err) + } + return out, nil +} diff --git a/pkg/bbutil/shred.go b/pkg/bbutil/shred.go new file mode 100644 index 00000000..329c45c6 --- /dev/null +++ b/pkg/bbutil/shred.go @@ -0,0 +1,109 @@ +package bbutil + +// Pick an appropriate secure erase command for this operating system +// or just delete the file with os.Remove(). + +// Code rewritten based https://codereview.stackexchange.com/questions/245072 + +import ( + "fmt" + "io/ioutil" + "os" + "os/exec" +) + +var shredCmds = []struct { + name, opts string +}{ + {"sdelete", "-a"}, + {"shred", "-u"}, + {"srm", "-f"}, + {"rm", "-Pf"}, +} + +func shredTemp(path, opts string) error { + file, err := ioutil.TempFile("", "shredTemp.") + if err != nil { + return err + } + filename := file.Name() + defer os.Remove(filename) + defer file.Close() + + err = file.Close() + if err != nil { + return err + } + err = RunBash(path, opts, filename) + if err != nil { + return err + } + return nil +} + +var shredPath, shredOpts = func() (string, string) { + for _, cmd := range shredCmds { + path, err := exec.LookPath(cmd.name) + if err != nil { + continue + } + err = shredTemp(path, cmd.opts) + if err == nil { + return path, cmd.opts + } + } + return "", "" +}() + +// ShredInfo reveals the shred command and flags (for "blackbox info") +func ShredInfo() string { + return shredPath + " " + shredOpts +} + +// shredFile shreds one file. +func shredFile(filename string) error { + fi, err := os.Stat(filename) + if err != nil { + return err + } + if !fi.Mode().IsRegular() { + err := fmt.Errorf("filename is not mode regular") + return err + } + + if shredPath == "" { + // No secure erase command found. Default to a normal file delete. + // TODO(tlim): Print a warning? Have a flag that causes this to be an error? + return os.Remove(filename) + } + + err = RunBash(shredPath, shredOpts, filename) + if err != nil { + return err + } + return nil +} + +// ShredFiles securely erases a list of files. +func ShredFiles(names []string) error { + + // TODO(tlim) DO the shredding in parallel like in v1. + + var eerr error + for _, n := range names { + _, err := os.Stat(n) + if err != nil { + if os.IsNotExist(err) { + fmt.Printf("======= already gone: %q\n", n) + continue + } + } + fmt.Printf("========== SHREDDING: %q\n", n) + e := shredFile(n) + if e != nil { + eerr = e + fmt.Printf("ERROR: %v\n", e) + } + } + return eerr +} diff --git a/pkg/bbutil/sortedfile_test.go b/pkg/bbutil/sortedfile_test.go new file mode 100644 index 00000000..6a5e2a7b --- /dev/null +++ b/pkg/bbutil/sortedfile_test.go @@ -0,0 +1,66 @@ +package bbutil + +import ( + "io/ioutil" + "os" + "testing" +) + +func TestAddLinesToSortedFile(t *testing.T) { + + var tests = []struct { + start string + add []string + expected string + }{ + { + "", + []string{"one"}, + "one\n", + }, + { + "begin\ntwo\n", + []string{"at top"}, + "at top\nbegin\ntwo\n", + }, + { + "begin\ntwo\n", + []string{"zbottom"}, + "begin\ntwo\nzbottom\n", + }, + { + "begin\ntwo\n", + []string{"middle"}, + "begin\nmiddle\ntwo\n", + }, + } + + for i, test := range tests { + content := []byte(test.start) + tmpfile, err := ioutil.TempFile("", "example") + if err != nil { + t.Fatal(err) + } + tmpfilename := tmpfile.Name() + defer os.Remove(tmpfilename) + + if _, err := tmpfile.Write(content); err != nil { + t.Fatal(err) + } + if err := tmpfile.Close(); err != nil { + t.Fatal(err) + } + AddLinesToSortedFile(tmpfilename, test.add...) + expected := test.expected + + got, err := ioutil.ReadFile(tmpfilename) + if err != nil { + t.Fatal(err) + } + if expected != string(got) { + t.Errorf("test %v: contents wrong:\nexpected: %q\n got: %q", i, expected, got) + } + os.Remove(tmpfilename) + } + +} diff --git a/pkg/box/box.go b/pkg/box/box.go new file mode 100644 index 00000000..44882d16 --- /dev/null +++ b/pkg/box/box.go @@ -0,0 +1,233 @@ +package box + +// box implements the box model. + +import ( + "fmt" + "log" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/StackExchange/blackbox/v2/pkg/bblog" + "github.com/StackExchange/blackbox/v2/pkg/bbutil" + "github.com/StackExchange/blackbox/v2/pkg/crypters" + "github.com/StackExchange/blackbox/v2/pkg/vcs" + "github.com/urfave/cli/v2" +) + +var logErr *log.Logger +var logDebug *log.Logger + +// Box describes what we know about a box. +type Box struct { + // Paths: + Team string // Name of the team (i.e. .blackbox-$TEAM) + RepoBaseDir string // Rel path to the VCS repo. + ConfigPath string // Abs or Rel path to the .blackbox (or whatever) directory. + ConfigRO bool // True if we should not try to change files in ConfigPath. + // Settings: + Umask int // umask to set when decrypting + Editor string // Editor to call + Debug bool // Are we in debug logging mode? + // Cache of data gathered from .blackbox: + Admins []string // If non-empty, the list of admins. + Files []string // If non-empty, the list of files. + FilesSet map[string]bool // If non-nil, a set of Files. + // Handles to interfaces: + Vcs vcs.Vcs // Interface access to the VCS. + Crypter crypters.Crypter // Inteface access to GPG. + logErr *log.Logger + logDebug *log.Logger +} + +// StatusMode is a type of query. +type StatusMode int + +const ( + // Itemized is blah + Itemized StatusMode = iota // Individual files by name + // All files is blah + All + // Unchanged is blah + Unchanged + // Changed is blah + Changed +) + +// NewFromFlags creates a box using items from flags. Nearly all subcommands use this. +func NewFromFlags(c *cli.Context) *Box { + + // The goal of this is to create a fully-populated box (and box.Vcs) + // so that all subcommands have all the fields and interfaces they need + // to do their job. + + logErr = bblog.GetErr() + logDebug = bblog.GetDebug(c.Bool("debug")) + + bx := &Box{ + Umask: c.Int("umask"), + Editor: c.String("editor"), + Team: c.String("team"), + logErr: bblog.GetErr(), + logDebug: bblog.GetDebug(c.Bool("debug")), + Debug: c.Bool("debug"), + } + + // Discover which kind of VCS is in use, and the repo root. + bx.Vcs, bx.RepoBaseDir = vcs.Discover() + + // Discover the crypto backend (GnuPG, go-openpgp, etc.) + bx.Crypter = crypters.SearchByName(c.String("crypto"), c.Bool("debug")) + if bx.Crypter == nil { + fmt.Printf("ERROR! No CRYPTER found! Please set --crypto correctly or use the damn default\n") + os.Exit(1) + } + + // Find the .blackbox (or equiv.) directory. + var err error + configFlag := c.String("config") + if configFlag != "" { + // Flag is set. Better make sure it is valid. + if !filepath.IsAbs(configFlag) { + fmt.Printf("config flag value is a relative path. Too risky. Exiting.\n") + os.Exit(1) + // NB(tlim): We could return filepath.Abs(config) or maybe it just + // works as is. I don't know, and until we have a use case to prove + // it out, it's best to just not implement this. + } + bx.ConfigPath = configFlag + bx.ConfigRO = true // External configs treated as read-only. + // TODO(tlim): We could get fancy here and set ConfigReadOnly=true only + // if we are sure configFlag is not within bx.RepoBaseDir. Again, I'd + // like to see a use-case before we implement this. + return bx + + } + // Normal path. Flag not set, so we discover the path. + bx.ConfigPath, err = FindConfigDir(bx.RepoBaseDir, c.String("team")) + if err != nil && c.Command.Name != "info" { + fmt.Printf("Can't find .blackbox or equiv. Have you run init?\n") + os.Exit(1) + } + return bx +} + +// NewUninitialized creates a box in a pre-init situation. +func NewUninitialized(c *cli.Context) *Box { + /* + This is for "blackbox init" (used before ".blackbox*" exists) + + Init needs: How we populate it: + bx.Vcs: Discovered by calling each plug-in until succeeds. + bx.ConfigDir: Generated algorithmically (it doesn't exist yet). + */ + bx := &Box{ + Umask: c.Int("umask"), + Editor: c.String("editor"), + Team: c.String("team"), + logErr: bblog.GetErr(), + logDebug: bblog.GetDebug(c.Bool("debug")), + Debug: c.Bool("debug"), + } + bx.Vcs, bx.RepoBaseDir = vcs.Discover() + if c.String("configdir") == "" { + rel := ".blackbox" + if bx.Team != "" { + rel = ".blackbox-" + bx.Team + } + bx.ConfigPath = filepath.Join(bx.RepoBaseDir, rel) + } else { + // Wait. The user is using the --config flag on a repo that + // hasn't been created yet? I hope this works! + fmt.Printf("ERROR: You can not set --config when initializing a new repo. Please run this command from within a repo, with no --config flag. Or, file a bug explaining your use caseyour use-case. Exiting!\n") + os.Exit(1) + // TODO(tlim): We could get fancy here and query the Vcs to see if the + // path would fall within the repo, figure out the relative path, and + // use that value. (and error if configflag is not within the repo). + // That would be error prone and would only help the zero users that + // ever see the above error message. + } + return bx +} + +// NewForTestingInit creates a box in a bare environment. +func NewForTestingInit(vcsname string) *Box { + /* + + This is for "blackbox test_init" (secret command used in integration tests; when nothing exists) + TestingInitRepo only uses bx.Vcs, so that's all we set. + Populates bx.Vcs by finding the provider named vcsname. + */ + bx := &Box{} + + // Find the + var vh vcs.Vcs + var err error + vcsname = strings.ToLower(vcsname) + for _, v := range vcs.Catalog { + if strings.ToLower(v.Name) == vcsname { + vh, err = v.New() + if err != nil { + return nil // No idea how that would happen. + } + } + } + bx.Vcs = vh + + return bx +} + +func (bx *Box) getAdmins() error { + // Memoized + if len(bx.Admins) != 0 { + return nil + } + + // TODO(tlim): Try the json file. + + // Try the legacy file: + fn := filepath.Join(bx.ConfigPath, "blackbox-admins.txt") + bx.logDebug.Printf("Admins file: %q", fn) + a, err := bbutil.ReadFileLines(fn) + if err != nil { + return fmt.Errorf("getAdmins can't load %q: %v", fn, err) + } + if !sort.StringsAreSorted(a) { + return fmt.Errorf("file corrupt. Lines not sorted: %v", fn) + } + bx.Admins = a + + return nil +} + +// getFiles populates Files and FileMap. +func (bx *Box) getFiles() error { + if len(bx.Files) != 0 { + return nil + } + + // TODO(tlim): Try the json file. + + // Try the legacy file: + fn := filepath.Join(bx.ConfigPath, "blackbox-files.txt") + bx.logDebug.Printf("Files file: %q", fn) + a, err := bbutil.ReadFileLines(fn) + if err != nil { + return fmt.Errorf("getFiles can't load %q: %v", fn, err) + } + if !sort.StringsAreSorted(a) { + return fmt.Errorf("file corrupt. Lines not sorted: %v", fn) + } + for _, n := range a { + bx.Files = append(bx.Files, filepath.Join(bx.RepoBaseDir, n)) + } + + bx.FilesSet = make(map[string]bool, len(bx.Files)) + for _, s := range bx.Files { + bx.FilesSet[s] = true + } + + return nil +} diff --git a/pkg/box/boxutils.go b/pkg/box/boxutils.go new file mode 100644 index 00000000..6497292f --- /dev/null +++ b/pkg/box/boxutils.go @@ -0,0 +1,224 @@ +package box + +import ( + "bufio" + "fmt" + "os" + "os/user" + "path/filepath" + "runtime" + "strconv" + "strings" + + "github.com/StackExchange/blackbox/v2/pkg/makesafe" +) + +// FileStatus returns the status of a file. +func FileStatus(name string) (string, error) { + /* + DECRYPTED: File is decrypted and ready to edit (unknown if it has been edited). + ENCRYPTED: GPG file is newer than plaintext. Indicates recented edited then encrypted. + SHREDDED: Plaintext is missing. + GPGMISSING: The .gpg file is missing. Oops? + PLAINERROR: Can't access the plaintext file to determine status. + GPGERROR: Can't access .gpg file to determine status. + */ + + p := name + e := p + ".gpg" + ps, perr := os.Stat(p) + es, eerr := os.Stat(e) + if perr == nil && eerr == nil { + if ps.ModTime().Before(es.ModTime()) { + return "ENCRYPTED", nil + } + return "DECRYPTED", nil + } + + if os.IsNotExist(perr) && os.IsNotExist(eerr) { + return "BOTHMISSING", nil + } + + if eerr != nil { + if os.IsNotExist(eerr) { + return "GPGMISSING", nil + } + return "GPGERROR", eerr + } + + if perr != nil { + if os.IsNotExist(perr) { + return "SHREDDED", nil + } + } + return "PLAINERROR", perr +} + +func anyGpg(names []string) error { + for _, name := range names { + if strings.HasSuffix(name, ".gpg") { + return fmt.Errorf( + "no not specify .gpg files. Specify %q not %q", + strings.TrimSuffix(name, ".gpg"), name) + } + } + return nil +} + +// func isChanged(pname string) (bool, error) { +// // if .gpg exists but not plainfile: unchanged +// // if plaintext exists but not .gpg: changed +// // if plainfile < .gpg: unchanged +// // if plainfile > .gpg: don't know, need to try diff + +// // Gather info about the files: + +// pstat, perr := os.Stat(pname) +// if perr != nil && (!os.IsNotExist(perr)) { +// return false, fmt.Errorf("isChanged(%q) returned error: %w", pname, perr) +// } +// gname := pname + ".gpg" +// gstat, gerr := os.Stat(gname) +// if gerr != nil && (!os.IsNotExist(perr)) { +// return false, fmt.Errorf("isChanged(%q) returned error: %w", gname, gerr) +// } + +// pexists := perr == nil +// gexists := gerr == nil + +// // Use the above rules: + +// // if .gpg exists but not plainfile: unchanged +// if gexists && !pexists { +// return false, nil +// } + +// // if plaintext exists but not .gpg: changed +// if pexists && !gexists { +// return true, nil +// } + +// // At this point we can conclude that both p and g exist. +// // Can't hurt to test that assertion. +// if (!pexists) && (!gexists) { +// return false, fmt.Errorf("Assertion failed. p and g should exist: pn=%q", pname) +// } + +// pmodtime := pstat.ModTime() +// gmodtime := gstat.ModTime() +// // if plainfile < .gpg: unchanged +// if pmodtime.Before(gmodtime) { +// return false, nil +// } +// // if plainfile > .gpg: don't know, need to try diff +// return false, fmt.Errorf("Can not know for sure. Try git diff?") +// } + +func parseGroup(userinput string) (int, error) { + if userinput == "" { + return -1, fmt.Errorf("group spec is empty string") + } + + // If it is a valid number, use it. + i, err := strconv.Atoi(userinput) + if err == nil { + return i, nil + } + + // If not a number, look it up by name. + g, err := user.LookupGroup(userinput) + if err == nil { + i, err = strconv.Atoi(g.Gid) + return i, nil + } + + // Give up. + return -1, err +} + +// FindConfigDir tests various places until it finds the config dir. +// If we can't determine the relative path, "" is returned. +func FindConfigDir(reporoot, team string) (string, error) { + + candidates := []string{} + if team != "" { + candidates = append(candidates, ".blackbox-"+team) + } + candidates = append(candidates, ".blackbox") + candidates = append(candidates, "keyrings/live") + logDebug.Printf("DEBUG: candidates = %q\n", candidates) + + maxDirLevels := 30 // Prevent an infinite loop + relpath := "." + for i := 0; i < maxDirLevels; i++ { + // Does relpath contain any of our directory names? + for _, c := range candidates { + t := filepath.Join(relpath, c) + logDebug.Printf("Trying %q\n", t) + fi, err := os.Stat(t) + if err == nil && fi.IsDir() { + return t, nil + } + if err == nil { + return "", fmt.Errorf("path %q is not a directory: %w", t, err) + } + if !os.IsNotExist(err) { + return "", fmt.Errorf("dirExists access error: %w", err) + } + } + + // If we are at the root, stop. + if abs, _ := filepath.Abs(relpath); abs == "/" { + break + } + // Try one directory up + relpath = filepath.Join("..", relpath) + } + + return "", fmt.Errorf("No .blackbox (or equiv) directory found") +} + +func gpgAgentNotice() { + // Is gpg-agent configured? + if os.Getenv("GPG_AGENT_INFO") != "" { + return + } + // Are we on macOS? + if runtime.GOOS == "darwin" { + // We assume the use of https://gpgtools.org, which + // uses the keychain. + return + } + + // TODO(tlim): v1 verifies that "gpg-agent --version" outputs a version + // string that is 2.1.0 or higher. It seems that 1.x is incompatible. + + fmt.Println("WARNING: You probably want to run gpg-agent as") + fmt.Println("you will be asked for your passphrase many times.") + fmt.Println("Example: $ eval $(gpg-agent --daemon)") + fmt.Print("Press CTRL-C now to stop. ENTER to continue: ") + input := bufio.NewScanner(os.Stdin) + input.Scan() +} + +func shouldWeOverwrite() { + fmt.Println() + fmt.Println("WARNING: This will overwrite any unencrypted files laying about.") + fmt.Print("Press CTRL-C now to stop. ENTER to continue: ") + input := bufio.NewScanner(os.Stdin) + input.Scan() +} + +// PrettyCommitMessage generates a pretty commit message. +func PrettyCommitMessage(verb string, files []string) string { + if len(files) == 0 { + // This use-case should probably be an error. + return verb + " (no files)" + } + rfiles := makesafe.RedactMany(files) + m, truncated := makesafe.FirstFewFlag(rfiles) + if truncated { + return verb + ": " + m + } + return verb + ": " + m +} diff --git a/pkg/box/pretty_test.go b/pkg/box/pretty_test.go new file mode 100644 index 00000000..a7cb42f9 --- /dev/null +++ b/pkg/box/pretty_test.go @@ -0,0 +1,35 @@ +package box + +import "testing" + +func TestPrettyCommitMessage(t *testing.T) { + long := "aVeryVeryLongLongLongStringStringString" + for i, test := range []struct { + data []string + expected string + }{ + {[]string{}, `HEADING (no files)`}, + {[]string{"one"}, `HEADING: one`}, + {[]string{"one", "two"}, `HEADING: one two`}, + {[]string{"one", "two", "three"}, `HEADING: one two three`}, + {[]string{"one", "two", "three", "four"}, + `HEADING: one two three four`}, + {[]string{"one", "two", "three", "four", "five"}, + `HEADING: one two three four five`}, + {[]string{"has spaces.txt"}, `HEADING: "has spaces.txt"`}, + {[]string{"two\n"}, `HEADING: "twoX"(redacted)`}, + {[]string{"smile😁eyes"}, `HEADING: smile😁eyes`}, + {[]string{"tab\ttab", "two very long strings.txt"}, + `HEADING: "tabXtab"(redacted) "two very long strings.txt"`}, + {[]string{long, long, long, long}, + "HEADING: " + long + " " + long + " " + long + " " + long + " ... " + long + "\n " + long + "\n " + long + "\n " + long + "\n"}, + } { + g := PrettyCommitMessage("HEADING", test.data) + if g == test.expected { + //t.Logf("%03d: PASSED files=%q\n", i, test.data) + t.Logf("%03d: PASSED", i) + } else { + t.Errorf("%03d: FAILED files==%q got=(%q) wanted=(%q)\n", i, test.data, g, test.expected) + } + } +} diff --git a/pkg/box/verbs.go b/pkg/box/verbs.go new file mode 100644 index 00000000..62a5938e --- /dev/null +++ b/pkg/box/verbs.go @@ -0,0 +1,633 @@ +package box + +// This file implements the business logic related to a black box. +// These functions are usually called from cmd/blackbox/drive.go or +// external sytems that use box as a module. +import ( + "bufio" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + + "github.com/StackExchange/blackbox/v2/pkg/bbutil" + "github.com/StackExchange/blackbox/v2/pkg/makesafe" + "github.com/olekukonko/tablewriter" +) + +// AdminAdd adds admins. +func (bx *Box) AdminAdd(nom string, sdir string) error { + err := bx.getAdmins() + if err != nil { + return err + } + + //fmt.Printf("ADMINS=%q\n", bx.Admins) + + // Check for duplicates. + if i := sort.SearchStrings(bx.Admins, nom); i < len(bx.Admins) && bx.Admins[i] == nom { + return fmt.Errorf("Admin %v already an admin", nom) + } + + bx.logDebug.Printf("ADMIN ADD rbd=%q\n", bx.RepoBaseDir) + changedFiles, err := bx.Crypter.AddNewKey(nom, bx.RepoBaseDir, sdir, bx.ConfigPath) + if err != nil { + return fmt.Errorf("AdminAdd failed AddNewKey: %v", err) + } + + // TODO(tlim): Try the json file. + + // Try the legacy file: + fn := filepath.Join(bx.ConfigPath, "blackbox-admins.txt") + bx.logDebug.Printf("Admins file: %q", fn) + err = bbutil.AddLinesToSortedFile(fn, nom) + if err != nil { + return fmt.Errorf("could not update file (%q,%q): %v", fn, nom, err) + } + changedFiles = append([]string{fn}, changedFiles...) + + bx.Vcs.NeedsCommit("NEW ADMIN: "+nom, bx.RepoBaseDir, changedFiles) + return nil +} + +// AdminList lists the admin id's. +func (bx *Box) AdminList() error { + err := bx.getAdmins() + if err != nil { + return err + } + + for _, v := range bx.Admins { + fmt.Println(v) + } + return nil +} + +// AdminRemove removes an id from the admin list. +func (bx *Box) AdminRemove([]string) error { + return fmt.Errorf("NOT IMPLEMENTED: AdminRemove") +} + +// Cat outputs a file, unencrypting if needed. +func (bx *Box) Cat(names []string) error { + if err := anyGpg(names); err != nil { + return fmt.Errorf("cat: %w", err) + } + + err := bx.getFiles() + if err != nil { + return err + } + + for _, name := range names { + var out []byte + var err error + if _, ok := bx.FilesSet[name]; ok { + out, err = bx.Crypter.Cat(name) + } else { + out, err = ioutil.ReadFile(name) + } + if err != nil { + bx.logErr.Printf("BX_CRY3\n") + return fmt.Errorf("cat: %w", err) + } + fmt.Print(string(out)) + } + return nil +} + +// Decrypt decrypts a file. +func (bx *Box) Decrypt(names []string, overwrite bool, bulkpause bool, setgroup string) error { + var err error + + if err := anyGpg(names); err != nil { + return err + } + + err = bx.getFiles() + if err != nil { + return err + } + + if bulkpause { + gpgAgentNotice() + } + + groupchange := false + gid := -1 + if setgroup != "" { + gid, err = parseGroup(setgroup) + if err != nil { + return fmt.Errorf("Invalid group name or gid: %w", err) + } + groupchange = true + } + bx.logDebug.Printf("DECRYPT GROUP %q %v,%v\n", setgroup, groupchange, gid) + + if len(names) == 0 { + names = bx.Files + } + return decryptMany(bx, names, overwrite, groupchange, gid) +} + +func decryptMany(bx *Box, names []string, overwrite bool, groupchange bool, gid int) error { + + // TODO(tlim): If we want to decrypt them in parallel, go has a helper function + // called "sync.WaitGroup()"" which would be useful here. We would probably + // want to add a flag on the command line (stored in a field such as bx.ParallelMax) + // that limits the amount of parallelism. The default for the flag should + // probably be runtime.NumCPU(). + + for _, name := range names { + fmt.Printf("========== DECRYPTING %q\n", name) + if !bx.FilesSet[name] { + bx.logErr.Printf("Skipping %q: File not registered with Blackbox", name) + continue + } + if (!overwrite) && bbutil.FileExistsOrProblem(name) { + bx.logErr.Printf("Skipping %q: Will not overwrite existing file", name) + continue + } + + // TODO(tlim) v1 detects zero-length files and removes them, even + // if overwrite is disabled. I don't think anyone has ever used that + // feature. That said, if we want to do that, we would implement it here. + + // TODO(tlim) v1 takes the md5 hash of the plaintext before it decrypts, + // then compares the new plaintext's md5. It prints "EXTRACTED" if + // there is a change. + + err := bx.Crypter.Decrypt(name, bx.Umask, overwrite) + if err != nil { + bx.logErr.Printf("%q: %v", name, err) + continue + } + + // FIXME(tlim): Clone the file perms from the .gpg file to the plaintext file. + + if groupchange { + // FIXME(tlim): Also "chmod g+r" the file. + os.Chown(name, -1, gid) + } + } + return nil +} + +// Diff ... +func (bx *Box) Diff([]string) error { + return fmt.Errorf("NOT IMPLEMENTED: Diff") +} + +// Edit unencrypts, calls editor, calls encrypt. +func (bx *Box) Edit(names []string) error { + + if err := anyGpg(names); err != nil { + return err + } + + err := bx.getFiles() + if err != nil { + return err + } + + for _, name := range names { + if _, ok := bx.FilesSet[name]; ok { + if !bbutil.FileExistsOrProblem(name) { + err := bx.Crypter.Decrypt(name, bx.Umask, false) + if err != nil { + return fmt.Errorf("edit failed %q: %w", name, err) + } + } + } + err := bbutil.RunBash(bx.Editor, name) + if err != nil { + return err + } + } + return nil +} + +// Encrypt encrypts a file. +func (bx *Box) Encrypt(names []string, shred bool) error { + var err error + + if err = anyGpg(names); err != nil { + return err + } + + err = bx.getAdmins() + if err != nil { + return err + } + + err = bx.getFiles() + if err != nil { + return err + } + if len(names) == 0 { + names = bx.Files + } + + enames, err := encryptMany(bx, names, shred) + + bx.Vcs.NeedsCommit( + PrettyCommitMessage("ENCRYPTED", names), + bx.RepoBaseDir, + enames, + ) + + return err +} + +func encryptMany(bx *Box, names []string, shred bool) ([]string, error) { + var enames []string + for _, name := range names { + fmt.Printf("========== ENCRYPTING %q\n", name) + if !bx.FilesSet[name] { + bx.logErr.Printf("Skipping %q: File not registered with Blackbox", name) + continue + } + if !bbutil.FileExistsOrProblem(name) { + bx.logErr.Printf("Skipping. Plaintext does not exist: %q", name) + continue + } + ename, err := bx.Crypter.Encrypt(name, bx.Umask, bx.Admins) + if err != nil { + bx.logErr.Printf("Failed to encrypt %q: %v", name, err) + continue + } + enames = append(enames, ename) + if shred { + bx.Shred([]string{name}) + } + } + + return enames, nil +} + +// FileAdd enrolls files. +func (bx *Box) FileAdd(names []string, shred bool) error { + bx.logDebug.Printf("FileAdd(shred=%v, %v)", shred, names) + + // Check for dups. + // Encrypt them all. + // If that succeeds, add to the blackbox-files.txt file. + // (optionally) shred the plaintext. + + // FIXME(tlim): Check if the plaintext is in GIT. If it is, + // remove it from Git and print a warning that they should + // eliminate the history or rotate any secrets. + + if err := anyGpg(names); err != nil { + return err + } + + err := bx.getAdmins() + if err != nil { + return err + } + err = bx.getFiles() + if err != nil { + return err + } + if err := anyGpg(names); err != nil { + return err + } + + // Check for newlines + for _, n := range names { + if strings.ContainsAny(n, "\n") { + return fmt.Errorf("file %q contains a newlineregistered", n) + } + } + + // Check for duplicates. + for _, n := range names { + if i := sort.SearchStrings(bx.Files, n); i < len(bx.Files) && bx.Files[i] == n { + return fmt.Errorf("file %q already registered", n) + } + } + + // Encrypt + var needsCommit []string + for _, name := range names { + s, err := bx.Crypter.Encrypt(name, bx.Umask, bx.Admins) + if err != nil { + return fmt.Errorf("AdminAdd failed AddNewKey: %v", err) + } + needsCommit = append(needsCommit, s) + } + + // TODO(tlim): Try the json file. + + // Try the legacy file: + fn := filepath.Join(bx.ConfigPath, "blackbox-files.txt") + bx.logDebug.Printf("Files file: %q", fn) + err = bbutil.AddLinesToSortedFile(fn, names...) + if err != nil { + return fmt.Errorf("could not update file (%q,%q): %v", fn, names, err) + } + + err = bx.Shred(names) + if err != nil { + bx.logErr.Printf("Error while shredding: %v", err) + } + + bx.Vcs.CommitTitle("BLACKBOX ADD FILE: " + makesafe.FirstFew(makesafe.ShellMany(names))) + + bx.Vcs.IgnoreFiles(bx.RepoBaseDir, names) + + bx.Vcs.NeedsCommit( + PrettyCommitMessage("blackbox-files.txt add", names), + bx.RepoBaseDir, + append([]string{filepath.Join(bx.ConfigPath, "blackbox-files.txt")}, needsCommit...), + ) + return nil +} + +// FileList lists the files. +func (bx *Box) FileList() error { + err := bx.getFiles() + if err != nil { + return err + } + for _, v := range bx.Files { + fmt.Println(v) + } + return nil +} + +// FileRemove de-enrolls files. +func (bx *Box) FileRemove(names []string) error { + return fmt.Errorf("NOT IMPLEMENTED: FileRemove") +} + +// Info prints debugging info. +func (bx *Box) Info() error { + + err := bx.getFiles() + if err != nil { + bx.logErr.Printf("Info getFiles: %v", err) + } + + err = bx.getAdmins() + if err != nil { + bx.logErr.Printf("Info getAdmins: %v", err) + } + + fmt.Println("BLACKBOX:") + fmt.Printf(" Debug: %v\n", bx.Debug) + fmt.Printf(" Team: %q\n", bx.Team) + fmt.Printf(" RepoBaseDir: %q\n", bx.RepoBaseDir) + fmt.Printf(" ConfigPath: %q\n", bx.ConfigPath) + fmt.Printf(" Umask: %04o\n", bx.Umask) + fmt.Printf(" Editor: %v\n", bx.Editor) + fmt.Printf(" Shredder: %v\n", bbutil.ShredInfo()) + fmt.Printf(" Admins: count=%v\n", len(bx.Admins)) + fmt.Printf(" Files: count=%v\n", len(bx.Files)) + fmt.Printf(" FilesSet: count=%v\n", len(bx.FilesSet)) + fmt.Printf(" Vcs: %v\n", bx.Vcs) + fmt.Printf(" VcsName: %q\n", bx.Vcs.Name()) + fmt.Printf(" Crypter: %v\n", bx.Crypter) + fmt.Printf(" CrypterName: %q\n", bx.Crypter.Name()) + + return nil +} + +// Init initializes a repo. +func (bx *Box) Init(yes, vcsname string) error { + fmt.Printf("VCS root is: %q\n", bx.RepoBaseDir) + + fmt.Printf("team is: %q\n", bx.Team) + fmt.Printf("configdir will be: %q\n", bx.ConfigPath) + + if yes != "yes" { + fmt.Printf("Enable blackbox for this %v repo? (yes/no)? ", bx.Vcs.Name()) + input := bufio.NewScanner(os.Stdin) + input.Scan() + ans := input.Text() + b, err := strconv.ParseBool(ans) + if err != nil { + b = false + if len(ans) > 0 { + if ans[0] == 'y' || ans[0] == 'Y' { + b = true + } + } + } + if !b { + fmt.Println("Ok. Maybe some other time.") + return nil + } + } + + err := os.Mkdir(bx.ConfigPath, 0o750) + if err != nil { + return err + } + + ba := filepath.Join(bx.ConfigPath, "blackbox-admins.txt") + bf := filepath.Join(bx.ConfigPath, "blackbox-files.txt") + bbutil.Touch(ba) + bbutil.Touch(bf) + bx.Vcs.SetFileTypeUnix(bx.RepoBaseDir, ba, bf) + + bx.Vcs.IgnoreAnywhere(bx.RepoBaseDir, []string{ + "pubring.gpg~", + "pubring.kbx~", + "secring.gpg", + }) + + fs := []string{ba, bf} + bx.Vcs.NeedsCommit( + "NEW: "+strings.Join(makesafe.RedactMany(fs), " "), + bx.RepoBaseDir, + fs, + ) + + bx.Vcs.CommitTitle("INITIALIZE BLACKBOX") + return nil +} + +// Reencrypt decrypts and reencrypts files. +func (bx *Box) Reencrypt(names []string, overwrite bool, bulkpause bool) error { + + allFiles := false + + if err := anyGpg(names); err != nil { + return err + } + if err := bx.getAdmins(); err != nil { + return err + } + if err := bx.getFiles(); err != nil { + return err + } + if len(names) == 0 { + names = bx.Files + allFiles = true + } + + if bulkpause { + gpgAgentNotice() + } + + fmt.Println("========== blackbox administrators are:") + bx.AdminList() + fmt.Println("========== (the above people will be able to access the file)") + + if overwrite { + bbutil.ShredFiles(names) + } else { + warned := false + for _, n := range names { + if bbutil.FileExistsOrProblem(n) { + if !warned { + fmt.Printf("========== Shred these files?\n") + warned = true + } + fmt.Println("SHRED?", n) + } + } + if warned { + shouldWeOverwrite() + } + } + + // Decrypt + if err := decryptMany(bx, names, overwrite, false, 0); err != nil { + return fmt.Errorf("reencrypt failed decrypt: %w", err) + } + enames, err := encryptMany(bx, names, false) + if err != nil { + return fmt.Errorf("reencrypt failed encrypt: %w", err) + } + if err := bbutil.ShredFiles(names); err != nil { + return fmt.Errorf("reencrypt failed shred: %w", err) + } + + if allFiles { + // If the "--all" flag was used, don't try to list all the files. + bx.Vcs.NeedsCommit( + "REENCRYPT all files", + bx.RepoBaseDir, + enames, + ) + } else { + bx.Vcs.NeedsCommit( + PrettyCommitMessage("REENCRYPT", names), + bx.RepoBaseDir, + enames, + ) + + } + + return nil +} + +// Shred shreds files. +func (bx *Box) Shred(names []string) error { + + if err := anyGpg(names); err != nil { + return err + } + + err := bx.getFiles() + // Calling getFiles() has the benefit of making sure we are in a repo. + if err != nil { + return err + } + + if len(names) == 0 { + names = bx.Files + } + + return bbutil.ShredFiles(names) +} + +// Status prints the status of files. +func (bx *Box) Status(names []string, nameOnly bool, match string) error { + + err := bx.getFiles() + if err != nil { + return err + } + + var flist []string + if len(names) == 0 { + flist = bx.Files + } else { + flist = names + } + + var data [][]string + var onlylist []string + thirdColumn := false + var tcData bool + + for _, name := range flist { + var stat string + var err error + if _, ok := bx.FilesSet[name]; ok { + stat, err = FileStatus(name) + } else { + stat, err = "NOTREG", nil + } + if (match == "") || (stat == match) { + if err == nil { + data = append(data, []string{stat, name}) + onlylist = append(onlylist, name) + } else { + thirdColumn = tcData + data = append(data, []string{stat, name, fmt.Sprintf("%v", err)}) + onlylist = append(onlylist, fmt.Sprintf("%v: %v", name, err)) + } + } + } + + if nameOnly { + fmt.Println(strings.Join(onlylist, "\n")) + return nil + } + + table := tablewriter.NewWriter(os.Stdout) + table.SetAutoWrapText(false) + if thirdColumn { + table.SetHeader([]string{"Status", "Name", "Error"}) + } else { + table.SetHeader([]string{"Status", "Name"}) + } + for _, v := range data { + table.Append(v) + } + table.Render() // Send output + + return nil +} + +// TestingInitRepo initializes a repo. +// Uses bx.Vcs to create ".git" or whatever. +// Uses bx.Vcs to discover what was created, testing its work. +func (bx *Box) TestingInitRepo() error { + + if bx.Vcs == nil { + fmt.Println("bx.Vcs is nil") + fmt.Printf("BLACKBOX_VCS=%q\n", os.Getenv("BLACKBOX_VCS")) + os.Exit(1) + } + fmt.Printf("ABOUT TO CALL TestingInitRepo\n") + fmt.Printf("vcs = %v\n", bx.Vcs.Name()) + err := bx.Vcs.TestingInitRepo() + fmt.Printf("RETURNED from TestingInitRepo: %v\n", err) + fmt.Println(os.Getwd()) + if err != nil { + return fmt.Errorf("TestingInitRepo returned: %w", err) + } + if b, _ := bx.Vcs.Discover(); !b { + return fmt.Errorf("TestingInitRepo failed Discovery") + } + return nil +} diff --git a/pkg/commitlater/commitlater.go b/pkg/commitlater/commitlater.go new file mode 100644 index 00000000..80efadf0 --- /dev/null +++ b/pkg/commitlater/commitlater.go @@ -0,0 +1,84 @@ +package commitlater + +import ( + "fmt" +) + +type future struct { + message string // Message that describes this transaction. + dir string // Basedir of the files + files []string // Names of the files + display []string // Names as to be displayed to the user +} + +// List of futures to be done in the future. +type List struct { + items []*future +} + +// Add queues up a future commit. +func (list *List) Add(message string, repobasedir string, files []string) { + item := &future{ + message: message, + dir: repobasedir, + files: files, + } + list.items = append(list.items, item) +} + +func sameDirs(l *List) bool { + if len(l.items) <= 1 { + return true + } + for _, k := range l.items[1:] { + if k.dir != l.items[0].dir { + return false + } + } + return true +} + +// Flush executes queued commits. +func (list *List) Flush( + title string, + fadd func([]string) error, + fcommit func([]string, string, []string) error, +) error { + + // Just list the individual commit commands. + if title == "" || len(list.items) < 2 || !sameDirs(list) { + for _, fut := range list.items { + err := fadd(fut.files) + if err != nil { + return fmt.Errorf("add files1 (%q) failed: %w", fut.files, err) + } + err = fcommit([]string{fut.message}, fut.dir, fut.files) + if err != nil { + return fmt.Errorf("commit files (%q) failed: %w", fut.files, err) + } + } + return nil + } + + // Create a long commit message. + var m []string + var f []string + for _, fut := range list.items { + err := fadd(fut.files) + if err != nil { + return fmt.Errorf("add files2 (%q) failed: %w", fut.files, err) + } + m = append(m, fut.message) + f = append(f, fut.files...) + } + msg := []string{title} + for _, mm := range m { + msg = append(msg, " * "+mm) + } + err := fcommit(msg, list.items[0].dir, f) + if err != nil { + return fmt.Errorf("commit files (%q) failed: %w", f, err) + } + + return nil +} diff --git a/pkg/crypters/_all/all.go b/pkg/crypters/_all/all.go new file mode 100644 index 00000000..bcde761b --- /dev/null +++ b/pkg/crypters/_all/all.go @@ -0,0 +1,5 @@ +package all + +import ( + _ "github.com/StackExchange/blackbox/v2/pkg/crypters/gnupg" +) diff --git a/pkg/crypters/crypters.go b/pkg/crypters/crypters.go new file mode 100644 index 00000000..8e83e9b8 --- /dev/null +++ b/pkg/crypters/crypters.go @@ -0,0 +1,58 @@ +package crypters + +import ( + "sort" + "strings" + + "github.com/StackExchange/blackbox/v2/models" +) + +// Crypter is the handle +type Crypter interface { + models.Crypter +} + +// NewFnSig function signature needed by reg. +type NewFnSig func(debug bool) (Crypter, error) + +// Item stores one item +type Item struct { + Name string + New NewFnSig + Priority int +} + +// Catalog is the list of registered vcs's. +var Catalog []*Item + +// SearchByName returns a Crypter handle for name. +// The search is case insensitive. +func SearchByName(name string, debug bool) Crypter { + name = strings.ToLower(name) + for _, v := range Catalog { + //fmt.Printf("Trying %v %v\n", v.Name) + if strings.ToLower(v.Name) == name { + chandle, err := v.New(debug) + if err != nil { + return nil // No idea how that would happen. + } + //fmt.Printf("USING! %v\n", v.Name) + return chandle + } + } + return nil +} + +// Register a new VCS. +func Register(name string, priority int, newfn NewFnSig) { + //fmt.Printf("CRYPTER registered: %v\n", name) + item := &Item{ + Name: name, + New: newfn, + Priority: priority, + } + Catalog = append(Catalog, item) + + // Keep the list sorted. + sort.Slice(Catalog, func(i, j int) bool { return Catalog[j].Priority < Catalog[i].Priority }) +} diff --git a/pkg/crypters/gnupg/gnupg.go b/pkg/crypters/gnupg/gnupg.go new file mode 100644 index 00000000..d2c961ee --- /dev/null +++ b/pkg/crypters/gnupg/gnupg.go @@ -0,0 +1,180 @@ +package gnupg + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "syscall" + + "github.com/StackExchange/blackbox/v2/pkg/bblog" + "github.com/StackExchange/blackbox/v2/pkg/bbutil" + "github.com/StackExchange/blackbox/v2/pkg/crypters" +) + +var pluginName = "GnuPG" + +func init() { + crypters.Register(pluginName, 100, registerNew) +} + +// CrypterHandle is the handle +type CrypterHandle struct { + GPGCmd string // "gpg2" or "gpg" + logErr *log.Logger + logDebug *log.Logger +} + +func registerNew(debug bool) (crypters.Crypter, error) { + + crypt := &CrypterHandle{ + logErr: bblog.GetErr(), + logDebug: bblog.GetDebug(debug), + } + + // Which binary to use? + path, err := exec.LookPath("gpg2") + if err != nil { + path, err = exec.LookPath("gpg") + if err != nil { + path = "gpg2" + } + } + crypt.GPGCmd = path + + return crypt, nil +} + +// Name returns my name. +func (crypt CrypterHandle) Name() string { + return pluginName +} + +// Decrypt name+".gpg", possibly overwriting name. +func (crypt CrypterHandle) Decrypt(filename string, umask int, overwrite bool) error { + + a := []string{ + "--use-agent", + "-q", + "--decrypt", + "-o", filename, + } + if overwrite { + a = append(a, "--yes") + } + a = append(a, filename+".gpg") + + oldumask := syscall.Umask(umask) + err := bbutil.RunBash(crypt.GPGCmd, a...) + syscall.Umask(oldumask) + return err +} + +// Cat returns the plaintext or, if it is missing, the decrypted cyphertext. +func (crypt CrypterHandle) Cat(filename string) ([]byte, error) { + + a := []string{ + "--use-agent", + "-q", + "--decrypt", + } + + // TODO(tlim): This assumes the entire gpg file fits in memory. If + // this becomes a problem, re-implement this using exec Cmd.StdinPipe() + // and feed the input in chunks. + in, err := ioutil.ReadFile(filename + ".gpg") + if err != nil { + + if os.IsNotExist(err) { + // Encrypted file doesn't exit? Return the plaintext. + return ioutil.ReadFile(filename) + } + + return nil, err + } + + return bbutil.RunBashInputOutput(in, crypt.GPGCmd, a...) +} + +// Encrypt name, overwriting name+".gpg" +func (crypt CrypterHandle) Encrypt(filename string, umask int, receivers []string) (string, error) { + var err error + + crypt.logDebug.Printf("Encrypt(%q, %d, %q)", filename, umask, receivers) + encrypted := filename + ".gpg" + a := []string{ + "--use-agent", + "--yes", + "--trust-model=always", + "--encrypt", + "-o", encrypted, + } + for _, f := range receivers { + a = append(a, "-r", f) + } + a = append(a, "--encrypt") + a = append(a, filename) + //err = bbutil.RunBash("ls", "-la") + + oldumask := syscall.Umask(umask) + crypt.logDebug.Printf("Args = %q", a) + err = bbutil.RunBash(crypt.GPGCmd, a...) + syscall.Umask(oldumask) + + return encrypted, err +} + +// AddNewKey extracts keyname from sourcedir's GnuPG chain to destdir keychain. +// It returns a list of files that may have changed. +func (crypt CrypterHandle) AddNewKey(keyname, repobasedir, sourcedir, destdir string) ([]string, error) { + + // $GPG --homedir="$2" --export -a "$KEYNAME" >"$pubkeyfile" + args := []string{ + "--export", + "-a", + } + if sourcedir != "" { + args = append(args, "--homedir", sourcedir) + } + args = append(args, keyname) + crypt.logDebug.Printf("ADDNEWKEY: Extracting key=%v: gpg, %v\n", keyname, args) + pubkey, err := bbutil.RunBashOutput("gpg", args...) + if err != nil { + return nil, err + } + if len(pubkey) == 0 { + return nil, fmt.Errorf("Nothing found when %q exported from %q", keyname, sourcedir) + } + + // $GPG --no-permission-warning --homedir="$KEYRINGDIR" --import "$pubkeyfile" + args = []string{ + "--no-permission-warning", + "--homedir", destdir, + "--import", + } + crypt.logDebug.Printf("ADDNEWKEY: Importing: gpg %v\n", args) + // fmt.Printf("DEBUG: crypter ADD %q", args) + err = bbutil.RunBashInput(pubkey, "gpg", args...) + if err != nil { + return nil, fmt.Errorf("AddNewKey failed: %w", err) + } + + // Suggest: ${pubring_path} trustdb.gpg blackbox-admins.txt + var changed []string + + // Prefix each file with the relative path to it. + prefix, err := filepath.Rel(repobasedir, destdir) + if err != nil { + //fmt.Printf("FAIL (%v) (%v) (%v)\n", repobasedir, destdir, err) + prefix = destdir + } + for _, file := range []string{"pubring.gpg", "pubring.kbx", "trustdb.gpg"} { + path := filepath.Join(destdir, file) + if bbutil.FileExistsOrProblem(path) { + changed = append(changed, filepath.Join(prefix, file)) + } + } + return changed, nil +} diff --git a/pkg/crypters/gnupg/keychain.go b/pkg/crypters/gnupg/keychain.go new file mode 100644 index 00000000..e666e2a7 --- /dev/null +++ b/pkg/crypters/gnupg/keychain.go @@ -0,0 +1,107 @@ +package gnupg + +/* + +# How does Blackbox manage key rings? + +Blackbox uses the user's .gnupg directory for most actions, such as decrypting data. +Decrypting requires the user's private key, which is stored by the user in their +home directory (and up to them to store safely). +Black box does not store the user's private key in the repo. + +When encrypting data, black needs the public key of all the admins, not just the users. +To assure that the user's `.gnupg` has all these public keys, prior to +encrypting data the public keys are imported from .blackbox, which stores +a keychain that stores the public (not private!) keys of all the admins. + +FYI: v1 does this import before decrypting, because I didn't know any better. + +# Binary compatibility: + +When writing v1, we didn't realize that the pubkey.gpg file is a binary format +that is not intended to be portable. In fact, it is intentionally not portable. +This means that all admins must use the exact same version of GnuPG +or the files (pubring.gpg or pubring.kbx) may get corrupted. + +In v2, we store the public keys in the portable ascii format +in a file called `.blackbox/public-keys-db.asc`. +It will also update the binary files if they exist. +If `.blackbox/public-keys-db.asc` doesn't exist, it will be created. + +Eventually we will stop updating the binary files. + +# Importing public keys to the user + +How to import the public keys to the user's GPG system: + +If pubkeyring-ascii.txt exists: + gpg --import pubkeyring-ascii.asc +Else if pubring.kbx + gpg --import pubring.kbx +Else if pubring.gpg + gpg --import pubring.gpg + +This is what v1 does: + #if gpg2 is installed next to gpg like on ubuntu 16 + if [[ "$GPG" != "gpg2" ]]; then + $GPG --export --no-default-keyring --keyring "$(get_pubring_path)" >"$keyringasc" + $GPG --import "$keyringasc" 2>&1 | egrep -v 'not changed$' >&2 + Else + $GPG --keyring "$(get_pubring_path)" --export | $GPG --import + fi + +# How to add a key to the keyring? + +Old, binary format: + # Get the key they want to add: + FOO is a user-specified directory, otherwise $HOME/.gnupg: + $GPG --homedir="FOO" --export -a "$KEYNAME" >TEMPFILE + # Import into the binary files: + KEYRINGDIR is .blackbox + $GPG --no-permission-warning --homedir="$KEYRINGDIR" --import TEMPFILE + # Git add any of these files if they exist: + pubring.gpg pubring.kbx trustdb.gpg blackbox-admins.txt + # Tell the user to git commit them. + +New, ascii format: + # Get the key to be added. Write to a TEMPFILE + FOO is a user-specified directory, otherwise $HOME/.gnupg: + $GPG --homedir="FOO" --export -a "$KEYNAME" >TEMPFILE + # Make a tempdir called TEMPDIR + # Import the pubkeyring-ascii.txt to TEMPDIR's keyring. (Skip if file not found) + # Import the temp1 data to TEMPDIR + # Export the TEMPDIR to create a new .blackbox/pubkeyring-ascii.txt + PATH_TO_BINARY is the path to .blackbox/pubring.gpg; if that's not found then pubring.kbx + $GPG --keyring PATH_TO_BINARY --export -a --output .blackbox/pubkeyring-ascii.txt + # Git add .blackbox/pubkeyring-ascii.txt and .blackbox/blackbox-admins.txt + # Tell the user to git commit them. + # Delete TEMPDIR + +# How to remove a key from the keyring? + +Old, binary format: + # Remove key from the binary file + $GPG --no-permission-warning --homedir="$KEYRINGDIR" --batch --yes --delete-key "$KEYNAME" || true + # Git add any of these files if they exist: + pubring.gpg pubring.kbx trustdb.gpg blackbox-admins.txt + # Tell the user to git commit them. + +New, ascii format: + # Make a tempdir called TEMPDIR + # Import the pubkeyring-ascii.txt to TEMPDIR's keyring. (Skip if file not found) + # Remove key from the ring file + $GPG --no-permission-warning --homedir="$KEYRINGDIR" --batch --yes --delete-key "$KEYNAME" || true + # Export the TEMPDIR to create a new .blackbox/pubkeyring-ascii.txt + PATH_TO_BINARY is the path to .blackbox/pubring.gpg; if that's not found then pubring.kbx + $GPG --keyring PATH_TO_BINARY --export -a --output .blackbox/pubkeyring-ascii.txt + # Git add .blackbox/pubkeyring-ascii.txt and .blackbox/blackbox-admins.txt + # Update the .blackbox copy of pubring.gpg, pubring.kbx, or trustdb.gpg (if they exist) + # with copies from TEMPDIR (if they exist). Git add any files that are updated. + # Tell the user to git commit them. + # Delete TEMPDIR + +*/ + +//func prepareUserKeychain() error { +// return nil +//} diff --git a/pkg/makesafe/makesafe.go b/pkg/makesafe/makesafe.go new file mode 100644 index 00000000..0830950b --- /dev/null +++ b/pkg/makesafe/makesafe.go @@ -0,0 +1,285 @@ +package makesafe + +// untaint -- A string with a Stringer that is shell safe. + +// This goes to great lengths to make sure the String() is pastable. +// Whitespace and shell "special chars" are handled as expected. + +// However to be extra paranoid, unicode is turned into backtick +// printf statements. I don't know anyone that puts unicode in their +// filenames, but I hope they appreciate this. + +// Most people would just use strconv.QuoteToGraphic() but I'm a +// control freak. + +import ( + "fmt" + "strings" + "unicode" +) + +type protection int + +const ( + // Unknown indicates we don't know if it is safe. + Unknown protection = iota + // None requires no special escaping. + None // Nothing special + // SingleQuote is unsafe in bash and requires a single quote. + SingleQuote // Requires at least a single quote + // DoubleQuote is unsafe in bash and requires escaping or other double-quote features. + DoubleQuote // Can only be in a double-quoted string +) + +const ( + // IsAQuote is either a `'` or `"` + IsAQuote = None + // IsSpace is ascii 32 + IsSpace = SingleQuote + // ShellUnsafe is ()!$ or other bash special char + ShellUnsafe = SingleQuote + // GlobUnsafe means could be a glob char (* or ?) + GlobUnsafe = SingleQuote + // InterpolationUnsafe used in bash string interpolation ($) + InterpolationUnsafe = SingleQuote + // HasBackslash things like \n \t \r \000 \xFF + HasBackslash = DoubleQuote +) + +func max(i, j protection) protection { + if i > j { + return i + } + return j + +} + +type tabEntry struct { + level protection + fn func(s rune) string +} + +var tab [128]tabEntry + +func init() { + + for i := 0; i <= 31; i++ { // Control chars + tab[i] = tabEntry{HasBackslash, oct()} + } + tab['\t'] = tabEntry{HasBackslash, literal(`\t`)} // Override + tab['\n'] = tabEntry{HasBackslash, literal(`\n`)} // Override + tab['\r'] = tabEntry{HasBackslash, literal(`\r`)} // Override + tab[' '] = tabEntry{IsSpace, same()} + tab['!'] = tabEntry{ShellUnsafe, same()} + tab['"'] = tabEntry{IsAQuote, same()} + tab['#'] = tabEntry{ShellUnsafe, same()} + tab['@'] = tabEntry{InterpolationUnsafe, same()} + tab['$'] = tabEntry{InterpolationUnsafe, same()} + tab['%'] = tabEntry{InterpolationUnsafe, same()} + tab['&'] = tabEntry{ShellUnsafe, same()} + tab['\''] = tabEntry{IsAQuote, same()} + tab['('] = tabEntry{ShellUnsafe, same()} + tab[')'] = tabEntry{ShellUnsafe, same()} + tab['*'] = tabEntry{GlobUnsafe, same()} + tab['+'] = tabEntry{GlobUnsafe, same()} + tab[','] = tabEntry{None, same()} + tab['-'] = tabEntry{None, same()} + tab['.'] = tabEntry{None, same()} + tab['/'] = tabEntry{None, same()} + for i := '0'; i <= '9'; i++ { + tab[i] = tabEntry{None, same()} + } + tab[':'] = tabEntry{InterpolationUnsafe, same()} // ${foo:=default} + tab[';'] = tabEntry{ShellUnsafe, same()} + tab['<'] = tabEntry{ShellUnsafe, same()} + tab['='] = tabEntry{InterpolationUnsafe, same()} // ${foo:=default} + tab['>'] = tabEntry{ShellUnsafe, same()} + tab['?'] = tabEntry{GlobUnsafe, same()} + tab['@'] = tabEntry{InterpolationUnsafe, same()} // ${myarray[@]}; + for i := 'A'; i <= 'Z'; i++ { + tab[i] = tabEntry{None, same()} + } + tab['['] = tabEntry{ShellUnsafe, same()} + tab['\\'] = tabEntry{ShellUnsafe, same()} + tab[']'] = tabEntry{GlobUnsafe, same()} + tab['^'] = tabEntry{GlobUnsafe, same()} + tab['_'] = tabEntry{None, same()} + tab['`'] = tabEntry{ShellUnsafe, same()} + for i := 'a'; i <= 'z'; i++ { + tab[i] = tabEntry{None, same()} + } + tab['{'] = tabEntry{ShellUnsafe, same()} + tab['|'] = tabEntry{ShellUnsafe, same()} + tab['}'] = tabEntry{ShellUnsafe, same()} + tab['~'] = tabEntry{ShellUnsafe, same()} + tab[127] = tabEntry{HasBackslash, oct()} + + // Check our work. All indexes should have been set. + for i, e := range tab { + if e.level == 0 || e.fn == nil { + panic(fmt.Sprintf("tabEntry %d not set!", i)) + } + } + +} + +// literal return this exact string. +func literal(s string) func(s rune) string { + return func(rune) string { return s } +} + +// same converts the rune to a string. +func same() func(r rune) string { + return func(r rune) string { return string(r) } +} + +// oct returns the octal representing the value. +func oct() func(r rune) string { + return func(r rune) string { return fmt.Sprintf(`\%03o`, r) } +} + +// Redact returns a string that can be used in a shell single-quoted +// string. It may not be an exact representation, but it is safe +// to include on a command line. +// +// Redacted chars are changed to "X". +// If anything is redacted, the string is surrounded by double quotes +// ("air quotes") and the string "(redacted)" is added to the end. +// If nothing is redacted, but it contains spaces, it is surrounded +// by double quotes. +// +// Example: `s` -> `s` +// Example: `space cadet.txt` -> `"space cadet.txt"` +// Example: `drink a \t soda` -> `"drink a X soda"(redacted)` +// Example: `smile☺` -> `"smile☺` +func Redact(tainted string) string { + + if tainted == "" { + return `""` + } + + var b strings.Builder + b.Grow(len(tainted) + 10) + + redacted := false + needsQuote := false + + for _, r := range tainted { + if r == ' ' { + b.WriteRune(r) + needsQuote = true + } else if r == '\'' { + b.WriteRune('X') + redacted = true + } else if r == '"' { + b.WriteRune('\\') + b.WriteRune(r) + needsQuote = true + } else if unicode.IsPrint(r) { + b.WriteRune(r) + } else { + b.WriteRune('X') + redacted = true + } + } + + if redacted { + return `"` + b.String() + `"(redacted)` + } + if needsQuote { + return `"` + b.String() + `"` + } + return tainted +} + +// RedactMany returns the list after processing each element with Redact(). +func RedactMany(items []string) []string { + var r []string + for _, n := range items { + r = append(r, Redact(n)) + } + return r +} + +// Shell returns the string formatted so that it is safe to be pasted +// into a command line to produce the desired filename as an argument +// to the command. +func Shell(tainted string) string { + if tainted == "" { + return `""` + } + + var b strings.Builder + b.Grow(len(tainted) + 10) + + level := Unknown + for _, r := range tainted { + if r < 128 { + level = max(level, tab[r].level) + b.WriteString(tab[r].fn(r)) + } else { + level = max(level, DoubleQuote) + b.WriteString(escapeRune(r)) + } + } + s := b.String() + + if level == None { + return tainted + } else if level == SingleQuote { + // A single quoted string accepts all chars except the single + // quote itself, which must be replaced with: '"'"' + return "'" + strings.Join(strings.Split(s, "'"), `'"'"'`) + "'" + } else if level == DoubleQuote { + // A double-quoted string may include \xxx escapes and other + // things. Sadly bash doesn't interpret those, but printf will! + return `$(printf '%q' '` + s + `')` + } + // should not happen + return fmt.Sprintf("%q", s) +} + +// escapeRune returns a string of octal escapes that represent the rune. +func escapeRune(r rune) string { + b := []byte(string(rune(r))) // Convert to the indivdual bytes, utf8-encoded. + // fmt.Printf("rune: len=%d %s %v\n", len(s), s, []byte(s)) + switch len(b) { + case 1: + return fmt.Sprintf(`\%03o`, b[0]) + case 2: + return fmt.Sprintf(`\%03o\%03o`, b[0], b[1]) + case 3: + return fmt.Sprintf(`\%03o\%03o\%03o`, b[0], b[1], b[2]) + case 4: + return fmt.Sprintf(`\%03o\%03o\%03o\%03o`, b[0], b[1], b[2], b[3]) + default: + return string(rune(r)) + } +} + +// ShellMany returns the list after processing each element with Shell(). +func ShellMany(items []string) []string { + var r []string + for _, n := range items { + r = append(r, Redact(n)) + } + return r +} + +// FirstFew returns the first few names. If any are truncated, it is +// noted by appending "...". The exact definition of "few" may change +// over time, and may be based on the number of chars not the list +func FirstFew(sl []string) string { + s, _ := FirstFewFlag(sl) + return s +} + +// FirstFewFlag is like FirstFew but returns true if truncation done. +func FirstFewFlag(sl []string) (string, bool) { + const maxitems = 2 + const maxlen = 70 + if len(sl) < maxitems || len(strings.Join(sl, " ")) < maxlen { + return strings.Join(sl, " "), false + } + return strings.Join(sl[:maxitems], " ") + " (and others)", true +} diff --git a/pkg/makesafe/makesafe_test.go b/pkg/makesafe/makesafe_test.go new file mode 100644 index 00000000..f037d213 --- /dev/null +++ b/pkg/makesafe/makesafe_test.go @@ -0,0 +1,136 @@ +package makesafe + +import ( + "testing" +) + +func TestRedact(t *testing.T) { + for i, test := range []struct{ data, expected string }{ + {"", `""`}, + {"one", "one"}, + {"has space.txt", `"has space.txt"`}, + {"has\ttab.txt", `"hasXtab.txt"(redacted)`}, + {"has\nnl.txt", `"hasXnl.txt"(redacted)`}, + {"has\rret.txt", `"hasXret.txt"(redacted)`}, + {"¡que!", `¡que!`}, + {"thé", `thé`}, + {"pound£", `pound£`}, + {"*.go", `*.go`}, + {"rm -rf / ; echo done", `"rm -rf / ; echo done"`}, + {"smile\u263a", `smile☺`}, + {"dub\U0001D4E6", `dub𝓦`}, + {"four\U0010FFFF", `"fourX"(redacted)`}, + } { + g := Redact(test.data) + if g == test.expected { + t.Logf("%03d: PASSED", i) + } else { + t.Errorf("%03d: FAILED data=%q got=(%s) wanted=(%s)", i, test.data, g, test.expected) + } + } +} + +func TestRedactMany(t *testing.T) { + data := []string{ + "", + "one", + "has space.txt", + "has\ttab.txt", + } + g := RedactMany(data) + if len(g) != 4 || g[0] != `""` || g[1] != `"has space.txt"` || g[2] != `"hasXtab.txt"(redacted)` { + t.Logf("PASSED") + } else { + t.Errorf("FAILED got=(%q)", g) + } +} + +func TestShell(t *testing.T) { + for i, test := range []struct{ data, expected string }{ + {"", `""`}, + {"one", "one"}, + {"two\n", `$(printf '%q' 'two\n')`}, + {"ta tab", `$(printf '%q' 'ta\ttab')`}, + {"tab\ttab", `$(printf '%q' 'tab\ttab')`}, + {"new\nline", `$(printf '%q' 'new\nline')`}, + {"¡que!", `$(printf '%q' '\302\241que!')`}, + {"thé", `$(printf '%q' 'th\303\251')`}, + {"pound£", `$(printf '%q' 'pound\302\243')`}, + {"*.go", `'*.go'`}, + {"rm -rf / ; echo done", `'rm -rf / ; echo done'`}, + {"smile\u263a", `$(printf '%q' 'smile\342\230\272')`}, + {"dub\U0001D4E6", `$(printf '%q' 'dub\360\235\223\246')`}, + {"four\U0010FFFF", `$(printf '%q' 'four\364\217\277\277')`}, + } { + g := Shell(test.data) + if g == test.expected { + t.Logf("%03d: PASSED", i) + //t.Logf("%03d: PASSED go(%q) bash: %s", i, test.data, test.expected) + } else { + t.Errorf("%03d: FAILED data=%q got=`%s` wanted=`%s`", i, test.data, g, test.expected) + } + } +} + +func TestEscapeRune(t *testing.T) { + for i, test := range []struct { + data rune + expected string + }{ + {'a', `\141`}, + {'é', `\303\251`}, + {'☺', `\342\230\272`}, + {'글', `\352\270\200`}, + {'𩸽', `\360\251\270\275`}, + //{"\U0010FEDC", `"'\U0010fedc'"`}, + } { + g := escapeRune(test.data) + if g == test.expected { + t.Logf("%03d: PASSED go=(%q) bash=(%s)", i, test.data, test.expected) + } else { + t.Errorf("%03d: FAILED data=%q got=(%s) wanted=(%s)", i, test.data, g, test.expected) + } + } +} + +func TestShellMany(t *testing.T) { + data := []string{ + "", + "one", + "has space.txt", + "¡que!", + } + g := ShellMany(data) + if len(g) != 4 || g[0] != `""` || g[1] != "one" || g[2] != `"has space.txt"` || g[3] != `$(printf '%q' '\302\241que!')` { + t.Logf("PASSED") + } else { + t.Errorf("FAILED got=(%q)", g) + } +} + +func TestFirstFewFlag(t *testing.T) { + for i, test := range []struct { + data []string + expectedFlag bool + expectedString string + }{ + {[]string{"", "one"}, false, ` one`}, + {[]string{"one"}, false, `one`}, + {[]string{"one", "two", "three", "longlonglong", "longlonglonglong", "manylonglonglog", "morelongonglonglong"}, true, ``}, + } { + gs, gf := FirstFewFlag(test.data) + if test.expectedFlag { + if gf == test.expectedFlag { + t.Logf("%03d: PASSED", i) + } else { + t.Errorf("%03d: FAILED data=%q got=(%q) wanted=(%q)", i, test.data, gs, test.expectedString) + } + } else { + if gf == test.expectedFlag && gs == test.expectedString { + t.Logf("%03d: PASSED", i) + } else { + t.Errorf("%03d: FAILED data=%q got=(%q) wanted=(%q)", i, test.data, gs, test.expectedString) + } + } + } +} diff --git a/pkg/vcs/_all/all.go b/pkg/vcs/_all/all.go new file mode 100644 index 00000000..2ad31ed0 --- /dev/null +++ b/pkg/vcs/_all/all.go @@ -0,0 +1,6 @@ +package all + +import ( + _ "github.com/StackExchange/blackbox/v2/pkg/vcs/git" + _ "github.com/StackExchange/blackbox/v2/pkg/vcs/none" +) diff --git a/pkg/vcs/git/git.go b/pkg/vcs/git/git.go new file mode 100644 index 00000000..9e73be2a --- /dev/null +++ b/pkg/vcs/git/git.go @@ -0,0 +1,226 @@ +package git + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/StackExchange/blackbox/v2/pkg/bbutil" + "github.com/StackExchange/blackbox/v2/pkg/commitlater" + "github.com/StackExchange/blackbox/v2/pkg/makesafe" + "github.com/StackExchange/blackbox/v2/pkg/vcs" +) + +var pluginName = "GIT" + +func init() { + vcs.Register(pluginName, 100, newGit) +} + +// VcsHandle is the handle +type VcsHandle struct { + commitTitle string + commitHeaderPrinted bool // Has the "NEXT STEPS" header been printed? + toCommit *commitlater.List // List of future commits +} + +func newGit() (vcs.Vcs, error) { + l := &commitlater.List{} + return &VcsHandle{toCommit: l}, nil +} + +// Name returns my name. +func (v VcsHandle) Name() string { + return pluginName +} + +func ultimate(s string) int { return len(s) - 1 } + +// Discover returns true if we are a repo of this type; along with the Abs path to the repo root (or "" if we don't know). +func (v VcsHandle) Discover() (bool, string) { + out, err := bbutil.RunBashOutputSilent("git", "rev-parse", "--show-toplevel") + if err != nil { + return false, "" + } + if out == "" { + fmt.Printf("WARNING: git rev-parse --show-toplevel has NO output??. Seems broken.") + return false, "" + } + if out[ultimate(out)] == '\n' { + out = out[0:ultimate(out)] + } + return err == nil, out +} + +// SetFileTypeUnix informs the VCS that files should maintain unix-style line endings. +func (v VcsHandle) SetFileTypeUnix(repobasedir string, files ...string) error { + seen := make(map[string]bool) + + // Add to the .gitattributes in the same directory as the file. + for _, file := range files { + d, n := filepath.Split(file) + af := filepath.Join(repobasedir, d, ".gitattributes") + err := bbutil.Touch(af) + if err != nil { + return err + } + err = bbutil.AddLinesToFile(af, fmt.Sprintf("%q text eol=lf", n)) + if err != nil { + return err + } + seen[af] = true + } + + var changedfiles []string + for k := range seen { + changedfiles = append(changedfiles, k) + } + + v.NeedsCommit( + "set gitattr=UNIX "+strings.Join(makesafe.RedactMany(files), " "), + repobasedir, + changedfiles, + ) + + return nil +} + +// IgnoreAnywhere tells the VCS to ignore these files anywhere rin the repo. +func (v VcsHandle) IgnoreAnywhere(repobasedir string, files []string) error { + // Add to the .gitignore file in the repobasedir. + ignore := filepath.Join(repobasedir, ".gitignore") + err := bbutil.Touch(ignore) + if err != nil { + return err + } + + err = bbutil.AddLinesToFile(ignore, files...) + if err != nil { + return err + } + + v.NeedsCommit( + "gitignore "+strings.Join(makesafe.RedactMany(files), " "), + repobasedir, + []string{".gitignore"}, + ) + return nil +} + +func gitSafeFilename(name string) string { + // TODO(tlim): Add unit tests. + // TODO(tlim): Confirm that *?[] escaping works. + if name == "" { + return "ERROR" + } + var b strings.Builder + b.Grow(len(name) + 2) + for _, r := range name { + if r == ' ' || r == '*' || r == '?' || r == '[' || r == ']' { + b.WriteRune('\\') + b.WriteRune(r) + } else { + b.WriteRune(r) + } + } + if name[0] == '!' || name[0] == '#' { + return `\` + b.String() + } + return b.String() +} + +// IgnoreFiles tells the VCS to ignore these files, specified relative to RepoBaseDir. +func (v VcsHandle) IgnoreFiles(repobasedir string, files []string) error { + + var lines []string + for _, f := range files { + lines = append(lines, "/"+gitSafeFilename(f)) + } + + // Add to the .gitignore file in the repobasedir. + ignore := filepath.Join(repobasedir, ".gitignore") + err := bbutil.Touch(ignore) + if err != nil { + return err + } + err = bbutil.AddLinesToFile(ignore, lines...) + if err != nil { + return err + } + + v.NeedsCommit( + "gitignore "+strings.Join(makesafe.RedactMany(files), " "), + repobasedir, + []string{".gitignore"}, + ) + return nil +} + +// Add makes a file visible to the VCS (like "git add"). +func (v VcsHandle) Add(repobasedir string, files []string) error { + + if len(files) == 0 { + return nil + } + + // TODO(tlim): Make sure that files are within repobasedir. + + var gpgnames []string + for _, n := range files { + gpgnames = append(gpgnames, n+".gpg") + } + return bbutil.RunBash("git", append([]string{"add"}, gpgnames...)...) +} + +// CommitTitle indicates what the next commit title will be. +// This is used if a group of commits are merged into one. +func (v *VcsHandle) CommitTitle(title string) { + v.commitTitle = title +} + +// NeedsCommit queues up commits for later execution. +func (v *VcsHandle) NeedsCommit(message string, repobasedir string, names []string) { + v.toCommit.Add(message, repobasedir, names) +} + +// DebugCommits dumps the list of future commits. +func (v VcsHandle) DebugCommits() commitlater.List { + return *v.toCommit +} + +// FlushCommits informs the VCS to do queued up commits. +func (v VcsHandle) FlushCommits() error { + return v.toCommit.Flush( + v.commitTitle, + func(files []string) error { + return bbutil.RunBash("git", append([]string{"add"}, files...)...) + }, + v.suggestCommit, + ) + // TODO(tlim): Some day we can add a command line flag that indicates that commits are + // to be done for real, not just suggested to the user. At that point, this function + // can call v.toCommit.Flush() with a function that actually does the commits instead + // of suggesting them. Flag could be called --commit=auto vs --commit=suggest. +} + +// suggestCommit tells the user what commits are needed. +func (v *VcsHandle) suggestCommit(messages []string, repobasedir string, files []string) error { + if !v.commitHeaderPrinted { + fmt.Printf("NEXT STEP: You need to manually check these in:\n") + } + v.commitHeaderPrinted = true + + fmt.Print(` git commit -m'`, strings.Join(messages, `' -m'`)+`'`) + fmt.Print(" ") + fmt.Print(strings.Join(makesafe.ShellMany(files), " ")) + fmt.Println() + return nil +} + +// The following are "secret" functions only used by the integration testing system. + +// TestingInitRepo initializes a repo. +func (v VcsHandle) TestingInitRepo() error { + return bbutil.RunBash("git", "init") + +} diff --git a/pkg/vcs/none/none.go b/pkg/vcs/none/none.go new file mode 100644 index 00000000..da4f1078 --- /dev/null +++ b/pkg/vcs/none/none.go @@ -0,0 +1,79 @@ +package none + +import ( + "fmt" + + "github.com/StackExchange/blackbox/v2/pkg/commitlater" + "github.com/StackExchange/blackbox/v2/pkg/vcs" +) + +var pluginName = "NONE" + +func init() { + vcs.Register(pluginName, 0, newNone) +} + +// VcsHandle is +type VcsHandle struct { + repoRoot string +} + +func newNone() (vcs.Vcs, error) { + return &VcsHandle{}, nil +} + +// Name returns my name. +func (v VcsHandle) Name() string { + return pluginName +} + +// Discover returns true if we are a repo of this type; along with the Abs path to the repo root (or "" if we don't know). +func (v VcsHandle) Discover() (bool, string) { + return true, "" // We don't know the root. +} + +//// SetRepoRoot informs the Vcs of the VCS root. +//func (v *VcsHandle) SetRepoRoot(dir string) { +// v.repoRoot = dir +//} + +// SetFileTypeUnix informs the VCS that files should maintain unix-style line endings. +func (v VcsHandle) SetFileTypeUnix(repobasedir string, files ...string) error { + return nil +} + +// IgnoreAnywhere tells the VCS to ignore these files anywhere in the repo. +func (v VcsHandle) IgnoreAnywhere(repobasedir string, files []string) error { + return nil +} + +// IgnoreFiles tells the VCS to ignore these files anywhere in the repo. +func (v VcsHandle) IgnoreFiles(repobasedir string, files []string) error { + return nil +} + +// CommitTitle sets the title of the next commit. +func (v VcsHandle) CommitTitle(title string) {} + +// NeedsCommit queues up commits for later execution. +func (v VcsHandle) NeedsCommit(message string, repobasedir string, names []string) { + return +} + +// DebugCommits dumps a list of future commits. +func (v VcsHandle) DebugCommits() commitlater.List { + return commitlater.List{} +} + +// FlushCommits informs the VCS to do queued up commits. +func (v VcsHandle) FlushCommits() error { + return nil +} + +// The following are "secret" functions only used by the integration testing system. + +// TestingInitRepo initializes a repo. +func (v VcsHandle) TestingInitRepo() error { + fmt.Println("VCS=none, TestingInitRepo") + return nil +} diff --git a/pkg/vcs/vcs.go b/pkg/vcs/vcs.go new file mode 100644 index 00000000..022699f3 --- /dev/null +++ b/pkg/vcs/vcs.go @@ -0,0 +1,82 @@ +package vcs + +import ( + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/StackExchange/blackbox/v2/models" +) + +// Vcs is the handle +type Vcs interface { + models.Vcs +} + +// NewFnSig function signature needed by reg. +type NewFnSig func() (Vcs, error) + +// Item stores one item +type Item struct { + Name string + New NewFnSig + Priority int +} + +// Catalog is the list of registered vcs's. +var Catalog []*Item + +// Discover polls the VCS plug-ins to determine the VCS of directory. +// The first to succeed is returned. +// It never returns nil, since "NONE" is always valid. +func Discover() (Vcs, string) { + for _, v := range Catalog { + h, err := v.New() + if err != nil { + return nil, "" // No idea how that would happen. + } + if b, repodir := h.Discover(); b { + + // Try to find the rel path from CWD to RepoBase + wd, err := os.Getwd() + if err != nil { + fmt.Printf("ERROR: Can not determine cwd! Failing!\n") + os.Exit(1) + } + //fmt.Printf("DISCCOVER: WD=%q REPO=%q\n", wd, repodir) + if repodir != wd && strings.HasSuffix(repodir, wd) { + // This is a terrible hack. We're basically guessing + // at the filesystem layout. That said, it works on macOS. + // TODO(tlim): Abstract this out into a separate function + // so we can do integration tests on it (to know if it fails on + // a particular operating system.) + repodir = wd + } + r, err := filepath.Rel(wd, repodir) + if err != nil { + // Wait, we're not relative to each other? Give up and + // just return the abs repodir. + return h, repodir + } + return h, r + } + } + // This can't happen. If it does, we'll panic and that's ok. + return nil, "" +} + +// Register a new VCS. +func Register(name string, priority int, newfn NewFnSig) { + //fmt.Printf("VCS registered: %v\n", name) + item := &Item{ + Name: name, + New: newfn, + Priority: priority, + } + Catalog = append(Catalog, item) + + // Keep the list sorted. + sort.Slice(Catalog, func(i, j int) bool { return Catalog[j].Priority < Catalog[i].Priority }) +} From b07793faf8c4e656c30254317614c0dfa2e36f24 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Fri, 24 Jul 2020 14:25:17 -0400 Subject: [PATCH 09/28] Golanggithub (#314) * Add Github Actions --- .github/workflows/build-and-test.yml | 16 +++++++ .github/workflows/release.yml | 35 +++++++++++++++ README-v2.md | 65 ++++++++++++++++++++++++++++ 3 files changed, 116 insertions(+) create mode 100644 .github/workflows/build-and-test.yml create mode 100644 .github/workflows/release.yml create mode 100644 README-v2.md diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml new file mode 100644 index 00000000..a81f9bc0 --- /dev/null +++ b/.github/workflows/build-and-test.yml @@ -0,0 +1,16 @@ +name: build-and-test + +ono: [pull_request, push] # TODO remove "push". Add it just for debugging + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + users: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Build Docker image + users: docker/build-push-action@v1 + with: + push: false diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..55ed3d2b --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,35 @@ +name: Create draft release + +jobs: + build: + name: Create draft release + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Build project (dummy) + run: zip my-artifact README.md # TODO - actually create all the binaries, artifacts etc + + - name: Create Release + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: Release ${{ github.ref }} + body: Please add your release note here + draft: true + prerelease: false + + - name: Upload binaries + id: upload_binaries + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: ./my-artifact.zip + asset_name: my-artifact.zip + asset_content_type: application/zip diff --git a/README-v2.md b/README-v2.md new file mode 100644 index 00000000..d495c92d --- /dev/null +++ b/README-v2.md @@ -0,0 +1,65 @@ +BlackBox v2 +=========== + +WARNING: v2 is still experimental. It is in the same git repo as v1 +because the filenames do not overlap. Please do not mix the two. v1 +is in `bin`. v2 is in `cmd/blackbox` and `binv2`. + +Blackbox is an open source tool that enables you to safe store sensitive information in +Git (or other) repos by encrypting them with GPG. Only the encrypted +version of the file is available. You can be free to provide access +to the repo, as but only people with the right GPG keys can access the +encrypted data. + +Things you should **never** store in a repo without encryption: + +* TLS (SSL) certificates +* Passwords +* API keys +* And more! + +Project Info: + +* [Overview](user-overview.md) +* [Why is this important?](why-is-this-important.md) +* [Support/Community](support.md) +* [How BB encrypts](encryption.md) +* [OS Compatibility](compatibility.md) +* [Installation Instructions](installation.md) +* [Alternatives](alternatives.md) + +User Info: + +* [Enabling Blackbox on a Repo](enable-repo.md) +* [Enroll a file](enable-repo.md) +* [Full Command List](full-command-list.md) +* [Add/Remove users](admin-ops.md) +* [Add/Remove files](file-ops.md) +* [Advanced techiques](advanced.md) +* [Use with Role Accounts](role-accounts.md) +* [Backwards Compatibility](backwards-compatibility.md) +* [Replacing expired keys](expired-keys.md) +* [Git Tips](git-tips.md) +* [SubVersion Tips](subversion-tips.md) +* [GnuPG tips](gnupg-tips.md) +* [Use with Ansible](with-ansible.md) +* [Use with Puppet](with-puppet.md) + +For contributors: + +* [Developer Info](dev.md) +* [Code overview](dev-code-overview.md) +* [HOWTO: Add new OS support](dev-add-os-support.md) +* [HOWTO: Add new VCS support](dev-add-vcs-support.md) + + +A slide presentation about an older release [is on SlideShare](http://www.slideshare.net/TomLimoncelli/the-blackbox-project-sfae). + +Join our mailing list: [https://groups.google.com/d/forum/blackbox-project](https://groups.google.com/d/forum/blackbox-project) + + +License +======= + +This content is released under the MIT License. +See the [LICENSE.txt](LICENSE.txt) file. From 6a34987ba03e6f1a78e314c5ab1e9d453ffe31a3 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Fri, 24 Jul 2020 15:59:33 -0400 Subject: [PATCH 10/28] disable demo zip stuff --- .github/workflows/release.yml | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 55ed3d2b..7cf67c37 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,8 +8,14 @@ jobs: - name: Checkout code uses: actions/checkout@v2 - - name: Build project (dummy) - run: zip my-artifact README.md # TODO - actually create all the binaries, artifacts etc +# - name: Build project (dummy) +# run: zip my-artifact README.md # TODO - actually create all the binaries, artifacts etc + + - name: Package RPM + uses: bpicode/github-action-fpm@master + with: + fpm_args: './build' + fpm_opts: '--debug -n mypackage -t deb -s dir' - name: Create Release id: create_release @@ -23,13 +29,13 @@ jobs: draft: true prerelease: false - - name: Upload binaries - id: upload_binaries - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: ./my-artifact.zip - asset_name: my-artifact.zip - asset_content_type: application/zip +# - name: Upload binaries +# id: upload_binaries +# uses: actions/upload-release-asset@v1 +# env: +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# with: +# upload_url: ${{ steps.create_release.outputs.upload_url }} +# asset_path: ./my-artifact.zip +# asset_name: my-artifact.zip +# asset_content_type: application/zip From 2eafe8429a27a6022726578402cc1656cd3fe5e6 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Wed, 19 Dec 2018 14:20:05 -0500 Subject: [PATCH 11/28] Update CHANGELOG.md --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 478373f7..820a7bf0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,12 @@ have access to. * Commit changes to gitignore when deregistering (#282) * Add support for NetBSD and SunOS (SmartOS) * Defend against ShellShock + + +Release v1.20181219 + +* New OS support: Add support for NetBSD and SunOS (SmartOS) +* Testing: Improve confidence test. * .blackbox is now the default config directory for new repos. (#272) * Add blackbox_decrypt_file (#270) * Improved compatibility: change"/bin/[x]" to "/usr/bin/env [x]" (#265) From 513e9fc55c7882b4c6c36a399da68be4e8b48bf9 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Wed, 19 Dec 2018 14:20:19 -0500 Subject: [PATCH 12/28] Update RELEASE_ENGINEERING.md --- RELEASE_ENGINEERING.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/RELEASE_ENGINEERING.md b/RELEASE_ENGINEERING.md index 9e5c7ba9..f128de5a 100644 --- a/RELEASE_ENGINEERING.md +++ b/RELEASE_ENGINEERING.md @@ -45,6 +45,17 @@ Build Tasks Stable Releases =============== +Step 0. Test the software + +Run this command to run the unit and system tests: + +``` +make test +``` + +NOTE: The tests require pinentry-tty. On macOS with NIX this +can be installed via: `nix-env -i pinentry` + Marking the software to be "stable": Step 1. Update CHANGELOG.md From 05edef2e88953d88ae2cbdf95ee17f10426340e7 Mon Sep 17 00:00:00 2001 From: Max Horstmann Date: Sun, 8 Nov 2020 18:33:14 -0500 Subject: [PATCH 13/28] clean up outdated workflows (#316) --- .github/workflows/build-and-test.yml | 16 ----------- .github/workflows/release.yml | 41 ---------------------------- 2 files changed, 57 deletions(-) delete mode 100644 .github/workflows/build-and-test.yml delete mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml deleted file mode 100644 index a81f9bc0..00000000 --- a/.github/workflows/build-and-test.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: build-and-test - -ono: [pull_request, push] # TODO remove "push". Add it just for debugging - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout repo - users: actions/checkout@v2 - with: - fetch-depth: 0 - - name: Build Docker image - users: docker/build-push-action@v1 - with: - push: false diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 7cf67c37..00000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Create draft release - -jobs: - build: - name: Create draft release - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - -# - name: Build project (dummy) -# run: zip my-artifact README.md # TODO - actually create all the binaries, artifacts etc - - - name: Package RPM - uses: bpicode/github-action-fpm@master - with: - fpm_args: './build' - fpm_opts: '--debug -n mypackage -t deb -s dir' - - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ github.ref }} - release_name: Release ${{ github.ref }} - body: Please add your release note here - draft: true - prerelease: false - -# - name: Upload binaries -# id: upload_binaries -# uses: actions/upload-release-asset@v1 -# env: -# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -# with: -# upload_url: ${{ steps.create_release.outputs.upload_url }} -# asset_path: ./my-artifact.zip -# asset_name: my-artifact.zip -# asset_content_type: application/zip From 4b1e090446d461b21154769e21db2d9ab802917d Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Thu, 12 Nov 2020 02:14:07 -0500 Subject: [PATCH 14/28] Fix broken test --- pkg/box/pretty_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/box/pretty_test.go b/pkg/box/pretty_test.go index a7cb42f9..ee2471eb 100644 --- a/pkg/box/pretty_test.go +++ b/pkg/box/pretty_test.go @@ -22,7 +22,7 @@ func TestPrettyCommitMessage(t *testing.T) { {[]string{"tab\ttab", "two very long strings.txt"}, `HEADING: "tabXtab"(redacted) "two very long strings.txt"`}, {[]string{long, long, long, long}, - "HEADING: " + long + " " + long + " " + long + " " + long + " ... " + long + "\n " + long + "\n " + long + "\n " + long + "\n"}, + "HEADING: " + long + " " + long + " (and others)"}, } { g := PrettyCommitMessage("HEADING", test.data) if g == test.expected { From 2c0c8cefa4d3a0fb53798695af518e0d21c4a3aa Mon Sep 17 00:00:00 2001 From: Max Horstmann Date: Fri, 13 Nov 2020 10:01:22 -0500 Subject: [PATCH 15/28] Go version: run build checks & tests (#317) --- .github/workflows/build.yml | 29 +++++++++++++++++++++++++++++ README.md | 3 ++- 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..73e1a635 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,29 @@ +name: build + +on: + pull_request: + branches: [ master ] + push: + branches: [ master ] + +jobs: + + build: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: ^1.15 + - name: Build binaries + working-directory: cmd/blackbox + run: go build + - name: Run unit tests + run: go test ./... + - name: Run integration tests + working-directory: integrationTest + run: umask 0027 ; rm -rf /tmp/bbhome-* && go test -long -nocleanup diff --git a/README.md b/README.md index 3aaa5f2e..2a63755e 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ -BlackBox [![CircleCI](https://circleci.com/gh/StackExchange/blackbox.svg?style=shield)](https://circleci.com/gh/StackExchange/workflows/blackbox) +BlackBox [![CircleCI](https://circleci.com/gh/StackExchange/blackbox.svg?style=shield)](https://circleci.com/gh/StackExchange/workflows/blackbox) [![Build Status](https://github.com/StackExchange/blackbox/workflows/build/badge.svg)](https://github.com/StackExchange/blackbox/actions?query=workflow%3Abuild+branch%3Amaster) + ======== Safely store secrets in a VCS repo (i.e. Git, Mercurial, Subversion or Perforce). These commands make it easy for you to Gnu Privacy Guard (GPG) encrypt specific files in a repo so they are "encrypted at rest" in your repository. However, the scripts make it easy to decrypt them when you need to view or edit them, and decrypt them for use in production. Originally written for Puppet, BlackBox now works with any Git or Mercurial repository. From 412af48ab567f60597714796caf814b7e21a0705 Mon Sep 17 00:00:00 2001 From: Max Horstmann Date: Tue, 17 Nov 2020 09:29:41 -0500 Subject: [PATCH 16/28] Remove v2 from go.mod- doesn't match latest tag ye --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 7f43a864..162b613e 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/StackExchange/blackbox/v2 +module github.com/StackExchange/blackbox go 1.14 From 4807dc527c4990e856c0729817ce0c86ce95bb1f Mon Sep 17 00:00:00 2001 From: Max Horstmann Date: Tue, 17 Nov 2020 09:47:39 -0500 Subject: [PATCH 17/28] Revert "Remove v2 from go.mod- doesn't match latest tag ye" This reverts commit 412af48ab567f60597714796caf814b7e21a0705. --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 162b613e..7f43a864 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/StackExchange/blackbox +module github.com/StackExchange/blackbox/v2 go 1.14 From b71378db826d3ce61cccf6038507d9efc69f1739 Mon Sep 17 00:00:00 2001 From: Max Horstmann Date: Wed, 18 Nov 2020 10:42:08 -0500 Subject: [PATCH 18/28] (Go version) Multi platform build (#319) --- .github/workflows/build.yml | 3 +- .gitignore | 1 - build/build.go | 75 +++++++++++++++++++++++++++++++++++++ cmd/blackbox/cli.go | 7 ++-- pkg/bbutil/umask_posix.go | 11 ++++++ pkg/bbutil/umask_windows.go | 9 +++++ pkg/crypters/gnupg/gnupg.go | 9 ++--- 7 files changed, 104 insertions(+), 11 deletions(-) create mode 100644 build/build.go create mode 100644 pkg/bbutil/umask_posix.go create mode 100644 pkg/bbutil/umask_windows.go diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 73e1a635..79cb7399 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,8 +20,7 @@ jobs: with: go-version: ^1.15 - name: Build binaries - working-directory: cmd/blackbox - run: go build + run: go run build/build.go - name: Run unit tests run: go test ./... - name: Run integration tests diff --git a/.gitignore b/.gitignore index 21f230d0..21403281 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ __pycache__/ # Distribution / packaging .Python env/ -build/ develop-eggs/ dist/ eggs/ diff --git a/build/build.go b/build/build.go new file mode 100644 index 00000000..062168b6 --- /dev/null +++ b/build/build.go @@ -0,0 +1,75 @@ +package main + +import ( + "flag" + "fmt" + "log" + "os" + "os/exec" + "strings" + "time" +) + +var sha = flag.String("sha", "", "SHA of current commit") + +var goos = flag.String("os", "", "OS to build (linux, windows, or darwin) Defaults to all.") + +func main() { + flag.Parse() + flags := fmt.Sprintf(`-s -w -X main.SHA="%s" -X main.BuildTime=%d`, getVersion(), time.Now().Unix()) + pkg := "github.com/StackExchange/blackbox/v2/cmd/blackbox" + + build := func(out, goos string) { + log.Printf("Building %s", out) + cmd := exec.Command("go", "build", "-o", out, "-ldflags", flags, pkg) + os.Setenv("GOOS", goos) + os.Setenv("GO111MODULE", "on") + cmd.Stderr = os.Stderr + cmd.Stdout = os.Stdout + err := cmd.Run() + if err != nil { + log.Fatal(err) + } + } + + for _, env := range []struct { + binary, goos string + }{ + {"blackbox-Linux", "linux"}, + {"blackbox.exe", "windows"}, + {"blackbox-Darwin", "darwin"}, + } { + if *goos == "" || *goos == env.goos { + build(env.binary, env.goos) + } + } +} + +func getVersion() string { + if *sha != "" { + return *sha + } + // check teamcity build version + if v := os.Getenv("BUILD_VCS_NUMBER"); v != "" { + return v + } + // check git + cmd := exec.Command("git", "rev-parse", "HEAD") + v, err := cmd.CombinedOutput() + if err != nil { + return "" + } + ver := strings.TrimSpace(string(v)) + // see if dirty + cmd = exec.Command("git", "diff-index", "--quiet", "HEAD", "--") + err = cmd.Run() + // exit status 1 indicates dirty tree + if err != nil { + if err.Error() == "exit status 1" { + ver += "[dirty]" + } else { + log.Printf("!%s!", err.Error()) + } + } + return ver +} diff --git a/cmd/blackbox/cli.go b/cmd/blackbox/cli.go index 74a252f4..50f90acd 100644 --- a/cmd/blackbox/cli.go +++ b/cmd/blackbox/cli.go @@ -4,9 +4,10 @@ package main import ( "fmt" - "syscall" "github.com/urfave/cli/v2" + + "github.com/StackExchange/blackbox/v2/pkg/bbutil" ) func flags() *cli.App { @@ -14,8 +15,8 @@ func flags() *cli.App { app.Version = "2.0.0" app.Usage = "Maintain encrypted files in a VCS (Git, Hg, Svn)" - defUmask := syscall.Umask(0) - syscall.Umask(defUmask) + defUmask := bbutil.Umask(0) + bbutil.Umask(defUmask) defUmaskS := fmt.Sprintf("%04o", defUmask) app.Flags = []cli.Flag{ diff --git a/pkg/bbutil/umask_posix.go b/pkg/bbutil/umask_posix.go new file mode 100644 index 00000000..13bddd20 --- /dev/null +++ b/pkg/bbutil/umask_posix.go @@ -0,0 +1,11 @@ +// +build !windows + +package bbutil + +import "syscall" + +// Umask is a no-op on Windows, and calls syscall.Umask on all other +// systems. On Windows it returns 0, which is a decoy. +func Umask(mask int) int { + return syscall.Umask(mask) +} diff --git a/pkg/bbutil/umask_windows.go b/pkg/bbutil/umask_windows.go new file mode 100644 index 00000000..1188a1bb --- /dev/null +++ b/pkg/bbutil/umask_windows.go @@ -0,0 +1,9 @@ +// +build windows + +package bbutil + +// Umask is a no-op on Windows, and calls syscall.Umask on all other +// systems. On Windows it returns 0, which is a decoy. +func Umask(mask int) int { + return 0o000 +} diff --git a/pkg/crypters/gnupg/gnupg.go b/pkg/crypters/gnupg/gnupg.go index d2c961ee..b29f4b5b 100644 --- a/pkg/crypters/gnupg/gnupg.go +++ b/pkg/crypters/gnupg/gnupg.go @@ -7,7 +7,6 @@ import ( "os" "os/exec" "path/filepath" - "syscall" "github.com/StackExchange/blackbox/v2/pkg/bblog" "github.com/StackExchange/blackbox/v2/pkg/bbutil" @@ -66,9 +65,9 @@ func (crypt CrypterHandle) Decrypt(filename string, umask int, overwrite bool) e } a = append(a, filename+".gpg") - oldumask := syscall.Umask(umask) + oldumask := bbutil.Umask(umask) err := bbutil.RunBash(crypt.GPGCmd, a...) - syscall.Umask(oldumask) + bbutil.Umask(oldumask) return err } @@ -118,10 +117,10 @@ func (crypt CrypterHandle) Encrypt(filename string, umask int, receivers []strin a = append(a, filename) //err = bbutil.RunBash("ls", "-la") - oldumask := syscall.Umask(umask) + oldumask := bbutil.Umask(umask) crypt.logDebug.Printf("Args = %q", a) err = bbutil.RunBash(crypt.GPGCmd, a...) - syscall.Umask(oldumask) + bbutil.Umask(oldumask) return encrypted, err } From d45564db11a6d518f08e1fcf3db2bb4687aa496c Mon Sep 17 00:00:00 2001 From: Max Horstmann Date: Thu, 19 Nov 2020 08:42:22 -0500 Subject: [PATCH 19/28] (Go version) Add release workflow (#318) --- .github/workflows/release.yml | 59 +++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..9904aa39 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,59 @@ +on: + release: + types: [published] + +name: release +jobs: + release: + name: release + runs-on: ubuntu-latest + steps: + + - name: Get release + id: get_release + uses: bruceadams/get-release@v1.2.2 + env: + GITHUB_TOKEN: ${{ github.token }} + + - name: Checkout repo + uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: ^1.15 + + - name: Build binaries + run: go run build/build.go + + - name: Upload blackbox-Darwin + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.get_release.outputs.upload_url }} + asset_path: ./blackbox-Darwin + asset_name: blackbox-Darwin + asset_content_type: application/octet-stream + + - name: Upload blackbox-Linux + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.get_release.outputs.upload_url }} + asset_path: ./blackbox-Linux + asset_name: blackbox-Linux + asset_content_type: application/octet-stream + + - name: Upload blackbox.exe + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.get_release.outputs.upload_url }} + asset_path: ./blackbox.exe + asset_name: blackbox.exe + asset_content_type: application/octet-stream From c1a2b3e88ac33a5c6ed6e753f3e991ae9fafd2e7 Mon Sep 17 00:00:00 2001 From: Fabien Villepinte Date: Mon, 11 Jan 2021 22:27:58 +0100 Subject: [PATCH 20/28] Fix formatting in README (#322) --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 2a63755e..5bb6d0a5 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ BlackBox [![CircleCI](https://circleci.com/gh/StackExchange/blackbox.svg?style=shield)](https://circleci.com/gh/StackExchange/workflows/blackbox) [![Build Status](https://github.com/StackExchange/blackbox/workflows/build/badge.svg)](https://github.com/StackExchange/blackbox/actions?query=workflow%3Abuild+branch%3Amaster) - ======== Safely store secrets in a VCS repo (i.e. Git, Mercurial, Subversion or Perforce). These commands make it easy for you to Gnu Privacy Guard (GPG) encrypt specific files in a repo so they are "encrypted at rest" in your repository. However, the scripts make it easy to decrypt them when you need to view or edit them, and decrypt them for use in production. Originally written for Puppet, BlackBox now works with any Git or Mercurial repository. From 9031973d389dc9815bcc16651ad00a52fec42f77 Mon Sep 17 00:00:00 2001 From: Brandon Johnson Date: Mon, 29 Mar 2021 13:01:52 -0700 Subject: [PATCH 21/28] Update some terms in README to get past some confusion --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 5bb6d0a5..ca47d7ba 100644 --- a/README.md +++ b/README.md @@ -347,7 +347,7 @@ FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the To join the list of people that can edit the file requires three steps; You create a GPG key and add it to the key ring. Then, someone that already has access adds you to the system. Lastly, you should test your access. -### Step 1: YOU create a GPG key pair on a secure machine and add to public keychain. +### Step 1: NEW USER creates a GPG key pair on a secure machine and adds to public keychain. If you don't already have a GPG key, here's how to generate one: @@ -401,7 +401,7 @@ ht push NOTE: Creating a Role Account? If you are adding the pubring.gpg of a role account, you can specify the directory where the pubring.gpg file can be found as a 2nd parameter: `blackbox_addadmin puppetmaster@puppet-master-1.example.com /path/to/the/dir` -### Step 2: SOMEONE ELSE adds you to the system. +### Step 2: EXISTING ADMIN adds new user to the system. Ask someone that already has access to re-encrypt the data files. This gives you access. They simply decrypt and re-encrypt the data without making any changes. @@ -433,7 +433,7 @@ hg commit hg push ``` -### Step 3: YOU test. +### Step 3: NEW USER tests. Make sure you can decrypt a file. (Suggestion: Keep a dummy file in VCS just for new people to practice on.) From a6d8f3d981fdfb0b271d9c82b3c431dbbdf2e02a Mon Sep 17 00:00:00 2001 From: Humz Date: Sat, 8 May 2021 10:23:09 +0100 Subject: [PATCH 22/28] Update LICENSE.txt --- LICENSE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE.txt b/LICENSE.txt index 972c1c4d..53deba65 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2014-2019 Stack Exchange, Inc. +Copyright (c) 2014-2021 Stack Exchange, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal From a03b6bf84a52867786a97f42860aafc04ee50f0b Mon Sep 17 00:00:00 2001 From: Arnout Engelen Date: Wed, 12 Jan 2022 18:04:15 +0100 Subject: [PATCH 23/28] recommend nix-shell over nix-env as it's more idiomatic nix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ca47d7ba..5b5b9644 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ Installation Instructions - *The Debian/Ubuntu way*: Check out the repo and make a DEB via `make packages-deb`; now you can distribute the DEB via local methods. (Requires [fpm](https://github.com/jordansissel/fpm).) - *The Antigen Way*: Add `antigen bundle StackExchange/blackbox` to your .zshrc - *The Zgen Way*: Add `zgen load StackExchange/blackbox` to your .zshrc where you're loading your other plugins. -- *The Nix Way*: `nix-env -i blackbox` +- *The Nix Way*: `nix-shell -p blackbox` - *The Pkgsrc Way*: `pkgin in scm-blackbox` Commands From dc01038efe4b43a3c459a32ffff00e3b9c9ef02f Mon Sep 17 00:00:00 2001 From: Jesus Galvan <2798097+jsgv@users.noreply.github.com> Date: Wed, 16 Mar 2022 18:15:18 +0100 Subject: [PATCH 24/28] Fix 'chmod' for macOS Monterey 12.3 (#347) --- bin/_blackbox_common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/_blackbox_common.sh b/bin/_blackbox_common.sh index b935f5d8..70959758 100755 --- a/bin/_blackbox_common.sh +++ b/bin/_blackbox_common.sh @@ -432,7 +432,7 @@ function cp_permissions() { # Copy the perms of $1 onto $2 .. end. case $(uname -s) in Darwin ) - chmod $( stat -f '%p' "$1" ) "${@:2}" + chmod $( stat -f '%Lp' "$1" ) "${@:2}" ;; FreeBSD | NetBSD ) chmod $( stat -f '%p' "$1" | sed -e "s/^100//" ) "${@:2}" From 80914679524688e664293549543421f4d2f75616 Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Wed, 16 Mar 2022 13:29:31 -0400 Subject: [PATCH 25/28] Improve test data generation (#348) * Improve test data generation * Add homebrew to "make test" path --- Makefile | 2 +- tools/confidence_test.sh | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index f8b6985f..168c4c9c 100644 --- a/Makefile +++ b/Makefile @@ -142,6 +142,6 @@ test: confidence confidence: @if [ -e ~/.gnupg ]; then echo ERROR: '~/.gnupg should not exist. If it does, bugs may polute your .gnupg configuration. If the code has no bugs everything will be fine. Do you feel lucky?'; false ; fi @if which >/dev/null gpg-agent ; then pkill gpg-agent ; rm -rf /tmp/tmp.* ; fi - @export PATH="$(PWD)/bin:$(PREFIX)/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/opt/local/bin:$(PATH)" ; tools/auto_system_test + @export PATH="$(PWD)/bin:$(PREFIX)/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/opt/local/bin:/usr/local/MacGPG2/bin:/opt/homebrew/bin:$(PATH)" ; tools/auto_system_test @if which >/dev/null gpg-agent ; then pkill gpg-agent ; fi @if [ -e ~/.gnupg ]; then echo ERROR: '~/.gnupg was created which means the scripts might be poluting GnuPG configuration. Fix this bug.'; false ; fi diff --git a/tools/confidence_test.sh b/tools/confidence_test.sh index aa8412ef..533e3469 100755 --- a/tools/confidence_test.sh +++ b/tools/confidence_test.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash blackbox_home=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../bin -export PATH="${blackbox_home}:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/opt/local/bin:/usr/pkg/bin:/usr/pkg/gnu/bin:${blackbox_home}" +export PATH="${blackbox_home}:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/opt/local/bin:/usr/pkg/bin:/usr/pkg/gnu/bin:/usr/local/MacGPG2/bin:/opt/homebrew/bin:${blackbox_home}" export LANG=C.UTF-8 # Required ro "gpg --export" to work properly. @@ -233,7 +233,8 @@ become_bob # This users's default group: DEFAULT_GID_NUM=$(id -g) # Pick a group that is not the default group: -TEST_GID_NUM=$(grep -v "$DEFAULT_GID_NUM" /etc/group | cut -d: -f3 | sort -rn | head -1) +#TEST_GID_NUM=$(grep -v "$DEFAULT_GID_NUM" /etc/group | cut -d: -f3 | sort -rn | head -1) +TEST_GID_NUM=$(id -G | tr ' ' "\n" | sort -nr | grep -x -v "$DEFAULT_GID_NUM" | head -1) echo "DEFAULT_GID_NUM=$DEFAULT_GID_NUM" echo "TEST_GID_NUM=$TEST_GID_NUM" From 7715254169c62267313d163c48f5c9d290370c65 Mon Sep 17 00:00:00 2001 From: Joe Block Date: Thu, 17 Mar 2022 13:33:53 -0600 Subject: [PATCH 26/28] Update zgen reference to zgenom (#350) zgen hasn't had an update in three years. zgenom is a fork that is actively maintained. Update the zgen reference to use zgenom instead. Signed-off-by: Joe Block --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5b5b9644..a93fbf59 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ Installation Instructions - *The RPM way*: Check out the repo and make an RPM via `make packages-rpm`; now you can distribute the RPM via local methods. (Requires [fpm](https://github.com/jordansissel/fpm).) - *The Debian/Ubuntu way*: Check out the repo and make a DEB via `make packages-deb`; now you can distribute the DEB via local methods. (Requires [fpm](https://github.com/jordansissel/fpm).) - *The Antigen Way*: Add `antigen bundle StackExchange/blackbox` to your .zshrc -- *The Zgen Way*: Add `zgen load StackExchange/blackbox` to your .zshrc where you're loading your other plugins. +- *The Zgenom Way*: Add `zgenom load StackExchange/blackbox` to your .zshrc where you're loading your other plugins. - *The Nix Way*: `nix-shell -p blackbox` - *The Pkgsrc Way*: `pkgin in scm-blackbox` From d84e904973ab4c313eb457fabb482242d2100dfb Mon Sep 17 00:00:00 2001 From: Joe Block Date: Thu, 31 Mar 2022 13:34:00 -0600 Subject: [PATCH 27/28] Add .gitattributes during repo initialization (#352) Make `blackbox_initialize` include `.blackbox/.gitattributes` when creating a new repository. Closes #351 Signed-off-by: Joe Block --- bin/blackbox_initialize | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/blackbox_initialize b/bin/blackbox_initialize index 5d77b694..2bef6f1c 100755 --- a/bin/blackbox_initialize +++ b/bin/blackbox_initialize @@ -44,7 +44,7 @@ if [[ $VCS_TYPE = "git" ]]; then grep -qF "$LINE" "$FILE" || echo "$LINE" >> "$FILE" LINE='blackbox-files.txt text eol=lf' grep -qF "$LINE" "$FILE" || echo "$LINE" >> "$FILE" - + vcs_add "$FILE" fi if [[ $VCS_TYPE = "svn" ]]; then From 66b65751c41b891a694de5a623243cbebbeed88c Mon Sep 17 00:00:00 2001 From: Tom Limoncelli Date: Fri, 10 Jun 2022 07:26:26 -0400 Subject: [PATCH 28/28] Update CHANGELOG.md (#356) --- CHANGELOG.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 820a7bf0..b780951f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,16 @@ +Release v1.20220610 + +NOTE: I don't have a lot of time to commit to this project. I'd gladly accept help, especially +with improving the testing on various operating systems. + +Major feature: macOS users rejoice! Incompatibility with macOS Monterey 12.3 is fixed! (#347) + +* Add .gitattributes during repo initialization (#352) +* Update zgen reference to zgenom (#350) +* Improve test data generation (#348) +* Fix 'chmod' for macOS Monterey 12.3 (#347) + + Release v1.20200429 NOTE: While there is now support for NetBSD and SunOS/SmartOS, the