From 053b40101e13086585d10475c5041d733224156e Mon Sep 17 00:00:00 2001 From: apollo Date: Mon, 11 Feb 2019 21:48:24 +0000 Subject: [PATCH] the safe credentials manager lands on GitHub --- .gitignore | 8 + .yardopts | 3 + Gemfile | 10 + README.md | 793 ++++++++++ Rakefile | 16 + bin/safedb | 5 + lib/configs/README.md | 58 + lib/extension/array.rb | 162 ++ lib/extension/dir.rb | 35 + lib/extension/file.rb | 123 ++ lib/extension/hash.rb | 33 + lib/extension/string.rb | 572 +++++++ lib/factbase/facts.safedb.net.ini | 38 + lib/interprete.rb | 462 ++++++ .../PRODUCE_RAND_SEQ_USING_DEV_URANDOM.txt | Bin 0 -> 2098 bytes lib/keytools/kdf.api.rb | 243 +++ lib/keytools/kdf.bcrypt.rb | 265 ++++ lib/keytools/kdf.pbkdf2.rb | 262 ++++ lib/keytools/kdf.scrypt.rb | 190 +++ lib/keytools/key.64.rb | 326 ++++ lib/keytools/key.algo.rb | 109 ++ lib/keytools/key.api.rb | 1391 +++++++++++++++++ lib/keytools/key.db.rb | 330 ++++ lib/keytools/key.docs.rb | 195 +++ lib/keytools/key.error.rb | 110 ++ lib/keytools/key.id.rb | 271 ++++ lib/keytools/key.ident.rb | 243 +++ lib/keytools/key.iv.rb | 107 ++ lib/keytools/key.local.rb | 259 +++ lib/keytools/key.now.rb | 402 +++++ lib/keytools/key.pair.rb | 259 +++ lib/keytools/key.pass.rb | 120 ++ lib/keytools/key.rb | 585 +++++++ lib/logging/gem.logging.rb | 132 ++ lib/modules/README.md | 43 + lib/modules/cryptology/aes-256.rb | 154 ++ lib/modules/cryptology/amalgam.rb | 70 + lib/modules/cryptology/blowfish.rb | 130 ++ lib/modules/cryptology/cipher.rb | 207 +++ lib/modules/cryptology/collect.rb | 138 ++ lib/modules/cryptology/crypt.io.rb | 225 +++ lib/modules/cryptology/engineer.rb | 99 ++ lib/modules/mappers/dictionary.rb | 288 ++++ lib/modules/storage/coldstore.rb | 186 +++ lib/modules/storage/git.store.rb | 399 +++++ lib/session/fact.finder.rb | 334 ++++ lib/session/require.gem.rb | 112 ++ lib/session/time.stamp.rb | 340 ++++ lib/session/user.home.rb | 49 + lib/usecase/cmd.rb | 490 ++++++ lib/usecase/config/README.md | 57 + lib/usecase/docker/README.md | 146 ++ lib/usecase/docker/docker.rb | 49 + lib/usecase/edit/README.md | 43 + lib/usecase/edit/delete.rb | 46 + lib/usecase/export.rb | 40 + lib/usecase/files/README.md | 37 + lib/usecase/files/eject.rb | 56 + lib/usecase/files/file_me.rb | 78 + lib/usecase/files/read.rb | 169 ++ lib/usecase/files/write.rb | 89 ++ lib/usecase/goto.rb | 57 + lib/usecase/id.rb | 36 + lib/usecase/import.rb | 157 ++ lib/usecase/init.rb | 63 + lib/usecase/jenkins/README.md | 146 ++ lib/usecase/jenkins/jenkins.rb | 208 +++ lib/usecase/login.rb | 71 + lib/usecase/logout.rb | 28 + lib/usecase/open.rb | 71 + lib/usecase/print.rb | 40 + lib/usecase/put.rb | 81 + lib/usecase/set.rb | 44 + lib/usecase/show.rb | 138 ++ lib/usecase/terraform/README.md | 91 ++ lib/usecase/terraform/terraform.rb | 121 ++ lib/usecase/token.rb | 35 + lib/usecase/update/README.md | 55 + lib/usecase/update/rename.rb | 180 +++ lib/usecase/use.rb | 41 + lib/usecase/verse.rb | 20 + lib/usecase/view.rb | 71 + lib/usecase/vpn/README.md | 150 ++ lib/usecase/vpn/vpn.ini | 31 + lib/usecase/vpn/vpn.rb | 54 + lib/version.rb | 3 + safedb.gemspec | 34 + 87 files changed, 14217 insertions(+) create mode 100644 .gitignore create mode 100644 .yardopts create mode 100644 Gemfile create mode 100644 README.md create mode 100644 Rakefile create mode 100755 bin/safedb create mode 100644 lib/configs/README.md create mode 100644 lib/extension/array.rb create mode 100644 lib/extension/dir.rb create mode 100644 lib/extension/file.rb create mode 100644 lib/extension/hash.rb create mode 100644 lib/extension/string.rb create mode 100644 lib/factbase/facts.safedb.net.ini create mode 100644 lib/interprete.rb create mode 100644 lib/keytools/PRODUCE_RAND_SEQ_USING_DEV_URANDOM.txt create mode 100644 lib/keytools/kdf.api.rb create mode 100644 lib/keytools/kdf.bcrypt.rb create mode 100644 lib/keytools/kdf.pbkdf2.rb create mode 100644 lib/keytools/kdf.scrypt.rb create mode 100644 lib/keytools/key.64.rb create mode 100644 lib/keytools/key.algo.rb create mode 100644 lib/keytools/key.api.rb create mode 100644 lib/keytools/key.db.rb create mode 100644 lib/keytools/key.docs.rb create mode 100644 lib/keytools/key.error.rb create mode 100644 lib/keytools/key.id.rb create mode 100644 lib/keytools/key.ident.rb create mode 100644 lib/keytools/key.iv.rb create mode 100644 lib/keytools/key.local.rb create mode 100644 lib/keytools/key.now.rb create mode 100644 lib/keytools/key.pair.rb create mode 100644 lib/keytools/key.pass.rb create mode 100644 lib/keytools/key.rb create mode 100644 lib/logging/gem.logging.rb create mode 100644 lib/modules/README.md create mode 100644 lib/modules/cryptology/aes-256.rb create mode 100644 lib/modules/cryptology/amalgam.rb create mode 100644 lib/modules/cryptology/blowfish.rb create mode 100644 lib/modules/cryptology/cipher.rb create mode 100644 lib/modules/cryptology/collect.rb create mode 100644 lib/modules/cryptology/crypt.io.rb create mode 100644 lib/modules/cryptology/engineer.rb create mode 100644 lib/modules/mappers/dictionary.rb create mode 100644 lib/modules/storage/coldstore.rb create mode 100644 lib/modules/storage/git.store.rb create mode 100644 lib/session/fact.finder.rb create mode 100644 lib/session/require.gem.rb create mode 100644 lib/session/time.stamp.rb create mode 100644 lib/session/user.home.rb create mode 100644 lib/usecase/cmd.rb create mode 100644 lib/usecase/config/README.md create mode 100644 lib/usecase/docker/README.md create mode 100644 lib/usecase/docker/docker.rb create mode 100644 lib/usecase/edit/README.md create mode 100644 lib/usecase/edit/delete.rb create mode 100644 lib/usecase/export.rb create mode 100644 lib/usecase/files/README.md create mode 100644 lib/usecase/files/eject.rb create mode 100644 lib/usecase/files/file_me.rb create mode 100644 lib/usecase/files/read.rb create mode 100644 lib/usecase/files/write.rb create mode 100644 lib/usecase/goto.rb create mode 100644 lib/usecase/id.rb create mode 100644 lib/usecase/import.rb create mode 100644 lib/usecase/init.rb create mode 100644 lib/usecase/jenkins/README.md create mode 100644 lib/usecase/jenkins/jenkins.rb create mode 100644 lib/usecase/login.rb create mode 100644 lib/usecase/logout.rb create mode 100644 lib/usecase/open.rb create mode 100644 lib/usecase/print.rb create mode 100644 lib/usecase/put.rb create mode 100644 lib/usecase/set.rb create mode 100644 lib/usecase/show.rb create mode 100644 lib/usecase/terraform/README.md create mode 100644 lib/usecase/terraform/terraform.rb create mode 100644 lib/usecase/token.rb create mode 100644 lib/usecase/update/README.md create mode 100644 lib/usecase/update/rename.rb create mode 100644 lib/usecase/use.rb create mode 100644 lib/usecase/verse.rb create mode 100644 lib/usecase/view.rb create mode 100644 lib/usecase/vpn/README.md create mode 100644 lib/usecase/vpn/vpn.ini create mode 100644 lib/usecase/vpn/vpn.rb create mode 100644 lib/version.rb create mode 100644 safedb.gemspec diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9106b2a --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +/.bundle/ +/.yardoc +/_yardoc/ +/coverage/ +/doc/ +/pkg/ +/spec/reports/ +/tmp/ diff --git a/.yardopts b/.yardopts new file mode 100644 index 0000000..ec2060b --- /dev/null +++ b/.yardopts @@ -0,0 +1,3 @@ +--title "opensecret credentials manager" +lib/**/*.rb - +lib/**/*.ini diff --git a/Gemfile b/Gemfile new file mode 100644 index 0000000..d6c7f16 --- /dev/null +++ b/Gemfile @@ -0,0 +1,10 @@ +## ============================================ +## Try removing this file +## See what happens +## ============================================ +source "https://rubygems.org" + +git_source(:github) {|repo_name| "https://github.com/#{repo_name}" } + +# Specify your gem's dependencies in safedb.gemspec +gemspec diff --git a/README.md b/README.md new file mode 100644 index 0000000..24474f6 --- /dev/null +++ b/README.md @@ -0,0 +1,793 @@ +safe [![Build Status](https://secure.travis-ci.org/TwP/inifile.png)](http://travis-ci.org/TwP/inifile) +========== + +safe database introduction +----------- +**A safe database contains books that you login to.** A book contains **`chapters`** and chapters contain **`verses`**. Each verse has a number of lines which are just key/value pairs. + +## Joe Bloggs Social Media Accounts + +Joe Bloggs wants to safely store his social media account credentials. His creates a book called **`joe.bloggs`**, a chapter called **`social`** and verses called **facebook**, **twitter**, **instagram** and **snapchat**. These verses will hold key value pairs like username, @password and signin.url (aka lines). + +``` +safe init joe.bloggs /path/to/dir # create a book called joe.bloggs +safe login joe.bloggs # login to the book +``` + +## create facebook credentials + +The joe.bloggs book has been created. Now create the **social chapter** and **facebook verse**. + +``` +safe open social facebook # open chapter social and verse facebook +safe put username joeybloggs9 # create a username (key/value) line +safe put @password s3cr3t # create a password (key/value) line +safe put signin.url https://xxx # create a signin url (key/value) line +``` + +## create twitter credentials + +Now that facebook is done - Joe **creates another verse called twitter** under the social chapter. + +``` +safe open social twitter # open chapter social and verse twitter +safe put username joebloggs4 # create a username (key/value) line +safe put @password secret12 # create a password (key/value) line +safe put signin.url https://yyy # create a signin url (key/value) line +``` + +**`safe open`** creates a new chapter verse or goes to one if it exists. Commands like **`safe put`**, **`safe show`** and **`safe delete`** all work on the currently opened chapter and verse. + + +## keep it safe + +You use **`safe`** to put and retrieve credentials into an uncrackable encrypted "safe" on your filesystem or USB key. + +
+You interact with safe on the command line, or through DevOps scripts and pipelines. safe will soon **integrate** with storage solutions like S3, Git, SSH, Docker, Redis, the AWS key manager (KMS), Docker, Google Drive, Kubernetes Secrets, Git Secrets, OAuth2, KeePass, LastPass and the Ansible / HashiCorp vaults.
+
+ +safe is **simple**, intuitive and highly secure. It never accesses the cloud. The crypt files it writes are precious to you but worthless to everyone else. + +safe | Install and Configure +----------- + +## install safe + + $ gem install safedb + $ export SAFE_TTY_TOKEN=`safe token` # setup a shell session variable + $ safe init joe@abc /home/joe/credentials # initialize a secrets domain + $ safe login joe@abc # login to the new domain + +You initialize then login to a **domain** like **joe@abc**. In the init command we specify where the encrypted material will be stored. Best use a USB key or phone to use your secrets on any drive or computer. + +You only need to run init once on a computer for each domain - after that you simply login. + +More information will be provided on installing and using safe via a gem install, Ubuntu's apt-get, yum, a docker container, a development install, a unit test install and a software development kit (SDK) install. + +## Create Alias for Export Safe Terminal Token + +It's tiresome to manually create the **SAFE_TTY_TOKEN environment variable** that is required by safe. + +So create an **alias safetty (export token)** command like this noting the escaped back-ticks surrounding the safe token call. + + $ echo "alias safetty='export SAFE_TTY_TOKEN=\`safe token\`'" >> ~/.bash_aliases + $ cat ~/.bash_aliases # Check the alias has been added to ~/.bash_aliases + $ source ~/.bash_aliases # Use source to avoid grabbing a new shell this time + +Now before using safe simply call safetty. + + $ safetty # safe terminal token + $ printenv | grep SAFE_TTY_TOKEN # check it was created + $ safe login joe@abc # login to a book + $ safe view # chapters and verses + +There are other ways to initialize the shell token including + +- via a Docker run ENV parameter +- inside a Vagrantfile (vagrant up) + +Do not add it to the bash profile script because safe uses the parent process id and bash profile will in effect use safe's grandparent's process id. + + +## Remove Token | Environment Variable + +When the shell closes the shell token will disappear which is good. You can clear it immediately with these commands. + + $ unset SAFE_TTY_TOKEN # Delete the shell session token + $ env | grep SAFE_TTY_TOKEN # Check SAFE_TTY_TOKEN is deleted + $ env -i bash # Delete every env var created by shell + + +## Chapter and Verse | Its a Book + +Visualize your safe **as a book** (like the Bible or the Oxford English Dictionary). + +You **open the book at a chapter and verse** then read, write and update a **key/value dictionary**. + +- **joe.credentials** is the **book** we login to. +- **email.accounts** is the **chapter** we open +- **joe@gmail.com** is the **verse** we open + +Now we can **put** and **read** key/value entries at the chapter and verse we opened. + +- **safe open email.accounts joe@gmail.com** +- **safe put username joe** +- **safe input password** +- **safe put question "Mothers Maiden Name"** +- **safe put answer "Rumpelstiltskin"** +- **safe tell** + +**What happened?** Look in the configured folder and you'll see some breadcrumbs and your first envelope. What happened was + +- the "emal.accounts" envelope is created for joe@gmail.com +- the username and a memorable question are put in +- **safe input password** securely collects the password +- **safe tell** outputs all the data at the opened path + +Let's put data for the next email account into the same "email.acocunts" envelope. + +- **safe open email.accounts joe@yahoo.com** +- **safe put username joey** +- **safe input secret** +- **safe tell** + + +## emacs passwords | safe login + +Emacs tries to detect a password prompt by examining the prompt text. These will match. + +- Password: +- Enter new password: + +Use **`Alt-x send-invisible`** or **`M-x send-invisible`** if emacs gets it wrong. + +In emas passwords entered in the special minibuffer + +- are not displayed +- nor are they entered into any history list + +There are ways to help Emacs recognize password prompts using regular expressions and lisp lists but this complexity is rarely warranted. + +## Keeping Files Secret + +**Whole files can be secured in the safe - not just a sequence of characters.** + +### A single file + +**This is legacy functionality and will soon be refactored using the multi-file embedded map approach.** + +You can pull in (and spit out) a file into the dictionary at the opened chapter and verse using **`safe read`** and **`safe write`** + + $ safetty # alias command puts token in an environment variable + $ safe login <> # login to a book + $ safe open <> <> # go to the dictionary at the opened chapter/verse + $ safe show # look at the key/value pairs in the dictionary + $ safe read ~/creds/my-key.pem # an encrypted file is added to the safe directory + $ safe write # the file is decrypted and faithfully returned + +With read/write only one file can be added to a dictionary. If you **safe read** the second time the safe file is effectively overwritten and unretrievable. Note that **safe write** creates a backup if the file exists at the filepath before overwriting it. + +But can we put more than one file into a dictionary? + +### Putting Many Files into a Dictionary + +**These commands may be refactored into read and write.** +Suppose you have 4 openvpn (ovpn) files and you want them encrypted in just one dictionary. You can do it with **safe inter** and **safe exhume** + + $ safe inter production.vpn ~/tmp-vpn-files/prod.ovpn + $ safe inter development.vpn ~/tmp-vpn-files/dev.ovpn + $ safe inter canary.vpn ~/tmp-vpn-files/canary.ovpn + $ safe inter staging.vpn ~/tmp-vpn-files/stage.ovpn + $ safe show + +Against the @production.vpn key exists a sub-dictionary holding key-value pairs like in.url, out.url, permissions, is_zip, use_sudo, date_created, date_modified and most importantly **content**. + +The actual file content is converted into a url safe base64 format (resulting in a sequence of characters) and then put into the dictionary with keys named production.vpn, canary.vpn and so on. + + $ safe exhume + +This powerful command **excavates all files** thus reconstituting them into their configured plaintext destinations. + + $ safe exhume production.vpn # dig out just the one file + $ safe exhume 'production.vpn,canary.vpn' # dig out every file in the list + $ safe exhume production.vpn ~/new/live.ovpn # dig out file to specified path + + +In keeping with the safe tradition of zero parameter commands whenever and wherever possible the **safe inter** command will now reread all the files again because safe knows where they should be. + + $ safe inter + +### Passing Files in through Standard In + +**@Yet to be implemented. Above inter/exhume should be read/write and the below should be the real inter/exhume** +File content can be presented at standard in (stdin) and ejected to (stdout) in keeping with unix command tradition. + + $ cat ~/.ssh/repo-private-key.pem | safe inter repo.key + $ safe exhume repo.key > /media/usb/repository-key.pem + +Internally and therefore private - inter converts the multiline text into urlsafe base 64 on the way (std)in and exhume does the opposite on the way (std)out. + +## Scripts can Read Safe's Credentials + +Within a DevOps script, you can read from a safe and write to it without the credentials touching the ground (disk) and/or sides. + +DevOps engineers often comment that this is the safe's most attractive feature. All you have to do is to tell safe that it is being called from within a script. This an example of connecting to a database maybe to create some space. + + $ safetty + $ safe login joe@bloggs.com + $ safe open mysql production + + $ python db-create-space.py + +You've logged into a safe book and opened a chapter and verse. Then you call a script - **look no parameters!** + +(Improve by using actual python commands). + +Now within the script could be lines like this. + + db_url = %x[safe print db.url --script] + db_usr = %x[safe print db.usr --script] + db_pass = %x[safe print db.pass --script] + + db_conn = Connection.new( db_url, db_usr, db_pass ) + +Notice the credentials have not touched the disk. The decrypted form was only used in memory to connect. + +The switch **--script** tells safe that it is being called from within a script. Safe won't give out credentials if the script in turn calls another script and that calls safe - it only obliges when you have run the command yourself. + +This gives you peace of mind that sub-processes two or more levels deep will not be able to access your credentials. + +You can also limit the credentials in a book. Scripts can only access credentials in books that you have logged into. Credentials in other books within your safe are out of scope. + + +## Scripts can Write Credentials into your Safe + +Many DevOps scripts source credentials that then need to be stored. Scripts can use Safe's configurable random generators to produce passwords, public/private keypairs and AES keys. Or the credentials are sourced externally and the scripts then place them into the safe. + + +## safe | The Commands + + $ safe login <> # login to one of the books in the safe + $ safe use <> # switch to this or that book (if logged in) + $ safe open <> <> # open email accounts chapter at this verse (specific account) + $ safe view # contents page of chapters and verses in this book + $ safe goto <> # shortcut for open command (pick number from the viewed list + + $ safe put <> <> # put in a non-sensitive key-value pair + $ safe put @<> <> # put in a non-sensitive key-value pair + + $ safe show # show the key/value dictionary at chapter and verse + + +## Chapter and Verse | Types + +What types can safe store. Remember the +- book +- chapter +- verse + +You login to a book and then "open" it up at a chapter and verse. + +At that point you get a dictionary with string keys. The value types can be + +- strings +- integers +- booleans +- lists +- dictionaries +- another book, chapter and verse +- files (plain, binary, zip) + +## Concepts Yet to be Documented + +We need to fix the login bug which we now workaround by init(ing) every time. +On top of that we must document the behaviour for + +- list management (create read add remove eject) - remove is given a value while eject is given an index +- crud operations on books, chapters, verses and key/value entries +- password changing +- hardening configuration using Hexadecimal characters + +## How to configure safe's behaviour + +We can configure safe's behaviour + +- globally for all books on a given workstation +- locally for activities pertaining to the current book + + +## Exporting Credentials in Different Formats + +Once credentials are in safe they can be exported in different formats. +Also you can start a shell, login, open a chapter and verse and then give safe the command to run. + +It can then export out selected (key/value) dictionaries at the opened chapter and verse as + +- **environment variables** +- **Kubernetes Secrets formatted files** +- **AWS IAM user environment variables or files** +- **RubyGem credentials (consumable by rake)** +- **rclone credentials for accessing GoogleDrive, Rackspace** +- **openvpn (ovpn) files (with keys/certs) for VPN tunnels** +- **ubuntu network manager configurations fir VPN and wireless** +- **certificates RubyGem credentials (consumable by rake)** +- **git credentials for pushing (or cloning) a git repo** + +In effect, safe can start VPNs, wireless connections, launch Firefox with certificates installed, run Ansible and Terraform suppling vital credentials - all this **without the credentials ever touching the ground (filesystem)**. + +## Generating Credentials + +The following can be generated from a single command + +- password strings configurable by length, set of printable characters and encoding +- private / public key pairs with bit length configurable (up to 8192 bits) - also format configurable +- AWS SSH keypairs +- certifcates including signed (root) certificates + +## Allowing Credentials Access + +Once the above are locked inside your safe - you + +## Did you know? + +Did you know that +- plaintext credentials are written by git config credential.helper store +- plaintext credentials are written (out of home directory) by ubuntu network manager +- plaintext credentials live under an AWS config directory. + + +## Configure Length of Generated Password + + +Visit the below - has perfect parameters for configuring the output of a generating credential. + +https://www.terraform.io/docs/providers/random/r/string.html + +Maybe find the Go software or Ruby alternatives. + +The following arguments are supported: + +- length - (Required) The length of the string desired +- upper - (Optional) (default true) Include uppercase alphabet characters in random string. +- min_upper - (Optional) (default 0) Minimum number of uppercase alphabet characters in random string. +- lower - (Optional) (default true) Include lowercase alphabet characters in random string. +- min_lower - (Optional) (default 0) Minimum number of lowercase alphabet characters in random string. +- number - (Optional) (default true) Include numeric characters in random string. +- min_numeric - (Optional) (default 0) Minimum number of numeric characters in random string. +- special - (Optional) (default true) Include special characters in random string. These are '!@#$%&*()-_=+[]{}<>:?' +- min_special - (Optional) (default 0) Minimum number of special characters in random string. +- override_special - (Optional) Supply your own list of special characters to use for string generation. This overrides characters list in the special argument. The special argument must still be set to true for any overwritten characters to be used in generation. +- keepers - (Optional) Arbitrary map of values that, when changed, will trigger a new id to be generated. See the main provider documentation for more information. + + + $ safe password length <> + +The length of randomly generated passwords (secret strings) can be weighted from 1 to 32. The generated +password length can still vary but is guaranteed to be one of 7 possible lengths as shown below. + + | ---------------------- | -------------------- | + | | Expected Char Count | + | ---------------------- | -------------------- | + | Password Length Weight | Min | Median | Max | + | ---------------------- | -------------------- | + | 1 | 8 | 11 | 14 | + | 2 | 9 | 12 | 15 | + | 3 | 10 | 13 | 16 | + | 4 | 11 | 14 | 17 | + | 5 | 12 | 15 | 18 | + | 6 | 13 | 16 | 19 | + | 7 | 14 | 17 | 20 | + | 8 | 15 | 18 | 21 | + | 9 | 16 | 19 | 22 | + | 10 | 17 | 20 | 23 | + | 11 | 18 | 21 | 24 | + | 12 (default) | 19 | 22 | 25 | + | 13 | 20 | 23 | 26 | + | 14 | 21 | 24 | 27 | + | 15 | 22 | 25 | 28 | + | 16 | 23 | 26 | 29 | + | 17 | 24 | 27 | 30 | + | 18 | 25 | 28 | 31 | + | 19 | 26 | 29 | 32 | + | 20 | 27 | 30 | 33 | + | 21 | 28 | 31 | 34 | + | 22 | 29 | 32 | 35 | + | 23 | 30 | 33 | 36 | + | 24 | 31 | 34 | 37 | + | 25 | 32 | 35 | 38 | + | 26 | 33 | 36 | 39 | + | 27 | 34 | 37 | 40 | + | 28 | 35 | 38 | 41 | + | 29 | 36 | 39 | 42 | + | 30 | 37 | 40 | 43 | + | 31 | 38 | 41 | 44 | + | 32 | 39 | 42 | 45 | + | ---------------------- | -------------------- | + +The lowest 1 setting will produce a 8, 9, 10, 11, 12, 13 or 14 character password. + +The default password hovers in the low to mid twenties whilst the hardest 32 setting will generate a +length 42 password string (give or take 3 characters on either side). + +No extra benefit is derived from generating passwords with lengths in excess of 42 characters. + +Don't forget that the above has **nothing** to do with the password you choose to protect your safe safe. +This only applies to (securely) randomly generated character sequences used to create passwords for external +applications and systems. + +### Configure Makeup of Password | Printable Characters + +Some systems reject certain characters. Lloyds Bank for example will only accept alpha-numerics. + +In these cases we need to configure the set of characters that sources the actual sequence of password characters. + +Again you can configure 1 to 32 which guarantees that the generated password sequence will be locked down to +(possibly) include a character and all those that come before it. + +There are 62 alpha-numerics which is the starting point and smallest source pool of usable choosable characters for a printable character sequence. + + - ---------------------- | -------------------- - --------- - + | Password Makeup Weight | # | Char Name | Character | + | ---------------------- | -----| ------------- | --------- | + | 1 | 62 | alpha-nums | A-Za-z0-9 | + | 2 | 63 | underscore | _ | + | 3 | 64 | period | . | + | 4 | 65 | hyphen | - | + | 5 | 66 | at symbol | @ | + | 6 | 67 | squiggle | ~ | + | 7 | 68 | hyphen | - | + | 8 | 69 | plus sign | + | + | 9 | 70 | percent | % | + | 10 | 71 | equals | = | + | 11 | 72 | SPACE | | + | 12 | 73 | fwd slash | / | + | 13 | 74 | hat symbol | ^ | + | 14 | 75 | soft open | ( | + | 15 | 76 | soft close | ) | + | 16 | 77 | square open | [ | + | 17 | 78 | square close | ] | + | 18 | 79 | curly open | { | + | 19 | 80 | curly close | } | + | 20 | 81 | angle open | < | + | 21 | 82 | angle close | > | + | 22 | 83 | pipe symbol | | | + | 23 | 84 | hash symbol | # | + | 24 | 85 | question mark | ? | + | 25 | 86 | colon | : | + | 26 | 87 | semi-colon | ; | + | 27 | 88 | comma | , | + | 28 | 89 | asterix | * | + | 29 | 90 | ampersand | & | + | 30 | 91 | exclamation | ! | + | 31 | 92 | dollar sign | $ | + | 32 | 93 | back tick | ` | + | ---------------------- | -----| ------------- | --------- | + +Use the full set of **93 printable characters** when protecting high value assets like databases. + +### Binary Data + +Some more advanced cryptography leaning services can handle binary streams (usually encoded) - safe can produce these at the drop of a hat. + +### Kubernetes Secrets + +safe can transfer a verse (or even the whole chapter) into a Kubernetes Secrets compatible format. + +Kubernetes Secrets (through the kubectl interface) require that hexadecimal (base64) encoding be applied to secrets coming in through the letterbox. + +safe can output dictionary (key/value pair) configurations in a format consumable by Kubernetes secrets. + +### Encoding Character Sequences + + + + + +### safe | All Done! + +Cracking safe is infeasible for anyone other than the rightful owner. Only OpenSSL implemented tried and tested cryptographic algorithms are used. Both PBKDF2 and BCrypt are used for expensive key derivation. The content is encrypted with AES (Advanced Encryption Standard) and 48 byte random keys are employed along with initialization vectors. + +Even with all this crypt technology it is **important** that you + +- choose a robust password of between 10 and 32 characters +- align the number of salt derivation iteratios to your machine's resources +- backup the domain folders in case you lose your USB drive or phone + +Your ability to access your own secrets (even after disaster scenarios) is as important as preventing the secrets being accessed. + +safe | moving computer +----------- + +We travel between laptops, desktops, virtual machines and even docker containers. Always run init the first time you use a domain on a different computer. + + $ gem install safe + $ export SAFE_TTY_TOKEN=`safe token` # setup a shell session variable + $ safe init joe@abc /home/joe/credentials # initialize a secrets domain + $ safe login joe@abc # login to the new domain + +Run all four commands the first time. Then simply run the second and fourth commands whenever you open a new shell to interact with safe. + +## the no-go no-clouds mantra + +safe is designed to operate in highly secure locked down environments in which external access is not just constrained - **it is non-existent**. + +safe does not contact nor talk to anything external. It never asks (nor needs to know) the credentials for accessing your stores - this means it compliments your storage security be it S3, Google Drive, Redis, Git and even email/pop3 solutions. + +## the encrypted at rest mantra + +The ability to read data from drives (after the fact and) after deletion means **nothing unencrypted** should be put on any drive (including usb keys). + + +safe configuration +------------------------ + +Aside from your private keys, safe keeps a small amount of configuration within the .safe folder off your home directory. A typically safe.ini file within that folder looks like + + [joebloggs@example.com] + type = user + id = joe + keydir = /media/joe/usb_drive + domains = [ lecturers@harvard ] + default = true + printx = asdfasdfas65as87d76fa97ds6f57as6d5f87a + printy = asdfasdfas65as87d76fbbbasdfas0asd09080 + printz = adsfasdflkajhsdfasdf87987987asd9f87987 + + [lecturers@harvard] + type = domain + store = git + url = https://www.eco-platform.co.uk/crypt/lecturers.git + + + +Backend Storage Options +----------------------- + +The planned list of backend storage systems (each onlined with a plugin), is + +- Git (including GitHub, GitLab, BitBucket, OpenGit and private Git installations). +- S3 Buckets from the Amazon Web Services (AWS) cloud. +- SSH, SCP, SFTP connected file-systems +- network storage including Samba, NFS, VMWare vSAN and +- GoogleDrive (only Windows has suitable synchronized support). + +Access management is configured EXTERNAL to safe. SafeDb simply piggybacks the network transport if authorization is granted. + + +## safe | Summary + +You can use safe alone or you can use it to share secrets with colleagues, friends and family, even machines. + +safe is simple and holistically secure. *Simple* means less mistakes, less confusion and more peer reviews from internet security experts. + +Every domain is tied to backend storage which is accessible by you and others in your domain. You can use Git, S3, a networked filesystem or shared drive, a SSH accessible filesystem and soon, free storage from safe.io + + +## How to Use SafeDb as an SDK | Require it from another Ruby program + +You can require safe (as an SDK) and interact with it directly from any other Ruby program without wrappers. + + $ gem install safe + $ irb + $ > require "safe" + $ > SafeDb::Interprete.version() + +The above should return the **installed version** of SafeDb. + +If you get a **LoadError (cannot load such file -- safe)** then try the below. + + $ irb + $ > $LOAD_PATH + +[ + "/usr/share/rubygems-integration/all/gems/did_you_mean-1.2.0/lib", + "/usr/local/lib/site_ruby/2.5.0", + "/usr/local/lib/x86_64-linux-gnu/site_ruby", + "/usr/local/lib/site_ruby", + "/usr/lib/ruby/vendor_ruby/2.5.0", + "/usr/lib/x86_64-linux-gnu/ruby/vendor_ruby/2.5.0", + "/usr/lib/ruby/vendor_ruby", + "/usr/lib/ruby/2.5.0", + "/usr/lib/x86_64-linux-gnu/ruby/2.5.0" +] + + + + +## SAFE PROPOSED FUNCTIONALITY DOCUMENTATION + + +Before we can move to siloed safe workspaces and RELEASE the software into the public domain we must refactor file handling and implement vital methodologies for evolving the software. + +## File Storage Methodology + +- delete the concepts of content.id, content.iv and content.key in the context of files. +- add one more key to file verse @file.content and store the urlsafe base64 contents of the file there + +@@@@@@@@@@@@ change +@@@@@@@@@@@@ change ==> maybe better to create a sub dictionary (map) for the file so will be +@@@@@@@@@@@@ change ==> key value pairs. Keys could be permissions - 755 | @content - BASE64 file representation | read.url - http://shareprices/mcdonalds.yaml | write.url $HOME/shares/mcds.yaml | type - binary +@@@@@@@@@@@@ change + +This move means that if we wish to export and import we do not need to fiddle with chapter files vs file files. + +--- + +## Advanced | Sub Lists and Sub Dictionaries + +### Introduce Concept of Lists, Sets and Dictionaries within the Verse Mini Dictionary + +This concept will come with more commands - like so + +safe add favfoods rice +safe insert favfoods |5| potato ## Note first index is 0 -> Also -2 is 2nd last | default is -1 (append at the end) +safe remove favfoods chicken +safe pop favfoods |3| +safe place cityfacts {} +safe place cityfacts { "london" => "6,200,000", "beijing" => "20,500,000", "new york" => "9,300,000" } +safe get cityfacts beijing +safe remove cityfacts "new york" + +Also you can now print in many formats including --hex, --json, --base64, --xml, --ini, --yaml + + +--- + +## Import Export Methodology + +Now build export to simply spit out everything into plain text chapter files (within safe workspace - export section). +Then the json chapter files are tarred and compressed. +Build import to uncompress then unzip then use the JSON to re-create the database + +--- + +## Upgrade methodology + +This move opens the door to safe's beautifully simple upgrade methodology. To upgrade safe to a major or minor version you + +- use the outgoing version to export all books +- then we upgrade safe +- then we use the new safe software to import and you are done. + +--- + +Now we have cleared the path for a SIMPLE Backup and Restore method. + +## Backup Restore Methodology + +The backup/restore MUST BE VERSION AGNOSTIC (in as far as is human and machinely possible. +Employ the export first giving us first zip file. +Then add a backup meta-data file with details like who when why which tag which version and most IMPORTANTLY the random IV and SALT for the key that locks the exported content file. + +The backup method retars up compresse both the metadata and the locked file. The new filename is like this. + + safe.backup.<>.<>.<>.tar.gz + +It adds it to the local safe backup workspace. It can only be done when logged in. + + safe restore /path/to/backup/file.tar.gz + +A restore will override the current in-place repository (after creating a backup of it) and user given option to rollback the restore. + +This method (theoretially) allows a version 3.428.24952 to restore an export of version 1.823.03497 + +--- + +## Safe's Concurrency Methodology + +A safe repository (book) can be changed by one session but read concurrently by multiple sessions. + +Directory Links are NOT PORTABLE to use to point to the active workspace especially if we the safe root folder is on a USB key. +A GOOD engough concurrency technique is a lock file in the BOOK's root folder that is named `safe.concurrency.lockfile.<>` + +The contents of the file will hold the relative directory name (session ID based) that has the lock and the session ID that had it before that (if not first). + +The . is used to when the first read/write login session occurs. Subsequent logins for a read/write session will then have 2 choices in this shell. + +- safe login ali.baba --steal # take over the primary read/write session +- safe login ali.baba --branch # leave primary session but open one that will not change the price of sugar +- safe login ali.baba --branch=master +- safe login ali.baba --branch=experimental +- safe login ali.baba -b experimental + +safe login --steal + +A third choice arises if we visit the shell holding the directory pointer and logout. + +### safe logout command + +Logout NEVER TOUCHES the lock file (it could have moved on multiple times so only login can act on it). + +However logout DELETES the cipher.file intra-sessionary ciphertext that can be unlocked by session key to retrieve the content key. This action renders it impossible to read or write any data from logged in book. + +A subsequent login can again re-instate this privilege. + +## safe login command + +At the very beginning a repository can come into being through either + +- an init +- or a clone (from git,s3,ssh,local filesystem, http) + +The first repo holds the live link. + +Subsequent logins must perform two checks + +- IS MY DIRECTORY (session) noted as the latest in the lock file (possible if you've logged out of the same shell) +- (if other directory) - Does the intra-sessionary key within that directory's cipher file have a value + +The popup asking the user to STEAL or go READONLY is triggered if the answers above are NO then YES. + +### Safe steal | HowTo + +If intra key has no value then stealing is not necessary so the existence of the --steal flag does not change the price of sugar. + +The Stealing flow of events is to + + - copy the directory into a new one for this session named `<>.<>.<>` + - validate the directory for data consistency (nice to have functionality) + - collect the password and if invalid stop now + - grab the lock file and write it to point it to our directory (we are it) + - create our own intra-sessionary key and write it in within our folder + +### Safe branch | HowTo + +Starting a BRANCH allows you to read and write to a copied branched repository but this branch does not change the price of sugar. + +In the future MERGE functionality may be implemented so that the database branch can be merged back into the master line. + +May a safe overthrow command can be crudely done which rudely overthrows the main (government) line and installs this dictatorish branch as the leader - possibly trashing any changes that the master line may have since the branch occured. + + +## safe gc (garbage collector) | safe workspace prune + +The prune command can delete workspaces if +- they are not the master branch AND +- they have not been changed in this bootup (or a logout has been issued againt them). + +## safe WORO policy + +chapter files can only be written once but can be read often. +This policy may make merging and diffs between branches easier in the future. + + + + + + + +### Development + +After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. + +To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org). + +### Contributing + +Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/safe. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct. + +License +------- + +MIT License +Copyright (c) 2006 - 2014 + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Rakefile b/Rakefile new file mode 100644 index 0000000..7f2c280 --- /dev/null +++ b/Rakefile @@ -0,0 +1,16 @@ +require "bundler/gem_tasks" +require "rake/testtask" + +# - +# - This configuration allows us to run "rake test" +# - and invoke minitest to execute all files in the +# - test directory with names ending in "_test.rb". +# - +Rake::TestTask.new(:test) do |t| + t.libs << "test" + t.libs << "lib" + t.test_files = FileList["test/**/*_test.rb"] +end + +task :default => :test + diff --git a/bin/safedb b/bin/safedb new file mode 100755 index 0000000..573ec5f --- /dev/null +++ b/bin/safedb @@ -0,0 +1,5 @@ +#!/usr/bin/env ruby + +require 'interprete' + +Interprete.start(ARGV) diff --git a/lib/configs/README.md b/lib/configs/README.md new file mode 100644 index 0000000..fbd80cc --- /dev/null +++ b/lib/configs/README.md @@ -0,0 +1,58 @@ + +# Modifying Safe's Behaviour | 4 Configuration Scopes + +Safe's behaviour can (by default) be modified in a manner that is scoped in 4 ways. Configuration directives can alter behaviour within + +1. a **book global** scope +2. a **machine local** scope +3. a **shell session** scope and +4. a **machine global** scope + +The scoping concept is similar to Git's --local and --global but it works in a different way. + + +## 1. Book Global Scope + +Directives issued against a safe book **"feel local"** but are global in that the behaviour persists on every machine that works with the book. + +Git's --local is different because cloning the repository on another machine wipe's out the directives. With safe the directives continue to alter behaviour even when the book is cloned and/or used on another machine. + + +## 2. Machine Local Scope + +This is similar to Git's --global directive which affects all repositories owned by a user on a given machine. + +Directives with a machine local scope **can influence the behaviour** of every Safe book one logs into on a machine. Move to another machine and the behaviour becomes unstuck. + +== Configuration Directive Precedence + +Note the sentence **can influence behaviour** as opposed to **will influence behaviour**. + +If a directive with a book global scope says "Yes" and the same directive exists but says "No" with machine local scope the "Yes" wins out. + +A book global directive overrides its machine local twin. + + +## 3. Shell Session Scope + +The self explanatory **shell session scoped** directives override their siblings be they book global or machine local. + +Alas, their elevated privileges are countered by relatively short lifespans. Shell session directives only last until either a logout is issued or the shell session comes to an end. + + +## 4. Default | Machine Global Scope + +Did you notice only **one (1) user** is affected by directives with a machine local scope as long as it isn't overriden. + +Directives with a **machine global scope** are the **default** and are set during an install or upgrade. + +They can potentially affect **every user and every safe book**. Even though their longevity is undisputed, their precedence is the lowest when going head to head with their 3 siblings. + +## The Naked Eye + +Directives with a book global scope **aren't visible to the naked eye**. They are encrypted within the master safe database and thus protected from prying eyes. + +The other 3 directive types exist in plain text + +- either where the gem is **installed** (machine global scope) +- or in the INI file in **.safe** off the user's home directory diff --git a/lib/extension/array.rb b/lib/extension/array.rb new file mode 100644 index 0000000..0d34af1 --- /dev/null +++ b/lib/extension/array.rb @@ -0,0 +1,162 @@ +#!/usr/bin/ruby + +# +# Reopen the core ruby Array class and add the below methods to it. +# +# Case Sensitivity rules for [ALL] the below methods that are +# added to the core Ruby string class. +# +# For case insensitive behaviour make sure you downcase both the +# string object and the parameter strings (or strings within +# other parameter objects, like arrays and hashes). +class Array + + + # The returned string is a result of a union (join) of all the + # (expected) string array elements followed by the deletion + # of all non alphanumeric characters. + # + # Disambiguating the String for Cross Platform Use + # + # This behaviour is typically used for transforming text that is + # about to be signed or digested (hashed). Removing all the non + # alpha-numeric characters disambiguates the string. + # + # An example is the exclusion of line ending characters which in + # Windows are different from Linux. + # + # This disambiguation means that signing functions will return the + # same result on widely variant platfoms like Windows vs CoreOS. + # + # @return [String] + # Returns the alphanumeric union of the strings within this array. + # + # @raise [ArgumentError] + # if the array is nil or empty. Also an error will be thrown if + # the array contains objects that cannot be naturally converted + # to a string. + def alphanumeric_union + raise ArgumentError, "Cannot do alphanumeric union on an empty array." if self.empty? + return self.join.to_alphanumeric + end + + + # Log the array using our logging mixin by printing every array + # item into its own log line. In most cases we (the array) are + # a list of strings, however if not, each item's to_string method + # is invoked and the result printed using one log line. + # + # The INFO log level is used to log the lines - if this is not + # appropriate create a (level) parameterized log lines method. + def log_lines + + self.each do |line| + clean_line = line.to_s.chomp.gsub("\\n","") + log.info(x) { line } if clean_line.length > 0 + end + + end + + + # Get the text [in between] this and that delimeter [exclusively]. + # Exclusively means the returned text [does not] include either of + # the matched delimeters (although an unmatched instance of [this] + # delimeter may appear in the in-between text). + # + # -------------------- + # Multiple Delimiters + # -------------------- + # + # When multiple delimiters exist, the text returned is in between the + # + # [a] - first occurrence of [this] delimeter AND the + # [b] - 1st occurrence of [that] delimeter [AFTER] the 1st delimiter + # + # Instances of [that] delimiter occurring before [this] are ignored. + # The text could contain [this] delimeter instances but is guaranteed + # not to contain a [that] delimeter. + # + # ----------- + # Parameters + # ----------- + # + # this_delimiter : begin delimeter (not included in returned string) + # that_delimiter : end delimeter (not included in returned string) + # + # ----------- + # Exceptions + # ----------- + # + # An exception (error) will be thrown if + # + # => any nil (or empties) exist in the input parameters + # => [this] delimeter does not appear in the in_string + # => [that] delimeter does not appear after [this] one + # + def before_and_after begin_delimeter, end_delimeter + + Throw.if_nil_or_empty_strings [ self, begin_delimeter, end_delimeter ] + + before_after_lines = [] + in_middle_bit = false + + self.each do |candidate_line| + + is_middle_boundary = !in_middle_bit && candidate_line.downcase.include?(begin_delimeter.downcase) + if is_middle_boundary + in_middle_bit = true + next + end + + unless in_middle_bit + before_after_lines.push candidate_line + next + end + + #-- + #-- Now we are definitely in the middle bit. + #-- Let's check for the middle end delimeter + #-- + if candidate_line.downcase.include? end_delimeter.downcase + in_middle_bit = false + end + + end + + return before_after_lines + + end + + + def middlle_bit begin_delimeter, end_delimeter + + Throw.if_nil_or_empty_strings [ self, begin_delimeter, end_delimeter ] + + middle_lines = [] + in_middle_bit = false + + self.each do |candidate_line| + + is_middle_boundary = !in_middle_bit && candidate_line.downcase.include?(begin_delimeter.downcase) + if is_middle_boundary + in_middle_bit = true + next + end + + end_of_middle = in_middle_bit && candidate_line.downcase.include?(end_delimeter.downcase) + return middle_lines if end_of_middle + + #-- + #-- We are definitely in the middle bit. + #-- + middle_lines.push(candidate_line) if in_middle_bit + + end + + unreachable_str = "This point should be unreachable unless facts are ended." + raise RuntimeError.new unreachable_str + + end + + +end diff --git a/lib/extension/dir.rb b/lib/extension/dir.rb new file mode 100644 index 0000000..cd0f0cb --- /dev/null +++ b/lib/extension/dir.rb @@ -0,0 +1,35 @@ +#!/usr/bin/ruby + +# -- +# -- Reopen the core ruby Dirctory class and add the below methods to it. +# -- +class Dir + + # -- + # -- Put all the files starting with the given string in + # -- alphabetical ascending order and then return the file + # -- that comes last. + # -- + # -- Throw an exception if no file in this folder starts + # -- with the given string + # -- + def ascii_order_file_starting_with starts_with_string + + recently_added_file = nil + filepath_leadstr = File.join self.path, starts_with_string + Dir.glob("#{filepath_leadstr}*").sort.each do |candidate_file| + + next if File.directory? candidate_file + recently_added_file = candidate_file + + end + + Throw.if_nil recently_added_file + Throw.if_not_exists recently_added_file + return recently_added_file + + end + + + +end diff --git a/lib/extension/file.rb b/lib/extension/file.rb new file mode 100644 index 0000000..fc2dd1d --- /dev/null +++ b/lib/extension/file.rb @@ -0,0 +1,123 @@ +#!/usr/bin/ruby + +# Reopen the core ruby File class and add the below methods to it. +class File + + # Get the full filepath of a sister file that potentially lives + # in the same directory that the leaf class is executing from and + # has the same name as the leaf class but a different extension. + # + # == Usage + # + # If class OpenFoo:Bar extends class OpenFoo:Baz and we are looking + # for an INI file in the folder that OpenFoo:Bar lives in we can + # call this method within OpenFoo:Baz like this. + # + # ini_filepath = sister_filepath( "ini", :execute ) + # # => /var/lib/gems/2.5.0/gems/fooey-0.2.99/lib/barry/bazzy/bar.ini + # + # == Common Implementation + # + # Object orientation scuppers the commonly used technique which + # derives the path from __FILE__ + # + # class_directory = File.dirname( __FILE__ ) + # leaf_class_name = self.class.name.split(":").last.downcase + # sister_filepath = File.join ( class_directory, "#{leaf_class_name}.#{extension}" ) + # + # With object orientation - running the above code within the + # abstracted (parent) class would produce a resultant filepath + # based on the folder the parent class is in rather than the + # extended "concrete" class. + # + # == Value Proposition + # + # You can call this method from the parent (abstract) class and it + # will still correctly return the path to the potential sister file + # living in the directory that the leaf class sits in. + # + # Put differently - this extension method allows code executing in + # the parent class to correctly pinpoint a file in the directory of + # the leaf class be it in the same or a different folder. + # + # @param caller + # the calling class object usually passed in using self + # + # @param extension + # the extension of a sister file that carries the same simple + # (downcased) name of the leaf class of this method's caller. + # + # Omit the (segregating) period character when providing this + # extension parameter. + # + # @param method_symbol + # the method name in symbolic form of any method defined in + # the leaf class even if the method overrides one of the same + # name in the parent class. + # + # @return the filepath of a potential sister file living in the same + # directory as the class, bearing the same (downcased) name + # as the class with the specified extension. + def self.sister_filepath caller, extension, method_symbol + + leaf_classname = caller.class.name.split(":").last.downcase + execute_method = caller.method( method_symbol ) + leaf_classpath = execute_method.source_location.first + leaf_directory = File.dirname( leaf_classpath ) + lower_filename = "#{leaf_classname}.#{extension}" + return File.join( leaf_directory, lower_filename ) + + end + + + # This method adds (logging its own contents) behaviour to + # the standard library {File} class. If this File points to + # a directory - that folder's single level content files are + # listed inside the logs. + # + # The DEBUG log level is used for logging. To change this + # create a new parameterized method. + # + # @param file_context [String] context denotes the whys and wherefores of this file. + def log_contents file_context + + ## This will fail - add physical raise statement. + Throw.if_not_exists self + + log.debug(x) { "# -- ------------------------------------------------------------------------ #" } + log.debug(x) { "# -- ------------------------------------------------------------------------ #" } + log.debug(x) { "# -- The File Path to Log => #{self}" } + + hr_file_size = PrettyPrint.byte_size( File.size(self) ) + dotless_extension = File.extname( self )[1..-1] + parent_dir_name = File.basename( File.dirname( self ) ) + file_name = File.basename self + is_zip = dotless_extension.eql? "zip" + + log.debug(x) { "# -- ------------------------------------------------------------------------ #" } + log.debug(x) { "# -- File Name => #{file_name}" } + log.debug(x) { "# -- File Size => #{hr_file_size}" } + log.debug(x) { "# -- File Type => #{file_context}" } + log.debug(x) { "# -- In Folder => #{parent_dir_name}" } + log.debug(x) { "# -- ------------------------------------------------------------------------ #" } + + log.debug(x) { "File #{file_name} is a zip (binary) file." } if is_zip + return if is_zip + + File.open( self, "r") do | file_obj | + line_no = 1 + file_obj.each_line do | file_line | + line_num = sprintf '%03d', line_no + clean_line = file_line.chomp.strip + log.debug(x) { "# -- [#{line_num}] - #{clean_line}" } + line_no += 1 + end + end + + log.debug(x) { "# -- ------------------------------------------------------------------------ #" } + log.debug(x) { "# -- [#{file_context}] End of File [ #{File.basename(self)} ]" } + log.debug(x) { "# -- ------------------------------------------------------------------------ #" } + + end + +end diff --git a/lib/extension/hash.rb b/lib/extension/hash.rb new file mode 100644 index 0000000..9dc42e5 --- /dev/null +++ b/lib/extension/hash.rb @@ -0,0 +1,33 @@ +#!/usr/bin/ruby + +# Reopen the core ruby Hash class and add the below methods to it. +class Hash + + # This method adds (logging its own contents) behaviour to + # the standard library {Hash} class. + # + # @note This behaviour does not consider that SECRETS may be inside + # the key value maps - it logs itself without a care in the world. + # This functionality must be included if this behaviourr is used by + # any cryptography classes. + # + # The DEBUG log level is used for logging. To change this + # create a new parameterized method. + def log_contents + + log.debug(x) { "# --- ----------------------------------------------" } + log.debug(x) { "# --- Map has [#{self.length}] key/value pairs." } + log.debug(x) { "# --- ----------------------------------------------" } + + self.each do |the_key, the_value| + + padded_key = sprintf '%-33s', the_key + log.debug(x) { "# --- #{padded_key} => #{the_value}" } + + end + + log.debug(x) { "# --- ----------------------------------------------" } + + end + +end diff --git a/lib/extension/string.rb b/lib/extension/string.rb new file mode 100644 index 0000000..ca6b281 --- /dev/null +++ b/lib/extension/string.rb @@ -0,0 +1,572 @@ +#!/usr/bin/ruby + + +# Reopen the core ruby String class and add the below methods to it. +# +# Case Sensitivity rules for [ALL] the below methods that are +# added to the core Ruby string class. +# +# For case insensitive behaviour make sure you downcase both the +# string object and the parameter strings (or strings within +# other parameter objects, like arrays and hashes). +class String + + ## ################################################ + ## ################################################ + ## ################################################ + ## ################################################ + ## https://www.di-mgt.com.au/cryptokeys.html + ## ################################################ + ## ################################################ + ## ################################################ + ## ################################################ + ## ################################################ + + # Encrypt this string with the parameter symmetric encryption/decryption key + # and then return the Base64 (block mode) encoded result. + # + # @example + # cipher_text = "Hello crypt world".encrypt_block_encode "ABC123XYZ" + # original_txt = cipher_text.block_decode_decrypt "ABC123XYZ" + # puts original_txt # "Hello crypt world" + # + # @param crypt_key [String] + # a strong long encryption key that is used to encrypt this string before + # applying the Base64 block encoding. + def encrypt_block_encode crypt_key + encrypted_text = SafeDb::ToolBelt::Blowfish.encryptor( self, crypt_key ) + return Base64.encode64( encrypted_text ) + end + + + + # First apply a base64 (block mode) decode to this string and then use the + # parameter symmetric decryption key to decrypt the result. The output is then + # returned within a new string. + # + # @example + # cipher_text = "Hello crypt world".decrypt_block_encode "ABC123XYZ" + # original_txt = cipher_text.block_decode_decrypt "ABC123XYZ" + # puts original_txt # "Hello crypt world" + # + # @param crypt_key [String] + # a strong long decryption key that is used to decrypt this string after + # the Base64 block decoding has been applied. + def block_decode_decrypt crypt_key + the_ciphertxt = Base64.decode64( self ) + return SafeDb::ToolBelt::Blowfish.decryptor( the_ciphertxt, crypt_key ) + end + + + + # Encrypt this string with the parameter symmetric encryption/decryption key + # and then return the Base64 (url safe mode) encoded result. + # + # The output will be a single line and differs from the block mode with + # + # - underscores printed instead of forward slash characters + # - hyphens printed instead of plus characters + # - no (blocked) carriage return or new line characters + # + # Note however that sometimes one or more equals characters will be printed at + # the end of the string by way of padding. In places like environment variables + # that are sensitive to the equals character this can be replaced by an @ + # symbol. + # + # @example + # cipher_text = "Hello @:==:@ world".encrypt_url_encode "ABC123XYZ" + # original_txt = cipher_text.url_decode_decrypt "ABC123XYZ" + # puts original_txt # "Hello @:==:@ world" + # + # @param crypt_key [String] + # a strong long encryption key that is used to encrypt this string before + # applying the Base64 ul safe encoding. + def encrypt_url_encode crypt_key + + ## ################################################ + ## ################################################ + ## ################################################ + ## ################################################ + ## https://www.di-mgt.com.au/cryptokeys.html + ## ################################################ + ## ################################################ + ## ################################################ + ## ################################################ + ## ################################################ + + log.info(x){ "Encrypt Length => [ #{self.length} ]" } + log.info(x){ "The Key Length => [ #{crypt_key.length} ]" } + log.info(x){ "Encrypt String => [ #{self} ]" } + log.info(x){ "Encryption Key => [ #{crypt_key} ]" } + + encrypted_text = SafeDb::ToolBelt::Blowfish.encryptor( self, crypt_key ) + + log.info(x){ "Encrypt Result => [ #{encrypted_text} ]" } + log.info(x){ "Encrypted Text => [ #{Base64.urlsafe_encode64(encrypted_text)} ]" } + + return Base64.urlsafe_encode64(encrypted_text) + + end + + + + # First apply a base64 (url safe mode) decode to this string and then use the + # parameter symmetric decryption key to decrypt the result. The output is then + # returned within a new string. + # + # The input must will be a single line and differs from the block mode with + # + # - underscores printed instead of forward slash characters + # - hyphens printed instead of plus characters + # - no (blocked) carriage return or new line characters + # + # @example + # cipher_text = "Hello @:==:@ world".encrypt_url_encode "ABC123XYZ" + # original_txt = cipher_text.url_decode_decrypt "ABC123XYZ" + # puts original_txt # "Hello @:==:@ world" + # + # @param crypt_key [String] + # a strong long decryption key that is used to decrypt this string after + # the Base64 url safe decoding has been applied. + def url_decode_decrypt crypt_key + the_ciphertxt = Base64.urlsafe_decode64( self ) + return SafeDb::ToolBelt::Blowfish.decryptor( the_ciphertxt, crypt_key ) + end + + + + + # Overtly long file paths (eg in logs) can hamper readability so this + # human readable filepath converter counters the problem by + # returning (only) the 2 immediate ancestors of the filepath. + # + # So this method returns the name of the grandparent folder then parent folder + # and then the most significant file (or folder) name. + # + # When this is not possible due to the filepath being colisively near the + # filesystem's root, it returns the parameter name. + # + # @example + # A really long input like + # => /home/joe/project/degrees/math/2020 + # is reduced to + # => degrees/math/2020 + # + # @return [String] the segmented 3 most significant path name elements. + def hr_path + + object_name = File.basename self + parent_folder = File.dirname self + parent_name = File.basename parent_folder + granny_folder = File.dirname parent_folder + granny_name = File.basename granny_folder + + return [granny_name,parent_name,object_name].join("/") + + end + + + # Return a new string matching this one with every non alpha-numeric + # character removed. This string is left unchanged. + # + # Spaces, hyphens, underscores, periods are all removed. The only + # characters left standing belong to a set of 62 and are + # + # - a to z + # - A to Z + # - 0 to 9 + # + # @return [String] + # Remove any character that is not alphanumeric, a to z, A to Z + # and 0 to 9 and return a new string leaving this one unchanged. + def to_alphanumeric + return self.delete("^A-Za-z0-9") + end + + + # Find the length of this string and return a string that is the + # concatenated union of this string and its integer length. + # If this string is empty a string of length one ie "0" will be + # returned. + # + # @return [String] + # Return this string with a cheeky integer tagged onto the end + # that represents the (pre-concat) length of the string. + def concat_length + return self + "#{self.length}" + end + + + # Get the text [in between] this and that delimiter [exclusively]. + # Exclusively means the returned text [does not] include either of + # the matched delimiters (although an unmatched instance of [this] + # delimiter may appear in the in-between text). + # + # ### Multiple Delimiters + # + # When multiple delimiters exist, the text returned is in between the + # + # - first occurrence of [this] delimiter AND the + # - 1st occurrence of [that] delimiter [AFTER] the 1st delimiter + # + # Instances of [that] delimiter occurring before [this] are ignored. + # The text could contain [this] delimiter instances but is guaranteed + # not to contain a [that] delimiter. + # + # @throw an exception (error) will be thrown if + # + # - any nil (or empties) exist in the input parameters + # - **this** delimiter does not appear in the in_string + # - **that** delimiter does not appear after [this] one + # + # @param this_delimiter [String] begin delimiter (not included in returned string) + # @param that_delimiter [String] end delimiter (not included in returned string) + # + # @return [String] the text in between (excluding) the two parameter delimiters + def in_between this_delimiter, that_delimiter + + raise ArgumentError, "This string is NIL or empty." if self.nil? || self.empty? + raise ArgumentError, "Begin delimiter is NIL or empty." if this_delimiter.nil? || this_delimiter.empty? + raise ArgumentError, "End delimiter is NIL or empty." if that_delimiter.nil? || that_delimiter.empty? + + scanner_1 = StringScanner.new self + scanner_1.scan_until /#{this_delimiter}/ + scanner_2 = StringScanner.new scanner_1.post_match + scanner_2.scan_until /#{that_delimiter}/ + + in_between_text = scanner_2.pre_match.strip + return in_between_text + + end + + + # To hex converts this string to hexadecimal form and returns + # the result leaving this string unchanged. + # @return [String] hexadecimal representation of this string + def to_hex + + return self.unpack("H*").first + + end + + + # From hex converts this (assumed) hexadecimal string back into + # its normal string form and returns the result leaving this string + # unchanged. + # @return [String] string that matches the hexadecimal representation + def from_hex + + return [self].pack("H*") + + end + + + # Flatten (lower) a camel cased string and add periods to + # denote separation where the capital letters used to be. + # + # Example behaviour is illustrated + # + # - in => ObjectOriented + # - out => object.oriented + # + # Even when a capital letter does not lead lowercase characters + # the behaviour should resemble this. + # + # - in => SuperX + # - out => super.x + # + # + # And if every letter is uppercase, each one represents its + # own section like this. + # + # - in => BEAST + # - out => b.e.a.s.t + # + # == Flatten Class Names + # + # If the string comes in as a class name we can expect it to + # contain colons like the below examples. + # This::That + # ::That + # This::That::TheOther + # + # So we find the last index of a colon and then continue as per + # the above with flattening the string. + # + # @return [String] a flatten (period separated) version of this camel cased string + def do_flatten + + to_flatten_str = self + + last_colon_index = to_flatten_str.rindex ":" + ends_with_colon = to_flatten_str[-1].eql? ":" + unless ( last_colon_index.nil? || ends_with_colon ) + to_flatten_str = to_flatten_str[ (last_colon_index+1) .. -1 ] + end + + snapped_str = "" + to_flatten_str.each_char do |this_char| + is_lower = "#{this_char}".is_all_lowercase? + snapped_str += "." unless is_lower || snapped_str.empty? + snapped_str += this_char.downcase + end + + return snapped_str + + end + + + + # Return true if every character in this string is lowercase. + # Note that if this string is empty this method returns true. + # + # @return true if every alpha character in this string is lowercase + def is_all_lowercase? + return self.downcase.eql? self + end + + + + # Flatten (lower) a camel cased string and add periods to + # denote separation where the capital letters used to be. + # The inverse operation to [ do_flatten ] which resurrects + # this (expected) period separated string changing it back + # to a camel (mountain) cased string. + # + # Example behaviour is illustrated + # + # - in => object.oriented + # - out => ObjectOriented + # + # Even when a single character exists to the right of the period + # the behaviour should resemble this. + # + # - in => super.x + # - out => SuperX + # + # + # And if every letter is period separated + # + # - in => b.e.a.s.t + # - out => BEAST + # + # @return [String] camel cased version of this flattened (period separated) string + def un_flatten + + segment_array = self.strip.split "." + resurrected_arr = Array.new + + segment_array.each do |seg_word| + resurrected_arr.push seg_word.capitalize + end + + undone_str = resurrected_arr.join + log.info(x){ "unflattening => [#{self}] and resurrecting to => [#{undone_str}]" } + + return undone_str + + end + + + + # -- + # Return true if the [little string] within this + # string object is both + # -- + # a] topped by the parameter prefix AND + # b] tailed by the parameter postfix + # -- + # ----------------------------------------- + # In the below example [true] is returned + # ----------------------------------------- + # -- + # This [String] => "Hey [<-secrets->] are juicy." + # little string => "secrets" + # topped string => "[<-" + # tailed string => "->]" + # -- + # Why true? Because the little string "secret" is + # (wrapped) topped by "[<-" and tailed by "->]" + # -- + # ----------------------------------------- + # Assumptions | Constraints | Boundaries + # ----------------------------------------- + # -- + # - all matches are [case sensitive] + # - this string must contain little_str + # - one strike and its true + # (if little string appears more than once) + # so => "all secrets, most [<-secrets->] r juicy" + # => true as long as (at least) one is wrapped + # -- + # -- + def has_wrapped? little_str, prefix, postfix + + return self.include?( prefix + little_str + postfix ) + + end + + + # Sandwich the first occurrence of a substring in + # this string with the specified pre and postfix. + # + # This string contains the little string and an + # IN-PLACE change is performed with the first + # occurrence of the little string being prefixed + # and postfixed with the 2 parameter strings. + # + # Example of sandwiching [wrapping] + # + # - [String] => "Hey secrets are juicy." + # - [To_Wrap] => "secrets" + # - [Prefix] => "[<-" + # - [Postfix] => "->]" + # + # [String] => "Hey [<-secrets->] are juicy." + # + # This string IS changed in place. + def sandwich_substr to_wrap_str, prefix, postfix + + occurs_index = self.downcase.index to_wrap_str.downcase + self.insert occurs_index, prefix + shifted_index = occurs_index + prefix.length + to_wrap_str.length + self.insert shifted_index, postfix + + end + + + # The parameter is a list of character sequences and TRUE is returned + # if EVERY ONE of the character sequences is always found nestled somewhere + # within this string. The matching is case-sensitive. + # + # The parameter array can be [empty] but not nil. And the harboured + # character sequences can neither be nil nor empty. + # + # @param word_array [Array] array of string words for the inclusivity test + # + # @return [Boolean] true if EVERY ONE of the char sequences appear somewhere in this string + def includes_all? word_array + + raise ArgumentError, "This string is NIL" if self.nil? + raise ArgumentError, "The parameter word array is NIL" if word_array.nil? + + word_array.each do |word| + + raise ArgumentError, "The word array #{word_array} contains a nil value." if word.nil? + return false unless self.include? word + + end + + return true + + end + + + # The parameter is a list of character sequences and TRUE is returned + # if any one of the character sequences can be found nestled somewhere + # within this string. The matching is case-sensitive. + # + # The parameter array can be [empty] but not nil. And the harboured + # character sequences can neither be nil nor empty. + # + # @param word_array [Array] array of string words for the inclusivity test + # + # @return [Boolean] true if string includes ANY one of the character sequences in array + def includes_any? word_array + + raise ArgumentError, "This string is NIL" if self.nil? + raise ArgumentError, "The parameter word array is NIL" if word_array.nil? + + word_array.each do |word| + + raise ArgumentError, "The word array #{word_array} contains a nil value." if word.nil? + return true if self.include? word + + end + + return false + + end + + + # -- + # Encrypt this string with the parameter encryption/decryption key + # and return the encrypted text as a new string. + # -- + # decrypt_key => the key that will decrypt the output string + # -- + # -- + def encrypt decrypt_key + +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting + +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key + + cipher = OpenSSL::Cipher::Cipher.new('DES-EDE3-CBC').encrypt + cipher.key = Digest::SHA1.hexdigest decrypt_key + crypted = cipher.update(self) + cipher.final + encrypted_text = crypted.unpack('H*')[0].upcase + + return encrypted_text + + end + + + # -- + # Decrypt this string with the parameter encryption/decryption key + # and return the decrypted text as a new string. + # -- + # encrypt_key => the key the input string was encrypted with + # -- + # -- + def decrypt encrypt_key + +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting +## ----> Write a RE-CRYPT method that goes through a folder - decrypting and recrypting + +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key +###### ON Linux improve by changing to OpenSSL::Cipher.new('DES-EDE3-CBC').encrypt +###### ON Linux improve by changing to Digest::SHA2.hexdigest decrypt_key + + cipher = OpenSSL::Cipher::Cipher.new('DES-EDE3-CBC').decrypt + cipher.key = Digest::SHA1.hexdigest encrypt_key + uncrypted = [self].pack("H*").unpack("C*").pack("c*") + decrypted_text = cipher.update(uncrypted) + cipher.final + + return decrypted_text + + end + + + # Log the string which is expected to be delineated. + # If the string originated from a file it will be logged + # line by line. If no line delineation the string will be + # dumped just as a blob. + # + # The INFO log level is used to log the lines - if this is not + # appropriate create a (level) parameterized log lines method. + def log_lines + + self.each_line do |line| + clean_line = line.chomp.gsub("\\n","") + log.info(x) { line } if clean_line.length > 0 + end + + end + +end diff --git a/lib/factbase/facts.safedb.net.ini b/lib/factbase/facts.safedb.net.ini new file mode 100644 index 0000000..665eb28 --- /dev/null +++ b/lib/factbase/facts.safedb.net.ini @@ -0,0 +1,38 @@ + +[global] + +name = safe +min.passwd.len = rb>> 6 +root.domain = devopswiki.co.uk +env.var.name = SECRET_MATERIAL +ratio = rb>> 3 +bit.key.size = rb>> 8192 +key.cipher = rb>> OpenSSL::Cipher::AES256.new(:CBC) + +domain.now.id = current.domain +front.path.id = frontend.path +machine.key.id = machine.p4ssk3y +time.stamp.id = domain.stamp +user.secret.id = user.secret + +stamp.14 = rb>> OpenSession::Stamp.yyjjj_hhmm_sst +stamp.23 = rb>> OpenSession::Stamp.yyjjj_hhmm_ss_nanosec + +separator.a = %$os$% + +repo.name = material_data +config.file = ops.workstation.directive.ini +session.file = ops.session.configuration.ini + +prompt.1 = Enter a Robust Password +prompt.2 = Re-enter that Password + + +[open] + +open.name = session +open.idlen = rb>> 10 +open.keylen = rb>> 56 +open.idname = session.id +open.keyname = session.key +open.pathname = session.path diff --git a/lib/interprete.rb b/lib/interprete.rb new file mode 100644 index 0000000..2b68129 --- /dev/null +++ b/lib/interprete.rb @@ -0,0 +1,462 @@ +require "thor" +require "fileutils" + +require "session/time.stamp" +require "logging/gem.logging" +require "session/require.gem" + + +# Include the logger mixins so that every class can enjoy "import free" +# logging through pointers to the (extended) log behaviour. +include OpenLogger + + +# This standard out sync command flushes text destined for STDOUT immediately, +# without waiting either for a full cache or script completion. +$stdout.sync = true + + +# Recursively require all gems that are either in or under the directory +# that this code is executing from. Only use this tool if your library is +# relatively small but highly interconnected. In these instances it raises +# productivity and reduces pesky "not found" exceptions. +OpenSession::RecursivelyRequire.now( __FILE__ ) + + +# This command line processor extends the Thor gem CLI tools in order to +# +# - read the posted commands, options and switches +# - maps the incoming string data to objects +# - assert that the mandatory options exist +# - assert the type of each parameter +# - ensure that the parameter values are in range +# - delegate processing to the registered handlers + +class Interprete < Thor + + + log.info(x) { "request to interact with a safe book has been received." } + + + # With this class option every (and especially the log) use case has + # the option of modifying its behaviour based on the presence and state + # of the --debug switch. + class_option :debug, :type => :boolean + + # The script class option is implemented in the parent {SafeDb::UseCase} + # use case enabling behaviour alteration based on the presence and state of + # the --script flag. + class_option :script, :type => :boolean + + + + # Description of the init configuration call. + desc "init ", "initialize the safe book on this device" + + # If confident that command history cannot be exploited to gain the + # human password or if the agent running safe is itself a script, + # the with option can be used to convey the password. + option :with + + # Initialize the credentials manager, collect the human password and + # manufacture the strong asymmetric public / private keypair. + # + # @param domain_name [String] the domain the software operates under + # @param base_path [String] the path to the base operating directory + def init( domain_name, base_path = nil ) + log.info(x) { "initialize the safe book on this device." } + init_uc = SafeDb::Init.new + init_uc.master_p4ss = options[:with] if options[:with] + init_uc.domain_name = domain_name + init_uc.base_path = base_path unless base_path.nil? + init_uc.flow_of_events + end + + + + # Description of the login use case command line call. + desc "login ", "login to the book before interacting with it" + + # If confident that command history cannot be exploited to gain the + # human password or if the agent running safe is itself a script, + # the with option can be used to convey the password. + option :with + + # Login in order to securely interact with your data. + # @param domain_name [String] the domain the software operates under + def login( domain_name = nil ) + log.info(x) { "[usecase] ~> login to the book before interacting with it." } + login_uc = SafeDb::Login.new + login_uc.domain_name = domain_name unless domain_name.nil? + login_uc.master_p4ss = options[:with] if options[:with] + login_uc.flow_of_events + end + + + + # Description of the print use case command line call. + desc "print ", "print the key value at the opened chapter and verse" + + # Print the value of the specified key belonging to a dictionary at + # the opened chapter and verse of the currently logged in book. + # + # @param key_name [String] the key whose value is to be printed + def print key_name + log.info(x) { "[usecase] ~> print the key value at the opened chapter and verse." } + print_uc = SafeDb::Print.new + print_uc.key_name = key_name + print_uc.from_script = options[:script].nil? ? false : options[:script] + print_uc.flow_of_events + end + + + + # Description of the verse use case command line call. + desc "verse", "print the verse name at the opened chapter and verse" + + # Print the name of the verse at the opened chapter and verse location. + def verse + log.info(x) { "[usecase] ~> print the verse name at the opened chapter and verse." } + verse_uc = SafeDb::Verse.new + verse_uc.from_script = options[:script].nil? ? false : options[:script] + verse_uc.flow_of_events + end + + + + # Description of the safe token use case. + desc "token", "generate and print out an encrypted (shell bound) session token" + + # Thetoken use cases prints out an encrypted session token tied + # to the workstation and shell environment. + def token + log.info(x) { "[usecase] ~> generate and print out an encrypted (shell bound) session token" } + SafeDb::Token.new.flow_of_events + end + + + + # Description of the open use case command. + desc "open ", "open a chapter and verse to read from or write to" + + # Open up a conduit (path) to the place where we can issue read, create, update, + # and destroy commands. + # + # The allowed characters that makeup chapter and verse aside from alphanumerics are + # + # - dollar signs + # - percent signs + # - ampersands + # - hyphens + # - underscores + # - plus signs + # - equal signs + # - @ signs + # - period characters and + # - question marks + # + # Notably whitespace including spaces and tabs are not allowed. + # + # @param chapter [String] + # the chapter of the logged in book to open + # + # @param verse [String] + # the verse of the logged in book and specified chapter to open + def open chapter, verse + log.info(x) { "[usecase] ~> open a chapter and verse to read from or write to." } + open_uc = SafeDb::Open.new + open_uc.env_path = chapter + open_uc.key_path = verse + open_uc.flow_of_events + end + + + + # Description of the export use case command. + desc "export", "exports the book or chapter or the mini dictionary at verse." + + # Export the entire book if no chapter and verse is specified (achieved with a safe close), + # or the chapter if only the chapter is open (safe shut or safe open <>, or the + # mini-dictionary at the verse if both chapter and verse are open. + def export + log.info(x) { "[usecase] ~> export book chapter content or dictionary at verse in JSON format." } + SafeDb::Export.new.flow_of_events + end + + + + # Description of the put secret command. + desc "put ", "put key/value pair into dictionary at open chapter and verse" + + # Put a secret with an id like login/username and a value like joebloggs into the + # context (eg work/laptop) that was opened with the open command. + # + # @param secret_id [String] the id of the secret to put into the opened context + # @param secret_value [String] the value of the secret to put into the opened context + def put secret_id, secret_value + log.info(x) { "[usecase] ~> put key/value pair into dictionary at open chapter and verse." } + put_uc = SafeDb::Put.new + put_uc.secret_id = secret_id + put_uc.secret_value = secret_value + put_uc.flow_of_events + end + + + + # Description of the file command. + desc "file ", "ingest a file into the safe from the filesystem (or S3, ssh, Google Drive)" + + # The file use case pulls a read in from either an accessible readsystem + # or from a remote http, https, git, S3, GoogleDrive and/or ssh source. + # + # @param file_key [String] keyname representing the file that is being read in + # @param file_url [String] url of file to ingest and assimilate into the safe + def file file_key, file_url + log.info(x) { "[usecase] ~> file read against key [[ #{file_key} ]]" } + log.info(x) { "[usecase] ~> file read from url [[ #{file_url} ]]" } + file_uc = SafeDb::FileMe.new + file_uc.file_key = file_key + file_uc.file_url = file_url + file_uc.flow_of_events + end + + + + # Description of the eject command. + desc "eject ", "write out ingested file at chapter/verse with specified file key" + + # The eject use case writes out a file that was previously ingested + # and coccooned inside the safe typically with the file command. + # + # @param file_key [String] the key that the file was ingested against + def eject file_key + log.info(x) { "[usecase] ~> eject file at chapter/verse against specified key." } + eject_uc = SafeDb::Eject.new + eject_uc.file_key = file_key + eject_uc.flow_of_events + end + + + + # Description of the delete command. + desc "delete ", "delete a line (key/value pair), or a verse, chapter and even a book" + + # The delete use case can delete a single line (key/value pair), or + # a verse, chapter and even a book + # + # @param entity_id [String] the ID of the entity to delete (line, verse, chapter or book) + def delete entity_id + log.info(x) { "[usecase] ~> delete a safe entity with a key id [#{entity_id}]." } + delete_uc = SafeDb::DeleteMe.new + delete_uc.entity_id = entity_id + delete_uc.flow_of_events + end + + + + # Description of the read command. + desc "read ", "read (reread) file either locally or via http, git or ssh" + + # The read use case pulls a read in from either an accessible readsystem + # or from a remote http, https, git, S3, GoogleDrive and/or ssh source. + # + # This use case expects a @file_url parameter. The actions it takes are to + # + # - register @in.url to mirror @file_url + # - register @out.url to mirror @file_url + # - check the location of @file_url + # - if no file exists it humbly finishes up + # + # @param file_url [String] url of file to ingest and assimilate into the safe + def read file_url + log.info(x) { "[usecase] ~> read (reread) file from optional url [[ #{file_url} ]]" } + read_uc = SafeDb::Read.new + read_uc.file_url = file_url + read_uc.flow_of_events + end + + + + # Description of the write command. + desc "write ", "write out file at chapter/verse to (optional) file url" + + # The write use case writes out a file that was previously ingested + # and coccooned inside the safe. + # + # @param file_url [String] optional file url marking where to write the file + def write( file_url = nil ) + log.info(x) { "[usecase] ~> write out file at chapter/verse to (optional) file url." } + write_uc = SafeDb::Write.new + write_uc.from_script = options[:script].nil? ? false : options[:script] + write_uc.file_url = file_url if file_url + write_uc.flow_of_events + end + + + + # Description of the show secret command. + desc "show", "show dictionary at the opened chapter and verse" + + # Show the secrets at the opened path. These secrets + # are simply written out to the shell console. + def show + log.info(x) { "[usecase] ~> show dictionary at the opened chapter and verse." } + SafeDb::Show.new.flow_of_events + end + + + + # Description of the view command. + desc "view", "print list of chapter and verse combos to console" + + # Display a bird's eye view of the domain's database including + # its envelopes, their keys and imported objects such as files. + def view + log.info(x) { "[usecase] ~> print list of chapter and verse combos to console." } + view_uc = SafeDb::View.new + view_uc.flow_of_events + end + + + + # Description of the goto use case command. + desc "goto ", "shortcut that opens chapter and verse at specified index" + + # Goto is a shortcut (or alias even) for the open command that takes an integer + # index that effectively specifies which and to open. + # + # @param index [Number] + # the integer index chosen from the list procured by the view command. + def goto index + log.info(x) { "[usecase] ~> opens the chapter and verse at index [#{index}]." } + goto_uc = SafeDb::Goto.new + goto_uc.index = index + goto_uc.flow_of_events + + end + + + + # Description of the terraform integration use case command. + desc "terraform ", "runs terraform after exporting IAM credentials at opened location" + + # This terraform use case exports the AWS IAM user access key, secret key and region key + # into (very safe) environment variables and then runs terraform plan, apply or destroy. + # + # This is both ultra secure and extremely convenient because the credentials do not leave + # the safe and exist within (environment variable) memory only for the duration of the + # terraform command. + # + # It is safe because you do not need to expose your AWS credentials in plain text. + # It is convenient because switching IAM users and AWS regions is as easy as typing the now + # ubiquitous safe open command. + # + # safe open <> <> + # + # @param command [String] + # the terraform command to run which is currently limited to plan, apply and destroy. + # This parameter is optional and if nothing is given then "apply" is assumed. + def terraform( command = nil ) + log.info(x) { "[usecase] ~> will export IAM credentials then invoke $ terraform #{command}" } + terraform_uc = SafeDb::Terraform.new + terraform_uc.command = command if command + terraform_uc.flow_of_events + end + + + + # Description of the jenkins integration use case command. + desc "jenkins <> <> <>", "sends credentials to the Jenkins 2 CI service." + + # This Jenkins use case injects for example the AWS IAM user access key, secret key and region key + # into a running Jenkins CI (Continuous Integration) service at the specified (url) location. + # + # safe jenkins post aws http://localhost:8080 + # + # @param command [String] + # + # the action to be taken which is currently limited to be [post]. + # + # @param service [String] + # + # Which service do the credentials being posted originate from? The crrent list includes + # + # - aws ( the 3 IAM user credentials ) + # - docker ( the username / password of docker repository ) + # - git ( the username/password of Git repository ) + # - rubygems ( the username / password of RubyGems package manager account ) + # + # @param url [String] + # + # the full url of the jenkins service for example http://localhost:8080 + # which includes the scheme (http|https) the hostname or ip address and + # the port jenkins is listening on (if not the default 80 or 443). + # + def jenkins( command, service, url ) + + log.info(x) { "[usecase] ~> request to #{command} #{service} credentials to Jenkins at #{url}" } + jenkins_uc = SafeDb::Jenkins.new + + jenkins_uc.command = command if command + jenkins_uc.service = service if service + jenkins_uc.url = url if url + + jenkins_uc.flow_of_events + + end + + + + # Description of the docker repository integration use case command. + desc "docker <>", "logs into or out of the dockerhub repository." + + # This docker use case .... + # + # safe docker login + # safe docker logout + # + # @param command [String] + # the action to be taken which is currently limited to either + # login or logout + def docker( command = "login" ) + + log.info(x) { "[usecase] ~> request to #{command} into or out of a docker repository." } + docker_uc = SafeDb::Docker.new + docker_uc.command = command + docker_uc.flow_of_events + + end + + + + # Description of the vpn use case command. + desc "vpn ", "runs vpn command typically safe vpn up or safe vpn down" + + # This VPN use case connects to the VPN whose specifics are recorded within the vpn.ini + # factfile living in the same directory as the vpn.rb usecase class. + # + # @param command [String] + # the vpn command to run which is currently limited to up or down + # This parameter is optional and if nothing is given then "up" is assumed. + def vpn( command = nil ) + log.info(x) { "[usecase] ~> VPN connection command #{command} has been issued." } + vpn_uc = SafeDb::Vpn.new + vpn_uc.command = command if command + vpn_uc.flow_of_events + end + + + + # Description of the identifier command. + desc "id", "prints out the current timestamp identifiers" + + # Put out the multiple formats of the current timestamp. + def id + log.info(x) { "[usecase] ~> prints out the current timestamp identifiers." } + id_uc = SafeDb::Id.new + id_uc.flow_of_events + end + + + +end diff --git a/lib/keytools/PRODUCE_RAND_SEQ_USING_DEV_URANDOM.txt b/lib/keytools/PRODUCE_RAND_SEQ_USING_DEV_URANDOM.txt new file mode 100644 index 0000000000000000000000000000000000000000..099b831cb2b0601ab55f2f657cad9160621b3fa9 GIT binary patch literal 2098 zcmcK0c`zG@0tRq1G&_kMN7sF(bXz4Mi7F{Y+|&^jR1(bkh>TCWcnkYaVG_jkc)w-n@U_-#c&r{pOqRH}g?Qcp}(}NV;Q{z`)-o z(qq8)z)S`hN+f_$P#hl0f(I5;*A<&7p& zXo0SN^uN|i@FK@Y#leY@L3gq7?(VTR2){1YMwy*Hw(d}pZLB>4mrZY$2T1yn(fONT`&amsN| zW=2yiZ#l=JDr~ppzmbJJyl(!U2mGxvO!ucCu~Pd!86|yeIm{`^#exf#Fo?gOF|P@p zGLHjbz1g*y0)(-ABoi(L_9GRG2v6pXv!I2aDrO2yHWPo2*c>d{ODiEF*KDkTlgENl ztjW5Kg~HNZlcgc!G;^*_E$n#^zbhCFGKt z6d|@ot44K;6EIW1B|LiG1AtB)>qtwUsLSQd?h$Wp3ERwjd;Czs+;r~AuTJfLckl+3 zcG?J+8NC|yU+o|}$a=}UqvHT5I&&;jkRfB~@y*6GxfkHfX}vOWUrST;d(8)Znz`Y3 z9~CjbGrCa$4E&5aTzmO}5xg=oK(A_@M&_-mqSP8O&%Z>zd!GEO>9Stjiqc}$;}&#`guQ)Krfn=n$s(NUkxr!gnd4@ z*vhJ1h|XUItUbh--OnDiNc$>XFU*y5eSbS=Wm-$Y9j2GfQeW4VhlPj_8YGjLQp1jP zR#vo(n81;Qe_)vc_i$wD8n_0>p}l`huR+`H+h)0+nt3uEbK^}6NG>| zcJ?6(B5%>0?cg8BJWM!^+a6ex#6fE0;S4Gb%`?fVr(6|2STT~Cv z(lDZhS}UXVNI7QIo30S0aB@o8Hj8EKCN4I1zo27V(@o+Dp7nmqhCmcu)OgJ11Nr>B zGsG!VK$%klqy30>FL9!vR^$}Ueh<`d;$NO6{_f!N5+EUIO&)@)R{06&C~afz(Y06M zm%WP3p%HQ1xlh4byGBv3g+QS?sim$^vcF{@HfqpVLJx&tC|~NzI@hJ_>&bCH8xt~$7gIEjvO?w_+LV)%U$o4I(Bb>UKVP*AP;@dXG)je1V}x-zBPdDlWQj& zSj`KH0)Sl&LhVFh;NYQR1ps9L-tk+&%KC+wxUwmzQ{Gn>%QZ1e*gZ2>?3juf`Wuf! zli6bdV>Ork8CM^XNK?Nv2Xm|SYtjirW9CjdlWk2r#A&4X{N@lrt)qb6Q!d!+?7iLf3C~Hd%;n<9+2wtv*NI1st2k}vKxg!%>0)o#a*}%riI0`s z1fPv_9Tyi{78F5NcR^WsT`J$B`d$xD-w2TKkht-U(akN1N`}2l_qe0!1V^GZ6;bU- zf!I*8dvJyJ4nIw8S#TPZ31sOxc+=peD;_T9^+M9;$Da3wgDYj_hCNZ`>L(sd-g4-{ z_c3Kv6uy4TRder0`_C!1C-3#8tJ+ghYfp+zwLiO1?3jP^g z#1C4N>&YgoAE0JZV=ESZyuYF1pSbL)L2MIdu)8w`W(D1lU^)BeeZZ3_aF<4TC?e|7 zrJ1}8+PgE?cHn*>-X(Tvx#QVpv&GgJYp3E;Hw4MbYTIYo|8}tgJ>1XNEo!&Tye$%45=M{HDCdsj{N{13^zzOYb~(N`Vk{K=3o01 z`1Wu44@S(*YhQ4Y{rLs!>w+#ws!zMJcv3OlMZX7J-=P}kc9fu1%}xyYP6;zVHZeWC zonbTwG_4;RESTV){!qGqtgu5@3oJRUY|+=-=yjtvqw{v6I(a7-qW$_rrWp!h_P{FY QP1B*L)xd;y^N*l^0nfg`ng9R* literal 0 HcmV?d00001 diff --git a/lib/keytools/kdf.api.rb b/lib/keytools/kdf.api.rb new file mode 100644 index 0000000..b6fa343 --- /dev/null +++ b/lib/keytools/kdf.api.rb @@ -0,0 +1,243 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + # The OpenKey underlying security strategy is to lock a master index file + # with a symmetric encryption key that is based on two randomly generated + # and amalgamated 55 and 45 character keys and then to lock that key + # (and only that key) with a 256 bit symmetric encryption key derived from + # a human password and generated by at least two cryptographic workhorses known + # as key derivation functions. + # + # Random powerful keys are derived are seeded with 55 random bytes and + # then fed through the master key generator and its two key derivation + # functions (BCrypt and PBKDF2). + # + # == What Does the Master Encryption Key Generator Do? + # + # This class sits at the core of implementing that strategy and works to produce + # 256 bit encryption key derived from a human password which is then minced by + # two best of breed key derivation functions (BCrypt and PBKDF2). + # + # BCrypt (Blowfish) and PBKDF2 are the leading key derivation functions + # whose modus operandi is to convert low entropy human generated passwords + # into a high entropy key that is computationally infeasible to acquire via brute + # force. + # + # == How to Create the Encryption Key + # + # To create a high entropy encryption key this method takes the first + # 168 bits from the 186 bit BCrypt key and the first 96 bits from the + # 132 bit PBKDF2 key and amalgamates them to produce a 264 bit key. + # + # The 264 bit key is then digested to produce a 256bit encryption key. + class KdfApi + + + # BCrypt (Blowfish) and PBKDF2 are the leading key derivation functions + # whose modus operandi is to convert low entropy human generated passwords + # into a high entropy key that is computationally infeasible to acquire via brute + # force. + BCRYPT_SALT_KEY_NAME = "bcrypt.salt" + + + # BCrypt (Blowfish) and PBKDF2 are the leading key derivation functions + # whose modus operandi is to convert low entropy human generated passwords + # into a high entropy key that is computationally infeasible to acquire via brute + # force. + PBKDF2_SALT_KEY_NAME = "pbkdf2.salt" + + + # To create a high entropy encryption key we use the full 180 bits + # from the returned 180 bit BCrypt key. + # + # When amalgamated with the 332 bits from the PBKDF2 Key we + # achieve a powerful union key length of 512 bits. + BCRYPT_KEY_CONTRIBUTION_SIZE = 180 + + + # The first 332 bits are used from the 384 bit key returned by the + # PBKDF2 algorithm. + # + # When amalgamated with the 180 bits from the BCrypt Key we + # achieve a powerful union key length of 512 bits. + PBKDF2_KEY_CONTRIBUTION_SIZE = 332 + + + # To create a high entropy encryption key we use the full 180 bits + # from the returned 180 bit BCrypt key and the first 332 bits from + # the 384 bit PBKDF2 key. + # + # On amalgamation, the outcome is a quality union key length + # of 512 bits. + AMALGAM_KEY_RAW_BIT_SIZE = BCRYPT_KEY_CONTRIBUTION_SIZE + PBKDF2_KEY_CONTRIBUTION_SIZE + + + # This method generates a 256 bit symmetric encryption key by passing a + # textual human sourced secret into two key derivation functions, + # namely BCrypt and PBKDF2. BCrypt, PBKDF2 and SCrypt are today's + # in form best of breed cryptographic workhorses for producing a + # high entropy key from possibly weak human sourced secret text. + # + # Example | Derive Key from Password + # + # key_store = KeyPair.new( "/path/to/kdf-salt-data.ini" ) + # key_store.use( "peter-pan" ) + # human_key = KdfApi.generate_from_password( "my_s3cr3t", key_store ) + # + # strong_key = Key.from_random() + # human_key.encrypt_key( strong_key, key_store ) + # + # strong_key.encrypt_file "/path/to/file-to-encrypt.pdf" + # strong_key.encrypt_text "I am the text to encrypt." + # + # --- + # + # Do not use the key derived from a human secret to encrypt anything + # other than a high entropy key randomly sourced from 48 bytes. + # + # Every time the user logs in, generate (recycle), another human key and + # another strong key and discard the previously outputted cipher texts. + # + # == BCrypt and the PBKDF2 Cryptographic Algorithms + # + # BCrypt (Blowfish) and PBKDF2 are the leading key derivation functions + # that exists to convert low entropy human generated passwords into a high + # entropy key that is computationally infeasible to acquire through brute force. + # + # On amalgamation, the outcome is a quality union key length + # of 512 bits. + # + # == Creating a High Entropy Encryption Key + # + # To create a high entropy encryption key this method takes the first + # 168 bits from the 186 bit BCrypt and the first 96 bits from the 132 + # bit PBKDF2 key and amalgamates them to produce a 264 bit key. + # + # Note that all four of the above numbers are divisable by six (6), for + # representation with a 64 character set, and eight (8), for transport + # via the byte (8 bit) protocols. + # + # Size of BCrypt and PBKDF2 Derived Keys + # + # + --------- - --------- + + # + --------- | --------- + + # | Algorithm | Bit Count | + # ----------- | --------- | + # | BCrypt | 180 Bits | + # | Pbkdf2 | 332 Bits | + # ----------- | --------- | + # | Total | 512 Bits | + # + --------- | --------- + + # + --------- - --------- + + # + # 256 Bit Encryption Key | Remove 8 Bits + # + # The manufactured encryption key, an amalgam of the above now has + # 264 bits carried by 44 Base64 characters. + # + # Just before it is used to encrypt vital keys, eight (8) bits are + # removed from the end of the key. The key is then converted into a + # powerful 32 byte (256 bit) encryption agent and is hashed by the + # SHA256 digest and delivered. + # + # @param human_secret [String] + # a robust human generated password with as much entropy as can + # be mustered. Remember that 40 characters spread randomly over + # the key space of about 90 characters and not relating to any + # dictionary word or name is the way to generate a powerful key + # that has embedded a near 100% entropy rating. + # + # @param key_map [KeyPair] + # The KeyPair storage service must have been initialized and a + # section specified using {KeyPair.use} thus allowing this method + # to write key-value pairs representing the BCrypt and + # PBKDF2 salts through the {KeyPair.set} behaviour. + # + # @return [Key] + # the 256 bit symmetric encryption key derived from a human password + # and passed through two cryptographic workhorses. + def self.generate_from_password human_secret, key_map + + bcrypt_salt = KdfBCrypt.generate_bcrypt_salt + pbkdf2_salt = KeyPbkdf2.generate_pbkdf2_salt + + key_map.set( BCRYPT_SALT_KEY_NAME, bcrypt_salt ) + key_map.set( PBKDF2_SALT_KEY_NAME, pbkdf2_salt ) + + return derive_and_amalgamate( human_secret, bcrypt_salt, pbkdf2_salt ) + + end + + + # Regenerate the viciously unretrievable nor reversable key that was + # generated in the past and with the same salts that were used during + # the original key derivation process. + # + # @param key_map [Hash] + # an instantiated and populated hash object containing the salts + # which were created in the past during the generation. These are + # now vital for a successful regeneration. + # + # @return [Key] + # the 256 bit symmetric encryption key that was previously generated + # from the secret and the cryptographic salts within the key_map. + def self.regenerate_from_salts human_secret, key_map + + bcrypt_salt = key_map.get( BCRYPT_SALT_KEY_NAME ) + pbkdf2_salt = key_map.get( PBKDF2_SALT_KEY_NAME ) + + return derive_and_amalgamate( human_secret, bcrypt_salt, pbkdf2_salt ) + + end + + + + private + + + + def self.derive_and_amalgamate( human_secret, bcrypt_salt, pbkdf2_salt ) + + bcrypt_key = KdfBCrypt.generate_key( human_secret, bcrypt_salt ) + pbkdf2_key = KeyPbkdf2.generate_key( human_secret.reverse, pbkdf2_salt ) + + assert_bcrypt_key_bit_length bcrypt_key + assert_pbkdf2_key_bit_length pbkdf2_key + + amalgam_key = Key.new ( bcrypt_key.to_s[ 0 .. (BCRYPT_KEY_CONTRIBUTION_SIZE-1) ] + pbkdf2_key.to_s[ 0 .. (PBKDF2_KEY_CONTRIBUTION_SIZE-1) ] ) + + assert_amalgam_key_bit_length amalgam_key + + return amalgam_key + + end + + + def self.assert_bcrypt_key_bit_length bcrypt_key + bcrypt_key_bit_length = bcrypt_key.to_s.bytesize + bcrypt_keysize_msg = "Expecting #{KdfBCrypt::BCRYPT_KEY_EXPORT_BIT_LENGTH} not #{bcrypt_key_bit_length} bits in bcrypt key." + raise RuntimeError, bcrypt_keysize_msg unless bcrypt_key_bit_length == KdfBCrypt::BCRYPT_KEY_EXPORT_BIT_LENGTH + end + + + def self.assert_pbkdf2_key_bit_length pbkdf2_key + pbkdf2_key_bit_length = pbkdf2_key.to_s.bytesize + pbkdf2_keysize_msg = "Expecting #{KeyPbkdf2::PBKDF2_EXPORT_BIT_LENGTH} not #{pbkdf2_key_bit_length} bits in pbkdf2 key." + raise RuntimeError, pbkdf2_keysize_msg unless pbkdf2_key_bit_length == KeyPbkdf2::PBKDF2_EXPORT_BIT_LENGTH + end + + + def self.assert_amalgam_key_bit_length amalgam_key + + amalgam_key_bit_length = amalgam_key.to_s.bytesize + amalgam_keysize_msg = "Expecting #{AMALGAM_KEY_RAW_BIT_SIZE} not #{amalgam_key_bit_length} bits in amalgam key." + raise RuntimeError, amalgam_keysize_msg unless amalgam_key_bit_length == AMALGAM_KEY_RAW_BIT_SIZE + end + + + end + + +end diff --git a/lib/keytools/kdf.bcrypt.rb b/lib/keytools/kdf.bcrypt.rb new file mode 100644 index 0000000..fa0371a --- /dev/null +++ b/lib/keytools/kdf.bcrypt.rb @@ -0,0 +1,265 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + # BCrypt is a Blowfish based Key Derivation Function (KDF) that exists to + # convert low entropy human created passwords into a high entropy key that + # is computationally infeasible to acquire through brute force. + # + # As human generated passwords have a relatively small key space, key derivation + # functions must be slow to compute with any implementation. + # + # BCrypt offers a cost parameter that determines (via the powers of two) + # the number of iterations performed. + # + # If the cost parameter is 12, then 4096 iterations (two to the power of 12) will + # be enacted. + # + # == A Cost of 16 is 65,536 iterations + # + # The minimum cost is 4 (16 iterations) and the maximum is 31. + # + # A cost of 16 will result in 2^16 = 65,536 iterations and will slow the + # derivation time to about a second on a powerful 2020 laptop. + # + # == BCrypt Cost Iteration Timings on an Intel i-5 Laptop + # + # The benchmark timings were incredibly consistent and + # took almost exactly twice as long for every step. + # + # An IBM ThinkPad was used to generate the timings. + # + # Memory RAM ~> 15GiB + # Processors ~> Intel(R) Core(TM) i5-7200U CPU @ 2.50GHz + # + # The timing results (for 2 steps) multiplied by four (4). + # + # 3.84 seconds for 2^16 (65,536) iterations + # 0.96 seconds for 2^14 (16,384) iterations + # 0.24 seconds for 2^12 ( 4,096) iterations + # 0.06 seconds for 2^10 ( 1,024) iterations + # + # A double digit iteration cost must be provided to avoid + # an in-built failure trap. The default cost is now 10. + class KdfBCrypt + + require "bcrypt" + + # The iteration count is determined using the powers of + # two so if the iteration integer is 12 there will be two + # to the power of 12 ( 2^12 ) giving 4096 iterations. + # The minimum number is 4 (16 iterations) and the max is 31. + # + # @example + # Configuring 16 into this directive results in + # 2^16 = 65,536 iterations + # + # == BCrypt Cost Iteration Timings on an Intel i-5 Laptop + # + # The benchmark timings were incredibly consistent and + # took almost exactly twice as long for every step. + # + # An IBM ThinkPad was used to generate the timings. + # + # Memory RAM ~> 15GiB + # Processors ~> Intel(R) Core(TM) i5-7200U CPU @ 2.50GHz + # + # The timing results (for 2 steps) multiplied by four (4). + # + # 3.84 seconds for 2^16 (65,536) iterations + # 0.96 seconds for 2^14 (16,384) iterations + # 0.24 seconds for 2^12 ( 4,096) iterations + # 0.06 seconds for 2^10 ( 1,024) iterations + # + # A double digit iteration cost must be provided to avoid + # an in-built failure trap. The default cost is now 10. + BCRYPT_ITERATION_INTEGER = 10 + + # The bcrypt algorithm produces a key that is 181 bits in + # length. The algorithm then converts the binary 181 bits + # into a (6-bit) Radix64 character. + # + # 181 / 6 = 30 remainder 1 (so 31 characters are needed). + BCRYPT_KEY_LENGTH = 31 + + # BCrypt key derivation (from text) implementations truncate + # the first 55 characters of the incoming text. + BCRYPT_MAX_IN_TEXT_LENGTH = 55 + + # The BCrypt algorithm produces 181 raw binary bits which is just + # one bit more than a 30 character base64 string. Hence the algorithm + # puts out 31 characters. + # + # We discard the 31st character because 5 of its 6 bits are 100% + # predictable. Thus the returned key will contribute 180 bits. + BCRYPT_KEY_EXPORT_BIT_LENGTH = 180 + + # The BCrypt algorithm salt string should be 22 characters + # and may include forward slashes and periods. + BCRYPT_SALT_LENGTH = 22 + + # BCrypt outputs a single line of text that holds the prefix + # then the Radix64 encoded salt and finally the Radix64 + # encoded hash key. + # + # The prefix consists of two sections sandwiched within + # two dollar $ signs at the extremeties and a third dollar + # separating them. + # + # The two sections are the + # - BCrypt algorithm version number (2a or 2b) and + # - a power of 2 integer defining the no. of interations + BCRYPT_OUTPUT_TEXT_PREFIX = "$2x$#{BCRYPT_ITERATION_INTEGER}$" + + + # Key generators should use this method to create a BCrypt salt + # string and then call the {generate_key} method passing in the + # salt together with a human generated password in order to derive + # a key. + # + # The salt can be persisted and then resubmitted in order to + # regenerate the same key in the future. + # + # For the BCrypt algorithm this method depends on the constant + # {BCRYPT_ITERATION_INTEGER} so that two to the power of the + # integer is the number of iterations. + # + # A generated salt looks like this assuming the algorithm version + # is 2a and the interation integer is 16. + # + # $2a$16$nkyYKCwljFRtcif6FCXn3e + # + # This method removes the $2a$16$ preamble string and stores only + # the actual salt string whose length should be 22 characters. + # + # Why do BCrypt salts always end with zero, e, u or period? + # + # Two (2) leftover bits is the short answer. + # + # This is because the salts are a random 16 bytes and must be + # stored in base64. The 16 bytes equals 128bits which when converted + # to base64 (6bits per character) results in 21 characters and only + # two leftover bits. + # + # BCrypt Salt => t4bDqoJlHbb/k7bkt4/1Ku (22 characters) + # BCrypt Salt => 9BjuJU67IG9Lz5tYUhOqeO (22 characters) + # BCrypt Salt => grz.QREI35585Y3AaCoCTe (22 characters) + # BCrypt Salt => zsxrVW2RGIltSu.AoS4E7e (22 characters) + # BCrypt Salt => dTlRJZ6ijDDVk2cFoCQHPO (22 characters) + # BCrypt Salt => S9B1azH7oD8L3.CQfxxzJO (22 characters) + # BCrypt Salt => LoZh.q3NdnTIuOmR6gHJF. (22 characters) + # BCrypt Salt => y6DKk23SmgNR863pTZ8nYe (22 characters) + # BCrypt Salt => rokdUF6tg6wHV6F0ymKFme (22 characters) + # BCrypt Salt => jrDpNgh.0OEIYaxsR7E7d. (22 characters) + # + # Don't forget BCrypt uses Radix64 (from OpenBSD). So the two (2) + # leftover bits result in 4 possible values which effectively is + # + # a period (.) + # a zero (0) + # an e (e) + # or a u (u) + # + # @return [String] + # the salt in a printable format like base64, hex or a string + # of ones and zeroes. This salt should be submitted in the exact + # same form to the {generate_key} method. + def self.generate_bcrypt_salt + + full_bcrypt_salt = BCrypt::Engine.generate_salt( BCRYPT_ITERATION_INTEGER ) + main_bcrypt_salt = full_bcrypt_salt[ BCRYPT_OUTPUT_TEXT_PREFIX.length .. -1 ] + keep_bcrypt_salt = "#{BCRYPT_ITERATION_INTEGER}#{main_bcrypt_salt}" + assert_bcrypt_salt( keep_bcrypt_salt ) + return keep_bcrypt_salt + + end + + + # Key generators should first use the {generate_salt} method to create + # a BCrypt salt string and then submit it to this method together with + # a human generated password in order to derive a key. + # + # The salt can be persisted and then resubmitted again to this method + # in order to regenerate the same key at any time in the future. + # + # Generate a binary key from the bcrypt password derivation function. + # + # This differs from a server side password to hash usage in that we + # are interested in the 186bit key that bcrypt produces. This method + # returns this reproducible key for use during symmetric encryption and + # decryption. + # + # @param human_secret [String] + # a robust human generated password with as much entropy as can + # be mustered. Remember that 40 characters spread randomly over + # the key space of about 90 characters and not relating to any + # dictionary word or name is the way to generate a powerful key + # that has embedded a near 100% entropy rating. + # + # @param bcrypt_salt [String] + # the salt string that has either been recently generated via the + # {generate_salt} method or read from a persistence store and + # resubmitted here (in the future) to regenerate the same key. + # + # @return [Key] + # an {OpenKey::Key} that has been initialized from the 30 RADIX64 + # character output from the BCrypt algorithm. + # + # The BCrypt algorithm produces 181 raw binary bits which is just + # one bit more than a 30 character base64 string. Hence the algorithm + # puts out 31 characters. + # + # We discard the 31st character because 5 of its 6 bits are 100% + # predictable. Thus the returned key will contribute 180 bits. + def self.generate_key human_secret, bcrypt_salt + + iteration_int = bcrypt_salt[ 0 .. 1 ] + bcrypt_prefix = "$2x$#{iteration_int}$" + full_salt_str = bcrypt_prefix + bcrypt_salt[ 2 .. -1 ] + + assert_bcrypt_salt( bcrypt_salt ) + + hashed_secret = BCrypt::Engine.hash_secret( human_secret, full_salt_str ) + encoded64_key = BCrypt::Password.new( hashed_secret ).to_s + key_begin_index = BCRYPT_OUTPUT_TEXT_PREFIX.length + BCRYPT_SALT_LENGTH + radix64_key_str = encoded64_key[ key_begin_index .. -1 ] + key_length_mesg = "The BCrypt key length should have #{BCRYPT_KEY_LENGTH} characters." + raise RuntimeError, key_length_mesg unless radix64_key_str.length == BCRYPT_KEY_LENGTH + chopped_radix64_key = radix64_key_str.chop() + + return Key.from_radix64( chopped_radix64_key ) + + end + + + private + + + # --- + # --- Timings Code + # --- + # --- chopped_radix64_key = NIL + # --- require 'benchmark' + # --- timings = Benchmark.measure { + # --- + # --- -- wrapped up code block + # --- + # --- } + # --- + # --- log.info(x) { "BCrypt key generation timings ~> #{timings}" } + # --- + + + def self.assert_bcrypt_salt the_salt + raise RuntimeError, "bcrypt salt not expected to be nil." if the_salt.nil? + bcrypt_total_length = 2 + BCRYPT_SALT_LENGTH + salt_length_msg = "BCrypt salt #{the_salt} is #{the_salt.length} and not #{bcrypt_total_length} characters." + raise RuntimeError, salt_length_msg unless the_salt.length == bcrypt_total_length + end + + + end + + +end diff --git a/lib/keytools/kdf.pbkdf2.rb b/lib/keytools/kdf.pbkdf2.rb new file mode 100644 index 0000000..c632dc8 --- /dev/null +++ b/lib/keytools/kdf.pbkdf2.rb @@ -0,0 +1,262 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + + # PBKDF2 is a powerful leading Key Derivation Function (KDF) that exists to + # convert low entropy human created passwords into a high entropy key that + # is computationally infeasible to acquire through brute force. + # + # As human generated passwords have a relatively small key space, key derivation + # functions must be slow to compute with any implementation. + # + # PBKDF2 offers an iteration count that configures the number of iterations + # performed to create the key. + # + # One million (1,000,000) should be the iteration count's lower bound. + # + # == Upgrading the OpenSSL pbkdf2_hmac Behaviour + # + # As soon as the new Ruby and OpenSSL libraries become commonplace this class should + # be upgraded to use the new and improved {OpenSSL::KDF.pbkdf2_hmac} behaviour + # rather than {OpenSSL::PKCS5.pbkdf2_hmac}. + # + # The difficulty is in detecting the operating system's C libraries that are directly + # accessed for OpenSSL functionality. If the distinction can be made accurately, those + # with newer libraries can reap the benefits immediately. + # + # == PBKDF2 Cost Iteration Timings on an Intel i-5 Laptop + # + # An IBM ThinkPad was used to generate the timings. + # + # Memory RAM ~> 15GiB + # Processors ~> Intel(R) Core(TM) i5-7200U CPU @ 2.50GHz + # + # The timing results show that a prudent value is somewhere + # between one hundred thousand and ten million iterations. + # + # 9.6 seconds for 10,000,000 ten million iterations + # 0.96 seconds for 1,000,000 one million iterations + # 0.096 seconds for 100,000 one hundred thousand iterations + # + # Open key sets iteration counts for PBKDF2 in hexadecimal and + # a valid range starts at 1 and counts up in chunks of a hundred + # thousand (100,000). + # + # 1 ~> 100,000 + # 5 ~> 500,000 + # 10 ~> 1,000,000 + # 16 ~> 16,000,000 + # 256 ~> 256,000,000 + # + # The maximum iteration multiplier allowed is 16,384. + class KeyPbkdf2 + + + # One million iterations is necessary due to the + # growth of GPU driven cloud based computing power + # that is curently being honed by mining BitCoin and training + # neural networks. + # + # == PBKDF2 Cost Iteration Timings on an Intel i-5 Laptop + # + # An IBM ThinkPad was used to generate the timings. + # + # Memory RAM ~> 15GiB + # Processors ~> Intel(R) Core(TM) i5-7200U CPU @ 2.50GHz + # + # The timing results show that a prudent value is somewhere + # between one hundred thousand and ten million iterations. + # + # Open key sets iteration counts for PBKDF2 in hexadecimal and + # a valid range starts at 1 and counts up in chunks of a hundred + # thousand (100,000). + # + # 1 ~> 100,000 + # 5 ~> 500,000 + # 10 ~> 1,000,000 + # 16 ~> 16,000,000 + # 256 ~> 256,000,000 + PBKDF2_ITERATION_MULTIPLIER = 1 + + # The quantity used to multiply the iteration multiplier by to + # gain the iteration count. + ONE_HUNDRED_THOUSAND = 100000 + + + # Documentation for this algorithm says this about the key length. + # + # Make the key length larger than or equal to the output length + # of the underlying digest function, otherwise an attacker could + # simply try to brute-force the key. + # + # According to PKCS#5, security is limited by the output length of + # the underlying digest function, i.e. security is not improved if a + # key length strictly larger than the digest output length is chosen. + # + # Therefore, when using PKCS5 for password storage, it suffices to + # store values equal to the digest output length, nothing is gained + # by storing larger values. + PBKDF2_EXPORT_KEY_LENGTH = OpenSSL::Digest::SHA384.new.digest_length + + + # For a 384 bit digest the key length is 48 bytes and the bit length + # is 384 bits. + PBKDF2_EXPORT_BIT_LENGTH = PBKDF2_EXPORT_KEY_LENGTH * 8 + + + # The documented recommended salt length in bytes for the PBKDF2 + # algorithm is between 16 and 24 bytes. The setting here is + # at the upper bound of that range. + PBKDF2_SALT_LENGTH_BYTES = 24 + + + # Return a random cryptographic salt generated from twenty-four + # random bytes produced by a secure random number generator. The + # returned salt is primarily a Base64 encoded string that can be + # stored and then passed to the {KeyPbkdf2.generate_key} method. + # + # + ------------ + -------- + ------------ + ------------- + + # | | Bits | Bytes | Base64 | + # | ------------ | -------- | ------------ | ------------- | + # | PBKDF2 Salt | 192 Bits | 24 bytes | 32 characters | + # + ------------ + -------- + ------------ + ------------- + + # + # The leading part of the character sequence indicates the length + # of the salt in chunks of 100,000 and is plus sign separated. + # + # 42+12345678abcdefgh12345678ABCDEFGH ~> 4,200,000 iterations + # 9+12345678abcdefgh12345678ABCDEFGH ~> 900,000 iterations + # 100+12345678abcdefgh12345678ABCDEFGH ~> 10,000,000 iterations + # + # Note that the generate key method will convert the trailing 32 + # base64 characters back into a 24 byte binary string. + # + # @return [String] + # a relatively small iteration count multiplier separated from the + # main salt characters by a plus sign. The salt characters will + # consist of 32 base64 characters which can be stored and fed into + # the {generate_key}. + # + # These 32 characters are a representation of the twenty-four (24) + # randomly and securely generated bytes. + def self.generate_pbkdf2_salt + + pbkdf2_salt = Key64.from_bits( Key.to_random_bits( PBKDF2_SALT_LENGTH_BYTES ) ) + return "#{PBKDF2_ITERATION_MULTIPLIER}+#{pbkdf2_salt}" + + end + + + # Generate a 128 bit binary key from the PBKDF2 password derivation + # function. The most important input to this function is the human + # generated key. The best responsibly sourced key with at least 95% + # entropy will contain about 40 characters spread randomly over the + # set of 95 typable characters. + # + # Aside from the human password the other inputs are + # + # - a base64 encoded randomly generated salt of 16 to 24 bytes + # - an iteration count of at least 1 million (due to GPU advances) + # - an output key length that is at least 16 bytes (128 bits) + # - a digest algorithm implementation (we use SHA512K) + # + # The {Key} returned by this method encapsulates the derived + # key of the byte (bit) length specified. + # + # PBKDF2 Output Key Length Note + # + # Documentation for this algorithm says this about the key length. + # + # Typically, the key length should be larger than or equal to the + # output length of the underlying digest function, otherwise an + # attacker could simply try to brute-force the key. According to + # PKCS#5, security is limited by the output length of the underlying + # digest function, i.e. security is not improved if a key length + # strictly larger than the digest output length is chosen. + # + # Therefore, when using PKCS5 for password storage, it suffices to + # store values equal to the digest output length, nothing is gained + # by storing larger values. + # + # Upgrading the OpenSSL pbkdf2_hmac Behaviour + # + # As soon as the new Ruby and OpenSSL libraries become commonplace this class should + # be upgraded to use the new and improved {OpenSSL::KDF.pbkdf2_hmac} behaviour + # rather than {OpenSSL::PKCS5.pbkdf2_hmac}. + # + # The difficulty is in detecting the operating system's C libraries that are directly + # accessed for OpenSSL functionality. If the distinction can be made accurately, those + # with newer libraries can reap the benefits immediately. + # + # @param human_secret [String] + # a robust human generated password with as much entropy as can + # be mustered. Remember that 40 characters spread randomly over + # the key space of about 95 characters and not relating to any + # dictionary word or name is the way to generate a powerful key + # that has embedded a near 100% entropy rating. + # + # @param pbkdf2_string [String] + # this is a relatively small iteration count multiplier separated + # from the main salt characters by a plus sign. The salt characters + # will consist of 32 base64 characters which can be stored and fed + # into the {generate_key}. + # + # The salt string presented here must have either been recently + # generated by {generate_pbkdf2salt} or read from a persistence + # store and resubmitted here in order to regenerate the same key. + # + # @return [Key] + # a key holder containing the key which can then be accessed via + # many different formats. The {Key} returned by this method + # encapsulates the derived key with the specified byte count. + def self.generate_key human_secret, pbkdf2_string + + KeyError.not_new pbkdf2_string, "PBKDF2 Algorithm Salt" + multiplier = pbkdf2_string.split("+")[0].to_i + pbkdf2_salt = pbkdf2_string.split("+")[1] + + mult_msg = "Iteration multiplier is an integer from 1 to 16,384 not [#{multiplier}]." + raise ArgumentError, mult_msg_msg unless( multiplier > 0 && multiplier < 16385 ) + iteration_count = multiplier * ONE_HUNDRED_THOUSAND + + binary_salt = Key.to_binary_from_bit_string( Key64.to_bits( pbkdf2_salt ) ) + err_msg = "Expected salt of #{PBKDF2_SALT_LENGTH_BYTES} bytes not #{binary_salt.length}." + raise ArgumentError, err_msg unless binary_salt.length == PBKDF2_SALT_LENGTH_BYTES + + pbkdf2_key = OpenSSL::PKCS5.pbkdf2_hmac( + human_secret, + binary_salt, + iteration_count, + PBKDF2_EXPORT_KEY_LENGTH, + OpenSSL::Digest::SHA384.new + ) + + return Key.from_binary( pbkdf2_key ) + + end + + + private + + + # --- + # --- Timings Code + # --- + # --- chopped_radix64_key = NIL + # --- require 'benchmark' + # --- timings = Benchmark.measure { + # --- + # --- -- wrapped up code block + # --- + # --- } + # --- + # --- log.info(x) { "PBKDF2 key generation timings ~> #{timings}" } + # --- + + + end + + +end diff --git a/lib/keytools/kdf.scrypt.rb b/lib/keytools/kdf.scrypt.rb new file mode 100644 index 0000000..95225ff --- /dev/null +++ b/lib/keytools/kdf.scrypt.rb @@ -0,0 +1,190 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + + # SCrypt is a Key Derivation Function (KDF) with a reliable OpenSSL + # implementation that converts low entropy password-like text to a + # high entropy key that is computationally infeasible to acquire through brute + # force. + # + # SCrypt is incredibly resistant to attacks using dedicated hardware with + # massive memory to boot. + # + class KdfSCrypt + + # SCrypt salts are recommended to contain 16 and 32 bytes + # inclusive. Here we opt for 24 bytes which unrolls out to + # 192 bits which serializes into 32 base64 characters. + SCRYPT_SALT_BYTE_LENGTH = 24 + + # The iteration count is determined using the powers of + # two so if the iteration integer is 12 there will be two + # to the power of 12 ( 2^12 ) giving 4096 iterations. + # The minimum number is 4 (16 iterations) and the max is 31. + # @example + # Configuring 16 into this directive results in + # 2^16 = 65,536 iterations + # + # This is a safe default and will slow the derivation time + # to about a second on a powerful 2020 laptop. + SCRYPT_ITERATION_INTEGER = 16 + + # The scrypt algorithm produces a key that is 181 bits in + # length. The algorithm then converts the binary 181 bits + # into a (6-bit) Radix64 character. + # + # 181 / 6 = 30 remainder 1 (so 31 characters are needed). + SCRYPT_KEY_LENGTH = 31 + + + # When the key is transported using a 64 character set where + # each character is represented by 6 bits - the Scrypt key + # expands to 186 bits rather than the original 181 bits. + # + # This expansion is because of the remainder. + # + # 181 bits divided by 6 is 30 characters plus 1 character + # for the extra bit. + # + # The 31 transported characters then appear as + # 31 times 6 which equals 186 bits. + SCRYPT_KEY_TRANSPORT_LENGTH = 186 + + # The scrypt algorithm salt string should be 22 characters + # and may include forward slashes and periods. + SCRYPT_SALT_LENGTH = 22 + + # Scrypt outputs a single line of text that holds the prefix + # then the Radix64 encoded salt and finally the Radix64 + # encoded hash key. + # + # The prefix consists of two sections sandwiched within + # two dollar $ signs at the extremeties and a third dollar + # separating them. + # + # The two sections are the + # - Scrypt algorithm version number (2a or 2b) and + # - a power of 2 integer defining the no. of interations + SCRYPT_OUTPUT_TEXT_PREFIX = "$2a$#{SCRYPT_ITERATION_INTEGER}$" + + + # Generate a secure random and unpredictable salt suitable for + # the SCrypt algorithm. SCrypt salts are recommended to contain + # 16 and 32 bytes inclusive. Here we opt for 24 bytes which + # unrolls to 192 bits which in turn is 32 base64 characters. + # + # The {OpenKey::KdfSCrypt::SCRYPT_SALT_BYTE_LENGTH} constant + # defines the number of random bytes required for a robust + # SCrypt salt. + # + # The salt can be persisted and then resubmitted in order to + # regenerate the same key in the future. + # + # @return [String] + # the salt in a bit string format which can be converted to + # in order to feed the derivation function or indeed converted + # to base64 in order to persist it. + def self.generate_scrypt_salt + return Key.to_random_bits( SCRYPT_SALT_BYTE_LENGTH ) + end + + + + # Key generators should first use the {generate_salt} method to create + # a Scrypt salt string and then submit it to this method together with + # a human generated password in order to derive a key. + # + # The salt can be persisted and then resubmitted again to this method + # in order to regenerate the same key at any time in the future. + # + # Generate a binary key from the scrypt password derivation function. + # + # This differs from a server side password to hash usage in that we + # are interested in the 186bit key that scrypt produces. This method + # returns this reproducible key for use during symmetric encryption and + # decryption. + # + # @param secret_text [String] + # a robust human generated password with as much entropy as can + # be mustered. Remember that 40 characters spread randomly over + # the key space of about 90 characters and not relating to any + # dictionary word or name is the way to generate a powerful key + # that has embedded a near 100% entropy rating. + # + # @param scrypt_salt [String] + # the salt string that has either been recently generated via the + # {generate_salt} method or read from a persistence store and + # resubmitted here (in the future) to regenerate the same key. + # + # @return [Key] + # a key holder containing the key which can then be accessed via + # many different formats. + def self.generate_key secret_text, scrypt_salt + + binary_salt = Key.to_binary_from_bit_string( scrypt_salt ) + + require "openssl" + + puts "" + puts $LOADED_FEATURES.grep(/openssl/) + puts "" + + scrypt_key = OpenSSL::KDF.scrypt(secret_text, salt: binary_salt, N: 2**SCRYPT_ITERATION_INTEGER, r: 8, p: 1, length: 33) + + + +=begin + hashed_secret = Scrypt::Engine.hash_secret( secret_text, to_scrypt_salt(scrypt_salt) ) + encoded64_key = Scrypt::Password.new( hashed_secret ).to_s + key_begin_index = SCRYPT_OUTPUT_TEXT_PREFIX.length + SCRYPT_SALT_LENGTH + radix64_key_str = encoded64_key[ key_begin_index .. -1 ] + key_length_mesg = "The scrypt key length should have #{SCRYPT_KEY_LENGTH} characters." + raise RuntimeError, key_length_mesg unless radix64_key_str.length == SCRYPT_KEY_LENGTH + + return Key.new(radix64_key_str) +=end + return scrypt_key + end + + + + private + + + def self.scrypt_test_method + + puts "" + puts "##############################################################################" + + key_count = 20 + for n in 0 .. key_count + scrypt_saltbits = OpenKey::KdfSCrypt.generate_scrypt_salt + scrypt_key = OpenKey::KdfSCrypt.generate_key( "abonekanoby", scrypt_saltbits ) + scrypt_saltchar = OpenKey::Key64.from_bits( scrypt_saltbits ) + puts "#{n} Salt => #{scrypt_saltchar} (#{scrypt_saltchar.length}) => Key => #{scrypt_key} (#{scrypt_key.length})" + end + + puts "##############################################################################" + puts "" + + end + + + + def self.to_scrypt_salt the_salt + return SCRYPT_OUTPUT_TEXT_PREFIX + the_salt + end + + def self.assert_scrypt_salt the_salt + raise RuntimeError, "scrypt salt not expected to be nil." if the_salt.nil? + salt_length_msg = "A scrypt salt is expected to contain #{SCRYPT_SALT_LENGTH} characters." + raise RuntimeError, salt_length_msg unless the_salt.length == SCRYPT_SALT_LENGTH + end + + + end + + +end diff --git a/lib/keytools/key.64.rb b/lib/keytools/key.64.rb new file mode 100644 index 0000000..6c4aacb --- /dev/null +++ b/lib/keytools/key.64.rb @@ -0,0 +1,326 @@ +#!/usr/bin/ruby + +module OpenKey + + # First use the class methods to source keys, then use a key's instance + # methods to access its properties and in concert with other symmetrical + # information, you can use the keys to lock (encrypt) or unlock (decrypt) + # other keys and objects. + # + # == Sourcing and Deriving Keys + # + # Keys can be + # + # - sourced from a secure random byte generating function + # - sourced from ciphertext and another (decryption) key + # - generated by passing a secret through key derivation functions + # - regenerated from a secret and previously stored salts + # - sourced from the current unique workstation shell environment + # - sourced from an environment variable containing ciphertext + # + # + # Keys need to be viewed (represented) in multiple ways and the essence + # of the key viewer is to input keys {as_bits}, {as_bytes} and {as_base64} + # and then output the same key (in as far as is possible) - as bits, as + # bytes and as base64. + # + # == Key | To and From Behaviour + # + # Use the From methods to create Keys from a variety of resources + # such as + # + # - a base64 encoded string + # - a binary byte string + # - a string of one and zero bits + # - a hexadecimal representation + # + # Once you have instantiated the key, you will then be able to convert it + # (within reason due to bit, byte and base64 lengths) to any of the above + # key representations. + # + # == Key | Bits Bytes and Base64 + # + # The shoe doesn't always fit when its on the other foot and this is best + # illustratd with a table that maps bits to 8 bit bytes and 6 bit Base64 + # characters. + # + # | --------- | -------- | ------------ | ------------------------------- | + # | Fit? | Bits | Bytes | (and) Base64 | + # | --------- | -------- | ------------ | ------------------------------- | + # | Perfect | 168 Bits | is 21 bytes | 28 Chars - bcrypt chops to this | + # | Perfect | 216 Bits | is 27 bytes | 36 Chars - | + # | Perfect | 264 Bits | is 33 bytes | 44 Chars - holder 4 256bit keys | + # | Perfect | 384 Bits | is 48 bytes | 64 Chars - 216 + 168 equals 384 | + # | --------- | -------- | ------------ | ------------------------------- | + # | Imperfect | 128 Bits | 16 precisely | 22 Chars - 21 + 2 remain bits | + # | Imperfect | 186 Bits | 23 remain 2 | 31 Characers precisely | + # | Imperfect | 256 Bits | 32 precisely | 43 Chars - 42 + 4 remain bits | + # | --------- | -------- | ------------ | ------------------------------- | + # + # Yes, the shoe doesn't always fit when it's on the other foot. + # + # == Schoolboy Error + # + # The strategy is so simple, we call it a schoolboy error. + # + # If we want to use a key with n bits and either n % 6 or n % 8 (or both) + # are not zero - we instantiate a Key with the lowest common + # denominator of 6 and 8 that exceeds n. + # + # So when we request a byte, or base64 representation the viewer will + # truncate (not round down) to the desired length. + # + # + # == YACHT 64 | Yet Another Character Table + # + # This binary key class is a dab hand at converting base64 strings + # into their 6-bit binary string equivalents. + # + # It can convert non-alphanumeric characters within either Base64 or + # Radix64 into the OpenKey YACHT64 standard which has a forward slash + # but neither a plus sign nor a period character. + # + # The Big4 Character Sets | Base64 | UrlSafe64 | Radix64 | YACHT64 + # + # Base64 and Radix64 (from OpenBSD) differ in both the order of characters + # and their choice of the two non-alphanumeric characters. Base64 can also + # contain line breaks and equal signs for padding. UrlSafe base64 has different + # choices for the two non alphanum characters in keeping with URL standards. + # + # The character sets for each of the four 64 fomats are as follows. + # + # - Base-64 is A to Z then a to z then 0 to 9 then + then / + # - Radix64 is . then / then 0 to 9 then A to Z then a to z + # - UrlSafeBase64 is Base64 but chars 63/64 are an underscore (_) and hyphen (-) + # - UrlSafeBase64 does not have line breaks and carriage returns (unlike Base64) + # - OpenKey 64 (YACHT64) uses the same 62 characters plus an @ sign and a forward slash + # - The 64 OpenKey 64 characters are obfuscated into a random order + # + # == 4 Non-AlphaNumerics | Base64 | Radix64 | YACHT64 + # + # The behaviour here is happy to convert base64 strings produced by either + # Radix64 or Base64 or UrlSafe Base64. Howeverr it aware of the + # non alpha-numeric characters and converts them before processing + # with the modus operandi that says + # + # - ignore the forward slash in YACHT64, Base64 and Radix64 + # - convert the plus (+) in Base64 to the @ symbol in YACHT64 + # - convert the period (.) in Radix64 to the @ symbol in YACHT64 + # - convert hyphen (-) in Url Safe Base64 into a fwd slash + # - convert underscore (_) in Url Safe Base64 to an @ sign + # - delete the (=) equals padding character used by Base64 + # + # Neither the OpenBSD backed Radix64 nor the OpenKey (YACHT64) entertain the + # concept of padding. + # + # == Mapping Each Character to 6 Binary Bits + # + # We need 6 binary bits to represent a base64 character (and 4 + # bits for hexadecimal). Here is an example mapping between + # a base 64 character, an integer and the six bit binary. + # + # Character Integer Binary (6 Bit) + # + # a 0 000000 + # b 1 000001 + # c 2 000010 + # + # y 25 011001 + # z 26 011010 + # A 27 011011 + # B 28 011100 + # + # 8 60 111100 + # 9 61 111101 + # / 62 111110 + # + 63 111111 + # + class Key64 + + # YACHT stands for Yet Another Character Table and it + # can map binary sequences onto 64 well chosen characters. + # + # The 64 character sets are all similar in that they hold 64 + # characters and they define two non alphanumeric characters + # because the 26 lowercase, 26 uppercase and 10 digits only + # adds up to an agonisingly close 62 characters. + # + YACHT64_CHARACTER_SET = [ + "a", "9", "W", "B", "f", "K", "O", "z", + "3", "s", "1", "5", "c", "n", "E", "J", + "L", "A", "l", "6", "I", "w", "o", "g", + "k", "N", "t", "Y", "S", "%", "T", "b", + "V", "R", "H", "0", "@", "Z", "8", "F", + "G", "j", "u", "m", "M", "h", "4", "p", + "q", "d", "7", "v", "e", "2", "U", "X", + "r", "C", "y", "Q", "D", "x", "P", "i" + ] + + + # Radix64 strings can contain period characters in their midst. + PERIOD = "." + + # Radix64 strings can contain forward slashes in their midst. + FORWARD_SLASH = "/" + + # YACHT64 strings can contain at symbols in their midst. + AT_SYMBOL = "@" + + # YACHT64 strings can contain percent signs in their midst. + PERCENT_SIGN = "%" + + + # Convert the parameter string of ones and zeroes into an + # internal base64 character set known as YACHT for yet another + # character table. + # + # @param bit_string [String] + # a string of ones and zeroes that can be sliced into + # six character chunks with each chunk then being mapped + # to a YACHT64 character. + # + # @return [String] + # printable characters from a set of 62 alpha-numerics + # plus an @ symbol and a percent % sign. + # + # @raise ArgumentError + # If the bit string is nil. + # Or if the bit string length is not a multiple of six. + # Or if it contains any character that is not a 1 or 0. + def self.from_bits bit_string + + nil_err_msg = "The parameter bit string cannot be nil." + raise ArgumentError, nil_err_msg if bit_string.nil? + + bit_size_msg = "The bit string length is not a multiple of #{SIX}." + raise ArgumentError, bit_size_msg unless bit_string.length % SIX == 0 + + num_unknowns = bit_string.delete("10").length + unknowns_msg = "The bit string has #{num_unknowns} characters that are not 1 or 0." + raise ArgumentError, unknowns_msg if num_unknowns > 0 + + characters64 = "" + char_count = bit_string.length / SIX + for n in 0 .. (char_count-1) + six_bit_chunk = bit_string[ (n*SIX), SIX ] + six_bit_index = six_bit_chunk.to_i(2) + characters64 += Key64.character(six_bit_index) + end + + code_size_msg = "Length is #{characters64.length} but #{char_count} is expected." + raise RuntimeError, code_size_msg unless characters64.length == char_count + + return characters64 + + end + + + # Convert the parameter characters based on an internal base64 + # character set (known as YACHT) into a bit string of ones + # and zeroes. + # + # @param char64_string [String] + # The base64 character sequence which which will be used to + # derive the returned bit string. Naturally this character + # sequencee cannot be nil, nor can it contain any characters + # that are not present in {Key64::YACHT64_CHARACTER_SET}. + # + # @return [String] + # a string of ones and zeroes that have been strung out + # from each YACHT64 character. The returned string length of + # ones and zeroes will be exactly 6 times the length of the + # input parameter. + # + # @raise [ArgumentError] + # If a nil or zero length character string is received. + # Or if the character sequence contains a character not present + # in the {Key64::YACHT64_CHARACTER_SET}. + # + # @raise [RuntimeError] + # if the conversion does not result in 6 bits for every character + # in the parameter string. + def self.to_bits char64_string + + bit_string = "" + char64_string.each_char do |the_char| + + yacht64_index = YACHT64_CHARACTER_SET.index(the_char) + assert_yacht64_index( the_char, yacht64_index ) + bit_string += "%06d" % [ yacht64_index.to_s(2) ] + + end + + assert_bit_lengths char64_string, bit_string + return bit_string + + end + + + # Convert a string of Radix64 characters into a bit representation which + # will be 6 times longer than the input parameter. This method first + # converts the string into the internal YACHT64 format and then converts + # that to a bit string using the {Key64.to_bits} method. + # + # @param radix64_string [String] + # the radix64 string to convert into bits. This string will be a subset + # of the usual 62 character suspects together with period and forward + # slash characters. + # + # This parameter should not contain newlines nor carriage returns. + # + # @return [String] + # a string of ones and zeroes that represent the bits converted from the + # radix64 input. The return value will be exactly 6 times the number of + # input characters. + def self.from_radix64_to_bits radix64_string + + yacht64_chars = radix64_string.gsub( PERIOD, AT_SYMBOL ).gsub( FORWARD_SLASH, PERCENT_SIGN ) + out_bitstring = to_bits( yacht64_chars ) + assert_bit_lengths( radix64_string, out_bitstring ) + return out_bitstring + + end + + + + private + + + + SIX = 6 + + def self.character char_index + + index_oob_msg = "The character index must be between 0 and 63 inclusive." + index_is_oob = char_index < 0 || char_index > 63 + raise ArgumentError, index_oob_msg if index_is_oob + return YACHT64_CHARACTER_SET[ char_index ] + + end + + def self.assert_bit_lengths( in_string, out_string ) + + in_length = in_string.length + out_length = out_string.length + good_ratio = out_length == in_length * SIX + size_msg = "Bit string length [#{out_length}] not 6 times more than [#{in_length}]." + raise RuntimeError, size_msg unless good_ratio + + end + + def self.assert_yacht64_index the_char, yacht64_index + + nil_msg = "Character [ #{the_char} ] not in YACHT character set." + raise ArgumentError, nil_msg if yacht64_index.nil? + + index_msg = "Index of character [ #{the_char} ] not within expected bounds." + all_good = ( yacht64_index >= 0 ) && ( yacht64_index <= 63 ) + raise ArgumentError, index_msg unless all_good + + end + + + end + + +end diff --git a/lib/keytools/key.algo.rb b/lib/keytools/key.algo.rb new file mode 100644 index 0000000..f273bec --- /dev/null +++ b/lib/keytools/key.algo.rb @@ -0,0 +1,109 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + # Algorithms that are quality catalysts in the derivation and entropy spread + # of keys, identifiers and base64 character numbers. + class KeyAlgo + + + # Cherry pick a given number of characters from the character pool + # so that a good spread is achieved. This picker is the anti-pattern + # of just axing the first 5 characters from a 100 character string + # essentially wasting over 90% of the available entropy. + # + # This is the algorithem to cherry pick a spread of characters + # from the pool in the second parameter. + # + # - if the character pool length is a multiple of num_chars all is good otherwise + # - constrict to the highest multiple of the pick size below the pool length + # - divide that number by num_chars to get the first offset and character spacing + # - if spacing is 3, the first character is the 3rd, the second the 6th and so on + # - then return the cherry picked characters + # + # @param pick_size [FixNum] the number of characters to cherry pick + # @param char_pool [String] a pool of characters to cherry pick from + # @return [String] + # a string whose length is the one indicated by the first parameter + # and whose characters contain a predictable, repeatable spread from + # the character pool parameter + def self.cherry_picker( pick_size, char_pool ) + + hmb_limit = highest_multiple_below( pick_size, char_pool.length ) + jump_size = hmb_limit / pick_size + read_point = jump_size + picked_chars = "" + loop do + picked_chars += char_pool[ read_point - 1 ] + read_point += jump_size + break if read_point > hmb_limit + end + + err_msg = "Expected cherry pick size to be #{pick_size} but it was #{picked_chars.length}." + raise RuntimeError, err_msg unless picked_chars.length == pick_size + + return picked_chars + + end + + + # Affectionately known as a hmb, this method returns the + # highest multiple of the first parameter that is below + # (either less than or equal to) the second parameter. + # + # - -------- - ------- - ----------------- - + # | Small | Big | Highest Multiple | + # | Number | Number | Below Big Num | + # | -------- - ------- - ----------------- | + # | 5 | 25 | 25 | + # | 3 | 20 | 18 | + # | 8 | 63 | 56 | + # | 1 | 1 | 1 | + # | 26 | 28 | 26 | + # | 1 | 7 | 7 | + # | 16 | 16 | 16 | + # | -------- - ------- - ----------------- | + # | 10 | 8 | ERROR | + # | -4 | 17 | ERROR | + # | 4 | -17 | ERROR | + # | 0 | 32 | ERROR | + # | 29 | 0 | ERROR | + # | -4 | 0 | ERROR | + # | -------- - ------- - ----------------- | + # - -------- - ------- - ----------------- - + # + # Zeroes and negative numbers cannot be entertained, nor can the + # small number be larger than the big one. + # + # @param small_num [FixNum] + # the highest multiple of this number below the one in the + # next parameter is what will be returned. + # + # @param big_num [FixNum] + # returns either this number or the nearest below it that is + # a multiple of the number in the first parameter. + # + # @raise [ArgumentError] + # if the first parameter is greater than the second + # if either or both parameters are zero or negative + def self.highest_multiple_below small_num, big_num + + arg_issue = (small_num > big_num) || small_num < 1 || big_num < 1 + err_msg = "Invalid args #{small_num} and #{big_num} to HMB function." + raise ArgumentError, err_msg if arg_issue + + for index in 0 .. ( big_num - 1 ) + invex = big_num - index # an [invex] is an inverted index + return invex if invex % small_num == 0 + end + + raise ArgumentError, "Could not find a multiple of #{small_num} lower than #{big_num}" + + end + + + end + + +end diff --git a/lib/keytools/key.api.rb b/lib/keytools/key.api.rb new file mode 100644 index 0000000..c150e3d --- /dev/null +++ b/lib/keytools/key.api.rb @@ -0,0 +1,1391 @@ +#!/usr/bin/ruby + +module OpenKey + + # Use RubyMine to understand the correlations and dependencies on + # this now monolithic class that must be broken up before meaningful + # effective and efficient progress can be made. + # + # --- + # + # == REFACTOR KEY API TO DRAW OUT POSSIBLY THESE FIVE CONCEPTS. + # + # - [1] the safe tty token + # - [2] the machine configurations in ~/.config/openkey/openkey.app.config.ini + # - [3] the login / logout session crumbs database + # - [4] the master content database holding local config, chapters and verses + # - [5] the safe databases that unmarshal into either JSON or file content + # + # --- + # + # Use the key applications programming interface to transition the + # state of three (3) core keys in accordance with the needs of the + # executing use case. + # + # == KeyApi | The 3 Keys + # + # The three keys service the needs of a command line application + # that executes within a shell environment in a unix envirronment + # or a command prompt in windows. + # + # So what are the 3 keys and what is their purpose. + # + # - shell key | exists to lock the index key created at login + # - human key | exists to lock the index key created at login + # - index key | exists to lock the application's index file + # + # So why do two keys (the shell key and human key) exist to lock the + # same index key? + # + # == KeyApi | Why Lock the Index Key Twice? + # + # On this login, the previous login's human key is regenerated from + # the human password and the saved salts. This old human key + # decrypts and reveals the old index key which in turn + # decrypts and reveals the index string. + # + # Both the old human key and the old index key are discarded. + # + # Then 48 bytes of randomness are sourced to generate the new index key. This + # key encrypts the now decrypted index string and is thrown away. The password + # sources a new human key (the salts are saved), and this new key locks the + # index key's source bytes. + # + # The shell key again locks the index key's source bytes. Why twice? + # + # - during subsequent shell command calls the human key is unavailable however + # the index key can be accessed via the shell key. + # + # - when the shell dies (or logout is issued) the shell key dies. Now the index + # key can only be accessed by a login when the password is made available. + # + # That is why the index key is locked twice. The shell key opens it mid-session + # and the regenerated human key opens it during the login of the next session. + # + # == The LifeCycle of each Key + # + # It seems odd that the human key is born during this login then dies + # at the very next one (as stated below). This is because the human key + # isn't the password, the human key is sourced from the password. + # + # So when are the 3 keys born and when do they cease being. + # + # - shell key | is born when the shell is created and dies when the shell dies + # - human key | is born when the user logs in this time and dies at the next login + # - index key | the life of the index key exactly mirrors that of the human key + # + # == The 7 Key API Calls + # + # | - | -------- | ------------ | ------------------------------- | + # | # | Rationale | Use Case | Goals | Tasks | + # | - | ------------------------------- | ------------ | ------------------------------- | + # | 1 | Create and Obfuscate Shell Key | key | x | y | + # | 2 | New App Instance on Workstation | init | x | y | + # | 3 | Login to App Instance in Shell | login | x | y | + # + class KeyApi + + + # This method should only be called once for each application instance + # resident on a workstation (machine) and it derives and writes the identifiers + # into the openkey configuration file. + # + # The Identifiers to Configure + # + # The principal identifiers to derive and configure are the + # + # - identifier for the application instance on this machine + # - global identifier derived for the application instance + # - keystore url location for this app on this machine + # - time the above two identifiers were burned to disk + # + # Set(App) Configuration File + # + # Neither the file nor its parent folder need to exist. We attempt to create + # the directory path and then the file. After this method has executed the + # below directives will be added to the openkey application coniguration. + # + # Config filepath is $HOME/.config/openkey/openkey.app.config.ini + # + # [srn1-apzd] + # app.instance.id = crnl-d3my + # keystore.url.id = /home/joe/credentials/repo + # initialize.time = Fri May 25 11:59:46 2018 ( 18145.1159.462 ) + # + # @param domain_name [String] + # the string reference that points to the application instance + # that is being initialized on this machine. + # + # @param keystore_url [String] + # The keystore url points to where the key metadata protecting + # this application instance lives. The simplest keystores are + # based on files and for them this url is just a folder path. + # + # The keystore URL cannot be N.E.W (nil, empty, whitespace only). + def self.init_app_domain( domain_name, keystore_url ) + + KeyError.not_new( domain_name, self ) + KeyError.not_new( keystore_url, self ) + + aim_id = KeyId.derive_app_instance_machine_id( domain_name ) + app_id = KeyId.derive_app_instance_identifier( domain_name ) + + keypairs = KeyPair.new( MACHINE_CONFIG_FILE ) + keypairs.use( aim_id ) + keypairs.set( APP_INSTANCE_ID_KEY, app_id ) + keypairs.set( KEYSTORE_IDENTIFIER_KEY, keystore_url ) + keypairs.set( APP_INITIALIZE_TIME, KeyNow.fetch() ) + + # -- + # -- Switch the dominant application domain being used to + # -- the domain that is being initialized right here. + # -- + use_application_domain( domain_name ) + + end + + + # Has the inter-sessionary key ( derived from a human secret ) been setup + # for the application shard referenced in the parameter? + # + # This method returns yes (true) if and only if + # + # - the application's keystore file exists + # - the file contains a breadcrumbs section + # - crumbs exist for human key rederivation + # + # If false return gives the go-ahead to + # + # - collect the human secret (in one of a number of ways) + # - pass it through key derivation functions + # - generate a high entropy power key and lock some initial content with it + # - use the key sourced from the human secret to lock the power key + # - throw away the secret, power key and human sourced key + # - save crumbs (ciphertext, salts, ivs) for content retrieval given secret + # + # Note that the {init_app_domain} method must have been called on this machine + # with the name of this application instance and the keystore url. An error results + # if no file is found at the {MACHINE_CONFIG_FILE} path. + # + # @param domain_name [String] + # a string reference for the in-focus shard of the application + # + # @return [Boolean] + # return true if the human secret for the parameter application name + # has been collected, transformed into a key, that key used to lock the + # power key, then secret and keys deleted, plus a trail of breadcrumbs + # sprinkled to allow the inter-sessionary key to be regenerated + # at the next login. + # + # Lest we forget - buried within this ensemble of activities, is + # generating the high entropy power key, using it to lock the + # application database before discarding it. + def self.is_domain_keys_setup?( domain_name ) + + KeyError.not_new( domain_name, self ) + keypairs = KeyPair.new( MACHINE_CONFIG_FILE ) + aim_id = KeyId.derive_app_instance_machine_id( domain_name ) + app_id = KeyId.derive_app_instance_identifier( domain_name ) + keypairs.use( aim_id ) + + keystore_file = get_keystore_file_from_domain_name( domain_name ) + return false unless File.exists?( keystore_file ) + + crumbs_db = KeyPair.new( keystore_file ) + return false unless crumbs_db.has_section?( APP_KEY_DB_BREAD_CRUMBS ) + + crumbs_db.use( APP_KEY_DB_BREAD_CRUMBS ) + return crumbs_db.contains?( INTER_KEY_CIPHERTEXT ) + + end + + + # Transform the domain secret into a key, use that key to lock the + # power key, delete the secret and keys and leave behind a trail of + # breadcrumbs sprinkled to allow the inter-sessionary key + # to be regenerated at the next login. + # + # Lest we forget - buried within this ensemble of activities, is + # generating the high entropy power key, using it to lock the + # application database before discarding it. + # + # The use case steps once the human secret is acquired is to + # + # - pass it through key derivation functions + # - generate a high entropy power key and lock some initial content with it + # - use the key sourced from the human secret to lock the power key + # - throw away the secret, power key and human sourced key + # - save crumbs (ciphertext, salts, ivs) for content retrieval given secret + # + # Note that the {init_app_domain} method must have been called on this machine + # with the name of this application instance and the keystore url. An error results + # if no file is found at the {MACHINE_CONFIG_FILE} path. + # + # @param domain_name [String] + # the string reference that points to the application instance + # that is being initialized on this machine. + # + # @param domain_secret [String] + # the secret text that can potentially be cryptographically weak (low entropy). + # This text is severely strengthened and morphed into a key using multiple key + # derivation functions like PBKDF2, BCrypt and SCrypt. + # + # The secret text is discarded and the derived inter-session key is used + # only to encrypt the randomly generated super strong index key, + # before being itself discarded. + # + # @param content_header [String] + # the content header tops the ciphertext storage file with details of how where + # and why the file came to be. + def self.setup_domain_keys( domain_name, domain_secret, content_header ) + + # -- + # -- Get the breadcrumbs trail and + # -- timestamp the moment. + # -- + crumbs_db = get_crumbs_db_from_domain_name( domain_name ) + crumbs_db.set( APP_INSTANCE_SETUP_TIME, KeyNow.fetch() ) + + # -- + # -- Create a new power key and lock the content with it. + # -- Create a new inter key and lock the power key with it. + # -- Leave the necessary breadcrumbs for regeneration. + # -- + recycle_keys( domain_name, domain_secret, crumbs_db, content_header, get_virgin_content( domain_name ) ) + + end + + + # Recycle the inter-sessionary key (based on the secret) and create a new + # content encryption (power) key and lock the parameter content with it + # before returning the new content encryption key. + # + # The {content_ciphertxt_file_from_domain_name} method is used to produce the path at which + # the ciphertext (resulting from locking the parameter content), is stored. + # + # @param domain_name [String] + # + # the (application instance) domain name chosen by the user or the + # machine that is interacting with the OpenKey software. + # + # @param domain_secret [String] + # + # the domain secret that is put through key derivation functions in order + # to attain the strongest possible inter-sessionary key which is used only + # to encrypt and decrypt the high-entropy content encryption key. + # + # @param crumbs_db [KeyPair] + # + # The crumbs database is expected to be initialized with a section + # ready to receive breadcrumb data. The crumbs data injected are + # + # - a random iv for future AES decryption of the parameter content + # - cryptographic salts for future rederivation of the inter-sessionary key + # - the resultant ciphertext from the inter key locking the content key + # + # @param the_content [String] + # + # the app database content whose ciphertext is to be recycled using the + # recycled (newly derived) high entropy random content encryption key. + def self.recycle_keys( domain_name, domain_secret, crumbs_db, content_header, the_content ) + + KeyError.not_new( domain_name, self ) + KeyError.not_new( domain_secret, self ) + KeyError.not_new( the_content, self ) + + # -- + # -- Create a random initialization vector (iv) + # -- used for AES encryption of virgin content + # -- + iv_base64_chars = KeyIV.new().for_storage() + crumbs_db.set( INDEX_DB_CRYPT_IV_KEY, iv_base64_chars ) + random_iv = KeyIV.in_binary( iv_base64_chars ) + + # -- + # -- Create a new high entropy power key + # -- for encrypting the virgin content. + # -- + power_key = Key.from_random + + # -- + # -- Encrypt the virgin content using the + # -- power key and the random iv and write + # -- the Base64 encoded ciphertext into a + # -- neighbouring file. + # -- + to_filepath = content_ciphertxt_file_from_domain_name( domain_name ) + binary_ciphertext = power_key.do_encrypt_text( random_iv, the_content ) + binary_to_write( to_filepath, content_header, binary_ciphertext ) + + # -- + # -- Derive new inter-sessionary key. + # -- Use it to encrypt the power key. + # -- Set the reretrieval breadcrumbs. + # -- + inter_key = KdfApi.generate_from_password( domain_secret, crumbs_db ) + inter_txt = inter_key.do_encrypt_key( power_key ) + crumbs_db.set( INTER_KEY_CIPHERTEXT, inter_txt ) + + # -- + # -- Return the just createdC high entropy + # -- content encryption (power) key. + # -- + return power_key + + end + + + + # At the end of a successful login the old content crypt key will + # have been re-acquired and discarded, with a fresh one createdand + # put to work protecting the application's content. + # + # After reacquisitioning (but before discarding) the old crypt key, the app's + # key-value database is silently decrypted with it then immediately re-encrypted + # with the newly created (and locked down) crypt key. + # + # Login Recycles 3 things + # + # The three (3) things recycled by this login are + # + # - the human key (sourced by putting the secret text through two key derivation functions) + # - the content crypt key (sourced from a random 48 byte sequence) + # - the content ciphertext (sourced by decrypting with the old and re-encrypting with the new) + # + # Remember that the content crypt key is itself encrypted by two key entities. + # + # The Inter and Intra Session Crypt Keys + # + # This login use case is the only time in the session that the + # human provided secret is made available - hence the inter-session name. + # + # The intra session key is employed by use case calls on within (intra) the + # session it was created within. + # + # The Weakness of the Human Inter Sessionary Key + # + # The weakest link in the human-sourced key is clearly the human. Yes it is + # strengthened by key derivation functions with cost parameters as high is + # tolerable, but despite and in spite of these efforts, poorly chosen short + # passwords are not infeasible to acquire through brute force. + # + # The fallability is countered by invalidating and recycling the (inter session) + # key on every login, thus reducing the time frame available to an attacker. + # + # The Weakness of the Shell Intra Sessionary Key + # + # The shell key hails from a super random (infeasible to crack) source of + # 48 binary bytes. So what is its achilles heel? + # + # The means of protecting the shell key is the weakness. The source of its + # protection key is a motley crue of data unique not just to the workstation, + # but the parent shell. This is also passed through key derivation functions + # to strengthen it. + # + # Temporary Environment Variables + # + # The shell key's ciphertext lives as a short term environment variable so + # when the shell dies the ciphertext dies and any opportunity to resurrect + # the shell key dies with it. + # + # A logout command removes the random iv and ciphertext forged + # when the shell acted to encrypt the content key. Even mid shell session, a + # logout renders the shell key worthless. + # + # Which (BreadCrumbs) endure? + # + # Only 4 things endure post the login (recycle) activities. + # These are the + # + # - salts and iteration counts used to generate the inter-session key + # - index key ciphertext after encryption using the inter-session key + # - index key ciphertext after encryption using the intra-session key + # - content ciphertext after the decrypt re-encrypt activities + # + # + # @param domain_name [String] + # the string reference that points to the application instance + # that is being initialized on this machine. + # + # @param domain_secret [String] + # the secret text that can potentially be cryptographically weak (low entropy). + # This text is severely strengthened and morphed into a key using multiple key + # derivation functions like PBKDF2, BCrypt and SCrypt. + # + # The secret text is discarded and the derived inter-session key is used + # only to encrypt the randomly generated super strong index key, + # before being itself discarded. + # + # The key ring only stores the salts. This means the secret text based key can + # only be regenerated at the next login, which explains the inter-session label. + # + # Note on Password Key Derivation + # For each guess, a brute force attacker would need to perform + # one million PBKDF2 and 65,536 BCrypt algorithm + # iterations. + # + # Even so, a password of 6 characters or less can be successfully + # attacked. With all earth's computing resources working exclusively + # and in concert on attacking one password, it would take over + # one million years to access the key derived from a well spread + # 24 character password. And the key becomes obsolete the next time + # you login. + # + # Use the above information to decide on secrets with sufficient + # entropy and spread with at least 12 characters. + # + # @param content_header [String] + # the content header tops the ciphertext storage file with details of how where + # and why the file came to be. + def self.do_login( domain_name, domain_secret, content_header ) + + # -- + # -- Get the breadcrumbs trail. + # -- + crumbs_db = get_crumbs_db_from_domain_name( domain_name ) + + # -- + # -- Get the old inter-sessionary key (created during the previous login) + # -- Get the old content encryption (power) key (again created during the previous login) + # -- Get the old random initialization vector (created during the previous login) + # -- + old_inter_key = KdfApi.regenerate_from_salts( domain_secret, crumbs_db ) + old_power_key = old_inter_key.do_decrypt_key( crumbs_db.get( INTER_KEY_CIPHERTEXT ) ) + old_random_iv = KeyIV.in_binary( crumbs_db.get( INDEX_DB_CRYPT_IV_KEY ) ) + + # -- + # -- Read the binary text representing the encrypted content + # -- that was last written by any use case capable of changing + # -- the application database content. + # -- + from_filepath = content_ciphertxt_file_from_domain_name( domain_name ) + old_crypt_txt = binary_from_read( from_filepath ) + + # -- + # -- Decrypt the binary ciphertext that was last written by a use case + # -- capable of changing the application database. + # -- + plain_content = old_power_key.do_decrypt_text( old_random_iv, old_crypt_txt ) + + # -- + # -- Create a new power key and lock the content with it. + # -- Create a new inter key and lock the power key with it. + # -- Leave the necessary breadcrumbs for regeneration. + # -- Return the new power key that re-locked the content. + # -- + power_key = recycle_keys( domain_name, domain_secret, crumbs_db, content_header, plain_content ) + + # -- + # -- Regenerate intra-session key from the session token. + # -- Encrypt power key for intra (in) session retrieval. + # -- + intra_key = KeyLocal.regenerate_shell_key( to_token() ) + intra_txt = intra_key.do_encrypt_key( power_key ) + + # -- + # -- Set the (ciphertext) breadcrumbs for re-acquiring the + # -- content encryption (power) key during (inside) this + # -- shell session. + # -- + app_id = KeyId.derive_app_instance_identifier( domain_name ) + unique_id = KeyId.derive_universal_id( app_id, to_token() ) + crumbs_db.use( unique_id ) + crumbs_db.set( INTRA_KEY_CIPHERTEXT, intra_txt ) + crumbs_db.set( SESSION_LOGIN_DATETIME, KeyNow.fetch() ) + + # -- + # -- Switch the dominant application domain being used to + # -- the domain that has just logged in. + # -- + use_application_domain( domain_name ) + + end + + + # Switch the application instance that the current shell session is using. + # Trigger this method either during the login use case or when the user + # issues an intent to use a different application instance. + # + # The machine configuration file at path {MACHINE_CONFIG_FILE} is changed + # in the following way + # + # - a {SESSION_APP_DOMAINS} section is added if one does not exist + # - the shell session ID key is added (or updated if it exists) + # - with a value corresponding to the app instance ID (on this machine) + # + # Subsequent use cases can now access the application ID by going first to + # the {SESSION_APP_DOMAINS} section, reading the ID of the app instance on + # this machine and then using that in turn to read the {APP_INSTANCE_ID_KEY} + # value. + # + # The {APP_INSTANCE_ID_KEY} value is the global ID of the app instance no + # matter which machine or shell is being used. + # + # @param domain_name [String] + # the string reference that points to the global application identifier + # no matter the machine being used. + def self.use_application_domain( domain_name ) + + KeyError.not_new( domain_name, self ) + + aim_id = KeyId.derive_app_instance_machine_id( domain_name ) + sid_id = KeyId.derive_session_id( to_token() ) + + keypairs = KeyPair.new( MACHINE_CONFIG_FILE ) + keypairs.use( SESSION_APP_DOMAINS ) + keypairs.set( sid_id, aim_id ) + + end + + + # Logout of the shell key session by making the high entropy content + # encryption key irretrievable for all intents and purposes to anyone + # who does not possess the domain secret. + # + # The key logout action is deleting the ciphertext originally produced when + # the intra-sessionary (shell) key encrypted the content encryption key. + # + # Why Isn't the Session Token Deleted? + # + # The session token is left to die by natural causes so that we don't + # interfere with other domain interactions that may be in progress within + # this shell session. + # + # @param domain_name [String] + # the string reference that points to the application instance that we + # are logging out of from the shell on this machine. + def self.do_logout( domain_name ) + + # --> @todo - user should ONLY type in logout | without domain name + # --> @todo - user should ONLY type in logout | without domain name + # --> @todo - user should ONLY type in logout | without domain name + # --> @todo - user should ONLY type in logout | without domain name + # --> @todo - user should ONLY type in logout | without domain name + + + # --> ###################### + # --> Login / Logout Time + # --> ###################### + # --> + # --> During login you create a section heading same as the session ID + # --> You then put the intra-key ciphertext there (from locking power key) + # --> To check if a login has occurred we ensure this session's ID exists as a header in crumbs DB + # --> On logout we remove the session ID and all the subsection crumbs (intra key ciphertext) + # --> Logout makes it impossible to access the power key (now only by seret delivery and the inter key ciphertext) + # --> + + + # -- + # -- Get the breadcrumbs trail. + # -- + crumbs_db = get_crumbs_db_from_domain_name( domain_name ) + + + # -- + # -- Set the (ciphertext) breadcrumbs for re-acquiring the + # -- content encryption (power) key during (inside) this + # -- shell session. + # -- + unique_id = KeyId.derive_universal_id( domain_name ) + crumbs_db.use( unique_id ) + crumbs_db.set( INTRA_KEY_CIPHERTEXT, intra_txt ) + crumbs_db.set( SESSION_LOGOUT_DATETIME, KeyNow.fetch() ) + + end + + + # Has the user orchestrating this shell session logged in? Yes or no? + # If yes then they appear to have supplied the correct secret + # + # - in this shell session + # - on this machine and + # - for this application instance + # + # Use the crumbs found underneath the universal (session) ID within the + # main breadcrumbs file for this application instance. + # + # Note that the system does not rely on this value for its security, it + # exists only to give a pleasant error message. + # + # @return [Boolean] + # return true if a marker denoting that this shell session with this + # application instance on this machine has logged in. Subverting this + # return value only serves to evoke disgraceful degradation. + def self.is_logged_in?( domain_name ) +############## Write this code. +############## Write this code. +############## Write this code. +############## Write this code. +############## Write this code. +############## Write this code. +############## Write this code. + return false unless File.exists?( frontend_keystore_file() ) + + crumbs_db = KeyPair.new( frontend_keystore_file() ) + crumbs_db.use( APP_KEY_DB_BREAD_CRUMBS ) + return false unless crumbs_db.contains?( LOGGED_IN_APP_SESSION_ID ) + + recorded_id = crumbs_db.get( LOGGED_IN_APP_SESSION_ID ) + return recorded_id.eql?( @uni_id ) + + end + + + # Return a date/time string detailing when the master database was first created. + # + # @param the_master_db [Hash] + # the master database to inspect = REFACTOR convert methods into a class instance + # + # @return [String] + # return a date/time string representation denoting when the master database + # was first created. + def self.to_db_create_date( the_master_db ) + return the_master_db[ DB_CREATE_DATE ] + end + + + # Return the domain name of the master database. + # + # @param the_master_db [Hash] + # the master database to inspect = REFACTOR convert methods into a class instance + # + # @return [String] + # return the domain name of the master database. + def self.to_db_domain_name( the_master_db ) + return the_master_db[ DB_DOMAIN_NAME ] + end + + + # Return the domain ID of the master database. + # + # @param the_master_db [Hash] + # the master database to inspect = REFACTOR convert methods into a class instance + # + # @return [String] + # return the domain ID of the master database. + def self.to_db_domain_id( the_master_db ) + return the_master_db[ DB_DOMAIN_ID ] + end + + + # Return a dictionary containing a string key and the corresponding master database + # value whenever the master database key starts with the parameter string. + # + # For example if the master database contains a dictionary like this. + # + # envelope@earth => { radius => 24034km, sun_distance_light_minutes => 8 } + # textfile@kepler => { filepath => $HOME/keplers_laws.txt, filekey => Nsf8F34dhDT34jLKsLf52 } + # envelope@jupiter => { radius => 852837km, sun_distance_light_minutes => 6 } + # envelope@pluto => { radius => 2601km, sun_distance_light_minutes => 52 } + # textfile@newton => { filepath => $HOME/newtons_laws.txt, filekey => sdDFRTTYu4567fghFG5Jl } + # + # with "envelope@" as the start string to match. + # The returned dictionary would have 3 elements whose keys are the unique portion of the string. + # + # earth => { radius => 24034km, sun_distance_light_minutes => 8 } + # jupiter => { radius => 852837km, sun_distance_light_minutes => 6 } + # pluto => { radius => 2601km, sun_distance_light_minutes => 52 } + # + # If no matches are found an empty dictionary is returned. + # + # @param the_master_db [Hash] + # the master database to inspect = REFACTOR convert methods into a class instance + # + # @param start_string [String] + # the start string to match. Every key in the master database that + # starts with this string is considered a match. The corresponding value + # of each matching key is appended onto the end of an array. + # + # @return [Hash] + # a dictionary whose keys are the unique (2nd) portion of the string with corresponding + # values and in no particular order. + def self.to_matching_dictionary( the_master_db, start_string ) + + matching_dictionary = {} + the_master_db.each_key do | db_key | + next unless db_key.start_with?( start_string ) + dictionary_key = db_key.gsub( start_string, "" ) + matching_dictionary.store( dictionary_key, the_master_db[db_key] ) + end + return matching_dictionary + + end + + + # To read the content we first find the appropriate shell key and the + # appropriate database ciphertext, one decrypts the other to produce the master + # database decryption key which in turn reveals the JSON representation of the + # master database. + # + # The master database JSON is deserialized as a {Hash} and returned. + # + # Steps Taken To Read the Master Database + # + # Reading the master database requires a rostra of actions namely + # + # - reading the path to the keystore breadcrumbs file + # - using the session token to derive the (unique to the) shell key + # - using the shell key and ciphertext to unlock the index key + # - reading the encrypted and encoded content, decoding and decrypting it + # - employing index key, ciphertext and random iv to reveal the content + # + # @param use_grandparent_pid [Boolean] + # + # Optional boolean parameter. If set to true the PID (process ID) used + # as part of an obfuscator key and normally acquired from the parent + # process should now be acquired from the grandparent's process. + # + # Set to true when accessing the safe's credentials from a sub process + # rather than directly through the logged in shell. + # + # @return [String] + # decode, decrypt and hen return the plain text content that was written + # to a file by the {write_content} method. + def self.read_master_db( use_grandparent_pid = false ) + + # -- + # -- Get the filepath to the breadcrumbs file using the trail in + # -- the global configuration left by {use_application_domain}. + # -- + crumbs_db = get_crumbs_db_from_session_token() + + # -- + # -- Get the path to the file holding the ciphertext of the application + # -- database content locked by the content encryption key. + # -- + crypt_filepath = content_ciphertxt_file_from_session_token() + + # -- + # -- Regenerate intra-session key from the session token. + # -- + intra_key = KeyLocal.regenerate_shell_key( to_token(), use_grandparent_pid ) + + # -- + # -- Decrypt and acquire the content enryption key that was created + # -- during the login use case and encrypted using the intra sessionary + # -- key. + # -- + unique_id = KeyId.derive_universal_id( read_app_id(), to_token() ) + crumbs_db.use( unique_id ) + power_key = intra_key.do_decrypt_key( crumbs_db.get( INTRA_KEY_CIPHERTEXT ) ) + + # -- + # -- Set the (ciphertext) breadcrumbs for re-acquiring the + # -- content encryption (power) key during (inside) this + # -- shell session. + # -- + crumbs_db.use( APP_KEY_DB_BREAD_CRUMBS ) + random_iv = KeyIV.in_binary( crumbs_db.get( INDEX_DB_CRYPT_IV_KEY ) ) + + # -- + # -- Get the full ciphertext file (warts and all) and then top and + # -- tail until just the valuable ciphertext is at hand. Decode then + # -- decrypt the ciphertext and instantiate a key database from the + # -- resulting JSON string. + # -- + crypt_txt = binary_from_read( crypt_filepath ) + json_content = power_key.do_decrypt_text( random_iv, crypt_txt ) + + return KeyDb.from_json( json_content ) + + end + + + # This write content behaviour takes the parameter content, encyrpts and + # encodes it using the index key, which is itself derived from the shell + # key unlocking the intra session ciphertext. The crypted content is + # written to a file whose path is derviced by {content_ciphertxt_file_from_domain_name}. + # + # Steps Taken To Write the Content + # + # Writing the content requires a rostra of actions namely + # + # - deriving filepaths to both the breadcrumb and ciphertext files + # - creating a random iv and adding its base64 form to the breadcrumbs + # - using the session token to derive the (unique to the) shell key + # - using the shell key and (intra) ciphertext to acquire the index key + # - using the index key and random iv to encrypt and encode the content + # - writing the resulting ciphertext to a file at the designated path + # + # @param content_header [String] + # the string that will top the ciphertext content when it is written + # + # @param app_database [KeyDb] + # this key database class will be streamed using its {Hash.to_json} + # method and the resulting content will be encrypted and written to + # the file at path {content_ciphertxt_file_from_session_token}. + # + # This method's mirror is {read_master_db}. + def self.write_master_db( content_header, app_database ) + + # -- + # -- Get the filepath to the breadcrumbs file using the trail in + # -- the global configuration left by {use_application_domain}. + # -- + crumbs_db = get_crumbs_db_from_session_token() + + # -- + # -- Get the path to the file holding the ciphertext of the application + # -- database content locked by the content encryption key. + # -- + crypt_filepath = content_ciphertxt_file_from_session_token() + + # -- + # -- Regenerate intra-session key from the session token. + # -- + intra_key = KeyLocal.regenerate_shell_key( to_token() ) + + # -- + # -- Decrypt and acquire the content enryption key that was created + # -- during the login use case and encrypted using the intra sessionary + # -- key. + # -- + unique_id = KeyId.derive_universal_id( read_app_id(), to_token() ) + crumbs_db.use( unique_id ) + power_key = intra_key.do_decrypt_key( crumbs_db.get( INTRA_KEY_CIPHERTEXT ) ) + + # -- + # -- Create a new random initialization vector (iv) to use when + # -- encrypting the incoming database content before writing it + # -- out to the file at the crypt filepath. + # -- + iv_base64_chars = KeyIV.new().for_storage() + crumbs_db.use( APP_KEY_DB_BREAD_CRUMBS ) + crumbs_db.set( INDEX_DB_CRYPT_IV_KEY, iv_base64_chars ) + random_iv = KeyIV.in_binary( iv_base64_chars ) + + # -- + # -- Now we use the content encryption (power) key and the random initialization + # -- vector (iv) to first encrypt the incoming content and then to Base64 encode + # -- the result. This is then written into the crypt filepath derived earlier. + # -- + binary_ciphertext = power_key.do_encrypt_text( random_iv, app_database.to_json ) + binary_to_write( crypt_filepath, content_header, binary_ciphertext ) + + end + + + # Register the URL to the frontend keystore that is tied to + # this application instance on this workstation (and user). The default + # keystore sits on an accessible filesystem that is preferably a + # removable drive (like a USB key or phone) which allows the keys to + # your secrets to travel with you in your pocket. + # + # Changing the Keystore Url + # + # If the keystore url has already been configured this method will overwrite + # (thereby updating) it. + # + # Changing the Keystore Url + # + # The keystore directives in the global configuration file looks like this. + # + # [keystore.ids] + # dxEy-v2w3-x7y8 = /media/usb_key/family.creds + # 47S3-Nv0w-8SYf = /media/usb_key/friend.creds + # 3Dds-8Tts-Jy2G = /media/usb_key/office.creds + # + # Which Use Case Sets the Keystore Url? + # + # The keystore url must be provided the very first time init + # is called for an app instance on a machine. If the configuration + # is wiped, the next initialize use case must again provide it. + # + # How to Add (Extend) Storage Services + # + # We could use Redis, PostgreSQL, even a Rest API to provide storage + # services. To extend it - make a keystore ID boss its own section and + # then add keypairs like + # + # - the keystore URL + # - the keystore Type (or interface class) + # - keystore create destroy markers + # + # @param keystore_url [String] + # The keystore url points to where the key metadata protecting + # this application instance lives. The simplest keystores are + # based on files and for them this url is just a folder path. + # + # @raise [KeyError] + # + # The keystore URL cannot be NEW. The NEW acronym asserts + # that the attribute is + # + # - neither Nil + # - nor Empty + # - nor Whitespace only + # + def register_keystore keystore_url + KeyError.not_new( keystore_url, self ) + @keymap.write( @aim_id, KEYSTORE_IDENTIFIER_KEY, keystore_url ) + end + + + # Generate a new set of envelope breadcrumbs, derive the new envelope + # filepath, then encrypt the raw envelope content, and write the + # resulting ciphertext out into the new file. + # + # The important parameters in play are the + # + # - session token used to find the storage folder + # - random envelope external ID used to name the ciphertext file + # - generated random key for encrypting and decrypting the content + # - generated random initialization vector (IV) for crypting + # - name of the file in which the locked content is placed + # - header and footer content that tops and tails the ciphertext + # + # @param crumbs_map [Hash] + # + # nothing is read from this crumbs map but 3 things are written to + # it with these corresponding key names + # + # - random content external ID {CONTENT_EXTERNAL_ID} + # - high entropy crypt key {CONTENT_ENCRYPT_KEY} + # - and initialization vector {CONTENT_RANDOM_IV} + # + # @param content_body [String] + # + # this is the envelope's latest and greatest content that will + # be encrypted, encoded, topped, tailed and then pushed out to + # the domain's storage folder. + # + # @param content_header [String] + # + # the string that will top the ciphertext content when it is written + # + def self.content_lock( crumbs_map, content_body, content_header ) + + # -- + # -- Create the external content ID and place + # -- it within the crumbs map. + # -- + content_exid = get_random_reference() + crumbs_map[ CONTENT_EXTERNAL_ID ] = content_exid + + # -- + # -- Create a random initialization vector (iv) + # -- for AES encryption and store it within the + # -- breadcrumbs map. + # -- + iv_base64 = KeyIV.new().for_storage() + random_iv = KeyIV.in_binary( iv_base64 ) + crumbs_map[ CONTENT_RANDOM_IV ] = iv_base64 + + # -- + # -- Create a new high entropy random key for + # -- locking the content with AES. Place the key + # -- within the breadcrumbs map. + # -- + crypt_key = Key.from_random() + crumbs_map[ CONTENT_ENCRYPT_KEY ] = crypt_key.to_char64() + + # -- + # -- Now use AES to lock the content body and write + # -- the encoded ciphertext out to a file that is + # -- topped with the parameter content header. + # -- + binary_ctext = crypt_key.do_encrypt_text( random_iv, content_body ) + content_path = content_filepath( content_exid ) + binary_to_write( content_path, content_header, binary_ctext ) + + end + + + # Use the content's external id expected in the breadcrumbs together with + # the session token to derive the content's filepath and then unlock and + # the content as a {KeyDb} structure. + # + # Unlocking the content means reading it, decoding and then decrypting it using + # the initialization vector (iv) and decryption key whose values are expected + # within the breadcrumbs map. + # + # @param crumbs_map [Hash] + # + # the three (3) data points expected within the breadcrumbs map are the + # + # - content's external ID {CONTENT_EXTERNAL_ID} + # - AES encryption key {CONTENT_ENCRYPT_KEY} + # - initialization vector {CONTENT_RANDOM_IV} + # + def self.content_unlock( crumbs_map ) + + # -- + # -- Get the external ID of the content then use + # -- that plus the session context to derive the + # -- content's ciphertext filepath. + # -- + content_path = content_filepath( crumbs_map[ CONTENT_EXTERNAL_ID ] ) + + # -- + # -- Read the binary ciphertext of the content + # -- from the file. Then decrypt it using the + # -- AES crypt key and intialization vector. + # -- + crypt_txt = binary_from_read( content_path ) + random_iv = KeyIV.in_binary( crumbs_map[ CONTENT_RANDOM_IV ] ) + crypt_key = Key.from_char64( crumbs_map[ CONTENT_ENCRYPT_KEY ] ) + text_data = crypt_key.do_decrypt_text( random_iv, crypt_txt ) + + return text_data + + end + + + # This method returns the content filepath which (at its core) + # is an amalgam of the application's (domain) identifier and the content's + # external identifier (XID). + # + # The filename is prefixed by {CONTENT_FILE_PREFIX}. + # + # @param external_id [String] + # + # nothing is read from this crumbs map but 3 things are written to + # it with these corresponding key names + # + # - random content external ID {CONTENT_EXTERNAL_ID} + # - high entropy crypt key {CONTENT_ENCRYPT_KEY} + # - and initialization vector {CONTENT_RANDOM_IV} + def self.content_filepath( external_id ) + + app_identity = read_app_id() + store_folder = get_store_folder() + env_filename = "#{CONTENT_FILE_PREFIX}.#{external_id}.#{app_identity}.txt" + env_filepath = File.join( store_folder, env_filename ) + return env_filepath + + end + + + # If the content dictionary is not nil and contains a key named + # {CONTENT_EXTERNAL_ID} then we return true as we expect the content + # ciphertext and its corresponding file to exist. + # + # This method throws an exception if they key exists but there is no + # file at the expected location. + # + # @param crumbs_map [Hash] + # + # we test for the existence of the constant {CONTENT_EXTERNAL_ID} + # and if it exists we assert that the content filepath should also + # be present. + # + def self.db_envelope_exists?( crumbs_map ) + + return false if crumbs_map.nil? + return false unless crumbs_map.has_key?( CONTENT_EXTERNAL_ID ) + + external_id = crumbs_map[ CONTENT_EXTERNAL_ID ] + the_filepath = content_filepath( external_id ) + error_string = "External ID #{external_id} found but no file at #{the_filepath}" + raise RuntimeException, error_string unless File.file?( the_filepath ) + + return true + + end + + + # Construct the header for the ciphertext content files written out + # onto the filesystem. + # + # @param gem_version [String] the current version number of the calling gem + # @param gem_name [String] the current name of the calling gem + # @param gem_site [String] the current website of the calling gem + # + # @param the_domain_name [String] + # + # This method uses one of the two (2) ways to gain the application id. + # + # If not logged in callers will have the domain name and should pass it + # in so that this method can use {KeyId.derive_app_instance_identifier} + # to gain the application id. + # + # If logged in then method {KeyApi.use_application_domain} will have + # executed and the application ID will be written inside the + # machine configuration file under the application instance on + # machine id and referenced in turn from the {SESSION_APP_DOMAINS} map. + # + # In the above case post a NIL domain name and this method will now + # turn to {KeyApi.read_app_id} for the application id. + def self.format_header( gem_version, gem_name, gem_site, the_domain_name = nil ) + + application_id = KeyId.derive_app_instance_identifier(the_domain_name) unless the_domain_name.nil? + application_id = read_app_id() if the_domain_name.nil? + universal_id = KeyId.derive_universal_id( application_id, to_token() ) + + line1 = "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n" + line2 = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + line3 = "#{gem_name} ciphertext block\n" + line4 = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + line5 = "App Ref Num := #{application_id}\n" # application domain reference + line6 = "Access Time := #{KeyNow.grab()}\n" # timestamp of the last write + line7 = "App Version := #{gem_version}\n" # this application semantic version + line8 = "Website Url := #{gem_site}\n" # app website or github url + line9 = "Session Ref := #{universal_id}\n" # application domain reference + + return line1 + line2 + line3 + line4 + line5 + line6 + line7 + line8 + line9 + + end + + + private + + + # -------------------------------------------------------- + # In order to separate keys into a new gem we must + # break knowledge of this variable name and have it + # instead passed in by clients. + TOKEN_VARIABLE_NAME = "SAFE_TTY_TOKEN" + TOKEN_VARIABLE_SIZE = 152 + # -------------------------------------------------------- + + + MACHINE_CONFIG_FILE = File.join( Dir.home, ".config/openkey/openkey.app.config.ini" ) + SESSION_APP_DOMAINS = "session.app.domains" + SESSION_IDENTIFIER_KEY = "session.identifiers" + KEYSTORE_IDENTIFIER_KEY = "keystore.url.id" + APP_INSTANCE_ID_KEY = "app.instance.id" + AIM_IDENTITY_REF_KEY = "aim.identity.ref" + LOGIN_TIMESTAMP_KEY = "login.timestamp" + LOGOUT_TIMESTAMP_KEY = "logout.timestamp" + MACHINE_CONFIGURATION = "machine.configuration" + APP_INITIALIZE_TIME = "initialize.time" + + APP_INSTANCE_SETUP_TIME = "app.instance.setup.time" + + APP_KEY_DB_NAME_PREFIX = "openkey.breadcrumbs" + FILE_CIPHERTEXT_PREFIX = "openkey.cipher.file" + OK_BASE_FOLDER_PREFIX = "openkey.store" + OK_BACKEND_CRYPT_PREFIX = "backend.crypt" + + APP_KEY_DB_DIRECTIVES = "key.db.directives" + APP_KEY_DB_CREATE_TIME_KEY = "initialize.time" + APP_KEY_DB_BREAD_CRUMBS = "openkey.bread.crumbs" + + LOGGED_IN_APP_SESSION_ID = "logged.in.app.session.id" + SESSION_LOGIN_DATETIME = "session.login.datetime" + SESSION_LOGOUT_DATETIME = "session.logout.datetime" + + INTER_KEY_CIPHERTEXT = "inter.key.ciphertext" + INTRA_KEY_CIPHERTEXT = "intra.key.ciphertext" + INDEX_DB_CRYPT_IV_KEY = "index.db.cipher.iv" + + BLOCK_64_START_STRING = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789ab\n" + BLOCK_64_END_STRING = "ba9876543210fedcba9876543210fedcba9876543210fedcba9876543210\n" + BLOCK_64_DELIMITER = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + + XID_SOURCE_APPROX_LEN = 11 + + CONTENT_FILE_PREFIX = "tree.db" + CONTENT_EXTERNAL_ID = "content.xid" + CONTENT_ENCRYPT_KEY = "content.key" + CONTENT_RANDOM_IV = "content.iv" + + DB_CREATE_DATE = "db.create.date" + DB_DOMAIN_NAME = "db.domain.name" + DB_DOMAIN_ID = "db.domain.id" + + + def self.binary_to_write( to_filepath, content_header, binary_ciphertext ) + + base64_ciphertext = Base64.encode64( binary_ciphertext ) + + content_to_write = + content_header + + BLOCK_64_DELIMITER + + BLOCK_64_START_STRING + + base64_ciphertext + + BLOCK_64_END_STRING + + BLOCK_64_DELIMITER + + File.write( to_filepath, content_to_write ) + + end + + + def self.binary_from_read( from_filepath ) + + file_text = File.read( from_filepath ) + core_data = file_text.in_between( BLOCK_64_START_STRING, BLOCK_64_END_STRING ).strip + return Base64.decode64( core_data ) + + end + + + def self.get_random_reference + + # Do not forget that you can pass this through + # the derive identifier method if uniformity is + # what you seek. + # + # [ KeyId.derive_identifier( reference ) ] + # + random_ref = SecureRandom.urlsafe_base64( XID_SOURCE_APPROX_LEN ).delete("-_").downcase + return random_ref[ 0 .. ( XID_SOURCE_APPROX_LEN - 1 ) ] + + end + + + def self.get_virgin_content( domain_name ) + + KeyError.not_new( domain_name, self ) + app_id = KeyId.derive_app_instance_identifier( domain_name ) + + initial_db = KeyDb.new() + initial_db.store( DB_CREATE_DATE, KeyNow.fetch() ) + initial_db.store( DB_DOMAIN_NAME, domain_name ) + initial_db.store( DB_DOMAIN_ID, app_id ) + return initial_db.to_json + + end + + + # This method depends on {use_application_domain} which sets + # the application ID against the session identity so only call + # it if we are in a logged in state. + # + # NOTE this will NOT be set until the session is logged in so + # the call fails before that. For this reason do not call this + # method from outside this class. If the domain name is + # available use {KeyId.derive_app_instance_identifier} instead. + def self.read_app_id() + + aim_id = read_aim_id() + keypairs = KeyPair.new( MACHINE_CONFIG_FILE ) + keypairs.use( aim_id ) + return keypairs.get( APP_INSTANCE_ID_KEY ) + + end + + + def self.read_aim_id() + + session_identifier = KeyId.derive_session_id( to_token() ) + + keypairs = KeyPair.new( MACHINE_CONFIG_FILE ) + keypairs.use( SESSION_APP_DOMAINS ) + return keypairs.get( session_identifier ) + + end + + + def self.get_crumbs_db_from_domain_name( domain_name ) + + KeyError.not_new( domain_name, self ) + keystore_file = get_keystore_file_from_domain_name( domain_name ) + crumbs_db = KeyPair.new( keystore_file ) + crumbs_db.use( APP_KEY_DB_BREAD_CRUMBS ) + return crumbs_db + + end + + + def self.get_crumbs_db_from_session_token() + + keystore_file = get_keystore_file_from_session_token() + crumbs_db = KeyPair.new( keystore_file ) + crumbs_db.use( APP_KEY_DB_BREAD_CRUMBS ) + return crumbs_db + + end + + + def self.get_store_folder() + + aim_id = read_aim_id() + app_id = read_app_id() + return get_app_keystore_folder( aim_id, app_id ) + + end + + + def self.get_app_keystore_folder( aim_id, app_id ) + + keypairs = KeyPair.new( MACHINE_CONFIG_FILE ) + keypairs.use( aim_id ) + keystore_url = keypairs.get( KEYSTORE_IDENTIFIER_KEY ) + basedir_name = "#{OK_BASE_FOLDER_PREFIX}.#{app_id}" + return File.join( keystore_url, basedir_name ) + + end + + + def self.get_keystore_file_from_domain_name( domain_name ) + + aim_id = KeyId.derive_app_instance_machine_id( domain_name ) + app_id = KeyId.derive_app_instance_identifier( domain_name ) + + app_key_db_file = "#{APP_KEY_DB_NAME_PREFIX}.#{app_id}.ini" + return File.join( get_app_keystore_folder( aim_id, app_id ), app_key_db_file ) + + end + + + def self.get_keystore_file_from_session_token() + + aim_id = read_aim_id() + app_id = read_app_id() + + app_key_db_file = "#{APP_KEY_DB_NAME_PREFIX}.#{app_id}.ini" + return File.join( get_app_keystore_folder( aim_id, app_id ), app_key_db_file ) + + end + + + def self.content_ciphertxt_file_from_domain_name( domain_name ) + + aim_id = KeyId.derive_app_instance_machine_id( domain_name ) + app_id = KeyId.derive_app_instance_identifier( domain_name ) + + appdb_cipher_file = "#{FILE_CIPHERTEXT_PREFIX}.#{app_id}.txt" + return File.join( get_app_keystore_folder( aim_id, app_id ), appdb_cipher_file ) + + end + + + def self.content_ciphertxt_file_from_session_token() + + aim_id = read_aim_id() + app_id = read_app_id() + + appdb_cipher_file = "#{FILE_CIPHERTEXT_PREFIX}.#{app_id}.txt" + return File.join( get_app_keystore_folder( aim_id, app_id ), appdb_cipher_file ) + + end + + + def self.to_token() + + raw_env_var_value = ENV[TOKEN_VARIABLE_NAME] + raise_token_error( TOKEN_VARIABLE_NAME, "not present") unless raw_env_var_value + + env_var_value = raw_env_var_value.strip + raise_token_error( TOKEN_VARIABLE_NAME, "consists only of whitespace") if raw_env_var_value.empty? + + size_msg = "length should contain exactly #{TOKEN_VARIABLE_SIZE} characters" + raise_token_error( TOKEN_VARIABLE_NAME, size_msg ) unless env_var_value.length == TOKEN_VARIABLE_SIZE + + return env_var_value + + end + + + def self.raise_token_error env_var_name, message + + puts "" + puts "#{TOKEN_VARIABLE_NAME} environment variable #{message}." + puts "To instantiate it you can use the below command." + puts "" + puts "$ export #{TOKEN_VARIABLE_NAME}=`safe token`" + puts "" + puts "ps => those are backticks around `safe token` (not apostrophes)." + puts "" + + raise RuntimeError, "#{TOKEN_VARIABLE_NAME} environment variable #{message}." + + end + + + end + + +end diff --git a/lib/keytools/key.db.rb b/lib/keytools/key.db.rb new file mode 100644 index 0000000..0d265c5 --- /dev/null +++ b/lib/keytools/key.db.rb @@ -0,0 +1,330 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + require 'json' + + # A Key/Value database knows how to manipulate a JSON backed data structure + # (put, add etc) after reading and then decrypting it from a + # file and before encrypting and then writing it to a file. + # + # It provides behaviour to which we can create, append (add), update + # (change), read parts and delete essentially two structures + # + # - a collection of name/value pairs + # - an ordered list of values + # + # == JSON is Not Exposed in the Interface + # + # A key/value database doesn't expose the data format used in the implementation + # allowing this to be changed seamlessly to YAMl or other formats. + # + # == Symmetric Encryption and Decryption + # + # A key/value database supports operations to read from and write to + # a known filepath and with a symmetric key it can + # + # - decrypt after reading from a file and + # - encrypt before writing to a (the same) file + # + # == Hashes as the Primary Data Structure + # + # The key/value database openly extends {Hash} as the data structure for holding + # + # - strings + # - arrays + # - other hashes + # - booleans + # - integers and floats + class KeyDb < Hash + + # Return a key database data structure that is instantiated from + # the parameter JSON string. + # + # @param db_json_string [String] + # this json formatted data structure will be converted into a + # a Ruby hash (map) data structure and returned. + # + # @return [KeyDb] + # a hash data structure that has been instantiated as per the + # parameter json string content. + def self.from_json( db_json_string ) + + data_db = KeyDb.new() + data_db.merge!( JSON.parse( db_json_string ) ) + return data_db + + end + + + + # Create a new key value entry inside a dictionary with the specified + # name at the root of this database. Successful completion means the + # named dictionary will contain one more entry than it need even if it + # did not previously exist. + # + # @param dictionary_name [String] + # + # if a dictionary with this name exists at the root of the + # database add the parameter key value pair into it. + # + # if no dictionary exists then create one first before adding + # the key value pair as the first entry into it. + # + # @param key_name [String] + # + # the key part of the key value pair that will be added into the + # dictionary whose name was provided in the first parameter. + # + # @param value [String] + # + # the value part of the key value pair that will be added into the + # dictionary whose name was provided in the first parameter. + def create_entry( dictionary_name, key_name, value ) + + KeyError.not_new( dictionary_name, self ) + KeyError.not_new( key_name, self ) + KeyError.not_new( value, self ) + + self[ dictionary_name ] = {} unless self.has_key?( dictionary_name ) + self[ dictionary_name ][ key_name ] = value + + end + + + # Create a new secondary tier map key value entry inside a primary tier + # map at the map_key_name location. + # + # A failure will occur if either the outer or inner keys already exist + # without their values being map objects. + # + # If this method is called against a new empty map, the resulting map + # structure will look like the below. + # + # { outer_keyname ~> { inner_keyname ~> { entry_keyname, entry_value } } } + # + # @param outer_keyname [String] + # + # if a dictionary with this name exists at the root of the + # database add the parameter key value pair into it. + # + # if no dictionary exists then create one first before adding + # the key value pair as the first entry into it. + # + # @param inner_keyname [String] + # + # if a map exists at this key name then an entry comprising of + # a map_entry_key and a entry_value may either be added + # (if the map_entry_key does not already exist), or updated if + # it does. + # + # if the map does not exist it will be created and its first and + # only entry will be a key with inner_keyname along with a new + # single entry map consisting of the entry_keyname and the + # entry_value. + # + # @param entry_keyname [String] + # + # this key will exist in the second tier map after this operation. + # + # @param entry_value [String] + # + # this value will exist in the second tier map after this operation + # and if the entry_keyname already existed its value is overwritten + # with this one. + # + def create_map_entry( outer_keyname, inner_keyname, entry_keyname, entry_value ) + + KeyError.not_new( outer_keyname, self ) + KeyError.not_new( inner_keyname, self ) + KeyError.not_new( entry_keyname, self ) + KeyError.not_new( entry_value, self ) + + self[ outer_keyname ] = {} unless self.has_key?( outer_keyname ) + self[ outer_keyname ][ inner_keyname ] = {} unless self[ outer_keyname ].has_key?( inner_keyname ) + self[ outer_keyname ][ inner_keyname ][ entry_keyname ] = entry_value + + end + + + # Does this database have an entry in the root dictionary named with + # the key_name parameter? + # + # @param dictionary_name [String] + # + # immediately return false if a dictionary with this name does + # not exist at the root of this database. + # + # @param key_name [String] + # + # test whether a key/value pair answering to this name exists inside + # the specified dictionary at the root of this database. + # + def has_entry?( dictionary_name, key_name ) + + KeyError.not_new( dictionary_name, self ) + KeyError.not_new( key_name, self ) + + return false unless self.has_key?( dictionary_name ) + return self[ dictionary_name ].has_key?( key_name ) + + end + + + # Get the entry with the key name in a dictionary that is itself + # inside another dictionary (named in the first parameter) which + # thankfully is at the root of this database. + # + # Only call this method if {has_entry?} returns true for the same + # dictionary and key name parameters. + # + # @param dictionary_name [String] + # + # get the entry inside a dictionary which is itself inside a + # dictionary (with this dictionary name) which is itself at the + # root of this database. + # + # @param key_name [String] + # + # get the value part of the key value pair that is inside a + # dictionary (with the above dictionary name) which is itself + # at the root of this database. + # + def get_entry( dictionary_name, key_name ) + + return self[ dictionary_name ][ key_name ] + + end + + + # Delete an existing key value entry inside the dictionary with the specified + # name at the root of this database. Successful completion means the + # named dictionary will contain one less entry if that key existed. + # + # @param dictionary_name [String] + # + # if a dictionary with this name exists at the root of the + # database add the parameter key value pair into it. + # + # if no dictionary exists throw an error + # + # @param key_name [String] + # + # the key part of the key value pair that will be deleted in the + # dictionary whose name was provided in the first parameter. + def delete_entry( dictionary_name, key_name ) + + KeyError.not_new( dictionary_name, self ) + KeyError.not_new( key_name, self ) + + self[ dictionary_name ].delete( key_name ) + + end + + + # Read and inject into this envelope, the data structure found in a + # file at the path specified in the first parameter. + # + # Symmetric cryptography is mandatory for the envelope so we must + # encrypt before writing and decrypt after reading. + # + # An argument error will result if a suitable key is not provided. + # + # If the file does not exist (denoting the first read) all this method + # does is to stash the filepath as an instance variable and igore the + # decryption key which can be nil (or ommitted). + # + # @param the_filepath [String] + # absolute path to the file which acts as the persistent mirror to + # this data structure envelope. + # + # @param decryption_key [String] + # encryption at rest is a given so this mandatory parameter must + # contain a robust symmetric decryption key. The key will be used + # for decryption after the read and it will not linger (ie not cached + # as an instance variable). + # + # @raise [ArgumentError] if the decryption key is not robust enough. + def read the_filepath, decryption_key = nil + + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + + raise RuntimeError, "This KeyDb.read() software is never called so how can I be here?" + + @filepath = the_filepath + return unless File.exists? @filepath + + cipher_text = Base64.decode64( File.read( @filepath ).strip ) + plain_text = ToolBelt::Blowfish.decryptor( cipher_text, decryption_key ) + + data_structure = JSON.parse plain_text + self.merge! data_structure + + end + + + # Write the data in this envelope hash map into a file-system + # backed mirror whose path was specified in the {self.read} method. + # + # Technology for encryption at rest is supported by this dictionary + # and to this aim, please endeavour to post a robust symmetric + # encryption key. + # + # Calling this {self.write} method when the file at the prescribed path + # does not exist results in the directory structure being created + # (if necessary) and then the encrypted file being written. + # + # @param encryption_key [String] + # encryption at rest is a given so this mandatory parameter must + # contain a robust symmetric encryption key. The symmetric key will + # be used for the decryption after the read. Note that the decryption + # key does not linger meaning it isn't cached in an instance variable. + def write encryption_key + + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + # @todo -> this is confused - it uses INI but above methods use JSON + + raise RuntimeError, "This KeyDb.write( key ) software is never called so how can I be here?" + + FileUtils.mkdir_p(File.dirname(@filepath)) + cipher_text = Base64.encode64 ToolBelt::Blowfish.encryptor( self.to_json, encryption_key ) + File.write @filepath, cipher_text + + puts "" + puts "=== ============================" + puts "=== Envelope State =============" + puts "=== ============================" + + a_ini_file = IniFile.new + self.each_key do |section_name| + a_ini_file[section_name] = self[section_name] + end + puts a_ini_file.to_s + + puts "=== ============================" + puts "" + + end + + + end + + +end diff --git a/lib/keytools/key.docs.rb b/lib/keytools/key.docs.rb new file mode 100644 index 0000000..d9c1f4e --- /dev/null +++ b/lib/keytools/key.docs.rb @@ -0,0 +1,195 @@ +#!/usr/bin/ruby + +# The open key library generates keys, it stores their salts, it produces differing +# representations of the keys (like base64 for storage and binary for encrypting). +# +# == Key Class Names their and Responsibility +# +# The 5 core key classes in the open key library are +# +# - {Key} represents keys in bits, binary and base 64 formats +# - {Key64} for converting from to and between base 64 characters +# - {Key256} uses key derivation functions to produce high entropy keys +# - {KeyIO} reads and writes key metadata (like salts) from/to persistent storage +# - {KeyCycle} for creating and locking the keys that underpin the security +# +# == The 5 Core Key Classes +# +# Key To initialize with a 264 binary bit string. To hold the +# key and represent it when requested +# - as a 264 bit binary bit string +# - as a 256 bit binary bit string +# - as a 256 bit raw bytes encryption key +# - as a YACHT64 formatted string +# +# Key64 To map in and out of the Yacht64 character set - from and to +# - a binary bit string sequence +# - a Base64 character encoding +# - a UrlSafe Base64 character encoding +# - a Radix64 character encoding +# +# Key256 It generates a key in 3 different and important ways. It can +# generate +# +# (a) from_password +# (b) from_random (or it can) +# (c) regenerate +# +# When generating from a password it takes a dictionary with +# a pre-tailored "section" and writes BCrypt and Pbkdf2 salts +# into it. +# +# When generating random it kicks of by creating a 55 byte +# random key fo BCrypt and a 64 byte random key for Pbkdf2. +# It then calls upon generate_from_password. +# +# When regenerating it queries the dictionary provided at the +# pre-tailored "section" for the BCrypt and Pbkdf2 salts and +# then uses input passwords (be they human randomly sourced) +# and regenerates the keys it produced at an earlier sitting. +# +# KeyIO KeyIO is instantiated with a folder path and a "key reference". +# KeyIO will then manage writing to and rereading from the structure +# hel inside th efile. The file is named (primarily) by the +# reference string. +# +# KeyCycle KeyLifeCycle implements the merry go round that palms off +# responsibility to the intra-session cycle and then back again +# to ever rotary inter-session(ary) cycle. +########### Maybe think of a method where we pass in +########### 2 secrets - 1 human and 1 55 random bytes (session) +########### +########### 1 another 55 random key is created (the actual encryption key) +########### 2 then the above key is encrypted TWICE (2 diff salts and keys) +########### 3 Once by key from human password +########### 4 Once by key from machine password +########### 5 then the key from 1 is returned +########### 6 caller encrypts file .................... (go 4 it) + + +# Generates a 256 bit symmetric encryption key derived from a random +# seed sequence of 55 bytes. These 55 bytes are then fed into the +# {from_password} key derivation function and processed in a similar +# way to if a human had generated the string. +# + + +# Key derivation functions exist to convert low entropy human +# created passwords into a high entropy key that is computationally difficult +# to acquire through brute force. +# +# == SafeDb's Underlying Security Strategy +# +# Randomly generate a 256 bit encryption key and encrypt it with a key +# derived from a human password and generated by at least two cryptographic +# workhorses known as key derivation functions. +# +# The encryption key (encrypted by the one derived from a human password) sits +# at the beginning of a long chain of keys and encryption - so much so that the +# crypt material being outputted for storage is all but worthless to anyone but +# its rightful owner. +# +# == Key Size vs Crack Time +# +# Cracking a 256 bit key would need roughly 2^255 iterations (half the space) +# and this is akin to the number of atoms in the known universe. +# +# The human key can put security at risk. +# +# The rule of thumb is that a 40 character password with a good spread of the +# roughly 90 typable characters, would produce security equivalent to that of +# an AES 256bit key. As the password size and entropy drop, so does the security, +# exponentially. +# +# As human generated passwords have a relatively small key space, key derivation +# functions must be slow to compute with any implementation. +# +# == Key Derivation Functions for Command Line Apps +# +# A command line app (with no recourse to a central server) uses a Key +# Derivation Function (like BCrypt, Aaron2 or PBKD2) in a manner different +# to that employed by server side software. +# +# - server side passwords are hashed then both salt and hash are persisted +# - command line apps do not store the key - they only store the salt +# - both throw away the original password +# +# == One Key | One Session | One Crypt +# +# Command line apps use the derived key to symmetrically encrypt and decrypt +# one and only one 48 character key and a new key is derived at the beginning +# of every session. +# +# At the end of the session all material encrypted by the outgoing key +# is removed. This aggressive key rotation strategy leaves no stone unturned in +# the quest for ultimate security. +# +# == SafeDb's CLI Key Derivation Architecture +# +# SafeDb never accesses another server and giving its users total control +# of their secret crypted materials. It strengthens the key derivation process +# in three important ways. +# +# - [1] it does not store the key nor does it store the password +# +# - [2] a new master key is generated for every session only to hold the master index file +# +# - [3] it uses both BCrypt (Blowfish Crypt) and the indefatigable PBKD2 + + + # After a successful initialization, the application instance is linked to a keystore + # whose contents are responsible for securing the application instance database. + # + # To ascertain what needs to be done to bridge the gap to full initialization the + # app needs to know 3 things from the KeyApi. These things are + # + # - the ID of this app instance on the machine + # - if a keystore been associated with this ID + # - whether the keystore secures the app database + # + # The answers dictate the steps that need to be undertaken to bring the database of + # the application instance under the secure wing of the KeyApi. + # + # + # == 1. What is the App Instance ID on this Machine? + # + # The KeyApi uses the "just given" application reference and the machine environment to + # respond with a digested identifier binding the application instance to the + # present machine (workstation). + # + # + # == 2. Has a Keystore been associated with this ID? + # + # The application's configuration manager is asked to find an associated KeyStore ID + # mapped against the app/machine id garnered by question 1. + # + # No it has not! + # + # If NO then a KeyStore ID is acquired either from the init command's parameter, + # or a suitable default. This new association between the app/machine ID and the + # KeyStore ID is then stored so the answer next time will be YES. + # + # Yes it has! + # + # Great - we now submit the KeyStore ID to the KeyApi so that it may answer question 3. + # + # + # == 3. Does the keystore secure the app instance database? + # + # For the KeyApi to answer, it needs the App's Instance ID and the KeyStore ID. + # + # Not Yet! Now NO means this application instance's database has not been + # brought under the protection of the KeyApi's multi-layered security net. For this it + # needs + # + # - the KeyStore ID + # - the application instance reference + # - the plaintext secret from which nothing of the host survives + # - the current application database plaintext + # + # Yes it does! If the app db keys have been instantiated and the client app is + # sitting pretty in possession of the database ciphertext, no more needs doing. + +module OpenKey + +end diff --git a/lib/keytools/key.error.rb b/lib/keytools/key.error.rb new file mode 100644 index 0000000..4d77db4 --- /dev/null +++ b/lib/keytools/key.error.rb @@ -0,0 +1,110 @@ +#!/usr/bin/ruby + +module OpenKey + + + # This class is the parent to all opensession errors + # that originate from the command line. + # + # All opensession cli originating errors are about + # + # - a problem with the input or + # - a problem with the current state or + # - a predictable future problem + class KeyError < StandardError + + + # Initialize the error and provide a culprit + # object which will be to-stringed and given + # out as evidence (look at this)! + # + # This method will take care of loggin the error. + # + # @param message [String] human readable error message + # @param culprit [Object] object that is either pertinent, a culprit or culpable + def initialize message, culprit + + super(message) + + @the_culprit = culprit + + log.info(x) { "An [Error] Occured => #{message}" } + log.info(x) { "Object of Interest => #{culprit.to_s}" } unless culprit.nil? + log.info(x) { "Class Name Culprit => #{culprit.class.name}" } + log.info(x) { "Error Message From => #{self.class.name}" } + + thread_backtrace = Thread.current.backtrace.join("\n") + thread_backtrace.to_s.log_lines + + end + + + # This method gives interested parties the object that + # is at the centre of the exception. This object is either + # very pertinent, culpable or at the very least, interesting. + # + # @return [String] string representation of culpable object + def culprit + return "No culprit identified." if @the_culprit.nil? + return @the_culprit.to_s + end + + + # Assert that the parameter string attribute is not new which + # means neither nil, nor empty nor consists solely of whitespace. + # + # The NEW acronym tells us that a bearer worthy of the name is + # + # - neither Nil + # - nor Empty + # - nor consists solely of Whitespace + # + # @param the_attribute [String] + # raise a {KeyError} if the attribute is not new. + # + # @param the_desc [String] + # a description of th attribute + # + # @raise [KeyError] + # + # The attribute cannot be NEW. The NEW acronym asserts + # that the attribute is + # + # - neither Nil + # - nor Empty + # - nor Whitespace only + # + def self.not_new the_attribute, the_desc + + attribute_new = the_attribute.nil? || the_attribute.chomp.strip.empty? + return unless attribute_new + + msg = "[the_desc] is either nil, empty or consists solely of whitespace." + raise KeyError.new( msg, the_desc ) + + end + + + end + +=begin + # Throw this error if the configured safe directory points to a file. + class SafeDirectoryIsFile < OpenError::CliError; end; + + # Throw this error if safe directory path is either nil or empty. + class SafeDirNotConfigured < OpenError::CliError; end; + + # Throw this error if the email address is nil, empty or less than 5 characters. + class EmailAddrNotConfigured < OpenError::CliError; end; + + # Throw this error if the store url is either nil or empty. + class StoreUrlNotConfigured < OpenError::CliError; end; + + # Throw if "prime folder" name occurs 2 or more times in the path. + class SafePrimeNameRepeated < OpenError::CliError; end; + + # Throw if "prime folder" name occurs 2 or more times in the path. + class SafePrimeNameNotAtEnd < OpenError::CliError; end; +=end + +end diff --git a/lib/keytools/key.id.rb b/lib/keytools/key.id.rb new file mode 100644 index 0000000..b3dd54b --- /dev/null +++ b/lib/keytools/key.id.rb @@ -0,0 +1,271 @@ +#!/usr/bin/ruby +# coding: utf-8 + + +module OpenKey + + + # This class derives non secret but unique identifiers based on different + # combinations of the application, shell and machine (compute element) + # references. + # + # == Identifier Are Not Secrets + # + # And their starting values are retrievable + # + # Note that the principle and practise of identifiers is not about keeping secrets. + # An identifier can easily give up its starting value/s if and when brute force is + # applied. The properties of a good iidentifier (ID) are + # + # - non repeatability (also known as uniqueness) + # - non predictability (of the next identifier) + # - containing alphanumerics (for file/folder/url names) + # - human readable (hence hyphens and separators) + # - non offensive (no swear words popping out) + # + # == Story | Identifiers Speak Volumes + # + # I told a friend what the turnover of his company was and how many clients he had. + # He was shocked and wanted to know how I had gleened this information. + # + # The invoices he sent me (a year apart). Both his invoice IDs (identifiers) and his + # user IDs where integers that counted up. So I could determine how many new clients + # he had in the past year, how many clients he had when I got the invoice, and I + # determined the turnover by guesstimating the average invoice amount. + # + # Many successful website attacks are owed to a predictable customer ID or a counter + # type session ID within the cookies. + # + # == Good Identifiers Need Volumes + # + # IDs are not secrets - but even so, a large number of properties are required + # to produce a high quality ID. + # + class KeyId + + + # The identity chunk length is set at four (4) which means each of the + # fabricated identifiers comprises of four character segments divided by + # hyphens. Only the 62 alpha-numerics ( a-z, A-Z and 0-9 ) will + # appear within identifiers - which maintains simplicity and provides an + # opportunity to re-iterate that identifiers are designed to be + # unpredictable, but not secret. + IDENTITY_CHUNK_LENGTH = 4 + + + # A hyphen is the chosen character for dividing the identifier strings + # into chunks of four (4) as per the {IDENTITY_CHUNK_LENGTH} constant. + SEGMENT_CHAR = "-" + + + # Get an identifier that is always the same for the parameter + # application reference regardless of the machine or shell or + # even the machine user, coming together to make the request. + # + # The returned identifier will consist only of alphanumeric characters + # and one hyphen, plus it always starts and ends with an alphanumeric. + # + # @param app_instance_ref [String] + # the string reference of the application instance (or shard) that + # is in play and needs to be digested into a unique but not-a-secret + # identifier. + # + # @return [String] + # An identifier that is guaranteed to be the same whenever the + # same application reference is provided on any machine, using any + # user through any shell interface or command prompt. + # + # It must be different for any other application reference. + def self.derive_app_instance_identifier( app_instance_ref ) + return derive_identifier( app_instance_ref ) + end + + + # Get an identifier that is always the same for the application + # instance (with reference given in parameter) on this machine + # and is always different when either/or or both the application ref + # and machine are different. + # + # The returned identifier will consist of only alphanumeric characters + # and hyphens - it will always start and end with an alphanumeric. + # + # This behaviour draws a fine line around the concept of machine, virtual + # machine, workstation and/or compute element. + # + # (aka) The AIM ID + # + # Returned ID is aka the Application Instance Machine (AIM) Id. + # + # @param app_ref [String] + # the string reference of the application instance (or shard) that + # is being used. + # + # @return [String] + # an identifier that is guaranteed to be the same whenever the + # same application reference is provided on this machine. + # + # it must be different on another machine even when the same + # application reference is provided. + # + # It will also be different on this workstation if the application + # instance identifier provided is different. + def self.derive_app_instance_machine_id( app_ref ) + return derive_identifier( app_ref + KeyIdent.derive_machine_identifier() ) + end + + + # The 32 character universal identifier bonds a digested + # application state identifier with the shell identifier. + # This method gives dual double guarantees to the effect that + # + # - a change in one, or in the other, or in both returns a different universal id + # - the same app state identifier in the same shell produces the same universal id + # + # The 32 Character Universal Identifier + # + # The universal identifier is an amalgam of two digests which can be individually + # retrieved from other methods in this class. An example is + # + # universal id => hg2x0-g3uslf-pa2bl5-09xvbd-n4wcq + # the shell id => g3uslf-pa2bl5-09xvbd + # app state id => hg2x0-n4wcq + # + # The 32 character universal identifier comprises of 18 session identifier + # characters (see {derive_session_id}) sandwiched between + # ten (10) digested application identifier characters, five (5) in front and + # five (5) at the back - all segmented by four (4) hyphens. + # + # @param app_reference [String] + # the chosen plaintext application reference identifier that + # is the input to the digesting (hashing) algorithm. + # + # @param session_token [String] + # a triply segmented (and one liner) text token instantiated by + # {KeyLocal.generate_shell_key_and_token} and provided + # here ad verbatim. + # + # @return [String] + # a 32 character string that cannot feasibly be repeated due to the use + # of one way functions within its derivation. The returned identifier bonds + # the application state reference with the present session. + def self.derive_universal_id( app_reference, session_token ) + + shellid = derive_session_id( session_token ) + app_ref = derive_identifier( app_reference + shellid ) + chunk_1 = app_ref[ 0 .. IDENTITY_CHUNK_LENGTH ] + chunk_3 = app_ref[ ( IDENTITY_CHUNK_LENGTH + 1 ) .. -1 ] + + return "#{chunk_1}#{shellid}#{SEGMENT_CHAR}#{chunk_3}".downcase + + end + + + # The session ID generated here is a derivative of the 150 character + # session token instantiated by {KeyLocal.generate_shell_key_and_token} + # and provided here ad verbatim. + # + # The algorithm for deriving the session ID is as follows. + # + # - convert the 150 characters to an alphanumeric string + # - convert the result to a bit string and then to a key + # - put the key's binary form through a 384 bit digest + # - convert the digest's output to 64 YACHT64 characters + # - remove the (on average 2) non-alphanumeric characters + # - cherry pick a spread out 12 characters from the pool + # - hiphenate the character positions five (5) and ten (10) + # - ensure the length of the resultant ID is fourteen (14) + # + # The resulting session id will look something like this + # + # g3sf-pab5-9xvd + # + # @param session_token [String] + # a triply segmented (and one liner) text token instantiated by + # {KeyLocal.generate_shell_key_and_token} and provided here ad + # verbatim. + # + # @return [String] + # a 14 character string that cannot feasibly be repeated + # within the keyspace of even a gigantic organisation. + # + # This method guarantees that the session id will always be the same when + # called by commands within the same shell in the same machine. + def self.derive_session_id( session_token ) + + assert_session_token_size( session_token ) + random_length_id_key = Key.from_char64( session_token.to_alphanumeric ) + a_384_bit_key = random_length_id_key.to_384_bit_key() + a_64_char_str = a_384_bit_key.to_char64() + base_64_chars = a_64_char_str.to_alphanumeric + + id_chars_pool = KeyAlgo.cherry_picker( ID_TRI_CHUNK_LEN, base_64_chars ) + id_hyphen_one = id_chars_pool.insert( IDENTITY_CHUNK_LENGTH, SEGMENT_CHAR ) + id_characters = id_hyphen_one.insert( ( IDENTITY_CHUNK_LENGTH * 2 + 1 ), SEGMENT_CHAR ) + + err_msg = "Shell ID needs #{ID_TRI_TOTAL_LEN} not #{id_characters.length} characters." + raise RuntimeError, err_msg unless id_characters.length == ID_TRI_TOTAL_LEN + + return id_characters.downcase + + end + + + # This method returns a 10 character digest of the parameter + # reference string. + # + # How to Derive the 10 Character Identifier + # + # So how are the 10 characters derived from the reference provided in + # the first parameter. The algorithm is this. + # + # - reverse the reference and feed it to a 256 bit digest + # - chop away the rightmost digits so that 252 bits are left + # - convert the one-zero bit str to 42 (YACHT64) characters + # - remove the (on average 1.5) non-alphanumeric characters + # - cherry pick and return spread out 8 characters + # + # @param reference [String] + # the plaintext reference input to the digest algorithm + # + # @return [String] + # a 10 character string that is a digest of the reference string + # provided in the parameter. + def self.derive_identifier( reference ) + + bitstr_256 = Key.from_binary( Digest::SHA256.digest( reference.reverse ) ).to_s + bitstr_252 = bitstr_256[ 0 .. ( BIT_LENGTH_252 - 1 ) ] + id_err_msg = "The ID digest needs #{BIT_LENGTH_252} not #{bitstr_252.length} chars." + raise RuntimeError, id_err_msg unless bitstr_252.length == BIT_LENGTH_252 + + id_chars_pool = Key64.from_bits( bitstr_252 ).to_alphanumeric + undivided_str = KeyAlgo.cherry_picker( ID_TWO_CHUNK_LEN, id_chars_pool ) + id_characters = undivided_str.insert( IDENTITY_CHUNK_LENGTH, SEGMENT_CHAR ) + + min_size_msg = "Id length #{id_characters.length} is not #{(ID_TWO_CHUNK_LEN + 1)} chars." + raise RuntimeError, min_size_msg unless id_characters.length == ( ID_TWO_CHUNK_LEN + 1 ) + + return id_characters.downcase + + end + + + private + + + ID_TWO_CHUNK_LEN = IDENTITY_CHUNK_LENGTH * 2 + ID_TRI_CHUNK_LEN = IDENTITY_CHUNK_LENGTH * 3 + ID_TRI_TOTAL_LEN = ID_TRI_CHUNK_LEN + 2 + + BIT_LENGTH_252 = 252 + + + def self.assert_session_token_size session_token + err_msg = "Session token has #{session_token.length} and not #{KeyLocal::SESSION_TOKEN_SIZE} chars." + raise RuntimeError, err_msg unless session_token.length == KeyLocal::SESSION_TOKEN_SIZE + end + + + end + + +end diff --git a/lib/keytools/key.ident.rb b/lib/keytools/key.ident.rb new file mode 100644 index 0000000..7437a14 --- /dev/null +++ b/lib/keytools/key.ident.rb @@ -0,0 +1,243 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + # This class knows how to derive information from the machine environment to aide + # in producing identifiers unique to the machine and/or workstation, with functionality + # similar to that required by licensing software. + # + # == Identity is Similar to Licensing Software | Except Deeper + # + # Deriving the identity string follows similar principles to licensing + # software that attempts to determine whether the operating environment + # is the same or different. But it goes deeper than licensing software + # as it is not only concerned about the same workstation - it is + # also concerned about the same shell or command line interface. + # + # == Known Issues + # + # The dependent macaddr gem is known to fail in scenarios where a + # VPN tunnel is active and a tell tale sign is the ifconfig command + # returning the tun0 interface rather than "eth0" or something that + # resembles "ensp21". + # + # This is one of the error messages resulting from such a case. + # + # macaddr.rb:86 from_getifaddrs undefined method pfamily (NoMethodError) + # + class KeyIdent + + # This method returns a plaintext string hat is guaranteed to be the same + # whenever called within the same shell for the same user on the same + # workstation, virtual machine, container or SSH session and different whenever + # a new shell is acquired. + # + # What is really important is that the shell identity string changes when + # + # - the command shell changes + # - the user switches to another workstation user + # - the workstation or machine host is changed + # - the user SSH's into another shell + # + # Unchanged | When Should it Remain Unchanged? + # + # Remaining unchanged is a feature that is as important and this must + # be so when and/or after + # + # - the user returns to a command shell + # - the user switches back to using a domain + # - the user exits their remote SSH session + # - sudo is used to execute the commands + # - the user comes back to their workstation + # - the clock ticks into another day, month, year ... + # + # @param use_grandparent_pid [Boolean] + # + # Optional boolean parameter. If set to true the PID (process ID) used + # as part of an obfuscator key and normally acquired from the parent + # process should now be acquired from the grandparent's process. + # + # Set to true when accessing the safe's credentials from a sub process + # rather than directly through the logged in shell. + # + # @return [String] + # Return a one line textual shell identity string. + # + # As key derivation algorithms enforcing a maximum length may be length may + # be applied, each character must add value so non-alphanumerics (mostly hyphens) + # are cleansed out before returning. + def self.derive_shell_identifier( use_grandparent_pid = false ) + + require 'socket' + + # -- Ensure that the most significant data points + # -- come first just like with numbers. + + identity_text = + [ + get_ancestor_pid( use_grandparent_pid ), + get_bootup_id(), + Etc.getlogin(), + Socket.gethostname() + ].join + + return identity_text.to_alphanumeric + + end + + + # Return an ancestor process ID meaning return either the parent process + # ID or the grandparent process ID. The one returned depends on the paremeter + # boolean value. + # + # == Command Used to find the grandparent process ID. + # + # $ ps -fp 31870 | awk "/tty/"' { print $3 } ' + # $ ps -fp 31870 | awk "/31870/"' { print $3 } ' + # + # The one liner finds the parental process ID of the process with the given + # parameter process ID. + # + # $ ps -fp 31870 + # + # UID PID PPID C STIME TTY TIME CMD + # joe 31870 2618 0 12:55 tty2 00:01:03 /usr/bin/emacs25 + # + # The ps command outputs two (2) lines and **awk** is employed to select the + # line containing the already known ID. We then print the 3rd string in the + # line which we expect to be the parent PID of the PID. + # + # == Warning | Do Not Use $PPID + # + # Using $PPID is fools gold because the PS command itself runs as another + # process so $PPID is this (calling) process ID and the number returned is + # exactly the same as the parent ID of this process - which is actually the + # grandparent of the invoked ps process. + # + # @param use_grandparent_pid [Boolean] + # Set to true if the grandparent process ID is required and false if + # only the parent process ID should be returned. + # + # @return [String] + # Return ancestor process ID that belongs to either the parent process + # or the grandparent process. + def self.get_ancestor_pid( use_grandparent_pid ) + + parental_process_id = Process.ppid.to_s() + grandparent_pid_cmd = "ps -fp #{parental_process_id} | awk \"/#{parental_process_id}/\"' { print $3 } '" + raw_grandparent_pid = %x[#{grandparent_pid_cmd}] + the_grandparent_pid = raw_grandparent_pid.chomp + + log.debug(x) { "QQQQQ ~> QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" } + log.debug(x) { "QQQQQ ~> Request Bool Use GPPID is ~> [[ #{use_grandparent_pid} ]]" } + log.debug(x) { "QQQQQ ~> Main Parent Process ID is ~> [[ #{parental_process_id} ]]" } + log.debug(x) { "QQQQQ ~> GrandParent Process ID is ~> [[ #{the_grandparent_pid} ]]" } + log.debug(x) { "QQQQQ ~> QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" } + + return ( use_grandparent_pid ? the_grandparent_pid : parental_process_id ) + + end + + + # This method uses a one-way function to return a combinatorial digested + # machine identification string using a number of distinct input parameters + # to deliver the characteristic of producing the same identifier for the + # same machine, virtual machine, workstation and/or compute element, and + # reciprocally, a different one on a different machine. + # + # The userspace is also a key machine identifier so a different machine user + # generates a different identifier when all other things remain equal. + # + # @return [String] + # a one line textual machine workstation or compute element identifier + # that is (surprisingly) different when the machine user changes. + def self.derive_machine_identifier + + require 'socket' + + identity_text = [ + Etc.getlogin, + get_machine_id(), + Socket.gethostname() + ].join.reverse + + return identity_text + + end + + + # If you need to know whether a Linux computer has been rebooted or + # you need an identifier that stays the same until the computer reboots, + # look no further than the read only (non sudoer accessible) **boot id**. + # + # In the modern era of virtualization you should always check the behaviour + # of the above identifiers when used inside + # + # - docker containers + # - Amazon EC2 servers (or Azure or GCE) + # - vagrant (VirtualBox/VMWare) + # - Windows MSGYWIN (Ubuntu) environments + # - Kubernetes pods + # + # @return [String] the bootup ID hash value + def self.get_bootup_id + + bootup_id_cmd = "cat /proc/sys/kernel/random/boot_id" + bootup_id_str = %x[ #{bootup_id_cmd} ] + return bootup_id_str.chomp + + end + + + # The machine identifier is a UUID based hash value that is tied to the + # CPU and motherboard of the machine. This read-only identifier can be + # accessed without sudoer permissions so is perfect for license generators + # and environment sensitive software. + # + # In the modern era of virtualization you should always check the behaviour + # of the above identifiers when used inside + # + # - docker containers + # - Amazon EC2 servers (or Azure or GCE) + # - vagrant (VirtualBox/VMWare) + # - Windows MSGYWIN (Ubuntu) environments + # - Kubernetes pods + # + # @return [String] the machine ID hash value + def self.get_machine_id + + machine_id_cmd = "cat /etc/machine-id" + machine_id_str = %x[ #{machine_id_cmd} ] + return machine_id_str.chomp + + end + + + # If the system was rebooted on April 23rd, 2018 at 22:00:16 we + # expect this method not to return 2018-04-23 22:00:16, but + # to return the 8 least significant digits bootup time + # digits which in this case are 23220016. + # + # Investigate all Linux flavours to understand whether this command + # works (or is it just Ubuntu). Also does Docker return a sensible + # value here? + # + # This method is not production ready. Not only is the time within + # a small range, also the most significant digit can fluctuate up + # or down randomly (in a non-deterministic manner. + # + # @return [String] the time when the system was booted. + def self.get_bootup_time_digits + + boot_time_cmd = "uptime -s" + uptime_string = %x[ #{boot_time_cmd} ] + return uptime_string.chomp.to_alphanumeric[ 6 .. -1 ] + + end + + + end + + +end diff --git a/lib/keytools/key.iv.rb b/lib/keytools/key.iv.rb new file mode 100644 index 0000000..5543ce3 --- /dev/null +++ b/lib/keytools/key.iv.rb @@ -0,0 +1,107 @@ +#!/usr/bin/ruby + +module OpenKey + + # Create and deliver representations of a random initialization vector + # suitable for the AES symmetric encryption algorithm which demands a + # 18 byte binary string. + # + # The initialization vector is sourced from {SecureRandom} which provides + # a highly random (and secure) byte sequence usually sourced from udev-random. + # + # + ------------------ + -------- + ------------ + ------------------- + + # | Random IV Format | Bits | Bytes | Base64 | + # | ------------------ | -------- | ------------ | ------------------- | + # | Random IV Stored | 192 Bits | 24 bytes | 32 characters | + # | Random IV Binary | 128 Bits | 16 bytes | (not stored) | + # + ------------------ + -------- + ------------ + ------------------- + + # + # This table shows that the initialization vector can be represented by + # both a 32 character base64 string suitable for storage and a + # 18 byte binary for feeding the algorithm. + class KeyIV + + + # The 24 random bytes is equivalent to 192 bits which when sliced into 6 bit + # blocks (one for each base64 character) results in 32 base64 characters. + NO_OF_BASE64_CHARS = 32 + + # We ask for 24 secure random bytes that are individually created to ensure + # we get exactly the right number. + NO_OF_SOURCE_BYTES = 24 + + # We truncate the source random bytes so that 16 bytes are returned for the + # random initialization vector. + NO_OF_BINARY_BYTES = 16 + + + # Initialize an initialization vector from a source of random bytes + # which can then be presented in both a (base64) storage format + # and a binary string format. + # + # + ------------------ + -------- + ------------ + ------------------- + + # | Random IV Format | Bits | Bytes | Base64 | + # | ------------------ | -------- | ------------ | ------------------- | + # | Random IV Stored | 192 Bits | 24 bytes | 32 characters | + # | Random IV Binary | 128 Bits | 16 bytes | (not stored) | + # + ------------------ + -------- + ------------ + ------------------- + + # + # We ask for 24 secure random bytes that are individually created to ensure + # we get exactly the right number. + # + # If the storage format is requested a 32 character base64 string is + # returned but if the binary form is requested the first 16 bytes are + # issued. + def initialize + @bit_string = Key.to_random_bits( NO_OF_SOURCE_BYTES ) + end + + + # When the storage format is requested a 32 character base64 string is + # returned - created from the initialized 24 secure random bytes. + # + # + ---------------- + -------- + ------------ + ------------------- + + # | Random IV Stored | 192 Bits | 24 bytes | 32 characters | + # + ---------------- + -------- + ------------ + ------------------- + + # + # @return [String] + # a 32 character base64 formatted string is returned. + def for_storage + return Key64.from_bits( @bit_string ) + end + + + # + # + ---------------- + -------- + ------------ + ------------------- + + # | Random IV Binary | 128 Bits | 16 bytes | (not stored) | + # + ---------------- + -------- + ------------ + ------------------- + + # + # @param iv_base64_chars [String] + # the 32 characters in base64 format that will be converted into a binary + # string (24 byte) representation and then truncated to 16 bytes and outputted + # in binary form. + # + # @return [String] + # a 16 byte binary string is returned. + # + # @raise [ArgumentError] + # if a 32 base64 characters are not presented in the parameter. + def self.in_binary iv_base64_chars + + b64_msg = "Expected #{NO_OF_BASE64_CHARS} base64 chars not #{iv_base64_chars.length}." + raise ArgumentError, b64_msg unless iv_base64_chars.length == NO_OF_BASE64_CHARS + + binary_string = Key.to_binary_from_bit_string( Key64.to_bits( iv_base64_chars ) ) + + bin_msg = "Expected #{NO_OF_SOURCE_BYTES} binary bytes not #{binary_string.length}." + raise RuntimeError, bin_msg unless binary_string.length == NO_OF_SOURCE_BYTES + + return binary_string[ 0 .. ( NO_OF_BINARY_BYTES - 1 ) ] + + end + + + end + + +end diff --git a/lib/keytools/key.local.rb b/lib/keytools/key.local.rb new file mode 100644 index 0000000..fcbfeda --- /dev/null +++ b/lib/keytools/key.local.rb @@ -0,0 +1,259 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + # The command line interface has a high entropy randomly generated + # key whose purpose is to lock the application's data key for + # the duration of the session which is between a login and a logout. + # + # These keys are unique to only one shell session on one workstation + # and they live lives that are no longer (and mostly shorter) than + # the life of the parent shell. + # + # == The 4 CLI Shell Entities + # + # The four (4) important entities within the shell session are + # + # - an obfuscator key for locking the shell key during a session + # - a high entropy randomly generated shell key for locking the app data key + # - one environment variable whose value embodies three (3) data segments + # - a session id derived by pushing the env var through a one-way function + class KeyLocal + + + # The number of Radix64 characters that make up a valid BCrypt salt. + # To create a BCrypt salt use + BCRYPT_SALT_LENGTH = 22 + + + # There are two digits representing the BCrypt iteration count. + # The minimum is 10 and the maximum is 16. + BCRYPT_ITER_COUNT_SIZE = 2 + + + # The session token comprises of 3 segments with fixed lengths. + # This triply segmented text token that can be used to decrypt + # and deliver the shell key. + SESSION_TOKEN_SIZE = 128 + 22 + BCRYPT_ITER_COUNT_SIZE + + + # Given a 152 character session token, what is the index that pinpoints + # the beginning of the 22 character BCrypt salt? The answer is given + # by this BCRYPT_SALT_START_INDEX constant. + BCRYPT_SALT_START_INDEX = SESSION_TOKEN_SIZE - BCRYPT_SALT_LENGTH - BCRYPT_ITER_COUNT_SIZE + + + # What index pinpoints the end of the BCrypt salt itself. + # This is easy as the final 2 characters are the iteration count + # so the end index is the length subtract 1 subtract 2. + BCRYPT_SALT_END_INDEX = SESSION_TOKEN_SIZE - 1 + + + # Initialize the session by generating a random high entropy shell token + # and then generate an obfuscator key which we use to lock the shell + # key and return a triply segmented text token that can be used to decrypt + # and deliver the shell key as long as the same shell on the same machine + # is employed to make the call. + # + # The 3 Session Token Segments + # + # The session token is divided up into 3 segments with a total of 150 + # characters. + # + # | -------- | ------------ | ------------------------------------- | + # | Segment | Length | Purpose | + # | -------- | ------------ | ------------------------------------- | + # | 1 | 16 bytes | AES Encrypt Initialization Vector(IV) | + # | 2 | 80 bytes | Cipher text from Random Key AES crypt | + # | 3 | 22 chars | Salt for obfuscator key derivation | + # | -------- | ------------ | ------------------------------------- | + # | Total | 150 chars | Session Token in Environment Variable | + # | -------- | ------------ | ------------------------------------- | + # + # Why is the 16 byte salt and the 80 byte BCrypt ciphertext represented + # by 128 base64 characters? + # + # 16 bytes + 80 bytes = 96 bytes + # 96 bytes x 8 bits = 768 bits + # 768 bits / 6 bits = 128 base64 characters + # + # @return [String] + # return a triply segmented text token that can be used to decrypt + # and redeliver the high entropy session shell key on the same machine + # and within the same shell on the same machine. + def self.generate_shell_key_and_token + + bcrypt_salt_key = KdfBCrypt.generate_bcrypt_salt + obfuscator_key = derive_session_crypt_key( bcrypt_salt_key ) + random_key_ciphertext = obfuscator_key.do_encrypt_key( Key.from_random() ) + session_token = random_key_ciphertext + bcrypt_salt_key.reverse + assert_session_token_size( session_token ) + + return session_token + + end + + + # Regenerate the random shell key that was instantiated and locked + # during the {instantiate_shell_key_and_generate_token} method. + # + # To successfully reacquire the randomly generated (and then locked) + # shell key we must be provided with five (5) data points, four (4) + # of which are embalmed within the 150 character session token + # parameter. + # + # What we need to Regenerate the Shell Key + # + # Regenerating the shell key is done in two steps when given the + # four (4) session token segments described below, and the + # shell identity key described in the {OpenKey::Identifier} class. + # + # The session token is divided up into 4 segments with a total of 152 + # characters. + # + # | -------- | ------------ | ------------------------------------- | + # | Segment | Length | Purpose | + # | -------- | ------------ | ------------------------------------- | + # | 1 | 16 bytes | AES Encrypt Initialization Vector(IV) | + # | 2 | 80 bytes | Cipher text from Random Key AES crypt | + # | 3 | 22 chars | Salt 4 shell identity key derivation | + # | 4 | 2 chars | BCrypt iteration parameter (10 to 16) | + # | -------- | ------------ | ------------------------------------- | + # | Total | 152 chars | Session Token in Environment Variable | + # | -------- | ------------ | ------------------------------------- | + # + # @param session_token [String] + # a triply segmented (and one liner) text token instantiated by + # {self.instantiate_shell_key_and_generate_token} and provided + # here ad verbatim. + # + # @param use_grandparent_pid [Boolean] + # + # Optional boolean parameter. If set to true the PID (process ID) used + # as part of an obfuscator key and normally acquired from the parent + # process should now be acquired from the grandparent's process. + # + # Set to true when accessing the safe's credentials from a sub process + # rather than directly through the logged in shell. + # + # @return [OpenKey::Key] + # an extremely high entropy 256 bit key derived (digested) from 48 + # random bytes at the beginning of the shell (cli) session. + def self.regenerate_shell_key( session_token, use_grandparent_pid = false ) + + assert_session_token_size( session_token ) + bcrypt_salt = session_token[ BCRYPT_SALT_START_INDEX .. BCRYPT_SALT_END_INDEX ].reverse + assert_bcrypt_salt_size( bcrypt_salt ) + + key_ciphertext = session_token[ 0 .. ( BCRYPT_SALT_START_INDEX - 1 ) ] + obfuscator_key = derive_session_crypt_key( bcrypt_salt, use_grandparent_pid ) + regenerated_key = obfuscator_key.do_decrypt_key( key_ciphertext ) + + return regenerated_key + + end + + + # Derive a short term (session scoped) encryption key from the + # surrounding shell and workstation (machine) environment with an + # important same/different guarantee. + # + # The same / different guarantee promises us that the derived + # key will be + # + # - the same whenever called from within this executing shell + # - different when the shell and/or workstation are different + # + # This method uses a one-way function to return a combinatorial digested + # session identification string using a number of distinct parameters that + # deliver the important behaviours of changing in certain circumstances + # and remaining unchanged in others. + # + # Change | When Should the key Change? + # + # What is really important is that the key changes when + # + # - the command shell changes + # - the workstation shell user is switched + # - the host machine workstation is changed + # - the user SSH's into another shell + # + # A distinct workstation is identified by the first MAC address and the + # hostname of the machine. + # + # Unchanged | When Should the Key Remain Unchanged? + # + # Remaining unchanged in certain scenarios is a feature that is + # just as important as changing in others. The key must remain + # unchanged when + # + # - the user returns to a command shell + # - the user exits their remote SSH session + # - sudo is used to execute the commands + # - the user comes back to their workstation + # - the clock ticks into another day, month, year ... + # + # @param bcrypt_salt_key [OpenKey::Key] + # + # Either use BCrypt to generate the salt or retrieve and post in a + # previously generated salt which must hold 22 printable characters. + # + # @param use_grandparent_pid [Boolean] + # + # Optional boolean parameter. If set to true the PID (process ID) used + # as part of an obfuscator key and normally acquired from the parent + # process should now be acquired from the grandparent's process. + # + # Set to true when accessing the safe's credentials from a sub process + # rather than directly through the logged in shell. + # + # @return [OpenKey::Key] + # a digested key suitable for short term (session scoped) use with the + # guarantee that the same key will be returned whenever called from within + # the same executing shell environment and a different key when not. + def self.derive_session_crypt_key bcrypt_salt_key, use_grandparent_pid = false + + shell_id_text = KeyIdent.derive_shell_identifier( use_grandparent_pid ) + truncate_text = shell_id_text.length > KdfBCrypt::BCRYPT_MAX_IN_TEXT_LENGTH + shell_id_trim = shell_id_text unless truncate_text + shell_id_trim = shell_id_text[ 0 .. ( KdfBCrypt::BCRYPT_MAX_IN_TEXT_LENGTH - 1 ) ] if truncate_text + + return KdfBCrypt.generate_key( shell_id_trim, bcrypt_salt_key ) + + end + + + private + + + # 000000000000000000000000000000000000000000000000000000000000000 + # How to determine the caller. + # Better strategy would be just to print the stack trace + # That gives you much more bang for the one line buck. + # 000000000000000000000000000000000000000000000000000000000000000 + # calling_module = File.basename caller_locations(1,1).first.absolute_path, ".rb" + # calling_method = caller_locations(1,1).first.base_label + # calling_lineno = caller_locations(1,1).first.lineno + # caller_details = "#{calling_module} | #{calling_method} | (line #{calling_lineno})" + # log.info(x) { "### Caller Details =>> =>> #{caller_details}" } + # 000000000000000000000000000000000000000000000000000000000000000 + + + def self.assert_session_token_size session_token + err_msg = "Session token has #{session_token.length} and not #{SESSION_TOKEN_SIZE} chars." + raise RuntimeError, err_msg unless session_token.length == SESSION_TOKEN_SIZE + end + + + def self.assert_bcrypt_salt_size bcrypt_salt + amalgam_length = BCRYPT_SALT_LENGTH + BCRYPT_ITER_COUNT_SIZE + err_msg = "Expected BCrypt salt length of #{amalgam_length} not #{bcrypt_salt.length}." + raise RuntimeError, err_msg unless bcrypt_salt.length == amalgam_length + end + + + end + + +end diff --git a/lib/keytools/key.now.rb b/lib/keytools/key.now.rb new file mode 100644 index 0000000..95d0e4c --- /dev/null +++ b/lib/keytools/key.now.rb @@ -0,0 +1,402 @@ +#!/usr/bin/ruby + +module OpenKey + + require 'singleton' + + # This stamp sits at the centre of a fundamental DevOps pattern concerned + # with infrastructure provisioning and configuraion management. + # + # The central idea behind the pattern is to link every infrastructure + # object created during a session with a reference accurate to the nearest + # centi-second denoting the moment the software runtime (session) began. + class KeyNow + include Singleton + + attr_reader :time_now + + # Return two digit [mo] month index from 01 to 12. + # @example 02 => in February + # + def self.mo + return KeyNow.instance.time_now.strftime "%m" + end + + + # Return three character abbreviated month name. + # @example feb => in February + # + def self.mmm + return KeyNow.instance.time_now.strftime( "%b" ).downcase + end + + + # + # Return three character abbreviated day of week. + # @example tue => on Tuesday + # + def self.ddd + return KeyNow.instance.time_now.strftime( "%a" ).downcase + end + + + # + # Return two digit (character) hour of day from 00 to 23. + # @example 22 => between 22.00.00 and 22.59.59 inclusive + # + def self.hh + return KeyNow.instance.time_now.strftime "%H" + end + + + # + # Return two digit minute of hour from [00] to [59]. + # + def self.mm + return KeyNow.instance.time_now.strftime "%M" + end + + + # + # Return two digit second of minute from [00] to [59]. + # + def self.ss + return KeyNow.instance.time_now.strftime "%S" + end + + + # + # Return a [3 digit] second and tenth of second + # representation. + # + # The final digit is derived from the 1000 sliced + # millisecond of second running from 000 to 999. + # + # Truncation (Not Rounding) + # + # The [final] digit is acquired by TRUNCATING + # (chopping off) the last 2 of the 3 millisecond + # digits. No rounding is applied. + # + # The 3 returned digits comprise of the + # + # - second of minute => 2 digits | [00] to [59] (and) + # - tenth of second => 1 digit from [0] to [9] + # + # @example + # + # => The time at the 562nd millisecond of the 49th + # second of the minute. + # + # => 3 chars + # => 495 + # + # + def self.sst + millisec_string = KeyNow.instance.time_now.strftime "%L" + return "#{ss}#{millisec_string[0]}" + end + + + # + # Return the [two] digit year (eg 19 for 2019). + # that we are currently in. + # + def self.yy + return KeyNow.instance.time_now.strftime("%Y")[2..-1] + end + + + # + # Return the [four] digit year (eg 2019) + # that we are currently in. + # + def self.yyyy + return KeyNow.instance.time_now.strftime("%Y") + end + + + # ------------------------------------------------- -- # + # Return 3 digit julian day of year [001] to [366]. -- # + # ------------------------------------------------- -- # + def self.jjj + return KeyNow.instance.time_now.strftime "%j" + end + + + # [yymo_mmm] returns an amalgam of + # + # => the two-digit year + # => the two-digit month index (starting at 01) + # => a period (separator) + # => the abbreviated month name + # + # @example + # => 1908.aug + # => for August 2019 + # + def self.yymo_mmm + return "#{yy}#{mo}.#{mmm}" + end + + + # + # Given two integer parameters (month index and 4 digit year) representing + # the month in question this method returns the [PREVIOUS MONTHS] character + # amalgam in the format [yymo_mmm] where + # + # => yy | previous month's two-digit year + # => mo | previous month's two-digit month index + # => . | a period (separator) + # => mmm | previous month's abbreviated month name + # + # ------------------- + # Example 1 (Simple) + # ------------------- + # + # returns char => 1907.jul + # 4 parameters => 8, 2019 + # representing => August, 2019 + # + # ---------------------- + # Example 2 (Last Year) + # ---------------------- + # + # returns char => 1812.dec + # 4 parameters => 1, 2019 + # representing => January, 2019 + # + def self.previous_month_chars this_month_index, this_4digit_year + + prev_month_index = this_month_index == 1 ? 12 : ( this_month_index - 1 ) + prev_2dig_mn_pad = sprintf '%02d', prev_month_index + prev_4digit_year = this_month_index == 1 ? ( this_4digit_year - 1 ) : this_4digit_year + prev_twodigit_yr = "#{prev_4digit_year.to_s}"[2..-1] + prev_months_name = Date::ABBR_MONTHNAMES[prev_month_index].downcase + + return "#{prev_twodigit_yr}#{prev_2dig_mn_pad}.#{prev_months_name}" + + end + + # + # Using the current class time this method returns + # the character amalgam for the [PREVIOUS MONTH] in + # the format [yymo_mmm] where + # + # => yy | last month's two-digit year + # => mo | last month's two-digit month index + # => . | a period (separator) + # => mmm | last month's abbreviated month name + # + # ------------------- + # Example 1 (Simple) + # ------------------- + # + # returns => 1907.jul + # if this month is => August 2019 + # + # ---------------------- + # Example 2 (Last Year) + # ---------------------- + # + # returns => 1812.dec + # if this month is => January 2019 + # + def self.yymo_mmm_prev + return previous_month_chars mo.to_i, yyyy.to_i + end + + + # Return 5 digit amalgam of year and julian day. + # eg [19003] for [January 3rd 2019] + def self.yyjjj + return "#{yy}#{jjj}" + end + + + # Return the 4 digit amalgam of the hour and minute + # using the 24 hour clock. + # + # @example + # => 1525 + # => 03:25 pm + # + def self.hhmm + return "#{hh}#{mm}" + end + + + # + # Return the time of day to a TENTH of a second accuracy. + # [8] characters will always be returned with the 5th one + # being the (period) separator. + # + # The first (separated) segment delivers a hhmm 24 hour + # clock representation of the stamped time. + # + # The 3 digits of the second segment comprise of + # + # second of minute => 2 digits | [00] to [59] + # tenth of second => 1 digit from [0] to [9] + # + # @example + # => The time at the 562nd millisecond of the 49th + # second of the 23rd minute of the 17th hour of + # the day ( 17:23:49.562 ) + # + # => 8 chars + # => 1723.495 + # + def self.hhmm_sst + return "#{hhmm}.#{sst}" + end + + + # Return a string timestampt that is a period separated + # amalgam of the 2 digit year, 3 digit julian day, 2 digit + # hour, 2 digit minute, 2 digit second and 1 digit rounded + # down tenth of second. + # + # @example + # => 19003.1025 + # => 10:25 am on January 3rd 2019 + # + # + # Return the time of day to a TENTH of a second accuracy. + # [8] characters will always be returned with the 5th one + # being the (period) separator. + # + # The first (separated) segment delivers a hhmm 24 hour + # clock representation of the stamped time. + # + # The 3 digits of the second segment comprise of + # + # - second of minute => 2 digits | [00] to [59] + # - tenth of second => 1 digit from [0] to [9] + # + # @example + # => The time at the 562nd millisecond of the 49th + # second of the 23rd minute of the 17th hour of + # the day ( 17:23:49.562 ) + # + # => 8 chars + # => 1723.495 + # + def self.yyjjj_hhmm_sst + return "#{yyjjj}.#{hhmm}.#{sst}" + end + + + # Return a string timestampt that is a period separated + # amalgam of the 2 digit year, 3 digit julian day, 2 digit + # hour, 2 digit minute, 2 digit second and 9 digit + # nanosecond. + # + # @example + # return => 19003.1725.42.836592034 + # 4 time => 17:25:42 am on January 3rd 2019 + # + # As per the above example, the time returned + # + # - is the 836592034 nanosecond + # - of the 42nd second + # - of the 25th minute + # - of the 17th hour + # - of the 3rd day + # - of the 20th year + # - of the 21st century + # + # @return [String] + # Return the time of day to nanosecond accuracy. + # 23 characters are always returned with three (3) period + # separators at the 6th, 11th and 14th positions. + def self.yyjjj_hhmm_ss_nanosec + nanosec_str = KeyNow.instance.time_now.strftime "%9N" + return "#{yyjjj}.#{hhmm}.#{ss}.#{nanosec_str}" + end + + + # Fetch the double barreled time stamp that is an amalgam of + # the human readable time now and a machine time representation + # from the moment this class was initialized. + # + # See the {yyjjj_hhmm_ss_nanosec} method for documentation of + # the nanosecond accurate time stamp. + # + # @return [String] + # the double barreled time stamp containing a human readable + # (right this moment) time and a class initialized time + # representation with nanosecond accuracy. + def self.fetch + return "#{Time.now.ctime} ( #{yyjjj_hhmm_ss_nanosec} )" + end + + + # Grab the double barreled time stamp that is an amalgam of + # the human readable time now and a machine time representation + # from the moment this class was initialized. + # + # On Friday June the 8th at about 6:26 pm. + # Fri Jun 8 18:26:17 2018 ( 18159.1826.138 ) + # + # See the {yyjjj_hhmm_sst} method for documentation of stamp + # that is accurate to the tenth of a second. + # + # @return [String] + # the double barreled time stamp containing a human readable + # (right this moment) time and a class initialized time + # representation with tenth of a second accuracy. + def self.grab + time_with_consecutive_spaces = Time.now.ctime + human_readable_str = time_with_consecutive_spaces.gsub( " ", " " ) + return "#{human_readable_str} ( #{yyjjj_hhmm_sst} )" + end + + + # Return the Rubyfied time zone being used. + def self.zone + return KeyNow.instance.time_now.zone + end + + + # Log segments of time pertaining to the time stamp. + # @todo + # move method contents into test class + def self.log_instance_time + + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] eco time stamp => [#{KeyNow.instance.time_now.ctime}]" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] Univ Time Zone => #{zone}" } + log.info(x) { "[stamp] Month Index is => #{mo}" } + log.info(x) { "[stamp] Month Name is => #{mmm}" } + log.info(x) { "[stamp] Day Of Week is => #{ddd}" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] Two Digit Year => #{yy}" } + log.info(x) { "[stamp] Julian Cal Day => #{jjj}" } + log.info(x) { "[stamp] Yr and Jul Day => #{yyjjj}" } + log.info(x) { "[stamp] Hour of Theday => #{hh}" } + log.info(x) { "[stamp] Minute of Hour => #{mm}" } + log.info(x) { "[stamp] Hour + Minute => #{hhmm}" } + log.info(x) { "[stamp] Second of Min => #{ss}" } + log.info(x) { "[stamp] 600 Min Slices => #{sst}" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] The Time Stamp => #{yyjjj_hhmm_sst}" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + + end + + + # This singleton (one instance) class sets the time just once. + def initialize + @time_now = Time.now; + end + + + KeyNow.log_instance_time + + + end + + +end diff --git a/lib/keytools/key.pair.rb b/lib/keytools/key.pair.rb new file mode 100644 index 0000000..bf1b964 --- /dev/null +++ b/lib/keytools/key.pair.rb @@ -0,0 +1,259 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + require 'inifile' + + # KeyPair is a key-value store backed by a plain-text file in + # an INI format that sits on an accessible file-system. + # + # + # == Example Data Exchange + # + # Issue the below ruby calls and specify a /path/to/file + # + # keymap = KeyPair.new "/path/to/file" + # + # keymap.use "phone_numbers" + # keymap.set "joe", "0044 7500 123456" + # keymap.set "amy", "0044 7678 123456" + # + # Now visit the file to see your exchanged data. + # + # [phone_numbers] + # joe = 0044 7500 123456 + # amy = 0044 7678 123456 + # + # + # == The Current Section + # + # You can set the current section with the {use} method and then + # subsequent read, write, or query behaviour will reference the section that + # you stated. + # + # You do not need a new object to switch sections - just go ahead and + # use another another one. + # + # Remember that KeyPair is two-dimensional data structure so all + # key-value pairs are stored under the auspices of a section. + # + # == Key-Value Pair Exchanges + # + # Representational state transfer occurs with four methods with + # + # - custom sections referenced through {read} and {write} + # - said sections transfered via ubiquitous {get} and {set} + # + # The name given to the default group can be specified to the constructor. + # If none is provided the aptly named "default" is used. + class KeyPair + + # Initialize the key value store and auto write a time stamp that + # has nano-second accuracy with a key whose name is gleened from + # the constant {KeyData::INIT_TIME_STAMP_NAME}. + # + # The path to the backing INI file is gleened from the first + # backing file path parameter. + # + # @param backing_file_path [String] + # the expected location of the file-backed key-value store. + # If the folder and/or file do not exist the folder is created + # and then the file is created along with the time stamps. + # + # @param the_default_group [String] + # the name of the default group. If none is presented this value + # will default to the aptly named "default". + def initialize backing_file_path + @file_path = backing_file_path + create_dir_if_necessary + end + + + # Set the section to use for future data exchanges via the ubiquitous {get} + # and {set} methods as well as the query {contains} key method. + # + # @param the_section_name [String] + # the non-nil and non whitespace only section name that will lead a + # set of key-value pairs in the INI formatted file. + def use the_section_name + raise ArgumentError, "Cannot use a Nil section name." if the_section_name.nil? + @section_to_use = the_section_name + end + + # Stash the setting directive and its value into the configuration file + # using the default settings group. + # + # @param key_name [String] the name of the key whose value is to be written + # @param key_value [String] the data item value of the key specified + def set key_name, key_value + raise ArgumentError, "Cannot set a Nil section name." if @section_to_use.nil? + write @section_to_use, key_name, key_value + end + + + # Stash the setting directive and its value into the configuration file + # using the default settings group. + # + # @param key_name [String] the name of the key whose value is to be written + # @return [String] + # return the value of the configuration directive in the default group + def get key_name + raise ArgumentError, "Cannot get from a Nil section name." if @section_to_use.nil? + read @section_to_use, key_name + end + + + # Write the key/value pair in the parameter into this key/value store's + # base file-system backing INI file. + # + # This method assumes the existence of the backing configuration file at + # the @file_path instance variable that was set during initialization. + # + # Observable value is the written key/value pair within the specified + # section. The alternate flows are + # + # - if the section does not exist it is created + # - if the section and key exist the value is inserted or overwritten + # + # @param section_name [String] name grouping the section of config values + # @param key [String] the key name of config directive to be written into the file + # @param value [String] value of the config directive to be written into the file + # + def write section_name, key, value + + config_map = IniFile.new( :filename => @file_path, :encoding => 'UTF-8' ) + config_map = IniFile.load( @file_path ) if File.file? @file_path + config_map[section_name][key] = value + config_map.write + + end + + + # Given the configuration key name and the context name, get the + # corresponding key value from the configuration file whose path + # is acquired using the {self#get_filepath} method. + # + # @param key_name [String] the key whose value is to be retrieved + # + # @return [String] the value configured for the parameter key + # + # @raise ArgumentError for any one of a long list of reasons that + # cause the key value to not be retrieved. This can range from + # non-existent directories and files, non readable files, incorrect + # configurations right down to missing keys or even missing values. + def read section_name, key_name + + raise ArgumentError.new "No section given." if section_name.nil? || section_name.strip.empty? + raise ArgumentError.new "No parameter key given." if key_name.nil? || key_name.strip.empty? + raise ArgumentError.new "No file found at [ #{@file_path} ]" unless File.exists? @file_path + the_text = File.read @file_path + raise ArgumentError.new "This file is empty => [ #{@file_path} ]" if the_text.empty? + + the_data = IniFile.load @file_path + key_exists = the_data[ section_name ].has_key?( key_name ) + key_err_msg = "Key [#{key_name}] not found in section [#{section_name}]" + raise ArgumentError, key_err_msg unless key_exists + + rawvalue = the_data[section_name][key_name] + key_val_msg = "Nil empty or whitespace value for key [#{section_name}][#{key_name}]" + nil_empty_or_whitespace = rawvalue.nil? || rawvalue.chomp.strip.empty? + raise ArgumentError, key_val_msg if nil_empty_or_whitespace + + return rawvalue.chomp.strip + + end + + + # Return true if the settings configuration file contains the specified + # parameter key within the current section name that has been set via + # the {use} method. + # + # This method does not check the contents (value) of the key. Even if it + # is an empty string, this method returns true so long as the section + # exists and the key exists within that. + # + # @param key_name [String] + # does a key with this name exist within the current map section. + # + # @return [Boolean] + # return true if the current section exists and a key with the parameter + # name exists within it. + # return false if either the section or the key do not exist. + # + # raise [ArgumentError] + # if the configuration file does not exist or is empty + # if the paramter key_name is nil, empty or contains only whitespace + def contains? key_name + + raise ArgumentError.new "No parameter key given." if key_name.nil? || key_name.strip.empty? + raise ArgumentError.new "No file found at [ #{@file_path} ]" unless File.exists? @file_path + the_text = File.read @file_path + raise ArgumentError.new "This file is empty => [ #{@file_path} ]" if the_text.empty? + + the_data = IniFile.load @file_path + return false unless the_data.has_section?( @section_to_use ) + return the_data[ @section_to_use ].has_key?( key_name ) + + end + + + + # Return true if the settings configuration file contains the specified + # section name. This method ignores whatever section that may or may not + # have been pointed to by the use command. + # + # @param section_name [String] + # does a section with this name exist within the file data structure + # + # @return [Boolean] + # return true if a section exists with the specified name + def has_section? section_name + + KeyError.not_new( section_name, self ) + + raise ArgumentError.new "No file found at [ #{@file_path} ]" unless File.exists? @file_path + the_text = File.read @file_path + raise ArgumentError.new "This file is empty => [ #{@file_path} ]" if the_text.empty? + + the_data = IniFile.load @file_path + return the_data.has_section?( section_name ) + + end + + + + # Get the time stamp that was written to the key-value store at + # the point it was first initialized and then subsequently written + # out (serialized) onto the file-system. + # + # The time stamp returned marks the first time this key-value store + # was conceived by a use case actor and subsequently serialized. + # + # @return [String] + # the string time stamp denoting the first time this key-value + # store was first initialized and then subsequently written out + # (serialized) onto the file-system. + def time_stamp + return get INIT_TIME_STAMP_NAME + end + + + + private + + + + def create_dir_if_necessary + + config_directory = File.dirname @file_path + return if (File.exist? config_directory) && (File.directory? config_directory) + FileUtils.mkdir_p config_directory + + end + + + end + + +end diff --git a/lib/keytools/key.pass.rb b/lib/keytools/key.pass.rb new file mode 100644 index 0000000..0742d50 --- /dev/null +++ b/lib/keytools/key.pass.rb @@ -0,0 +1,120 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + class KeyPass + + + # Collect something sensitive from the command line with a + # minimum length specified in the first parameter. This method can't + # know whether the information is a password, a pin number or whatever + # so it takes the integer minimum size at its word. + # + # Question 5 to App Config | What is the Secret? + # + # The client may need to acquire the secret if the answer to question 4 indicates the need + # to instantiate the keys and encrypt the application's plaintext database. The application + # should facilitate communication of the secret via + # + # - an environment variable + # - the system clipboard (cleared after reading) + # - a file whose path is a command parameter + # - a file in a pre-agreed location + # - a file in the present directory (with a pre-agreed name) + # - a URL from a parameter or pre-agreed + # - the shell's secure password reader + # - the DConf / GConf or GSettings configuration stores + # - a REST API + # - password managers like LastPass, KeePassX or 1Pass + # - the Amazon KMS (Key Management Store) + # - vaults from Ansible, Terraform and Kubernetes + # - credential managers like GitSecrets and Credstash + # + # @param prompt_twice [Boolean] indicate whether the user should be + # prompted twice. If true the prompt_2 text must be provided and + # converse is also true. A true value asserts that both times the + # user enters the same (case sensitive) string. + # + # @return [String] the collected string text ( watch out for non-ascii chars) + # @raise [ArgumentError] if the minimum size is less than one + def self.password_from_shell prompt_twice + + assert_min_size MINIMUM_PASSWORD_SIZE + + sleep(1) + puts "Password:" + first_secret = STDIN.noecho(&:gets).chomp + + assert_input_text_size first_secret.length, MINIMUM_PASSWORD_SIZE + return first_secret unless prompt_twice + + sleep(1) + puts "Re-enter the password:" + check_secret = STDIN.noecho(&:gets).chomp + + assert_same_size_text first_secret, check_secret + + return first_secret + + end + + + # -- + # -- Raise an exception if asked to collect text that is less + # -- than 3 characters in length. + # -- + def self.assert_min_size min_size + + min_length_msg = "\n\nCrypts with 2 (or less) characters open up exploitable holes.\n\n" + raise ArgumentError.new min_length_msg if min_size < 3 + + end + + + # -- + # -- Output an error message and then exit if the entered input + # -- text size does not meet the minimum requirements. + # -- + def self.assert_input_text_size input_size, min_size + + if( input_size < min_size ) + + puts + puts "Input is too short. Please enter at least #{min_size} characters." + puts + + exit + + end + + end + + + # -- + # -- Assert that the text entered the second time is exactly (case sensitive) + # -- the same as the text entered the first time. + # -- + def self.assert_same_size_text first_text, second_text + + unless( first_text.eql? second_text ) + + puts + puts "Those two bits of text are not the same (in my book)!" + puts + + exit + + end + + end + + private + + MINIMUM_PASSWORD_SIZE = 4 + + + end + + +end diff --git a/lib/keytools/key.rb b/lib/keytools/key.rb new file mode 100644 index 0000000..8f48ee6 --- /dev/null +++ b/lib/keytools/key.rb @@ -0,0 +1,585 @@ +#!/usr/bin/ruby + +module OpenKey + + # First use the class methods to source keys, then use a key's instance + # methods to access its properties and in concert with other symmetrical + # information, you can use the keys to lock (encrypt) or unlock (decrypt) + # other keys and objecs. + # + # == Sourcing and Deriving Keys + # + # Keys can be + # + # - sourced from a secure random byte generating function + # - sourced from ciphertext and another (decryption) key + # - generated by passing a secret through key derivation functions + # - regenerated from a secret and previously stored salts + # - sourced from the current unique workstation shell environment + # - sourced from an environment variable containing ciphertext + # + # + # Keys need to be viewed (represented) in multiple ways and the essence + # of the key viewer is to input keys {as_bits}, {as_bytes} and {as_base64} + # and then output the same key (in as far as is possible) - as bits, as + # bytes and as base64. + # + # == Key | To and From Behaviour + # + # Use the From methods to create Keys from a variety of resources + # such as + # + # - a base64 encoded string + # - a binary byte string + # - a string of one and zero bits + # - a hexadecimal representation + # + # Once you have instantiated the key, you will then be able to convert it + # (within reason due to bit, byte and base64 lengths) to any of the above + # key representations. + # + # == Key | Bits Bytes and Base64 + # + # The shoe doesn't always fit when its on the other foot and this is best + # illustratd with a table that maps bits to 8 bit bytes and 6 bit Base64 + # characters. + # + # | --------- | -------- | ------------ | ------------------------------- | + # | Fit? | Bits | Bytes | (and) Base64 | + # | --------- | -------- | ------------ | ------------------------------- | + # | Perfect | 168 Bits | is 21 bytes | 28 Chars - bcrypt chops to this | + # | Perfect | 216 Bits | is 27 bytes | 36 Chars - | + # | Perfect | 264 Bits | is 33 bytes | 44 Chars - holder 4 256bit keys | + # | Perfect | 384 Bits | is 48 bytes | 64 Chars - 216 + 168 equals 384 | + # | --------- | -------- | ------------ | ------------------------------- | + # | Imperfect | 128 Bits | 16 precisely | 22 Chars - 21 + 2 remain bits | + # | Imperfect | 186 Bits | 23 remain 2 | 31 Characers precisely | + # | Imperfect | 256 Bits | 32 precisely | 43 Chars - 42 + 4 remain bits | + # | --------- | -------- | ------------ | ------------------------------- | + # + # Yes, the shoe doesn't always fit when it's on the other foot. + # + # == Schoolboy Error + # + # The strategy is so simple, we call it a schoolboy error. + # + # If we want to use a key with n bits and either n % 6 or n % 8 (or both) + # are not zero - we instantiate a Key with the lowest common + # denominator of 6 and 8 that exceeds n. + # + # So when we request a byte, or base64 representation the viewer will + # truncate (not round down) to the desired length. + # + # == Mapping Each Character to 6 Binary Bits + # + # We need 6 binary bits to represent a base64 character (and 4 + # bits for hexadecimal). Here is an example mapping between + # a base 64 character, an integer and the six bit binary. + # + # Character Integer Binary (6 Bit) + # + # a 0 000000 + # b 1 000001 + # c 2 000010 + # + # y 25 011001 + # z 26 011010 + # A 27 011011 + # B 28 011100 + # + # 8 60 111100 + # 9 61 111101 + # / 62 111110 + # + 63 111111 + # + class Key + + # Initialize a key object from a bit string of ones and zeroes provided + # in the parameter string. + # + # For example a string of 384 bits (ones and zeroes) can be thought of + # as a 48 byte key which can also be represented with 64 more compact + # base64 characters. + # + # | -------- | ------------ | -------------------------------- | + # | Bits | Bytes | Base64 | + # | -------- | ------------ | -------------------------------- | + # | 384 Bits | is 48 bytes | and 64 characters | + # | -------- | ------------ | -------------------------------- | + # + # @param the_bit_string [String] + # the bit string of ones and zeroes that represents the bits that + # represent this key + def initialize the_bit_string + @bit_string = the_bit_string + end + + + # Return a (secure) randomly generated super high entropy 384 bit key + # that can be stored with 64 base64 characters and used to + # source digest functions that can unreversibly convert + # the key to a 256 bit symmetric encryption key. + # + # | -------- | ------------ | -------------------------------- | + # | Bits | Bytes | Base64 | + # | -------- | ------------ | -------------------------------- | + # | 384 Bits | is 48 bytes | and 64 characters | + # | -------- | ------------ | -------------------------------- | + # + # This key easily translates to a base64 and/or byte array format because + # the 384 bit count is a multiple of both 6 and 8. + # + # @return [OpenKey::Key] + # return a key containing 384 random bits (or a random array of 48 bytes) + # which can if necessary be serialized into 64 base64 characters. + # + # @raise [ArgumentError] + # If a nil or zero length byte array is received. + # Or if the number of bytes multiplied by 8 + # is not a multiple of 6. + def self.from_random + return Key.new( to_random_bits( RANDOM_KEY_BYTE_LENGTH ) ) + end + + + def self.to_random_bits the_byte_length + random_bit_string = "" + for n in 1 .. the_byte_length + random_integer = SecureRandom.random_number( EIGHT_BIT_INTEGER_SIZE ) + random_bit_string += "%08d" % [ random_integer.to_s(2) ] + end + return random_bit_string + end + + + # Return the key represented by the parameter sequence of base64 + # characters. + # + # @param char64_string [String] + # + # The base64 character sequence which the returned key is + # instantiated from. Naturally this character sequencee cannot + # be nil, nor can it contain any characters that are not + # present in {Key64::YACHT64_CHARACTER_SET}. + # + # Ideally the number of parameter characters multiplied by 6 + # should be a multiple of eight (8) otherwise the new + # key's bit string will require padding and extension. + # + # @return [OpenKey::Key] + # return a key from the parameter sequence of base64 characters. + # + # @raise [ArgumentError] + # If a nil or zero length byte array is received. + # Or if the number of bytes multiplied by 8 + # is not a multiple of 6. + def self.from_char64 char64_string + return Key.new( Key64.to_bits( char64_string ) ) + end + + + # Return a key represented by the parameter binary string. + # + # @param binary_text [String] + # The binary string that the returned key will be + # instantiated from. + # + # @return [OpenKey::Key] + # return a key from the binary byte string parameter + def self.from_binary binary_text + ones_and_zeroes = binary_text.unpack("B*")[0] + return Key.new( ones_and_zeroes ) + end + + + # Convert a string of Radix64 characters into a key. + # + # This method converts the base64 string into the internal YACHT64 format + # and then converts that into a bit string so that a key can be instantiated. + # + # @param radix64_string [String] + # the radix64 string to convert into akey. This string will be a subset + # of the usual 62 character suspects together with period and forward + # slash characters. + # + # This parameter should not contain newlines nor carriage returns. + # + # @return [OpenKey::Key] + # return a key from the parameter sequence of base64 characters. + # + # @raise [ArgumentError] + # If a nil or zero length parameter array is received. + def self.from_radix64 radix64_string + return Key.new( Key64.from_radix64_to_bits( radix64_string ) ) + end + + + # When a key is initialized, it is internally represented as a + # string of ones and zeroes primarily for simplicity and can be + # visualized as bits that are either off or on. + # + # Once internalized a key can also be represented as + # + # - a sequence of base64 (or radix64) characters (1 per 6 bits) + # - a binary string suitable for encryption (1 byte per 8 bits) + # - a 256bit encryption key from Digest(ing) the binary form + # + # @return [String] + # a string of literally ones and zeroes that represent the + # sequence of bits making up this key. + def to_s + + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + ## Write duplicate ALIAS method called ==> to_bits() <== (bits and pieces) + + ## --------------------------------------------- + ## +++++++++ WARNING ++++++++ + ## --------------------------------------------- + ## + ## to_s does not need 2b called + ## So both the below print the same. + ## + ## So YOU MUST KEEP the to_s method until a proper test suite is in place. + ## So YOU MUST KEEP the to_s method until a proper test suite is in place. + ## + ## puts "#{the_key}" + ## puts "#{the_key.to_s}" + ## + ## So YOU MUST KEEP the to_s method until a proper test suite is in place. + ## So YOU MUST KEEP the to_s method until a proper test suite is in place. + ## + ## --------------------------------------------- + + return @bit_string + end + + + # Convert this keys bit value into a printable character set + # that is suitable for storing in multiple places such as + # environment variables and INI files. + # + # @return [String] + # printable characters from a set of 62 alpha-numerics + # plus an @ symbol and a percent % sign. + # + # @raise ArgumentError + # If the bit value string for this key is nil. + # Or if the bit string length is not a multiple of six. + # Or if it contains any character that is not a 1 or 0. + def to_char64 + assert_non_nil_bits + return Key64.from_bits( @bit_string ) + end + + + # Return the un-printable binary bytes representation + # of this key. If you store 128 bits it will produce 22 characters + # because 128 divide by 6 is 21 characters and a remainder of two (2) + # bits. + # + # The re-conversion of the 22 characters will now produce 132 bits which + # is different from the original 128 bits. + # + # @return [Byte] + # a non-printable binary string of eight (8) bit bytes which can be + # used as input to both digest and symmetric cipher functions. + def to_binary + return [ to_s ].pack("B*") + end + + + # Return the un-printable binary bytes representation + # of this key. If you store 128 bits it will produce 22 characters + # because 128 divide by 6 is 21 characters and a remainder of two (2) + # bits. + # + # The re-conversion of the 22 characters will now produce 132 bits which + # is different from the original 128 bits. + # + # @return [Byte] + # a non-printable binary string of eight (8) bit bytes which can be + # used as input to both digest and symmetric cipher functions. + def self.to_binary_from_bit_string bit_string_to_convert + return [ bit_string_to_convert ].pack("B*") + end + + + # This method uses digests to convert the key's binary representation + # (which is either 48 bytes for purely random keys or 64 bytes for keys + # derived from human sourced secrets) into a key whose size is ideal for + # plying the ubiquitous AES256 symmetric encryption algorithm. + # + # This method should only ever be called when this key has been derived + # from either a (huge) 48 byte random source or from a key derivation + # function (KDF) such as BCrypt, SCrypt, PBKDF2 or a union from which the + # 512 bit (64 byte) key can be reduced to 256 bits. + # + # @return [String] + # a binary string of thirty-two (32) eight (8) bit bytes which + # if appropriate can be used as a symmetric encryption key especially + # to the powerful AES256 cipher. + def to_aes_key + return Digest::SHA256.digest( to_binary() ) + end + + + # This method uses the SHA384 digest to convert this key's binary + # representation into another (newly instantiated) key whose size + # is precisely 384 bits. + # + # If you take the returned key and call + # + # - {to_char64} you get a 64 character base64 string + # - {to_s} you get a string of 384 ones and zeroes + # - {to_binary} you get a 48 byte binary string + # + # @return [OpenKey::Key] + # a key with a bit length (ones and zeroes) of precisely 384. + def to_384_bit_key + + a_384_bit_key = Key.from_binary( Digest::SHA384.digest( to_binary() ) ) + + has_384_chars = a_384_bit_key.to_s.length == 384 + err_msg = "Digested key length is #{a_384_bit_key.to_s.length} instead of 384." + raise RuntimeError, err_msg unless has_384_chars + + return a_384_bit_key + + end + + + # Use the {OpenSSL::Cipher::AES256} block cipher in CBC mode and the binary + # 256bit representation of this key to encrypt the parameter key. + # + # Store the ciphertext provided by this method. To re-acquire (reconstitute) + # the parameter key use the {do_decrypt_key} decryption method with + # the ciphertext produced here. + # + # Only Encrypt Strong Keys + # + # Never encrypt a potentially weak key, like one derived from a human password + # (even though it is put through key derivation functions). + # + # Once generated (or regenerated) a potentially weak key should live only as + # long as it takes for it to encrypt a strong key. The strong key can then + # be used to encrypt valuable assets. + # + # Enforcing Strong Key Size + # + # If one key is potentially weaker than the other, the weaker key must be this + # object and the strong key is the parameter key. + # + # This method thus enforces the size of the strong key. A strong key has + # 384 bits of entropy, and is represented by 64 base64 characters. + # + # @param key_to_encrypt [OpenKey::Key] + # this is the key that will first be serialized into base64 and then locked + # down using the 256 bit binary string from this host object as the symmetric + # encryption key. + # + # This method is sensitive to the size of the parameter key and expects to + # encrypt exactly 64 base64 characters within the parameter key. + # + # @return [String] + # The returned ciphertext should be stored. Its breakdown is as follows. + # 96 bytes are returned which equates to 128 base64 characters. + # The random initialization vector (iv) accounts for the first 16 bytes. + # The actual crypt ciphertext then accounts for the final 80 bytes. + # + # @raise [ArgumentError] + # the size of the parameter (strong) key is enforced to ensure that it has + # exactly 384 bits of entropy which are represented by 64 base64 characters. + def do_encrypt_key key_to_encrypt + + crypt_cipher = OpenSSL::Cipher::AES256.new(:CBC) + + crypt_cipher.encrypt() + random_iv = crypt_cipher.random_iv() + crypt_cipher.key = to_aes_key() + + calling_module = File.basename caller_locations(1,1).first.absolute_path, ".rb" + calling_method = caller_locations(1,1).first.base_label + calling_lineno = caller_locations(1,1).first.lineno + caller_details = "#{calling_module} | #{calling_method} | (line #{calling_lineno})" + + cipher_text = crypt_cipher.update( key_to_encrypt.to_char64 ) + crypt_cipher.final + + binary_text = random_iv + cipher_text + ones_zeroes = binary_text.unpack("B*")[0] + ciphertxt64 = Key64.from_bits( ones_zeroes ) + + size_msg = "Expected bit count is #{EXPECTED_CIPHER_BIT_LENGTH} not #{ones_zeroes.length}." + raise RuntimeError, size_msg unless ones_zeroes.length == EXPECTED_CIPHER_BIT_LENGTH + + return ciphertxt64 + + end + + + # Use the {OpenSSL::Cipher::AES256} block cipher in CBC mode and the binary + # 256bit representation of this key to decrypt the parameter ciphertext and + # return the previously encrypted key. + # + # To re-acquire (reconstitute) the original key call this method with the + # stored ciphertext that was returned by the {do_encrypt_key}. + # + # Only Encrypt Strong Keys + # + # Never encrypt a potentially weak key, like one derived from a human password + # (even though it is put through key derivation functions). + # + # Once generated (or regenerated) a potentially weak key should live only as + # long as it takes for it to encrypt a strong key. The strong key can then + # be used to encrypt valuable assets. + # + # Enforcing Strong Key Size + # + # If one key is potentially weaker than the other, the weaker key must be this + # object and the strong key is reconstituted and returned by this method. + # + # @param ciphertext_to_decrypt [String] + # Provide the ciphertext produced by our sister key encryption method. + # The ciphertext should hold 96 bytes which equates to 128 base64 characters. + # The random initialization vector (iv) accounts for the first 16 bytes. + # The actual crypt ciphertext then accounts for the final 80 bytes. + # + # @return [Key] + # return the key that was serialized into base64 and then encrypted (locked down) + # with the 256 bit binary symmetric encryption key from this host object. + # + # @raise [ArgumentError] + # the size of the parameter ciphertext must be 128 base 64 characters. + def do_decrypt_key ciphertext_to_decrypt + + bit_text = Key64.to_bits(ciphertext_to_decrypt) + size_msg = "Expected bit count is #{EXPECTED_CIPHER_BIT_LENGTH} not #{bit_text.length}." + raise RuntimeError, size_msg unless bit_text.length == EXPECTED_CIPHER_BIT_LENGTH + + cipher_x = OpenSSL::Cipher::AES256.new(:CBC) + cipher_x.decrypt() + + rawbytes = [ bit_text ].pack("B*") + + cipher_x.key = to_aes_key() + cipher_x.iv = rawbytes[ 0 .. ( RANDOM_IV_BYTE_COUNT - 1 ) ] + key_chars_64 = cipher_x.update( rawbytes[ RANDOM_IV_BYTE_COUNT .. -1 ] ) + cipher_x.final + + return Key.from_char64( key_chars_64 ) + + end + + + # Use the {OpenSSL::Cipher::AES256} block cipher in CBC mode and the binary + # 256bit representation of this key to encrypt the parameter plaintext using + # the parameter random initialization vector. + # + # Store the ciphertext provided by this method. To re-acquire (reconstitute) + # the plaintext use the {do_decrypt_text} decryption method, giving + # it the same initialization vector and the ciphertext produced here. + # + # Only Encrypt Once + # + # Despite the initialization vector protecting against switch attacks you + # should only use this or any other key once to encrypt an object. + # While it is okay to encrypt small targets using two different keys, it + # pays not to do the same when the target is large. + # + # @param random_iv [String] + # a randomly generated 16 byte binary string that is to be used as the + # initialization vector (IV) - this is a requirement for AES encryption + # in CBC mode - this IV does not need to be treated as a secret + # + # @param plain_text [String] + # the plaintext or binary string to be encrypted. To re-acquire this string + # use the {do_decrypt_text} decryption method, giving it the same + # initialization vector (provided in the first parameter) and the ciphertext + # returned from this method. + # + # @return [String] + # The returned binary ciphertext should be encoded and persisted until such + # a time as its re-acquisition by authorized parties becomes necessary. + def do_encrypt_text random_iv, plain_text + + crypt_cipher = OpenSSL::Cipher::AES256.new(:CBC) + + crypt_cipher.encrypt() + crypt_cipher.iv = random_iv + crypt_cipher.key = to_aes_key() + + return crypt_cipher.update( plain_text ) + crypt_cipher.final + + end + + + # Use the {OpenSSL::Cipher::AES256} block cipher in CBC mode and the binary + # 256bit representation of this key to decrypt the parameter ciphertext using + # the parameter random initialization vector. + # + # Use this method to re-acquire (reconstitute) the plaintext that was + # converted to ciphertext by the {do_encrypt_text} encryption method, + # naturally using the same initialization vector for both calls. + # + # Only Decrypt Once + # + # Consider a key spent as soon as it decrypts the one object it was + # created to decrypt. Like a bee dying after a sting, a key should die after + # it decrypts an object. Should re-decryption be necessary - another key + # should be derived or generated. + # + # @param random_iv [String] + # a randomly generated 16 byte binary string that is to be used as the + # initialization vector (IV) - this is a requirement for AES decryption + # in CBC mode - this IV does not need to be treated as a secret + # + # @param cipher_text [String] + # the ciphertext or binary string to be decrypted in order to re-acquire + # (reconstitute) the plaintext that was converted to ciphertext by the + # {do_encrypt_text} encryption method. + # + # @return [String] + # if the plaintext (or binary string) returned here still needs to be + # kept on the low, derive or generate another key to protect it. + def do_decrypt_text random_iv, cipher_text + + raise ArgumentError, "Incoming cipher text cannot be nil." if cipher_text.nil? + + crypt_cipher = OpenSSL::Cipher::AES256.new(:CBC) + + crypt_cipher.decrypt() + crypt_cipher.iv = random_iv + crypt_cipher.key = to_aes_key() + + return crypt_cipher.update( cipher_text ) + crypt_cipher.final + + end + + + private + + + RANDOM_KEY_BYTE_LENGTH = 48 + + EIGHT_BIT_INTEGER_SIZE = 256 + + RANDOM_IV_BYTE_COUNT = 16 + + CIPHERTEXT_BYTE_COUNT = 80 + + EXPECTED_CIPHER_BIT_LENGTH = ( CIPHERTEXT_BYTE_COUNT + RANDOM_IV_BYTE_COUNT ) * 8 + + + def assert_non_nil_bits + nil_err_msg = "The bit string for this key is nil." + raise RuntimeError, nil_err_msg if @bit_string.nil? + end + + + end + + +end diff --git a/lib/logging/gem.logging.rb b/lib/logging/gem.logging.rb new file mode 100644 index 0000000..b380edf --- /dev/null +++ b/lib/logging/gem.logging.rb @@ -0,0 +1,132 @@ +require "logger" +require "session/user.home" + +# [MIXIN] magic is deployed to hand out DevOps quality logging +# features to any class that includes the logging module. +# +# When logging facilities are not ready we need to log just to +# stdout but when they are we need to use them. +# +# mixin power enables one class to give the logfile path and all +# classes will suddenly retrieve a another logger and use that. +# +# include Logging +# def doImportant +# log.warn(x) "unhappy about doing this" +# do_anyway +# log.debug(x) "all good it was okay" +# end +# +# So what are Mixins? +# +# Refer to the below link for excellent coverage of mixins. +# @see http://ruby-doc.com/docs/ProgrammingRuby/html/tut_modules.html +# +module OpenLogger + + @@gem_name = "safedb.net" + @@gem_base = File.join OpenSession::Home.dir, ".#{@@gem_name}" + FileUtils.mkdir_p @@gem_base unless File.exists? @@gem_base + @@log_path = File.join @@gem_base, "safedb.net-cli.log" + + + # Classes that include (MIXIN) this logging module will + # have access to this logger method. + # + # [memoization] is implemented here for performance and + # will only initiate a logger under 2 circumstances + # + # [1] - the first call (returns a STDOUT logger) + # [2] - the call after the logfile path is set + # (returns a more sophisticated logger) + def log + + @@log_class ||= get_logger + + end + + + # This Ruby behavioural snippet allows the logger to print 3 crucial + # pieces of information for the troubleshooter (detective) so that they + # may ascertain + # + # - the [module] the logging call came from + # - the [method] the logging call came from + # - line number origins of the logging call + # + # To use this method you can make calls like this + # + # - log.info(x) { "Log many things about where I am now." } + # - log.warn(x) { "Log many things about where I am now." } + # + def x + + module_name = File.basename caller_locations(1,1).first.absolute_path, ".rb" + method_name = caller_locations(1,1).first.base_label + line_number = caller_locations(1,1).first.lineno + + "#{module_name} | #{method_name} | (line #{line_number}) " + + end + + + # This method returns an initialized logger. + # + # The logger returned may write to + # + # - a simple file + # - a service like fluentd + # - a message queue + # - a nosql database + # - all of the above + # + # Not that [memoization] should be used so that this method + # gets called ideally just once although in practise it may + # turn out to be a handful of times. + # + # @return [Logger] return an initialized logger object + def get_logger + + file_logger = Logger.new @@log_path + original_formatter = Logger::Formatter.new + + file_logger.formatter = proc { |severity, datetime, progname, msg| + original_formatter.call( severity, datetime, progname, msg.dump.chomp.strip ) + } + + return file_logger + + end + + + # Overtly long file paths in the log files sometimes hamper readability + # and this method improves the situation by returning just the two + # immediate ancestors of the file (or folder) path. + # + # @example A really long input like + # /home/joe/project/degrees/math/2020 + # is reduced to + # degrees/math/2020 + # + # So this method returns the name of the grandparent folder then parent folder + # and then the most significant file (or folder) name. + # + # When this is not possible due to the filepath being colisively near the + # filesystem's root, it returns the parameter name. + # + # @param object_path [String] overtly long path that will be made more readable + # @return [String] the (separated) three most significant path name segments + def nickname object_path + + object_name = File.basename object_path + parent_folder = File.dirname object_path + parent_name = File.basename parent_folder + granny_folder = File.dirname parent_folder + granny_name = File.basename granny_folder + + return [granny_name,parent_name,object_name].join("/") + + end + + +end diff --git a/lib/modules/README.md b/lib/modules/README.md new file mode 100644 index 0000000..074b0bd --- /dev/null +++ b/lib/modules/README.md @@ -0,0 +1,43 @@ + + +In order for data to be secured for storage or transmission, it must be transformed in such a manner that it would be difficult for an unauthorized individual to be able to discover its true meaning. To do this, certain mathematical equations are used, which are very difficult to solve unless certain strict criteria are met. The level of difficulty of solving a given equation is known as its intractability. These types of equations form the basis of cryptography. + +Some of the most important are: + +== The Discrete Logarithm Problem + +The best way to describe this problem is first to show how its inverse concept works. The following applies to Galois fields (groups). Assume we have a prime number P (a number that is not divisible except by 1 and itself, P). This P is a large prime number of over 300 digits. Let us now assume we have two other integers, a and b. Now say we want to find the value of N, so that value is found by the following formula: + +N = ab mod P, where 0 <= N <= (P · 1) + +(meaning a to the power b mod P) +(learn to do powers in markdown)! + + +This is known as discrete exponentiation and is quite simple to compute. However, the opposite is true when we invert it. If we are given P, a, and N and are required to find b so that the equation is valid, then we face a tremendous level of difficulty. + + +This problem forms the basis for a number of public key infrastructure algorithms, such as Diffie-Hellman and EIGamal. This problem has been studied for many years and cryptography based on it has withstood many forms of attacks. + +== The Integer Factorization Problem + +This is simple in concept. Say that one takes two prime numbers, P2 and P1, which are both "large" (a relative term, the definition of which continues to move forward as computing power increases). We then multiply these two primes to produce the product, N. The difficulty arises when, being given N, we try and find the original P1 and P2. The Rivest-Shamir-Adleman public key infrastructure encryption protocol is one of many based on this problem. To simplify matters to a great degree, the N product is the public key and the P1 and P2 numbers are, together, the private key. + +This problem is one of the most fundamental of all mathematical concepts. It has been studied intensely for the past 20 years and the consensus seems to be that there is some unproven or undiscovered law of mathematics that forbids any shortcuts. That said, the mere fact that it is being studied intensely leads many others to worry that, somehow, a breakthrough may be discovered. + +== The Elliptic Curve Discrete Logarithm Problem + +This is a new cryptographic protocol based upon a reasonably well-known mathematical problem. The properties of elliptic curves have been well known for centuries, but it is only recently that their application to the field of cryptography has been undertaken. + +First, imagine a huge piece of paper on which is printed a series of vertical and horizontal lines. Each line represents an integer with the vertical lines forming x class components and horizontal lines forming the y class components. The intersection of a horizontal and vertical line gives a set of coordinates (x,y). In the highly simplified example below, we have an elliptic curve that is defined by the equation: + +y2 + y = x3 · x2 (this is way too small for use in a real life application, but it will illustrate the general idea) + +For the above, given a definable operator, we can determine any third point on the curve given any two other points. This definable operator forms a "group" of finite length. To add two points on an elliptic curve, we first need to understand that any straight line that passes through this curve intersects it at precisely three points. Now, say we define two of these points as u and v: we can then draw a straight line through two of these points to find another intersecting point, at w. We can then draw a vertical line through w to find the final intersecting point at x. Now, we can see that u + v = x. This rule works, when we define another imaginary point, the Origin, or O, which exists at (theoretically) extreme points on the curve. As strange as this problem may seem, it does permit for an effective encryption system, but it does have its detractors. + +On the positive side, the problem appears to be quite intractable, requiring a shorter key length (thus allowing for quicker processing time) for equivalent security levels as compared to the Integer Factorization Problem and the Discrete Logarithm Problem. On the negative side, critics contend that this problem, since it has only recently begun to be implemented in cryptography, has not had the intense scrutiny of many years that is required to give it a sufficient level of trust as being secure. + +This leads us to more general problem of cryptology than of the intractability of the various mathematical concepts, which is that the more time, effort, and resources that can be devoted to studying a problem, then the greater the possibility that a solution, or at least a weakness, will be found. + + + diff --git a/lib/modules/cryptology/aes-256.rb b/lib/modules/cryptology/aes-256.rb new file mode 100644 index 0000000..8d13a7a --- /dev/null +++ b/lib/modules/cryptology/aes-256.rb @@ -0,0 +1,154 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module SafeDb + + module ToolBelt + + + # Aes256 is a symmetric encryption cipher which inherits extends the + # {SafeDb::Cipher} base class in order to implement plug and play + # symmetric encryption. + # + # == Aes256 Symmetric Encrypt/Decrypt + # + # To facilitate decryption - this cipher produces a key/value pair + # dictionary which will be stored along with the ciphertext itself. + # The dictionary includes + # + # - symmetric.cipher - the algorithm used to encrypt and decrypt + # - encryption.key - hex encoded key for encrypting and decrypting + # - initialize.vector - the initialization vector known as a IV (four) + # + # == Aes256 Implemented Methods + # + # This cipher brings the cryptographic mathematics and implementation algorithms + # for the 256Bit Advanced Encryption Standard. No serious practical (nor theoretical) + # challenge has ever been mounted against this algorithm (or this implementation). + # + # This class implements the below methods + # + # - do_symmetric_encryption(plain_text) - resulting in ciphertext + # - do_symmetric_decryption(ciphertext, encryption_dictionary) » plaintext + # + # and it also sets the @dictionary hash (map) of pertinent + # key/value pairs including the +encryption algorithm+ and +encryption key+. + # + # That's It. Cipher children can rely on the {SafeDb::Cipher} parent to + # do the nitty gritty of file-handling plus managing stores and paths. + + class Aes256 + + # Use the AES 256 bit block cipher and a robust strong random key plus + # initialization vector (IV) to symmetrically encrypt the plain text. + # + # Cryptographic Properties + # + # This encrypt event populates key/value pairs to the hash (dictionary) instance + # given in the parameter. + # + # A crypt properties dictionary acts as output from every encryption event + # and input to every decryption event. The most common properties include + # + # - the symmetric key used for the encryption and decryption + # - the iv (initialization vector) that adds another dimension of strength + # - authorization data that thwarts switch attacks by tying context to content + # - the cipher algorithm, its implementation and its encryption strength + # - the digest of the original message for validation purposes + # + # @param e_properties [Hash] + # instantiated hash map in which the encrryption properties will + # be stuffed. + # + # @param plain_text [String] the plain (or base64 encoded) text to encrypt + # @return [String] the symmetrically encrypted cipher text + def self.do_encrypt e_properties, plain_text + + crypt_cipher = OpenSSL::Cipher::AES256.new(:CBC) + crypt_cipher.encrypt + plain_text_digest = Digest::SHA256.digest plain_text + + e_properties[CryptIO::DICT_CIPHER_NAME] = crypt_cipher.class.name + e_properties[CryptIO::DICT_CRYPT_KEY] = Base64.urlsafe_encode64 crypt_cipher.random_key + e_properties[CryptIO::DICT_CRYPT_IV] = Base64.urlsafe_encode64 crypt_cipher.random_iv + e_properties[CryptIO::DICT_TEXT_DIGEST] = Base64.urlsafe_encode64 plain_text_digest + + return crypt_cipher.update( plain_text ) + crypt_cipher.final + + end + + + # Use the AES 256 bit block cipher together with the encryption key, + # initialization vector (iv) and other data found within the decryption + # properties dictionary to symmetrically decrypt the cipher text. + # + # This encrypt event in {self.do_encrypt} populated the property dictionary + # that was presumably serialized, stored, retrieved then deserialized and + # (at last) presented in the first parameter. + # + # Cryptographic Properties + # + # A crypt properties dictionary is the output from every encryption event + # and input to every decryption event. The most common properties include + # + # - the symmetric key used for the encryption and decryption + # - the iv (initialization vector) that adds another dimension of strength + # - authorization data that thwarts switch attacks by tying context to content + # - the cipher algorithm, its implementation and its encryption strength + # - the digest of the original message for validation purposes + # + # @param d_properties [Hash] + # the crypt properties dictionary is the output from every encryption event + # and (as in this case) input to every decryption event. + # + # @param cipher_text [String] + # the (already decoded) cipher text for decryption by this method using the + # encryption properties setup during the past encrypt event. + # + # @return [String] + # the plain text message originally given to be encrypted. If the message digest + # is provided within the decryption properties dictionary a sanity check will + # occur. + # + # @raise [RuntimeError] + # if decryption fails or the recalculated message digest fails an equivalence test. + def self.do_decrypt d_properties, cipher_text + + decode_cipher = OpenSSL::Cipher::AES256.new(:CBC) + decode_cipher.decrypt + + decode_cipher.key = Base64.urlsafe_decode64( d_properties[CryptIO::DICT_CRYPT_KEY] ) + decode_cipher.iv = Base64.urlsafe_decode64( d_properties[CryptIO::DICT_CRYPT_IV] ) + + plain_text = decode_cipher.update( cipher_text ) + decode_cipher.final + assert_digest_equivalence( d_properties[CryptIO::DICT_TEXT_DIGEST], plain_text ) + + return plain_text + + end + + + private + + + def self.assert_digest_equivalence( digest_b4_encryption, plain_text_message ) + + plain_text_digest = Base64.urlsafe_encode64( Digest::SHA256.digest( plain_text_message ) ) + return if digest_b4_encryption.eql? plain_text_digest + + msg1 = "\nEquivalence check of original and decrypted plain text digests failed.\n" + msg2 = "Digest before encryption => #{digest_b4_encryption}\n" + msg3 = "Digest after decryption => #{plain_text_digest}\n" + error_message = msg1 + msg2 + msg3 + raise RuntimeError, error_message + + end + + + end + + + end + + +end diff --git a/lib/modules/cryptology/amalgam.rb b/lib/modules/cryptology/amalgam.rb new file mode 100644 index 0000000..6eacf34 --- /dev/null +++ b/lib/modules/cryptology/amalgam.rb @@ -0,0 +1,70 @@ +#!/usr/bin/ruby + +module SafeDb + + module ToolBelt + + + # This class knows how to amalgamate passwords, keys and string data in + # a manner that is the cryptographical equivalent of synergy. + # + # The amalgamated keys are synergially (cryptographically) greater than + # the sum of their parts. + class Amalgam + + # Amalgamate the two parameter passwords in a manner that is the + # cryptographical equivalent of synergy. The amalgamated keys are + # synergially greater than the sum of their parts. + # + # -- Get a viable machine password taking into account the human + # -- password length and the specified mix_ratio. + # + # + # @param human_password [String] the password originating from a human + # @param machine_key [String] a machine engineered ascii password (key) + # @mixparam machine_key [String] a machine engineered ascii password (key) + # + # @return [String] the union of the two parameter passwords + # + # @raise [ArgumentError] when the size of the two passwords and the + # mix ratio do not conform to the constraint imposed by the below + # equation which must hold true. + # machine password length = human password length * mix_ratio - 1 + # + def self.passwords human_password, machine_password, mix_ratio + + size_error_msg = "Human pass length times mix_ratio must equal machine pass length." + lengths_are_perfect = human_password.length * mix_ratio == machine_password.length + raise ArgumentError.new size_error_msg unless lengths_are_perfect + + machine_passwd_chunk = 0 + amalgam_passwd_index = 0 + amalgamated_password = "" + + human_password.each_char do |passwd_char| + + amalgamated_password[amalgam_passwd_index] = passwd_char + amalgam_passwd_index += 1 + + for i in 0..(mix_ratio-1) do + machine_pass_index = machine_passwd_chunk * mix_ratio + i + amalgamated_password[amalgam_passwd_index] = machine_password[machine_pass_index] + amalgam_passwd_index += 1 + end + + machine_passwd_chunk += 1 + + end + + return amalgamated_password + + end + + + end + + + end + + +end diff --git a/lib/modules/cryptology/blowfish.rb b/lib/modules/cryptology/blowfish.rb new file mode 100644 index 0000000..ebe0316 --- /dev/null +++ b/lib/modules/cryptology/blowfish.rb @@ -0,0 +1,130 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module SafeDb + + module ToolBelt + + # Blowfish is a symmetric encryption cipher which inherits extends the + # {SafeDb::Cipher} base class in order to implement plug and play + # symmetric encryption. + # + # Blowfish is still uncrackable - however its successor (TwoFish) has + # been reinforced to counter the growth of super-computer brute force + # resources. + class Blowfish + + # The blowfish cipher id constant is used to +initialize+ + # an {OpenSSL::Cipher} class instance. + BLOWFISH_CIPHER_ID = "BF-ECB" + + + # Blowfish constrains the length of +incoming plain text+ forcing it + # to be a multiple of eight (8). + BLOWFISH_BLOCK_LEN = 8 + + + # Encrypt the (plain) text parameter using the symmetric encryption key + # specified in the second parameter and return the base64 encoded + # representation of the cipher text. + # + # Blowfish is a block cipher meaning it needs both the key and the plain + # text inputted to conform to a divisible block length. + # + # Don't worry about this block length requirement as this encrption method + # takes care of it and its sister method {self.decryptor} will also perform + # the correct reversal activities to give you back the original plain text. + # + # {Base64.urlsafe_encode64} facilitates the ciphertext encoding returning text that + # is safe to write to a file. + # + # @param plain_text [String] + # This parameter should be the non-nil text to encrypt using Blowfish. + # Before encryption the text will be padded using a text string from + # the {SafeDb::Cipher::TEXT_PADDER} constant until it results in + # a string with the required block length. + # + # @param encryption_key [String] + # send a long strong unencoded key which does not have to be a multiple of + # eight even though the algorithm demands it. Before the encryption this key + # will be passed through a digest using behaviour from {Digest::SHA256.digest} + # + # This behaviour returns a key whose length is a multiple of eight. + # + # @return [String] base64 representation of blowfish crypted ciphertext + # + # @raise [OpenSSL::Cipher::CipherError] + # An (encryption) key length too short error is raised for short keys. + def self.encryptor plain_text, encryption_key + + shortkey_msg = "The #{encryption_key.length} character encryption key is too short." + raise ArgumentError, shortkey_msg unless encryption_key.length > 8 + log.info(x) { "os blowfish request to encrypt plain text with provided key." } + + block_txt = plain_text + block_txt += CryptIO::TEXT_PADDER until block_txt.bytesize % BLOWFISH_BLOCK_LEN == 0 + raw_stretched_key = Digest::SHA256.digest(encryption_key) + + blowfish_encryptor = OpenSSL::Cipher.new(BLOWFISH_CIPHER_ID).encrypt + blowfish_encryptor.key = raw_stretched_key + return blowfish_encryptor.update(block_txt) << blowfish_encryptor.final + + end + + + # Decrypt the cipher text parameter using the symmetric decryption key + # specified in the second parameter. The cipher text is expected to have + # already been decoded if necessary. + # + # Its okay to use a bespoke encryptor - just ensure you encode the result + # and override the padding constant. + # + # Blowfish is a block cipher meaning it needs both the key and the plain + # text inputted to conform to a divisible block length. + # + # Don't worry about this block length requirement as this decrption method + # takes care of the reversing the activities carried out by {self.encryptor}. + # + # @param cipher_text [String] + # This incoming cipher text should already be encoded but it + # will chomped and stripped upon receipt followed by + # decryption using the Blowfish algorithm. + # + # @param decryption_key [String] + # Send the same key that was used during the encryption phase. The encryption + # phase passed the key through the {Digest::SHA256.digest} digest so here + # the decryption does the exact same thing. + # + # The digest processing guarantees a symmetric key whose length conforms to + # the multiple of eight block length requirement. + # + # @return [String] + # After decoding and decryption the plain text string will still be padded, + # +but not with spaces+. The unlikely to occur padding string constant used + # is the {SafeDb::Cipher::TEXT_PADDER}. + # + # If the plaintext ended with spaces these would be preserved. After padder + # removal any trailing spaces will be preserved in the returned plain text. + # + def self.decryptor cipher_text, decryption_key + + digested_key = Digest::SHA256.digest decryption_key + + decrypt_tool = OpenSSL::Cipher.new(BLOWFISH_CIPHER_ID).decrypt + decrypt_tool.key = digested_key + + padded_plaintxt = decrypt_tool.update(cipher_text) << decrypt_tool.final + pad_begin_index = padded_plaintxt.index CryptIO::TEXT_PADDER + return padded_plaintxt if pad_begin_index.nil? + return padded_plaintxt[ 0 .. (pad_begin_index-1) ] + + end + + + end + + + end + + +end diff --git a/lib/modules/cryptology/cipher.rb b/lib/modules/cryptology/cipher.rb new file mode 100644 index 0000000..4b94d08 --- /dev/null +++ b/lib/modules/cryptology/cipher.rb @@ -0,0 +1,207 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module SafeDb + + module ToolBelt + + require "base64" + + # {SafeDb::Cipher} is a base class that enables cipher varieties + # to be plugged and played with minimal effort. This Cipher implements much + # of the use case functionality - all extension classes need to do, is + # to subclass and implement only the core behaviour that define its identity. + # + # == Double Encryption | Cipher Parent vs Cipher Child + # + # Double encryption first with a symmetric and then an asymmetric one fulfills + # the +safe+ promise of making the stored ciphertext utterly worthless. + # + # The child ciphers implement the inner symmetric encyption whilst the parent + # implements the outer asymmetric encryption algorithm. + # + # The process is done twice resulting in two stores that are mirrored in structure. + # The front end store holds doubly encrypted keys whist the backend store holds + # the doubly encrypted secrets. + # + # Attackers wouldn't be able to distinguish one from the other. Even if they + # theoretically cracked the asymmetric encryption - they would then be faced + # with a powerful symmetric encryption algorithm which could be any one of the + # leading ciphers such as TwoFish or the Advanced Encryption Standard (AES). + # + # == Ciphers at 3 Levels + # + # Ciphers are implemented at three distinct levels. + # + # Low Level Ciphers + # + # Low level ciphers are given text to encrypt and an instantiated dictionary + # in which to place the encryption parameters such as keys and initialization + # vectors (iv)s. + # + # Some more specific ciphers can handle authorization data for example the + # Galois Counter Mode (GCM) cipher. + # + # Low level ciphers know nothing about text IO nor reading and writing to + # persistence structures like files, queues and databases. + # + # Mid Level Ciphers + # + # Mid level ciphers talk to the low level ciphers and bring in input and output + # textual formats like SafeDb's two-part block structures. + # + # Mid level ciphers still know nothing of persistence structures like files, + # queues and databases. + # + # Use Case Level Ciphers + # + # The ciphers operating at the use case level talk to mid level ciphers. They + # interact with the safe store API which brings persistence + # functions such as read/write as well as remoting functions such as + # push/pull. + # + # Use Case level ciphers interact with the latest crypt technologies due to + # interface separation. Also they talk classes implementing persistence stores + # allowing assets liek Git, S3, DropBox, simple files, SSH filesystems, Samba + # to hold locked key and material crypts. + # + # Databases stores will be introduced soon allowing safe to plug in and + # exploit database managers like Mongo, Hadoop, MySQL, Maria, and PostgreSQL. + # + # Plugging into DevOps orchestration platforms like Terraform, Ansible, Chef + # and Puppet will soon be available. Add this with integrations to other credential + # managers like HashiCorp's Vault, Credstash, Amazon KMS, Git Secrets, PGP, + # LastPass, KeePass and KeePassX. + # + # == How to Implement a Cipher + # + # Extend this base class to inherit lots of +unexciting+ functionality + # that essentially + # + # - manages the main encryption and decryption use case flow + # - +concatenates+ the symmetric encryption meta data with ciphertext +after encryption+ + # - _splits_ and objectifies the key/value metadata plus ciphertext +before decryption+ + # - +handles file read/writes+ in conjunction with the store plugins + # - handles +exceptions+ and +malicious input detection+ and incubation + # - +_performs the asymmetric encryption_+ of the cipher's symmetrically encrypted output + # + # == What Behaviour Must Ciphers Implement + # + # Ciphers bring the cryptographic mathematics and implementation algorithms + # to the table. So when at home they must implement + # + # - do_symmetric_encryption(plain_text) - resulting in ciphertext + # - do_symmetric_decryption(ciphertext, encryption_dictionary) » plaintext + # + # and also set the @dictionary hash (map) of pertinent + # key/value pairs including the encryption algorithm, the encryption key and + # the ciphertext signature to thwart any at-rest tampering. + # + # That's It. Cipher children can rely on the {SafeDb::Cipher} parent to + # do the nitty gritty of file-handling plus managing stores and paths. + class Cipher + + # Ciphers use symmetric algorithms to encrypt the given text, which + # is then wrapped up along with the encryption key and other metadata + # pertinent to the algorithm, they then encrypt this bundle with the + # public key provided and return the text that can safely be stored in + # a text file. + # + # Ciphers should never interact with the filesystem which makes them + # reusable in API and remote store scenarios. + # + # Binary files should be converted into the base64 format before being + # presented to ciphers. + # + # Every component in the pipeline bears the responsibility for nullifying + # and rejecting malicious content. + # + # @param public_key [OpenSSL::PKey::RSA] + # an {OpenSSL::PKey::RSA} public key. The unique selling point of + # asymmetric encryption is it can be done without recourse to the heavily + # protected private key. Thus the encryption process can continue with + # just a public key as long as its authenticity is assured. + # + # @param payload_text [String] + # plaintext (or base64 encoded) text to encrypt + # + # @return [String] doubly (symmetric and asymmetric) encrypted cipher text + def self.encrypt_it public_key, payload_text + + crypt_data = {} + crypted_payload = Base64.encode64( Aes256.do_encrypt( crypt_data, payload_text ) ) + unified_material = CryptIO.inner_crypt_serialize crypt_data, crypted_payload + + outer_crypt_key = Engineer.strong_key( 128 ) + crypted_cryptkey = Base64.encode64( public_key.public_encrypt( outer_crypt_key ) ) + + crypted_material = Base64.encode64(Blowfish.encryptor unified_material, outer_crypt_key) + + return CryptIO.outer_crypt_serialize( crypted_cryptkey, crypted_material ) + + end + + + # This method takes and safe formatted cipher-text block + # generated by {self.encrypt_it} and returns the original message that has effectively + # been doubly encrypted using a symmetric and asymmetric cipher. This type of + # encryption is standard best practice when serializing secrets. + # + # safe cipher-text blocks look like a two(2) part bundle + # but they are actually a three(3) part bundle because the second + # part is in itself an amalgam of two distinct objects, serialized as text blocks. + # + # The 3 SafeDb Blocks + # + # Even though the incoming text appears to contain two (2) blocks, + # it actually contains three (3). + # + # - a massive symmetric encryption key (locked by an asymmetric keypair) + # - a dictionary denoting the algorithm and parameters used to encrypt the 3rd block + # - the true message whose encryption is parametized by the dictionary (in 2nd block) + # + # The second and third block are only revealed by asymmetrically decrypting + # the key in the first block and using it to symmetrically decrypt what appears + # to be a unified second block. + # + # @param private_key [OpenSSL::PKey::RSA] + # the asymmetric private key whose corresponding public key was + # employed during the encryption of a super-strong 128 character symmetric + # key embalmed by the first ciphertext block. + # + # @param os_block_text [String] + # the locked cipher text is the safe formatted block which comes + # in two main chunks. First is the long strong symmetric encryption + # key crypted with the public key portion of the private key in the first + # parameter. + # + # The second chunk is the symmetrically crypted text that was locked with + # the encryption key revealed in the first chunk. + # + # @return [String] + # the doubly encrypted plain text that is locked by a symmetric key and + # that symmetric key itself locked using the public key portion of the + # private key whose crypted form is presented in the first parameter. + def self.decrypt_it private_key, os_block_text + + first_block = Base64.decode64( CryptIO.outer_crypt_deserialize os_block_text, true ) + trail_block = Base64.decode64( CryptIO.outer_crypt_deserialize os_block_text, false ) + + decrypt_key = private_key.private_decrypt first_block + inner_block = Blowfish.decryptor( trail_block, decrypt_key ) + + crypt_props = Hash.new + cipher_text = CryptIO.inner_crypt_deserialize( crypt_props, inner_block ) + + return Aes256.do_decrypt( crypt_props, cipher_text ) + + end + + + end + + + end + + +end diff --git a/lib/modules/cryptology/collect.rb b/lib/modules/cryptology/collect.rb new file mode 100644 index 0000000..15d5d1d --- /dev/null +++ b/lib/modules/cryptology/collect.rb @@ -0,0 +1,138 @@ +#!/usr/bin/ruby + +module SafeDb + + module ToolBelt + + require 'io/console' + + # This class will be refactored into an interface implemented by a set + # of plugins that will capture sensitive information from users from an + # Ubuntu, Windows, RHEL, CoreOS, iOS or CentOS command line interface. + # + # An equivalent REST API will also be available for bringing in sensitive + # information in the most secure (but simple) manner. + class Collect + + + # Collect something sensitive from the command line with a + # minimum length specified in the first parameter. This method can't + # know whether the information is a password, a pin number or whatever + # so it takes the integer minimum size at its word. + # + # Question 5 to App Config | What is the Secret? + # + # The client may need to acquire the secret if the answer to question 4 indicates the need + # to instantiate the keys and encrypt the application's plaintext database. The application + # should facilitate communication of the secret via + # + # - an environment variable + # - the system clipboard (cleared after reading) + # - a file whose path is a command parameter + # - a file in a pre-agreed location + # - a file in the present directory (with a pre-agreed name) + # - a URL from a parameter or pre-agreed + # - the shell's secure password reader + # - the DConf / GConf or GSettings configuration stores + # - a REST API + # - password managers like LastPass, KeePassX or 1Pass + # - the Amazon KMS (Key Management Store) + # - vaults from Ansible, Terraform and Kubernetes + # - credential managers like GitSecrets and Credstash + # + # @param min_size [Integer] the minimum size of the collected secret + # whereby one (1) is the least we can expect. The maximum bound is + # not constrained here so will fall under what is allowed by the + # interface, be it a CLI, Rest API, Web UI or Mobile App. + # + # @param prompt_twice [Boolean] indicate whether the user should be + # prompted twice. If true the prompt_2 text must be provided and + # converse is also true. A true value asserts that both times the + # user enters the same (case sensitive) string. + # + # @param prompt_1 [String] the text (aide memoire) used to prompt the user + # + # @param prompt_2 [String] if the prompt twice boolean is TRUE, this + # second prompt (aide memoire) must be provided. + # + # @return [String] the collected string text ( watch out for non-ascii chars) + # @raise [ArgumentError] if the minimum size is less than one + def self.secret_text min_size, prompt_twice, prompt_1, prompt_2=nil + + assert_min_size min_size + + sleep(1) + puts "\n#{prompt_1} : " + first_secret = STDIN.noecho(&:gets).chomp + + assert_input_text_size first_secret.length, min_size + return first_secret unless prompt_twice + + sleep(1) + puts "\n#{prompt_2} : " + check_secret = STDIN.noecho(&:gets).chomp + + assert_same_size_text first_secret, check_secret + + return first_secret + + end + + + # -- + # -- Raise an exception if asked to collect text that is less + # -- than 3 characters in length. + # -- + def self.assert_min_size min_size + + min_length_msg = "\n\nCrypts with 2 (or less) characters open up exploitable holes.\n\n" + raise ArgumentError.new min_length_msg if min_size < 3 + + end + + + # -- + # -- Output an error message and then exit if the entered input + # -- text size does not meet the minimum requirements. + # -- + def self.assert_input_text_size input_size, min_size + + if( input_size < min_size ) + + puts + puts "Input is too short. Please enter at least #{min_size} characters." + puts + + exit + + end + + end + + + # -- + # -- Assert that the text entered the second time is exactly (case sensitive) + # -- the same as the text entered the first time. + # -- + def self.assert_same_size_text first_text, second_text + + unless( first_text.eql? second_text ) + + puts + puts "Those two bits of text are not the same (in my book)!" + puts + + exit + + end + + end + + + end + + + end + + +end diff --git a/lib/modules/cryptology/crypt.io.rb b/lib/modules/cryptology/crypt.io.rb new file mode 100644 index 0000000..77e7fa7 --- /dev/null +++ b/lib/modules/cryptology/crypt.io.rb @@ -0,0 +1,225 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module SafeDb + + module ToolBelt + + # CryptIO concentrates on injecting and ingesting crypt properties into and + # out of a key/value dictionary as well as injecting and ingesting cryptographic + # materials into and out of text files. + # + # == Cryptographic Properties + # + # A crypt properties dictionary acts as output from every encryption event + # and input to every decryption event. The most common properties include + # + # - the symmetric key used for the encryption and decryption + # - the iv (initialization vector) that adds another dimension of strength + # - authorization data that thwarts switch attacks by tying context to content + # - the cipher algorithm, its implementation and its encryption strength + # - the various glue strings that allow related ciphertext to occupy a file + # + # == Why Pad? + # + # Many ciphers (like Blowfish) constrains plain text lengths to multiples + # of 8 (or 16) and a common right pad with spaces strategy is employed + # as a workaround. safe does it diferently. + # + # == Why isn't Space Padding Used? + # + # If safe padded plaintext (ending in one or more spaces) with + # spaces, the decrypt phase (after right stripping spaces) would return + # plain text string shorter than the original. + # + # == Why Unusual Padding and Separators + # + # Why does safe employ unusual strings for padding and separation. + # + # The separator string must be unusual to make it unlikely for it to occur in any + # of the map's key value pairs nor indeed the chunk of text being glued. Were + # this to happen, the separate and reconstitute phase may not accurately return + # the same two entities we are employed to unite. + # + # == So How is Padding Done? + # + # Instead of single space padding - safe uses an unlikely 7 character + # padder which is repeated until the multiple is reached. + # + # <-|@|-> + # + # == So How is Padding Done? + # + # The padder length must be a prime number or infinite loops could occur. + # + # If the padder string is likely to occur in the plain text, another + # padder (or strategy) should and could be employed. + # + class CryptIO + + + # The safe text padder. See the class description for an analysis + # of the use of this type of padder. + TEXT_PADDER = "<-|@|->" + + # An unusual string that glues together an encryption dictionary and + # a chunk of base64 encoded and encrypted ciphertext. + # The string must be unusual enough to ensure it does not occur within + # the dictionary metadata keys or values. + INNER_GLUE_STRING = "\n<-|@| < || safe inner crypt material axis || > |@|->\n\n" + + # An unusual string that glues together the asymmetrically encrypted outer + # encryption key with the outer crypted text. + OUTER_GLUE_STRING = "\n<-|@| < || safe outer crypt material axis || > |@|->\n\n" + + # Text header for key-value pairs hash map that will be serialized. + DICT_HEADER_NAME = "crypt.properties" + + # Name for the class of cipher employed. + DICT_CIPHER_NAME = "cipher.class" + + # Name for the {Base64} encoded symmetric (lock/unlock) crypt key. + DICT_CRYPT_KEY = "encryption.key" + + # Dictionary name for the encryption iv (initialization vector) + DICT_CRYPT_IV = "encryption.iv" + + # Dictionary name for the Base64 (urlsafe) encoded plaintext digest. + DICT_TEXT_DIGEST = "plaintext.digest" + + + # Serialize and then unify a hash map and a textual chunk using + # a known but unusual separator string in a manner that protects + # content integrity during the serialize / deserialize process. + # + # This crypt serialization uses a specific "inner glue" as the + # string that separates the serialized key/value dictionary and + # the encoded textual block. + # + # @param hash_map [String] + # this hash (dictionary) will be serialized into INI formatted text + # using behaviour from {Hash} and {IniFile}. + # + # @param text_chunk [String] + # the usually Base64 encrypted textual block to be glued at the + # bottom of the returned block. + # + # @return [String] serialized and glued together result of map plus text + # + # @raise [ArgumentError] + # if the dictionary hash_map is either nil or empty. + def self.inner_crypt_serialize hash_map, text_chunk + + nil_or_empty_hash = hash_map.nil? || hash_map.empty? + raise ArgumentError, "Cannot serialize nil or empty properties." if nil_or_empty_hash + ini_map = IniFile.new + ini_map[ DICT_HEADER_NAME ] = hash_map + return ini_map.to_s + INNER_GLUE_STRING + text_chunk + + end + + + # Deserialize an safe formatted text which contains an + # encryption properties dictionary (serialized in INI format) + # and a Base64 encoded crypt block which is the subject of the + # encryption dictionary. + # + # The crypt serialization used a specific "inner glue" as the + # string that separates the serialized key/value dictionary and + # the encoded textual block. We now employ this glue to split + # the serialized dictionary from the textual block. + # + # @param hash_map [String] + # send an instantiated hash (dictionary) which will be populated + # by this deserialize operation. The dictionary propeties can + # then be used to decrypt the returned ciphertext. + # + # @param text_block [String] + # the first of a two-part amalgamation is a hash (dictionary) in + # INI serialized form and the second part is a Base64 encrypted + # textual block. + # + # The deserialized key/value pairs will be stuffed into the + # non nil (usually empty) hash map in the first parameter and + # the block (in the 2nd part) will be Base64 decoded and + # returned by this method. + # + # @return [String] + # The encoded block in the 2nd part of the 2nd parameter will be + # Base64 decoded and returned. + # + # @raise [ArgumentError] + # if the dictionary hash_map is either nil or empty. Also if + # the inner glue tying the two parts together is missing an + # {ArgumentError} will be thrown. + def self.inner_crypt_deserialize hash_map, text_block + + raise ArgumentError, "Cannot populate a nil hash map." if hash_map.nil? + assert_contains_glue text_block, INNER_GLUE_STRING + + serialized_map = text_block.split(INNER_GLUE_STRING).first.strip + encoded64_text = text_block.split(INNER_GLUE_STRING).last.strip + ini_props_hash = IniFile.new( :content => serialized_map ) + encrypt_values = ini_props_hash[DICT_HEADER_NAME] + + hash_map.merge!( encrypt_values ) + return Base64.decode64( encoded64_text ) + + end + + + # Using an outer divider (glue) - attach the asymmetrically encrypted outer + # encryption key with the outer encrypted text. + # + # @param crypt_material_x [String] asymmetrically encrypted (encoded) outer encryption key + # @param crypt_material_y [String] symmetrically encrypted inner metadata and payload crypt + # + # @return [String] concatenated result of the two crypt materials and divider string + def self.outer_crypt_serialize crypt_material_x, crypt_material_y + return crypt_material_x + OUTER_GLUE_STRING + crypt_material_y + end + + + # Given two blocks of text that were bounded together by the + # {self.outer_crypt_serialize} method we must return either the + # first block (true) or the second (false). + # + # @param crypt_material [String] + # large block of text in two parts that is divided by the + # outer glue string. + # + # @param top_block [Boolean] + # if true the top (of the two) blocks will be returned + # otherwise the bottom block is returned. + # + # @return [String] either the first or second block of text + # + # @raise [ArgumentError] + # If the outer glue string tying the two parts together is + # missing an {ArgumentError} will be thrown. + def self.outer_crypt_deserialize os_material, top_block + + assert_contains_glue os_material, OUTER_GLUE_STRING + return os_material.split(OUTER_GLUE_STRING).first.strip if top_block + return os_material.split(OUTER_GLUE_STRING).last.strip + + end + + + private + + def self.assert_contains_glue os_crypted_block, glue_string + + no_glue_msg = "\nGlue string not in safe cipher block.\n#{glue_string}\n" + raise ArgumentError, no_glue_msg unless os_crypted_block.include? glue_string + + end + + + end + + + end + + +end diff --git a/lib/modules/cryptology/engineer.rb b/lib/modules/cryptology/engineer.rb new file mode 100644 index 0000000..64d6a3d --- /dev/null +++ b/lib/modules/cryptology/engineer.rb @@ -0,0 +1,99 @@ +#!/usr/bin/ruby + +module SafeDb + + module ToolBelt + + + require 'securerandom' + + # This class will be refactored into an interface implemented by a set + # of plugins that will capture sensitive information from users from an + # Ubuntu, Windows, RHEL, CoreOS, iOS or CentOS command line interface. + # + # An equivalent REST API will also be available for bringing in sensitive + # information in the most secure (but simple) manner. + class Engineer + + + # -- + # -- Get a viable machine password taking into account the human + # -- password length and the specified mix_ratio. + # -- + # -- machine password length = human password length * mix_ratio - 1 + # -- + def self.machine_key human_password_length, mix_ratio + + machine_raw_secret = strong_key( human_password_length * ( mix_ratio + 1) ) + return machine_raw_secret[ 0..( human_password_length * mix_ratio - 1 ) ] + + end + + + # -- + # -- Engineer a raw password that is similar (approximate) in + # -- length to the integer parameter. + # -- + def self.strong_key approx_length + + non_alphanum = SecureRandom.urlsafe_base64(approx_length); + return non_alphanum.delete("-_") + + end + + + # Amalgamate the parameter passwords using a specific mix ratio. This method + # produces cryptographically stronger secrets than algorithms that simply + # concatenate two string keys together. If knowledge of one key were gained, this + # amalgamation algorithm still provides extremely strong protection even when + # one of the keys has a single digit length. + # + # This +length constraint formula+ binds the two input strings together with + # the integer mix ratio. + # + # machine password length = human password length * mix_ratio - 1 + # + # @param human_password [String] the first password (shorter one) to amalgamate + # @param machine_password [String] the second password (longer one) to amalgamate + # @param mix_ratio [Fixnum] the mix ratio that must be respected by the + # previous two parameters. + # @return [String] an amalgamated (reproducible) union of the 2 parameter passwords + # + # @raise [ArgumentError] if the length constraint assertion does not hold true + def self.get_amalgam_password human_password, machine_password, mix_ratio + + size_error_msg = "Human pass length times mix_ratio must equal machine pass length." + lengths_are_perfect = human_password.length * mix_ratio == machine_password.length + raise ArgumentError.new size_error_msg unless lengths_are_perfect + + machine_passwd_chunk = 0 + amalgam_passwd_index = 0 + amalgamated_password = "" + + human_password.each_char do |passwd_char| + + amalgamated_password[amalgam_passwd_index] = passwd_char + amalgam_passwd_index += 1 + + for i in 0..(mix_ratio-1) do + machine_pass_index = machine_passwd_chunk * mix_ratio + i + amalgamated_password[amalgam_passwd_index] = machine_password[machine_pass_index] + amalgam_passwd_index += 1 + end + + machine_passwd_chunk += 1 + + end + + return amalgamated_password + + end + + + end + + + end + + +end diff --git a/lib/modules/mappers/dictionary.rb b/lib/modules/mappers/dictionary.rb new file mode 100644 index 0000000..2822485 --- /dev/null +++ b/lib/modules/mappers/dictionary.rb @@ -0,0 +1,288 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenKey + + require 'inifile' + + # An OpenSession dictionary is a 2D (two dimensional) hash data + # structure backed by an encrypted file. + # + # It supports operations to read from and write to a known + # filepath and given the correct symmetric encryption key it will + # + # - decrypt after reading from the file and + # - encrypt before writing to the file + # + # This dictionary extends {Hash} in order to deliver on its core key value + # storage and retrieve use cases. Extend this dictionary and provide + # context specific methods through constants to read and write context + # specific data. + # + # == The Current Dictionary Section + # + # This Dictionary is two-dimensional so all key-value pairs are stored + # under the auspices of a section. + # + # The Dictionary can track the current section for you and all data + # exchanges can occur in lieu of a single section if you so wish by using + # the provided {put} and {get} methods. + # + # To employ section management functionality you should pass in a current + # section id when creating the dictionary. + # + # @example + # To use the dictionary in the raw (unextended) format you create + # write and read it like this. + # + # ---------------------------------------------------------------------- + # + # my_dictionary = Dictionary.create( "/path/to/backing/file" ) + # + # my_dictionary["user23"] = {} + # my_dictionary["user23"]["Name"] = "Joe Bloggs" + # my_dictionary["user23"]["Email"] = "joebloggs@example.com" + # my_dictionary["user23"]["Phone"] = "+44 07342 800080" + # + # my_dictionary.write( "crypt-key-1234-wxyz" ) + # + # ---------------------------------------------------------------------- + # + # my_dictionary = Dictionary.create( "/path/to/backing/file", "crypt-key-1234-wxyz" ) + # puts my_dictionary.has_key? "user23" # => true + # puts my_dictionary["user23"].length # => 3 + # puts my_dictionary["user23"]["Email"] # => "joebloggs@example.com" + # + # ---------------------------------------------------------------------- + class Dictionary < Hash + + attr_accessor :backing_filepath, :section_id + + + # Create either a new empty dictionary or unmarshal (deserialize) the + # dictionary from an encrypted file depending on whether a file exists + # at the backing_file parameter location. + # + # If the backing file indeed exists, the crypt key will be employed to + # decode and then decrypt the contents before the unmarshal operation. + # + # The filepath is stored as an instance variable hence the {write} + # operation does not need to be told where to? + # + # @example + # # Create Dictionary the first time + # my_dictionary = Dictionary.create( "/path/to/backing/file" ) + # + # # Create Dictionary from an Encrypted Backing File + # my_dictionary = Dictionary.create( "/path/to/backing/file", "crypt-key-1234-wxyz" ) + # + # @param backing_file [String] + # the backing file is the filepath to this Dictionary's encrypted + # backing file when serialized. If no file exists at this path the + # operation will instantiate and return a new empty {Hash} object. + # + # @param crypt_key [String] + # if the backing file exists then this parameter must contain a + # robust symmetric decryption key. The symmetric key will be used + # for decryption after the base64 encoded file is read. + # + # Note that the decryption key is never part of the dictionary object. + # This class method knows it but the new Dictionary has no crypt key + # instance variable. Another crypt key must then be introduced when + # serializing (writing) the dictionary back into a file. + # + # @return [Dictionary] + # return a new Dictionary that knows where to go if it needs + # to read (deserialize) or write (serialize) itself. + # + # If no file exists at the path a new empty {Hash} object is + # returned. + # + # If a file exists, then the crypt_key parameter is expected + # to be the decryption and key and the dictionary will be based + # on the decrypted contents of the file. + # + # @raise [ArgumentError] + # An {ArgumentError} is raised if either no decryption key is provided + # or one that is unsuitable (ie was not used within the encryption). + # Errors can also arise if the block coding and decoding has not been + # done satisfactorily. + def self.create backing_file, crypt_key = nil + + key_missing = File.file?( backing_file ) && crypt_key.nil? + raise ArgumentError, "No crypt key provided for file #{backing_file}" if key_missing + + dictionary = Dictionary.new + dictionary.backing_filepath = backing_file + + return dictionary unless File.file? backing_file + + file_contents = File.read( backing_file ).strip + plaintext_str = file_contents.block_decode_decrypt( crypt_key ) + dictionary.ingest_contents( plaintext_str ) + + return dictionary + + end + + + # Create either a new dictionary containing the specified section or unmarshal + # (deserialize) the dictionary from an encrypted file depending on whether a + # file exists at the backing_file parameter location and then create the + # section only if it does not exist. + # + # If the backing file indeed exists, the crypt key will be employed to + # decode and then decrypt the contents before the unmarshal operation. + # + # The filepath is stored as an instance variable hence the {write} + # operation does not need to be told where to? + # + # This dictionary will also know which "section" should be used to + # put, add, update and delete key/value data. You can employ this dictionary + # such that each instance only creates, updates, removes and/or reads + # from a single section. + # + # @example + # # Create Dictionary the first time with a section. + # my_dictionary = Dictionary.create( "/path/to/file", "Europe" ) + # + # # Create Dictionary from an Encrypted Backing File + # my_dictionary = Dictionary.create( "/path/to/file", "Europe", "1234-wxyz" ) + # + # @param backing_file [String] + # the backing file is the filepath to this Dictionary's encrypted + # backing file when serialized. + # + # @param section_id [String] + # the created dictionary know which section should be used to + # put, add, update and delete key/value data. If the backing file + # does not exist a new section is created in the empty dictionary. + # + # If the file exists a new section is created only if it is not + # already present inside the dictionary. + # + # @param crypt_key [String] + # if the backing file exists then this parameter must contain a + # robust symmetric decryption key. The symmetric key will be used + # for decryption after the base64 encoded file is read. + # + # Note that the decryption key is never part of the dictionary object. + # This class method knows it but the new Dictionary has no crypt key + # instance variable. Another crypt key must then be introduced when + # serializing (writing) the dictionary back into a file. + # + # @return [Dictionary] + # return a new Dictionary that knows where to go if it needs + # to read (deserialize) or write (serialize) itself. + # + # If no file exists at the path a new empty {Hash} object is + # returned. + # + # If a file exists, then the crypt_key parameter is expected + # to be the decryption and key and the dictionary will be based + # on the decrypted contents of the file. + # + # @raise [ArgumentError] + # An {ArgumentError} is raised if either no decryption key is provided + # or one that is unsuitable (ie was not used within the encryption). + # Errors can also arise if the block coding and decoding has not been + # done satisfactorily. + def self.create_with_section backing_file, section_id, crypt_key = nil + + dictionary = create( backing_file, crypt_key = nil ) + dictionary.section_id = section_id + dictionary[section_id] = {} unless dictionary.has_key?( section_id ) + + return dictionary + + end + + + # Write the data in this dictionary hash map into a file-system + # backend mirror whose path was specified in the {Dictionary.create} + # factory method. + # + # Technology for encryption at rest is mandatory when using this + # Dictionary to write and read files from the filesystem. + # + # Calling this {self.write} method when the file at the prescribed path + # does not exist results in the directory structure being created + # (if necessary) and then the (possibly encrypted) file being written. + # + # @param crypt_key [String] + # this parameter must contain a robust symmetric crypt key to use for + # the encryption before writing to the filesystem. + # + # Note that the decryption key is never part of the dictionary object. + # For uncrackable security this key must be changed every time the + # file is written. + def write crypt_key + + ini_file = IniFile.new + self.each_key do |section_name| + ini_file[section_name] = self[section_name] + end + + crypted_text = ini_file.to_s.encrypt_block_encode( crypt_key ) + + FileUtils.mkdir_p File.dirname(@backing_filepath) + File.write @backing_filepath, crypted_text + + end + + + + def get key_name + return self[@section_id][key_name] + end + + + + def put key_name, key_value + self[@section_id][key_name] = key_value + end + + + + + # Ingest the contents of the INI string and merge it into a + # this object which is a {Hash}. + # + # @param the_contents [String] + # the INI string that will be ingested and morphed into + # this dictionary. + # + # @raise [ArgumentError] + # if the content contains any nil section name, key name + # or key value. + def ingest_contents the_contents + + ini_file = IniFile.new( :content => the_contents ) + ini_file.each do | data_group, data_key, data_value | + ingest_entry data_group, data_key, data_value + end + + end + + + private + + + def ingest_entry section_name, key_name, value + + msg = "A NIL object detected during ingestion of file [#{@filepath}]." + raise ArgumentError.new msg if section_name.nil? || key_name.nil? || value.nil? + + if self.has_key? section_name then + self[section_name][key_name] = value + else + self.store section_name, { key_name => value } + end + + end + + + end + + +end diff --git a/lib/modules/storage/coldstore.rb b/lib/modules/storage/coldstore.rb new file mode 100644 index 0000000..bc22b75 --- /dev/null +++ b/lib/modules/storage/coldstore.rb @@ -0,0 +1,186 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module SafeDb + + module Store + + # Cold storage can sync repositories with a bias during conflicts + # either to the remote repository when pulling, and then + # conversely to the local reposiory when pushing. + # + # In between the sync operations a ColdStore can create, read, update and + # delete to and from the local mirror. + # + # == ColdStore | Use Cases + # + # Any self-respecting coldstore must, after initialization, provide + # some basic (and mandatory) behaviour. + # + # These include + # + # - read - reading text from a (possibly unavailable) frozen path + # - write - writing text (effectively freezing it) to a path + # - pull - sync with a collision bias that favours the remote mirror + # - push - sync with a collision bias that favours the local mirror + # + # Cold Storage is borrowed from BitCoin and represents offline storage + # for keys and crypts. safe separates keys and crypts so that you can + # transfer and share secrets by moving keys (not the crypts). + # + # == Houses and Gold Bullion + # + # You don't carry houses or gold bullion around to rent, share or transfer + # their ownership. + # + # You copy keys to rent secrets and when the tenure is up (or you change your + # mind) you revoke access with a metaphorical lock change. + # + # safe embodies concepts like an owner who rents as opposed to a change + # in ownership. + # + # == trade secrets | commoditizing secrets + # + # safe is a conduit through which secrets can be bought and sold. + # + # It commoditizes secrets so that they can be owned, traded, leased and + # auctioned. Options to acquire or relinquish them at set prices can easily + # be taken out. + class ColdStore + + # @param base_path [String] + # path to the store's (mirror) base directory. + # If the denoted directory does not exist an attempt will be made to + # create it. If a file exists at this path an error will be thrown. + # + # @param domain [String] + # the domain is an identifier (and namespace) denoting which safe + # "account" is being accessed. safe allows the creation and use of + # multiple domains. + def initialize local_path + + @store_path = local_path + FileUtils.mkdir_p @store_path + + end + + + # Read the file frozen (in this store mirror) at this path and + # return its contents. + # + # Coldstores are usually frozen offline (offmachine) so for this + # to work the {ColdStore.pull} behaviour must have executed to + # create a local store mirror. This method reads from that mirror. + # + # @param from_path [String] + # read the file frozen at this path and return its contents + # so that the defreeze process can begin. + # + # This path is relative to the base of the store defined in + # the constructor. + # + # @return [String] + # return the text frozen in a file at the denoted local path + # + # nil is reurned if no file can be found in the local mirror + # at the configured path + # + # @raise [RuntimeError] + # unless the path exists in this coldstore and that path is + # a directory (as opposed to a file). + # + # @raise [ArgumentError] + # if more than one file match is made at the path specified. + def read from_path + + frozen_filepath = File.join @store_path, from_path + frozen_dir_path = File.dirname(frozen_filepath) + + log.info(x) { "Coldstore will search in folder [#{frozen_dir_path.hr_path}]" } + + exists_msg = "Directory #{frozen_dir_path} does not exist in store." + is_dir_msg = "Path #{frozen_dir_path} should be a directory (not a file)." + raise RuntimeError, exists_msg unless File.exists? frozen_dir_path + raise RuntimeError, is_dir_msg unless File.directory? frozen_dir_path + + full_filepath = "" + file_matched = false + + Dir.glob("#{frozen_dir_path}/**/*.os.txt").each do |matched_path| + + log.info(x) { "Coldstore search with [#{from_path}] has matched [#{matched_path.hr_path}]" } + log.info(x) { "Ignore directory at [#{matched_path.hr_path}]." } if File.directory? matched_path + next if File.directory? matched_path + + two_match_msg = "More than one file matched. The second is #{matched_path}." + raise ArgumentError, two_match_msg if file_matched + file_matched = true + + full_filepath = matched_path + + end + + no_file_msg = "Coldstore could not find path [#{from_path}] from [#{@store_path}]." + raise RuntimeError, no_file_msg unless file_matched + + log.info(x) { "Coldstore matched exactly one envelope at [#{full_filepath.hr_path}]." } + return File.read full_filepath + + end + + + # Write (freeze) the text into a file at the denoted path. The + # folder path will be created if need be. + # + # Coldstores are usually frozen offline (offmachine) so after + # this method completes the {ColdStore.push} behaviour must be + # executed to synchronize the local coldstore freezer with the + # remote mirror. + # + # @param this_text [String] + # this is the text that needs to be frozen into the local and + # subsequently the remote coldstore freezer. + # + # @param to_path [String] + # write the text (effectively freezing it) into the file at + # this path. An attempt will be made to put down the necessary + # directory structure. + # + # This path is relative to the base of the store defined in + # the constructor. + def write this_text, to_path + + freeze_filepath = File.join @store_path, to_path + + log.info(x) { "ColdStore freezing #{this_text.length} characters of worthless text."} + log.info(x) { "ColdStore freeze file path => #{freeze_filepath.hr_path}"} + + FileUtils.mkdir_p(File.dirname(freeze_filepath)) + File.write freeze_filepath, this_text + + end + + + private + + # @todo - write sync (with a local bias during conflicts) + # The open up to the public (published) api. + def push + + + end + + # @todo - write sync (with a rmote bias during conflicts) + # The open up to the public (published) api. + def pull + + end + + + end + + + end + + +end diff --git a/lib/modules/storage/git.store.rb b/lib/modules/storage/git.store.rb new file mode 100644 index 0000000..5da9719 --- /dev/null +++ b/lib/modules/storage/git.store.rb @@ -0,0 +1,399 @@ +#!/usr/bin/ruby + + module SafeDb + + module Store + + +# -- ------------------------------------------------------------------- -- # +# -- Provision the git branch involved in our present working directory. -- # +# -- The [present directory] may not relate to version control at all or -- # +# -- it may relate to the master or other branch in the source mgt tool. -- # +# -- ------------------------------------------------------------------- -- # +class GitFlow + + @@url_postfix = ".git/" + + # -- + # -- Check in whatever has changed in the local repository + # -- at the path stated in the first parameter. + # -- + # -- The text in the second parameter helps to distinguish + # -- what was to be pushed up and forms part of the git + # -- commit message. + # -- + def self.push repo_root_dir, what_changed_string, time_stamp + + dot_git_path = File.join repo_root_dir, ".git" + Throw.if_not_exists dot_git_path + + Dir.chdir repo_root_dir + + git_diff_cmd = "git status -vv; echo;" + git_diff_output = %x[#{git_diff_cmd}] + git_diff_output.log_lines + + git_add_cmd = "git add -A; echo;" + git_add_output = %x[#{git_add_cmd}] + git_add_output.log_lines + + git_commit_cmd = "git commit -m \"Writing #{what_changed_string} at #{time_stamp}.\";" + git_commit_output = %x[#{git_commit_cmd}] + git_commit_output.log_lines + + # -- + # -- This command may require input (username/password) from the + # -- user hence we don't wrap inside output trapping executors. + # -- + system "git push origin master" + + end + + + # -- ------------------------------------------------- -- # + # -- Return the branch name of a local git repository. -- # + # -- ------------------------------------------------- -- # + # -- Parameter -- # + # -- path_to_dot_git : local path to the .git folder -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- git is installed on the machine -- # + # -- ------------------------------------------------- -- # + def self.wc_branch_name path_to_dot_git + + cmd = "git --git-dir=#{path_to_dot_git} branch"; + branch_names = %x[#{cmd}]; + branch_names.each_line do |line| + return line[2, line.length].strip if line.start_with?('*') + end + raise ArgumentError.new "No branch name starts with asterix.\n#{cmd}\n#{branch_names}\n" + + end + + + # -- ------------------------------------------------- -- # + # -- Get the remote origin url of a git working copy. -- # + # -- ------------------------------------------------- -- # + # -- Parameter -- # + # -- path_to_dot_git : local path to .git folder -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- git is installed on the machine -- # + # -- working copy exists and has remote origin -- # + # -- ------------------------------------------------- -- # + def self.wc_origin_url path_to_dot_git + + cmd = "git --git-dir=#{path_to_dot_git} config --get remote.origin.url" + url = %x[#{cmd}]; + raise ArgumentError.new "No remote origin url.\n#{cmd}\n" if url.nil? + + return url.strip + + end + + + # -- -------------------------------------------------- -- # + # -- Get the uncut revision of a git repo working copy. -- # + # -- -------------------------------------------------- -- # + # -- Parameter -- # + # -- path_to_dot_git : local path to .git folder -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- git is installed on the machine -- # + # -- working copy exists and has remote origin -- # + # -- -------------------------------------------------- -- # + def self.wc_revision_uncut path_to_dot_git + + log.info(ere) { "##### GitFlow path to dot git is => #{path_to_dot_git}" } + repo_url = wc_origin_url path_to_dot_git + log.info(ere) { "##### The GitFlow repo url is => #{repo_url}" } + + ## Bug HERE - On Ubuntu the branch name is like => (HEAD detached at 067f9a3) + ## Bug HERE - This creates a failure of => sh: 1: Syntax error: "(" unexpected + ## Bug HERE - The unexpected failure occurs in the ls-remote command below + ## Bug HERE - So hardcoding this to "master" for now + # branch_name = wc_branch_name path_to_dot_git + branch_name = "master" + + log.info(ere) { "##### The GitFlow branch name is => #{branch_name}" } + cmd = "git ls-remote #{repo_url} ls-remote -b #{branch_name}" + log.info(ere) { "##### The GitFlow get dirty rev command is => #{cmd}" } + dirty_revision = %x[#{cmd}]; + log.info(ere) { "##### The dirty revision is => #{dirty_revision}" } + return dirty_revision.partition("refs/heads").first.strip; + + end + + + # -- -------------------------------------------------- -- # + # -- Get brief revision of repo from working copy path. -- # + # -- -------------------------------------------------- -- # + # -- Parameter -- # + # -- path_to_dot_git : local path to .git folder -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- we return the first 7 revision chars -- # + # -- git is installed on the machine -- # + # -- working copy exists and has remote origin -- # + # -- -------------------------------------------------- -- # + def self.wc_revision path_to_dot_git + + log.info(ere) { "GitFlow path to dot git is => #{path_to_dot_git}" } + Throw.if_not_exists path_to_dot_git + + uncut_revision = wc_revision_uncut path_to_dot_git + log.info(ere) { "GitFlow uncut full revision is => #{uncut_revision}" } + + # -- --------------------------------------------------------------------- -- # + # -- Gits [short revision] hash has 7 chars. Note 4 is the usable minimum. -- # + # -- For usage in stamps where space comes at a premium - 6 chars will do. -- # + # -- --------------------------------------------------------------------- -- # + ref_length = 7 + return "r" + uncut_revision[0..(ref_length - 1)]; + + end + + + # -- -------------------------------------------------- -- # + # -- Clone the branch of a local git repo working copy. -- # + # -- -------------------------------------------------- -- # + # -- Parameter -- # + # -- src_gitpath : local path to .git folder -- # + # -- new_wc_path : path to new non-existent dir -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- git is installed on the machine -- # + # -- working copy exists and has remote origin -- # + # -- -------------------------------------------------- -- # + def self.do_clone_wc path_to_dot_git, path_to_new_dir + + # -- ----------------------------------------------------------- -- # + # -- Why clone from a working copy (instead of a remote url). -- # + # -- ----------------------------------------------------------- -- # + # -- -- # + # -- When actively [DEVELOPING] an eco plugin and you want to -- # + # -- -- # + # -- 1 - [test] the behaviour without a git commit/git push -- # + # -- 2 - test whatever [branch] the working copy is now at -- # + # -- -- # + # -- This use case requires us to clone from a working copy. -- # + # -- -- # + # -- ----------------------------------------------------------- -- # + +### Bug here - see getting branch name issue +### Bug here - see getting branch name issue +### Bug here - see getting branch name issue +### Bug here - see getting branch name issue +### branch_name = wc_branch_name path_to_dot_git + branch_name = "master" +##### cmd = "git clone #{path_to_dot_git} -b #{branch_name} #{path_to_new_dir}" +##### cmd = "git clone #{path_to_dot_git} -b #{branch_name} #{path_to_new_dir}" +##### cmd = "git clone #{path_to_dot_git} -b #{branch_name} #{path_to_new_dir}" + cmd = "git clone #{path_to_dot_git} #{path_to_new_dir}" + clone_output = %x[#{cmd}]; + + log.info(ere) { "[gitflow] cloning working copy" } + log.info(ere) { "[gitflow] repo branch name : #{branch_name}" } + log.info(ere) { "[gitflow] src dot git path : #{path_to_dot_git}" } + log.info(ere) { "[gitflow] new wc dir path : #{path_to_new_dir}" } + log.info(ere) { "[gitflow] git clone command : #{cmd}" } + log.info(ere) { "[gitflow] git clone output : #{clone_output}" } + + end + + + # -- + # -- Clone a remote repository at the specified [url] into + # -- a [NON-EXISTENT] folder path. + # -- + # -- --------------------------------- + # -- What is a Non Existent Dir Path? + # -- --------------------------------- + # -- + # -- The parent directory of a non existent folder path + # -- must [exist] whilst the full path itself does not. + # -- The clone operation will create the final folder in + # -- the path and then it [puts] the repository contents + # -- within it. + # -- + # -- ----------- + # -- Parameters + # -- ----------- + # -- + # -- repo_url : url ends in dot git f-slash + # -- clone_dir : path to new non-existent dir + # -- + # -- ----------------------------- + # -- Dependencies and Assumptions + # -- ----------------------------- + # -- + # -- git is installed on the machine + # -- repo exists and is publicly readable + # -- the master branch is he one to clone + # -- the current Dir.pwd() is writeable + # -- + def self.do_clone_repo repo_url, non_existent_path + + cmd = "git clone #{repo_url} #{non_existent_path}" + clone_output = %x[#{cmd}]; + + log.info(ere) { "[gitflow] cloning remote repository" } + log.info(ere) { "[gitflow] git repository url : #{repo_url}" } + log.info(ere) { "[gitflow] git clone dir path : #{nickname non_existent_path}" } + log.info(ere) { "[gitflow] git clone command : #{cmd}" } + log.info(ere) { "[gitflow] git clone output : #{clone_output}" } + + end + + + # -- ----------------------------------------------------- -- # + # -- Clone [many] git repositories given an array of urls -- # + # -- along with a corresponding array of the working copy -- # + # -- folder names and a [parental] base (offset) folder. -- # + # -- ----------------------------------------------------- -- # + # -- Parameter -- # + # -- repo_urls : array of git repository urls -- # + # -- base_names : array of cloned repo base names -- # + # -- parent_dir : path to local [parent] folder -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- arrays have equiv corresponding entries -- # + # -- parent dir is created if not exists -- # + # -- repos exist and are publicly readable -- # + # -- master branches are the ones to clone -- # + # -- ----------------------------------------------------- -- # + def self.do_clone_repos repo_urls, base_names, parent_dir + + Dir.mkdir parent_dir unless File.exists? parent_dir + Throw.if_not_found parent_dir, "clone repos" + + repo_urls.each_with_index do | repo_url, repo_index | + + git_url = repo_url if repo_url.end_with? @@url_postfix + git_url = "#{repo_url}#{@@url_postfix}" unless repo_url.end_with? @@url_postfix + + proj_folder = File.join parent_dir, base_names[repo_index] + + log.info(ere) { "[clone repos] proj [index] => #{repo_index}" } + log.info(ere) { "[clone repos] repo url 1st => #{repo_url}" } + log.info(ere) { "[clone repos] repo url 2nd => #{git_url}" } + log.info(ere) { "[clone repos] project name => #{base_names[repo_index]}" } + log.info(ere) { "[clone repos] project path => #{proj_folder}" } + + GitFlow.do_clone_repo git_url, proj_folder + + end + + end + + + # -- ------------------------------------------------ -- # + # -- Move assets from a git repo to a local zip file. -- # + # -- ------------------------------------------------ -- # + # -- -- # + # -- Parameter -- # + # -- repo_url : the url of the git repository -- # + # -- path_offset : FWD-SLASH ENDED PATH in repo -- # + # -- target_dir : the target folder for new zip -- # + # -- zip_filename : extensionless name of the zip -- # + # -- -- # + # -- Return -- # + # -- path to the zip file created in a tmp folder -- # + # -- -- # + # -- ------------------------------------------------ -- # + # -- Dependencies and Assumptions -- # + # -- ------------------------------------------------ -- # + # -- -- # + # -- END PATH OFFSET WITH A FORWARD SLASH -- # + # -- IF NO OFFSET SEND "/" for path_offset -- # + # -- git is installed on the machine -- # + # -- the repo exists with path offset -- # + # -- the master branch is archived -- # + # -- name is unique as used to create a dir -- # + # -- -- # + # -- ------------------------------------------------ -- # + def self.git2zip repo_url, path_offset, target_dir, zip_basename + + log.info(ere) { "[git2zip] ------------------------------------------- -- #" } + log.info(ere) { "[git2zip] archiving repo assets at path offset -- #" } + log.info(ere) { "[git2zip] ------------------------------------------- -- #" } + log.info(ere) { "[git2zip] git repository url : #{repo_url}" } + log.info(ere) { "[git2zip] slash tail dir offset : #{path_offset}" } + log.info(ere) { "[git2zip] target zip directory : #{target_dir}" } + log.info(ere) { "[git2zip] zip file [base] name : #{zip_basename}" } + + clone_dir = File.join Dir.tmpdir(), zip_basename + do_clone_repo repo_url, clone_dir + dot_git_path = File.join clone_dir, ".git" + dst_zip_path = File.join target_dir, "#{zip_basename}.zip" + + the_offset = path_offset + the_offset = "" if path_offset.length == 1 + cmd = "git --git-dir=#{dot_git_path} archive -o #{dst_zip_path} HEAD:#{the_offset}" + clone_output = %x[#{cmd}]; + + log.info(ere) { "[git2zip] tmp clone src folder : #{clone_dir}" } + log.info(ere) { "[git2zip] cloned dot git path : #{dot_git_path}" } + log.info(ere) { "[git2zip] target zip full path : #{dst_zip_path}" } + log.info(ere) { "[git2zip] git archive command : #{cmd}" } + log.info(ere) { "[git2zip] ------------------------------------------- -- #" } + log.info(ere) { "#{clone_output}" } + log.info(ere) { "[git2zip] ------------------------------------------- -- #" } + + return dst_zip_path + + end + + + # -- ------------------------------------------------- -- # + # -- Return an array of simple file names in the repo. -- # + # -- ------------------------------------------------- -- # + # -- Parameter -- # + # -- repo_url : the url of the repository to read -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- we are not interested in folders -- # + # -- trawl is recursive (infinite depth) -- # + # -- git is installed on the machine -- # + # -- ------------------------------------------------- -- # + def self.file_names repo_url + + random_text = SecureRandom.urlsafe_base64(12).delete("-_").downcase + cloned_name = "eco.repo.clone.#{random_text}" + cloned_path = File.join Dir.tmpdir(), cloned_name + + do_clone_repo repo_url, cloned_path + dot_git_path = File.join cloned_path, ".git" + + cmd = "git --git-dir=#{dot_git_path} ls-tree -r master --name-only" + filename_lines = %x[#{cmd}]; + names_list = Array.new + filename_lines.each_line do |line| + names_list.push line.strip + end + + log.info(ere) { "[git2files] ----------------------------------------------" } + log.info(ere) { "[git2files] [#{names_list.length}] files in [#{repo_url}]" } + log.info(ere) { "[git2files] ----------------------------------------------" } + log.info(ere) { "[git2files] Random Text : #{random_text}" } + log.info(ere) { "[git2files] Cloned Name : #{cloned_name}" } + log.info(ere) { "[git2files] Cloned Path : #{cloned_path}" } + log.info(ere) { "[git2files] Repo Folder : #{dot_git_path}" } + log.info(ere) { "[git2files] Reading Cmd : #{cmd}" } + log.info(ere) { "[git2files] ----------------------------------------------" } + pp names_list + log.info(ere) { "[git2files] ----------------------------------------------" } + + return names_list + + end + + + + end + + + end + +end + diff --git a/lib/session/fact.finder.rb b/lib/session/fact.finder.rb new file mode 100644 index 0000000..ade46cf --- /dev/null +++ b/lib/session/fact.finder.rb @@ -0,0 +1,334 @@ +#!/usr/bin/ruby + +module SafeDb + + # -- + # -- ----------------- + # -- Fact Production + # -- ----------------- + # -- + # -- The fact tree is tasked with fact production + # -- (population). Fact production is done by [consuming] + # -- + # -- [1] - simple string, number of boolean facts + # -- [2] - facts already produced + # -- [3] - identity facts from command line and environment + # -- [4] - software (behaviour) that derives facts + # -- [5] - inherited (extended) facts from (OO) parents + # -- + # -- + # -- + # -- ----------------------------------------- + # -- The 4 Universal (DevOps) Creation Facts + # -- ----------------------------------------- + # -- + # -- No matter the (DevOps) eco-system being fabricated, these four + # -- facts prevail and stand out. + # -- + # -- Every time a DevOps [eco-system] is created, cloned, or recovered, + # -- a small cluster of core facts endure to define the instance and act + # -- as the identification basis of all new eco-system resources. + # -- + # -- The 4 facts underpinning eco-system creation are + # -- + # -- [1] - [what] is being built + # -- [2] - [when] did the building begin + # -- [3] - [who] instigated it (and from) + # -- [4] - [which] workstation. + # -- + # -- --------------------------------------- + # -- DevOps 4 Creational [Instance] Facts + # -- --------------------------------------- + # -- + # -- The core instance identities used and reused time and again relate to + # -- + # -- [1] - plugin (eg) wordpress.hub or jenkins.hub + # -- [2] - time (eg) 18036.0952.065 + # -- [3] - user (eg) peterpan + # -- [4] - workstation (eg) laptop_susie or hp_desktop + # -- + # -- + class FactFind + + @@eval_prefix = "rb>>" + + # The fact tree values can be referenced using the @f + # specifier with a 2 dimensional key. + attr_reader :f + + + # This method constructs the FactFind object and tree database + # and initializers the root fact container structures. + def initialize + + @f = {} + @s = {} + +# ---> @f.store symbol(plugin_id), {} +# ---> @p = @f[symbol(plugin_id)] + + end + + + # Assimilate the gem's main factbase fact file into + # the structure that is exposed to outside classes + # as the instance variable @f (a 2D array type). + # + # The factfile to assimilate is always expected to + # exist in folder [ ../factbase ] + # + # The factfile name within the above folder is expected + # to be presented in the parameter. + # + # @param factfile_name [String] name of factfile to assimilate + def assimilate factfile_name + + factfile_dir = File.join(File.dirname(File.expand_path(__FILE__)), "../factbase") + factfile_path = File.join factfile_dir, factfile_name + + log.info(x) { "Assimilating factfile in folder => #{factfile_dir}" } + log.info(x) { "Assimilating factfile with name => #{factfile_name}" } + + assimilate_ini_file factfile_path + + end + + + # ----> --------------------------------------------------> + # ----> How to Write a Custom Error + # ----> --------------------------------------------------> + # ----> Add a custom data attributes to your exception + # ----> You can add custom data to your exception just like you'd do it in any other class. Let's add an attribute reader to our class and update the constructor. + # ----> class MyError < StandardError + # ----> attr_reader :thing + # ----> def initialize(msg="My default message", thing="apple") + # ----> @thing = thing + # ----> super(msg) + # ----> end + # ----> end + # ----> --------------------------------------------------> + # ----> Using the Custom Error Class + # ----> --------------------------------------------------> + # ----> begin + # ----> raise MyError.new("my message", "my thing") + # ----> rescue => e + # ----> puts e.thing # "my thing" + # ----> end + # ----> --------------------------------------------------> + + + # -- ------------------------------------------- -- # + # -- -- # + # -- Template -- # + # -- -- # + # -- The longest river in africa is without -- # + # -- doubt the @[africa|longest.river]. Now -- # + # -- @[south.america|most.spoken] is the -- # + # -- most common language in south america. -- # + # -- -- # + # -- The population of the americas -- # + # -- is @[americas|population] according to + # -- the Harvard 2015 census. + # -- -- # + # -- ------------------------------------------- -- # + # -- -- # + # -- Ruby Code -- # + # -- -- # + # -- double_north_america_pop = @f[:north_america][:population] * 2 + # -- -- # + # -- ------------------------------------------- -- # + # -- Parameters -- # + # -- factory_facts : instantiated 2D hash -- # + # -- ini_filepath : path to factfile to read -- # + # -- -- # + # -- Dependencies and Assumptions -- # + # -- the [inifile] gem is installed -- # + # -- file exists at the ini_filepath -- # + # -- factory_facts are instantiated -- # + # -- identity facts like @i[:plugin] exist -- # + # -- ------------------------------------------- -- # + def assimilate_ini_file ini_filepath + + fact_filename = File.basename ini_filepath + log_begin fact_filename + + no_file = "No (ini) factfile found here => #{ini_filepath}" + raise ArgumentError.new no_file unless File.exists? ini_filepath + + # -- + # -- Use the inifile gem to parse and read the fact + # -- file contents into Ruby's map structures. + # -- + begin + + map_facts = IniFile.load ini_filepath + map_facts.each do | group_str, key_str, input_value | + assimilate_fact group_str, key_str, input_value + end + + rescue Exception => e + + log.fatal(x) { "## ############################ #################################" } + log.fatal(x) { "## Fact File Assimilation Error ---------------------------------" } + log.fatal(x) { "## ############################ #################################" } + log.fatal(x) { "## File => #{ini_filepath}" } + log.fatal(x) { "## Error => #{e.message}" } + log.fatal(x) { "## ############################ #################################" } + e.backtrace.log_lines + log.fatal(x) { "## ############################ #################################" } + + raise e + + end + + log_end fact_filename + + end + + + + # This method assimilates a two-dimensional fact bringing it into the + # fact tree fold. + # + # Once assimilated, this fact with a 2D index can be reused + # - for future fact resolution + # - by classes with access to the fact tree + # - for dynamic template resolution + # + # @param fact_group_str the first dimensional fact key + # @param fact_key_str the second dimensional fact key + # @param fact_value_str value of the fact to assimilate + def assimilate_fact fact_group_str, fact_key_str, fact_value_str + + grp_symbol = fact_group_str.gsub(".", "_").to_sym + key_symbol = fact_key_str.gsub(".", "_").to_sym + + raise ArgumentError, "Assimilating Fact [ #{fact_group_str} ][ #{fact_key_str} ] => Value is NIL" if fact_value_str.nil? + fact_string = fact_value_str.strip + + begin + + raise ArgumentError, "Fact object in section #{fact_group_str} with key #{fact_key_str} is nil." if fact_string.nil? + eval_value = evaluate( fact_string ) + add_fact grp_symbol, to_symbol(fact_key_str), eval_value + + rescue Exception => e + + log.fatal(x) { "## ##################### #################################" } + log.fatal(x) { "## Fact Evaluation Error ---------------------------------" } + log.fatal(x) { "## ##################### #################################" } + log.fatal(x) { "## Fact Family => #{fact_group_str}" } + log.fatal(x) { "## Fact Key => #{fact_key_str}" } + log.fatal(x) { "## Fact Stmt => #{fact_string}" } + log.fatal(x) { "## Fact Error => #{e.message}" } + log.fatal(x) { "## ##################### #################################" } + e.backtrace.log_lines + + raise e + + end + + unless @f.has_key? grp_symbol then + + log.debug(x){ "# @@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #" } + log.debug(x){ "# @@ the [#{fact_group_str}] silo facts." } + log.debug(x){ "# @@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #" } + + end + + id_keystring = "#{grp_symbol}#{key_symbol}".downcase + sensitive = id_keystring.includes_any? [ "secret", "password", "credential", "creds" ] + print_value = "****************" + print_value = eval_value unless sensitive + + fw_key = sprintf '%-33s', "@f[:#{grp_symbol}][:#{key_symbol}]" + log.debug(x){ "#{fw_key} => #{print_value}" } + + end + + + + # This static method converts from string to symbol. + # @param from_string the neither nil nor empty string to convert to a symbol + # @return a symbol representation of the input string + def to_symbol from_string + return from_string.strip.gsub(".", "_").to_sym + end + + + + private + + + + def add_fact group_symbol, key_symbol, key_value + + fact_component = "[group]=> #{group_symbol} [key]=> #{key_symbol} [value]=> #{key_value}" + nil_error_text = "Neither fact coordinates nor values can be nil. #{fact_component}" + raise ArgumentError.new nil_error_text if group_symbol.nil? || key_symbol.nil? || key_value.nil? + + if @f.has_key? group_symbol then + + # -- This isn't the first fact within this group + # -- so store the new fact key/value pair within + # -- the group's namespace. + @f[group_symbol][key_symbol] = key_value + + else + + # -- Create a new umbrella grouping against which + # -- the new key-value pairing will be inserted. + @f.store group_symbol, { key_symbol => key_value } + + end + + # -- The @s sibling hash is updated to reflect the + # -- key-value pairs within the current group. This + # -- allows @s to be used as shorthand within INI + # -- file fact definition statements. + @s = @f[group_symbol] + + end + + + def evaluate string + + # -----> @todo raise a FactError here + + raise RuntimeError.new "Fact to Evaluate is Nil." if string.nil? + return string unless string.start_with? @@eval_prefix + return eval( string.gsub @@eval_prefix, "" ) + + end + + + def log_begin the_filename + + log.info(x) { "- -" } + log.info(x) { "# @@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@ #" } + log.info(x) { "# -- ------------------------------------------------- -- #" } + log.info(x) { "# -- [= BEGIN THE ASSIMILATION =] #{the_filename}" } + log.info(x) { "# -- ------------------------------------------------- -- #" } + log.info(x) { "# @@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@ #" } + + end + + + def log_end the_filename + + log.info(x) { "# @@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@ #" } + log.info(x) { "# -- ------------------------------------------------- -- #" } + log.info(x) { "# -- [= END ASSIMILATION =] #{the_filename}" } + log.info(x) { "# -- ------------------------------------------------- -- #" } + log.info(x) { "# @@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@ #" } + log.info(x) { "- -" } + log.info(x) { "- -" } + log.info(x) { "- -" } + + end + + + end + + +end diff --git a/lib/session/require.gem.rb b/lib/session/require.gem.rb new file mode 100644 index 0000000..b53b71d --- /dev/null +++ b/lib/session/require.gem.rb @@ -0,0 +1,112 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module OpenSession + + + # Require every file with a dot rb extension that is + # +either directly in or recursively below+ the calling gem's + # directory. + # + # Note that this class and its methods depend on an initialized + # logger so as a pre-condition, ensure the logging has been + # instantiated before calling. + # + # == The Base Require Path + # + # Here is an example of the base require path being derived. + # + # @example + # Let's assume that the + # + # - ruby gems version is 2.3.0, and the + # - safe version is 0.0.944, and the + # - calling class is in the lib directory + # + # +then+ the gem base path would be + # + # /var/lib/gems/2.3.0/gems/safe-0.0.944/lib + # + # This means every ruby (.rb) file both +directly in+ and + # +recursively below+ the lib directory will be + # required. + # + # + # == Requiring Parental Classes Before Child Classes + # + # This is a common problem when bringing classes in to join + # the fray. We must require the +Planet+ class before + # we require the +Neptune+ class. + # + # class Neptune < Planet + # + # The solution lies in the directory structure between parent + # and child classes and this is illustrated by +plugins+. + # + # ------------------------ + # Plugins Folder Structure + # ------------------------ + # + # In the plugins hierarchy, you'll notice that the child classes + # are always below the parents. This strategy works if the +inheritors+ + # are in the same gem as the +inherited+. + class RecursivelyRequire + + # Require every file with a dot rb extension that is + # +either in or recursively below+ the file path given + # in the parameter. + # + # This method logs every file that is required using + # the INFO log level. + # + # == Requiring Parental Classes Before Child Classes + # + # This is a common problem when bringing classes in to join + # the fray. We must require the +Planet+ class before + # we require the +Neptune+ class. + # + # class Neptune < Planet + # + # The solution lies in the directory structure between parent + # and child classes and this is illustrated by +plugins+. + # + # ------------------------ + # Plugins Folder Structure + # ------------------------ + # + # In the plugins hierarchy, you'll notice that the child classes + # are always below the parents. This strategy works if the +inheritors+ + # are in the same gem as the +inherited+. + # + # This require loop is breadth first not depth first + # so all the parent (base) classes in plugins will be required before + # their extension classes in the lower subdirectories. + # + # @param gem_filepath [String] path to callling gem (use __FILE) + def self.now gem_filepath + + +############ require_relative "../cryptools/keygen" + require_relative "../usecase/cmd" + + + gem_basepath = File.expand_path "..", gem_filepath + + log.info(x) { "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" } + log.info(x) { "@@@@ Require Gems In or Under [#{gem_basepath}]" } + log.info(x) { "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" } + + Dir["#{gem_basepath}/**/*.rb"].each do |gem_path| + + log.info(x) { "@@@@ => #{gem_path}" } + require gem_path + + end + + log.info(x) { "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" } + + end + + end + +end diff --git a/lib/session/time.stamp.rb b/lib/session/time.stamp.rb new file mode 100644 index 0000000..3536f72 --- /dev/null +++ b/lib/session/time.stamp.rb @@ -0,0 +1,340 @@ +#!/usr/bin/ruby + +module OpenSession + + require 'singleton' + + # This stamp sits at the centre of a fundamental DevOps pattern concerned + # with infrastructure provisioning and configuraion management. + # + # The central idea behind the pattern is to link every infrastructure + # object created during a session with a reference accurate to the nearest + # centi-second denoting the moment the software runtime (session) began. + class Stamp + include Singleton + + attr_reader :time_now + + # Return two digit [mo] month index from 01 to 12. + # @example 02 => in February + def self.mo + return Stamp.instance.time_now.strftime "%m" + end + + + # Return three character abbreviated month name. + # @example feb => in February + def self.mmm + return Stamp.instance.time_now.strftime( "%b" ).downcase + end + + + # Return three character abbreviated day of week. + # @example tue => on Tuesday + def self.ddd + return Stamp.instance.time_now.strftime( "%a" ).downcase + end + + + # Return two digit (character) hour of day from 00 to 23. + # @example 22 => between 22.00.00 and 22.59.59 inclusive + def self.hh + return Stamp.instance.time_now.strftime "%H" + end + + + # Return two digit minute of hour from [00] to [59]. + def self.mm + return Stamp.instance.time_now.strftime "%M" + end + + + # Return two digit second of minute from [00] to [59]. + def self.ss + return Stamp.instance.time_now.strftime "%S" + end + + + # Return a [3 digit] second and tenth of second + # representation. + # + # The final digit is derived from the 1000 sliced + # millisecond of second running from 000 to 999. + # + # Truncation (Not Rounding) + # + # The [final] digit is acquired by TRUNCATING + # (chopping off) the last 2 of the 3 millisecond + # digits. No rounding is applied. + # + # The 3 returned digits comprise of the + # + # - second of minute => 2 digits | [00] to [59] (and) + # - tenth of second => 1 digit from [0] to [9] + # + # @example + # + # => The time at the 562nd millisecond of the 49th + # second of the minute. + # + # => 3 chars + # => 495 + def self.sst + millisec_string = Stamp.instance.time_now.strftime "%L" + return "#{ss}#{millisec_string[0]}" + end + + + # Return the [two] digit year (eg 19 for 2019). + # that we are currently in. + def self.yy + return Stamp.instance.time_now.strftime("%Y")[2..-1] + end + + + # Return the [four] digit year (eg 2019) + # that we are currently in. + def self.yyyy + return Stamp.instance.time_now.strftime("%Y") + end + + + # Return 3 digit julian day of year [001] to [366]. + def self.jjj + return Stamp.instance.time_now.strftime "%j" + end + + + # [yymo_mmm] returns an amalgam of + # + # => the two-digit year + # => the two-digit month index (starting at 01) + # => a period (separator) + # => the abbreviated month name + # + # @example + # => 1908.aug + # => for August 2019 + # + def self.yymo_mmm + return "#{yy}#{mo}.#{mmm}" + end + + + # Given two integer parameters (month index and 4 digit year) representing + # the month in question this method returns the [PREVIOUS MONTHS] character + # amalgam in the format [yymo_mmm] where + # + # => yy | previous month's two-digit year + # => mo | previous month's two-digit month index + # => . | a period (separator) + # => mmm | previous month's abbreviated month name + # + # ------------------- + # Example 1 (Simple) + # ------------------- + # + # returns char => 1907.jul + # 4 parameters => 8, 2019 + # representing => August, 2019 + # + # ---------------------- + # Example 2 (Last Year) + # ---------------------- + # + # returns char => 1812.dec + # 4 parameters => 1, 2019 + # representing => January, 2019 + def self.previous_month_chars this_month_index, this_4digit_year + + prev_month_index = this_month_index == 1 ? 12 : ( this_month_index - 1 ) + prev_2dig_mn_pad = sprintf '%02d', prev_month_index + prev_4digit_year = this_month_index == 1 ? ( this_4digit_year - 1 ) : this_4digit_year + prev_twodigit_yr = "#{prev_4digit_year.to_s}"[2..-1] + prev_months_name = Date::ABBR_MONTHNAMES[prev_month_index].downcase + + return "#{prev_twodigit_yr}#{prev_2dig_mn_pad}.#{prev_months_name}" + + end + + # Using the current class time this method returns + # the character amalgam for the [PREVIOUS MONTH] in + # the format [yymo_mmm] where + # + # => yy | last month's two-digit year + # => mo | last month's two-digit month index + # => . | a period (separator) + # => mmm | last month's abbreviated month name + # + # ------------------- + # Example 1 (Simple) + # ------------------- + # + # returns => 1907.jul + # if this month is => August 2019 + # + # ---------------------- + # Example 2 (Last Year) + # ---------------------- + # + # returns => 1812.dec + # if this month is => January 2019 + def self.yymo_mmm_prev + return previous_month_chars mo.to_i, yyyy.to_i + end + + + # Return 5 digit amalgam of year and julian day. + # eg [19003] for [January 3rd 2019] + def self.yyjjj + return "#{yy}#{jjj}" + end + + + # Return the 4 digit amalgam of the hour and minute + # using the 24 hour clock. + # + # @example + # => 1525 + # => 03:25 pm + def self.hhmm + return "#{hh}#{mm}" + end + + + # Return the time of day to a TENTH of a second accuracy. + # [8] characters will always be returned with the 5th one + # being the (period) separator. + # + # The first (separated) segment delivers a hhmm 24 hour + # clock representation of the stamped time. + # + # The 3 digits of the second segment comprise of + # + # second of minute => 2 digits | [00] to [59] + # tenth of second => 1 digit from [0] to [9] + # + # @example + # => The time at the 562nd millisecond of the 49th + # second of the 23rd minute of the 17th hour of + # the day ( 17:23:49.562 ) + # + # => 8 chars + # => 1723.495 + def self.hhmm_sst + return "#{hhmm}.#{sst}" + end + + + # Return a string timestampt that is a period separated + # amalgam of the 2 digit year, 3 digit julian day, 2 digit + # hour, 2 digit minute, 2 digit second and 1 digit rounded + # down tenth of second. + # + # @example + # => 19003.1025 + # => 10:25 am on January 3rd 2019 + # + # + # Return the time of day to a TENTH of a second accuracy. + # [8] characters will always be returned with the 5th one + # being the (period) separator. + # + # The first (separated) segment delivers a hhmm 24 hour + # clock representation of the stamped time. + # + # The 3 digits of the second segment comprise of + # + # - second of minute => 2 digits | [00] to [59] + # - tenth of second => 1 digit from [0] to [9] + # + # @example + # => The time at the 562nd millisecond of the 49th + # second of the 23rd minute of the 17th hour of + # the day ( 17:23:49.562 ) + # + # => 8 chars + # => 1723.495 + def self.yyjjj_hhmm_sst + return "#{yyjjj}.#{hhmm}.#{sst}" + end + + + # Return a string timestampt that is a period separated + # amalgam of the 2 digit year, 3 digit julian day, 2 digit + # hour, 2 digit minute, 2 digit second and 9 digit + # nanosecond. + # + # @example + # return => 19003.1725.42.836592034 + # 4 time => 17:25:42 am on January 3rd 2019 + # + # As per the above example, the time returned + # + # - is the 836592034 nanosecond + # - of the 42nd second + # - of the 25th minute + # - of the 17th hour + # - of the 3rd day + # - of the 20th year + # - of the 21st century + # + # @return [String] + # Return the time of day to nanosecond accuracy. + # 23 characters are always returned with three (3) period + # separators at the 6th, 11th and 14th positions. + def self.yyjjj_hhmm_ss_nanosec + nanosec_str = Stamp.instance.time_now.strftime "%9N" + return "#{yyjjj}.#{hhmm}.#{ss}.#{nanosec_str}" + end + + + # Return the Rubyfied time zone being used. + def self.zone + return Stamp.instance.time_now.zone + end + + + # Log segments of time pertaining to the time stamp. + # @todo + # move method contents into test class + def self.log_instance_time + + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] eco time stamp => [#{Stamp.instance.time_now.ctime}]" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] Univ Time Zone => #{zone}" } + log.info(x) { "[stamp] Month Index is => #{mo}" } + log.info(x) { "[stamp] Month Name is => #{mmm}" } + log.info(x) { "[stamp] Day Of Week is => #{ddd}" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] Two Digit Year => #{yy}" } + log.info(x) { "[stamp] Julian Cal Day => #{jjj}" } + log.info(x) { "[stamp] Yr and Jul Day => #{yyjjj}" } + log.info(x) { "[stamp] Hour of Theday => #{hh}" } + log.info(x) { "[stamp] Minute of Hour => #{mm}" } + log.info(x) { "[stamp] Hour + Minute => #{hhmm}" } + log.info(x) { "[stamp] Second of Min => #{ss}" } + log.info(x) { "[stamp] 600 Min Slices => #{sst}" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + log.info(x) { "[stamp] The Time Stamp => #{yyjjj_hhmm_sst}" } + log.info(x) { "[stamp] -------------- => -------------------------------- #" } + + end + + + # This singleton (one instance) class sets the time just once. + def initialize + + @time_now = Time.now; + + end + + +############ Stamp.log_instance_time + + + end + + +end diff --git a/lib/session/user.home.rb b/lib/session/user.home.rb new file mode 100644 index 0000000..35982fa --- /dev/null +++ b/lib/session/user.home.rb @@ -0,0 +1,49 @@ +#!/usr/bin/ruby + +# coding: utf-8 + +# opensession contains basic behaviour for managing a client only +# (serverless) session. Configuration directives are read and written +# from an INI off the home directory that is created when the session +# is first initiated. +module OpenSession + + # This singleton class ascertains the users home folder in a manner + # agnositic to whether the software is running on Linux or Windows. + class Home + include Singleton + + # This static behaviour reads the [home folder] just once. + def self.dir + return Home.instance.folder + end + + # This static behaviour reads the [username] just once. + def self.usr + return Home.instance.username + end + + attr_reader :folder + attr_reader :username + + # Ascertain the home folder location. + def initialize + + # On Windows the home folder may end with [AppData/Roaming]. + extraneous_path = "/AppData/Roaming" + + @folder = Dir.home + @username = @folder.split("/").last + return unless Dir.home.end_with? extraneous_path + + # Remove the tail [AppData/Roaming] from the home path. + @folder = Dir.home.gsub extraneous_path, "" + @username = @folder.split("/").last + + end + + + end + + +end diff --git a/lib/usecase/cmd.rb b/lib/usecase/cmd.rb new file mode 100644 index 0000000..8fb7a5c --- /dev/null +++ b/lib/usecase/cmd.rb @@ -0,0 +1,490 @@ +#!/usr/bin/ruby +# coding: utf-8 + +module SafeDb + + # The parent SafeDb use case is designed to be extended by the cli + # (command line) use cases like {SafeDb::Open}, {SafeDb::Put} and + # {SafeDb::Lock} because it describes behaviour common to at least two + # (but usually more) of the use cases. + # + # == Common Use Case Behaviour + # + # This {SafeDb::UseCase} use case is designed to be extended and does preparatory + # work to create favourable and useful conditions to make use cases readable, + # less repetitive, simpler and concise. + # + # == Machine (Workstation) Configuration File + # + # The global configuration filepath is found off the home directory using {Dir.home}. + # + # ~/.safedb.net/safedb.net.configuration.ini + # + # The global configuration file in INI format is managed through the methods + # + # - {grab} read the value at key_name from the default section + # - {stash} put directive key/value pair in default section + # - {read} read the value at key_name from the parameter section + # - {write} put directive key/value pair in parameter section + class UseCase + + # This variable should be set to true if the use case call + # originates from a shell different from the one through which + # the login ocurred. + # + # To proceed, the shell that hosted the safe login must be a + # parent or at least an ancestor of this shell. + # + # This variable need not be set if the login shell is the direct + # parent of this one (the every day manual usage scenario). + # + # If however the login occurred from a grand or great grandparent + # shell (as is the case when nested scripts make an agent-like call), + # this variable must be set to true. + attr_writer :from_script + + # This prefix denotes keys and their values should be posted as environment + # variables within the context (for example terraform) before instigating the + # main action like terraform apply. + ENV_VAR_PREFIX_A = "evar." + ENV_VAR_PREFIX_B = "@evar." + + # This prefix precedes keynames whose map value represents a file object. + FILE_KEY_PREFIX = "file::" + + # The base64 encoded representation of the file content is placed into + # a map with this keyname. + FILE_CONTENT_KEY = "content64" + + # The file base name is placed into a map with this keyname. + FILE_NAME_KEY = "filename" + + + # This is the root command typed into the shell to invoke one of the + # safe use cases. + COMMANDMENT = "safe" + + + # The name of the environment variable that will hold the session token + # generated by {self.generate_session_token}. This environment variable + # is typically instantiated either manually (for ad-hoc use) or through + # media such as dot profile. + ENV_VAR_KEY_NAME = "SAFE_TTY_TOKEN" + + + # If and when this command line credentials management app needs to write + # any configuration directives to the machine's userspace it will use this + # constant to denote the (off-home) directory name. + APP_DIR_NAME = "safedb.net" + + + # Get the master database. This behaviour can only complete + # correctly if a successful login precedes this call either + # in this or an ancestral shell environment. + # + # @return [Hash] + # the hash data structure returned represents the master + # database. + def get_master_database + + begin + + log.info(x) { "Request for master db with from_script set to #{@from_script}" } + return OpenKey::KeyApi.read_master_db( @from_script ) + + rescue OpenSSL::Cipher::CipherError => e + + log.fatal(x) { "Exception getting master db for the safe book." } + log.fatal(x) { "The from_script parameter came set as [ #{@from_script} ]" } + log.fatal(x) { "The exception message is ~> [[ #{e.message} ]]" } + e.backtrace.log_lines + abort e.message + + end + + end + + # The path to the initial configuration file below the user's home + # directory. The directory name the configuration file sits in is + # a dot prefixed context name derived from the value inside the + # {APP_DIR_NAME} constant. + # + # ~/.<>/<>-configuration.ini + # + # You can see the filename too is derived from the context with a + # trailing string ending in .ini. + # + # @return [String] full path to the context configuration file + def config_file + return File.join config_directory(), "#{APP_DIR_NAME}.configuration.ini" + end + + + # This method returns the absolute path to the directory that the + # configuration file sits in. It is basically just the dot prefixed + # context name (the {APP_DIR_NAME} constant). + # + # ~/.<> + # + # @return [String] path to directory holding context configuration file + def config_directory + return File.join( Dir.home, ".#{APP_DIR_NAME}" ) + end + + + # Execute the use cases's flow from beginning when + # you validate the input and parameters through the + # memorize, execute and the final cleanup. + def flow_of_events + + check_pre_conditions + execute + cleanup + check_post_conditions + + end + + + # Validate the input parameters and check that the current + # state is perfect for executing the use case. + # + # If either of the above fail - the validation function should + # set a human readable string and then throw an exception. + def check_pre_conditions + + begin + + pre_validation + + rescue OpenError::CliError => e + + puts "" + puts "Your command did not complete successfully." + puts "Pre validation checks failed." + puts "" + puts " => #{e.message}" + puts "" + abort e.message + end + + end + + + # Override me if you need to + def pre_validation + + end + + + # After the main flow of events certain state conditions + # must hold true thus demonstrating that the observable + # value has indeed ben delivered. + # + # Child classes should subclass this method and place any + # post execution (post condition) checks in it and then + # make a call to this method through the "super" keyword. + def check_post_conditions + + begin + + post_validation + + rescue OpenError::CliError => e + + puts "" + puts "Your command did not complete successfully." + puts "Post validation checks failed." + puts "" + puts " => #{e.message}" + #### puts " => #{e.culprit}" + puts "" + abort e.message + end + + end + + + # Child classes should subclass this method and place any + # post execution (post condition) checks in it and then + # make a call to this method through the "super" keyword if + # this method gets any global behaviour in it worth calling. + def post_validation + + end + + + # Execute the main flow of events of the use case. Any + # exceptions thrown are captured and if the instance + # variale [@human_readable_message] is set - tell the + # user about it. Without any message - just tell the + # user something went wrong and tell them where the logs + # are that might carry more information. + def execute + + end + + + # If the use case validation went well, the memorization + # went well the + def cleanup + + end + + + # This use case is initialized primary by resolving the configured + # +general and use case specific facts+. To access the general facts, + # a domain name is expected in the parameter delegated by the extension + # use case classes. + def initialize + + class_name = self.class.name.split(":").last.downcase + is_no_token_usecase = [ "token", "init", "id" ].include? class_name + return if is_no_token_usecase + + exit(100) unless ops_key_exists? + + fact_filepath = File.sister_filepath( self, "ini", :execute ) + log.info(x) { "Search location for INI factfile is [#{fact_filepath}]" } + return unless File.exists?( fact_filepath ) + + @facts = FactFind.new() + add_secret_facts @facts + @facts.assimilate_ini_file( fact_filepath ) + @dictionary = @facts.f[ @facts.to_symbol( class_name ) ] + + end + + + private + + + ENV_PATH = "env.path" + KEY_PATH = "key.path" + ENVELOPE_KEY_PREFIX = "envelope@" + + LAST_ACCESSED = "last.accessed.time" + + SESSION_DICT_LOCK_SIZE = 32 + + SESSION_DICT_LOCK_NAME = "crypted.session.dict.lock" + + ENVELOPE_KEY_SIZE = 32 + + ENVELOPE_KEY_NAME = "crypted.envelope.key" + + ENVELOPE_ID_SIZE = 16 + + ENVELOPE_ID_NAME = "crypted.envelope.id" + + SESSION_ID_SIZE = 64 + + SESSION_FILENAME_ID_SIZE = 24 + + SESSION_START_TIMESTAMP_NAME = "session.creation.time" + + MASTER_LOCK_KEY_NAME = "master.session.lock.key" + + APPLICATION_GEM_NAME = "safedb" + APPLICATION_GEM_WEBSITE = "https://www.safedb.net" + APPLICATION_GITHUB_URL = "https://github.com/devops4me/safedb.net" + + + def add_secret_facts fact_db + + master_db = get_master_database() + raise ArgumentError.new "There is no open chapter here." if unopened_envelope?( master_db ) + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + verse_id = master_db[ KEY_PATH ] + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) + mini_dictionary = chapter_data[ master_db[ KEY_PATH ] ] + + mini_dictionary.each do | key_str, value_str| + fact_db.assimilate_fact( "secrets", key_str, value_str ) + end + + end + + + def create_header() + + return OpenKey::KeyApi.format_header( + SafeDb::VERSION, + APPLICATION_GEM_NAME, + APPLICATION_GITHUB_URL, + @domain_name + ) + + end + + + + def ops_key_exists? + + log_env() + + if ( ENV.has_key? ENV_VAR_KEY_NAME ) + return true + end + + puts "" + puts "safe needs you to create a session key." + puts "To automate this step see the documentation." + puts "To create the key run the below command." + puts "" + puts " export #{ENV_VAR_KEY_NAME}=`#{COMMANDMENT} token`" + puts "" + puts "Those are backticks surrounding `#{COMMANDMENT} token`" + puts "Not apostrophes." + puts "" + + return false + + end + + + def log_env() + + log.info(x) { "Gem Root Folder => #{Gem.dir()}" } + log.info(x) { "Gem Config File => #{Gem.config_file()}" } + log.info(x) { "Gem Binary Path => #{Gem.default_bindir()}" } + log.info(x) { "Gem Host Path => #{Gem.host()}" } + log.info(x) { "Gem Spec Info => #{Gem.latest_spec_for(APPLICATION_GEM_NAME)}" } + log.info(x) { "Gem Spec Name => #{Gem.latest_spec_for(APPLICATION_GEM_NAME).name}" } + log.info(x) { "Gem Spec Version => #{Gem.latest_spec_for(APPLICATION_GEM_NAME).version}" } + log.info(x) { "Gem Caller Folder => #{Gem.location_of_caller()}" } + log.info(x) { "Gem Paths List => #{Gem.path()}" } + log.info(x) { "Gem Platforms => #{Gem.platforms()}" } + log.info(x) { "Gem Ruby Version X => #{Gem.ruby()}" } + log.info(x) { "Gem Ruby Version Y => #{Gem::VERSION}" } + log.info(x) { "Gem Ruby Version Z => #{Gem.latest_rubygems_version()}" } + log.info(x) { "Gem User Folder => #{Gem.user_dir()}" } + log.info(x) { "Gem User Home => #{Gem.user_home()}" } + + return + + end + + + def unopened_envelope?( key_database ) + + return false if key_database.has_key?( ENV_PATH ) + print_unopened_envelope() + return true + + end + + + def print_unopened_envelope() + + puts "" + puts "Problem - before creating, reading or changing data you" + puts "must first open a path to it like this." + puts "" + puts " #{COMMANDMENT} open email.accounts joe@gmail.com" + puts "" + puts " then you put data at that path" + puts "" + puts " #{COMMANDMENT} put username joebloggs" + puts " #{COMMANDMENT} put password jo3s-s3cr3t" + puts " #{COMMANDMENT} put phone-no 07123456789" + puts " #{COMMANDMENT} put question \"Mums maiden name\"" + puts "" + puts " and why not read it back" + puts "" + puts " #{COMMANDMENT} get password" + puts "" + puts " then close the path." + puts "" + puts " #{COMMANDMENT} close" + puts "" + + end + + + def print_already_logged_in + + puts "" + puts "We are already logged in. Open a secret envelope, put, then seal." + puts "" + puts " #{COMMANDMENT} open aws.credentials:s3reader" + puts " #{COMMANDMENT} put access_key ABCD1234" + puts " #{COMMANDMENT} put secret_key FGHIJ56789" + puts " #{COMMANDMENT} put region_key eu-central-1" + puts " #{COMMANDMENT} seal" + puts "" + + end + + + def print_already_initialized + + puts "" + puts "You can go ahead and login." + puts "Your domain [#{@domain_name}] is already setup." + puts "You should already know the password." + puts "" + puts " #{COMMANDMENT} login #{@domain_name}" + puts "" + + end + + + def print_domain_initialized + + puts "" + puts "It is time to login." + puts "The protector keys for [#{@domain_name}] have been setup." + puts "From now on you simply login to use this domain." + puts "" + puts " #{COMMANDMENT} login #{@domain_name}" + puts "" + + end + + + def print_not_initialized + + puts "" + puts "Please initialize the app domain on this machine." + puts "Give a domain name and a folder for key storage." + puts "" + puts " #{COMMANDMENT} init \"$HOME/open.world\"" + puts "" + + end + + + def print_success_initializing + + puts "" + puts "Success - now open a secret envelope, put, then seal." + puts "" + puts " #{COMMANDMENT} open aws.credentials:s3reader" + puts " #{COMMANDMENT} put access_key ABCD1234" + puts " #{COMMANDMENT} put secret_key FGHIJ56789" + puts " #{COMMANDMENT} put region_key eu-central-1" + puts " #{COMMANDMENT} seal" + puts "" + + end + + + def print_login_success + + puts "" + puts "Success - you are logged in." + puts "" + puts " #{COMMANDMENT} open aws.credentials:s3reader" + puts " #{COMMANDMENT} put access_key ABCD1234" + puts " #{COMMANDMENT} put secret_key FGHIJ56789" + puts " #{COMMANDMENT} put region_key eu-central-1" + puts " #{COMMANDMENT} seal" + puts "" + + end + + + end + + +end diff --git a/lib/usecase/config/README.md b/lib/usecase/config/README.md new file mode 100644 index 0000000..a1d7680 --- /dev/null +++ b/lib/usecase/config/README.md @@ -0,0 +1,57 @@ + +# Safe | Increasing Key Derivation Function Cost + +Safe uses two **key derivation functions** (**BCrypt** and **PBKDF2**) to transform the human sourced password into a key. The only role the resulting key plays is the encryption and decryption of a large **highly random computer generated key** which in turn protects the **master database**. + +### Why are two (2) key derivation algorithms used? + +Your credentials are still safe even in the rare case of a successful analytical attack being discovered on one of the algorithms. + +## Why High Computational Costs are Desirable? + +Unlike most algorithms, key derivation functions **work best when they run slowly!** This protects against brute force attacks where attackers use "rainbow tables" or try to iterate over common passwords in an attempt to rederive the key. + +Your responsibility is to make **safe as slow as is tolerable** by increasing the number of iterations required to derive each key. + +## Safe | Increasing the Cost of Both Key Derivation Functions + +You should increase the cost of **safe's** key derivation functions until safe commands run as slow as is tolerably and no less! + +```bash +safe cost bcrypt 3 +safe cost pbkdf2 4 +``` + +Both algorithms can be configured with a cost parameter from 1 to 7 inclusive. The default cost is 1 for both and is moderately secure and runs as slowly as is tolerable on an IBM ThinkPad laptop with an Intel Pentium i5 processor with 16G of RAM. + +Note that PBKDF2 has no maximum. BCrypt limits the cost to 2^16. + +
+    -------- - ------------ - --------------- - --------------------- - ---------------- -
+    |  Cost  |     BCrypt   |       BCrypt    |        PBKDF2         |     PBKDF2       |
+    |        |     Cost     |    Iterations   |         Cost          |   Iterations     |
+    | ------ - ------------ - --------------- - --------------------- - ---------------- |
+    |    1   |     2^10     |       1,024     |      3^0 x 100,000    |       100,000    |
+    |    2   |     2^11     |       2,048     |      3^1 x 100,000    |       300,000    |
+    |    3   |     2^12     |       4,096     |      3^2 x 100,000    |       900,000    |
+    |    4   |     2^13     |       8,192     |      3^3 x 100,000    |     2,700,000    |
+    |    5   |     2^14     |      16,384     |      3^4 x 100,000    |     8,100,000    |
+    |    6   |     2^15     |      32,768     |      3^5 x 100,000    |    24,300,000    |
+    |    7   |     2^16     |      65,536     |      3^6 x 100,000    |    72,900,000    |
+    -------- - ------------ - --------------- - --------------------- - ---------------- -
+
+ +When you increase the cost **safe will become perceivably slower**. With a cost of 7, a laptop takes many minutes but an AWS cloud compute optimized M5 server crunches through in mere seconds. + +## What is Your Data Worth? + +Attackers can bring a significant amount of modern data centre hardware to the table in order to access your credentials. + +However, these computing resources cost money and the amount of money an attacker spends will be proportional to the perceived gains from a successfully attack. The bigger the dollar signs in their eyes, the more they will spend. + +The default settings coupled with a **12 character password** takes (on average) 50 years to crack with computing resources that will cost $1,000 every single day. + +### Twenty Million Dollars + +If what you are protecting is worth more than **$(50 x 366 x 1,000)**, you should use an at least 16 character password and increase the computational cost parameters for both key derivation functions. + diff --git a/lib/usecase/docker/README.md b/lib/usecase/docker/README.md new file mode 100644 index 0000000..5739852 --- /dev/null +++ b/lib/usecase/docker/README.md @@ -0,0 +1,146 @@ + +# safe jenkins + + +### safe jenkins post [aws|docker|git] <> | introduction + +Use **`safe jenkins post`** to inject both your **AWS IAM User** and **docker login/password** credentials into your Jenkins 2.0 continuous integration portal reachable by the **jenkins host url** given in the 4th parameter of the safe command. + +--- + +## safe jenkins post | prerequisite + +Before you can inject credentials into jenkins using **`safe jenkins post`** you must + +- be logged into your safe +- have opened the appropriate chapter/verse +- have put the required credential key/value pairs into the safe +- have the jenkins service up and running + +After the post (to jenkins), your continuous integration jobs will be able to access the credential values via their IDs as stated in the below table. + +--- + +## safe jenkins post aws | key names table + +As credentials are WORO (write once, read often), safe makes the reading part very very easy (and secure) so your effort is frontloaded. + +| Safe Key | Jenkins Credential IDs | Environment Variable | Description | +|:-----------:|:----------------------:|:--------------------- |:-------------------------------------------------------- | +| @access.key | safe.aws.access.key | AWS_ACCESS_KEY_ID | The AWS IAM user's access key credential. | +| @secret.key | safe.aws.secret.key | AWS_SECRET_ACCESS_KEY | The AWS IAM user's secret key credential. | +| region.key | safe.aws.region.key | AWS_REGION | The AWS region key that your Jenkins service points to. | + +So you can see that by convention, safe expects the credential keys in the safe to be named a particular way, and likewise, you can be assured of the IDs it gives those credentials when posted to Jenkins. + + +## safe jenkins post | credentials lifecycle + +The life of the credentials begins when you create an IAM user and record its access and secret keys. Then + +- you login to safe and store the 3 keys and their values +- safe jenkins post will read the values and post them to Jenkins +- Jenkins stores the values in conjunction with the Jenkins Credential IDs +- pipeline jobs ask Jenkins to put the Credential ID values against environment variables +- tools like Terraform and AwsCli use the environment variables to work in the cloud + + +## Jenkinsfile | Usage in Pipeline Jobs + +Here is a pipeline declaration within a Jenkinsfile that asks Jenkins to put the credential values in its secrets store into the stated environment variables. + + environment + { + AWS_ACCESS_KEY_ID = credentials( 'safe.aws.access.key' ) + AWS_SECRET_ACCESS_KEY = credentials( 'safe.aws.secret.key' ) + AWS_REGION = credentials( 'safe.aws.region.key' ) + } + +After **`safe jenkins post aws`** you can **click into the Credentials item in the Jenkins main menu** to assure yourself that the credentials have indeed been properly injected. + +--- + +## How to Write AWS Credentials into your Safe + +In order to **`safe terraform apply`** or **`safe jenkins post aws <>`** or `safe visit` you must first put those ubiquitous IAM programmatic user credentials into your safe. + + $ safe login joebloggs.com # open the book + + $ safe open iam dev.s3.reader # open chapter and verse + $ safe put @access.key ABCD1234EFGH5678 # Put IAM access key in safe + $ safe put @secret.key xyzabcd1234efgh5678 # Put IAM secret key in safe + $ safe put region.key eu-west-3 # infrastructure in Paris + + $ safe open iam canary.admin # open chapter and verse + $ safe put @access.key 4321DCBA8765WXYZ # Put IAM access key in safe + $ safe put @secret.key 5678uvwx4321abcd9876 # Put IAM secret key in safe + $ safe put region.key eu-west-1 # infrastructure in Dublin + + $ safe logout + + +--- + + +## How to write DockerHub Credentials into your Safe + +#### safe jenkins post docker https://jenkins.example.com + +Before you can issue a **`safe jenkins post docker http://localhost:8080`** you must insert your docker login credentials in the form of a docker.username and @docker.password into your safe. Remember that any key starting with the `@ sign` tells the safe to keep it a secret like when you issue a **`safe show`** command. + + $ safe login joebloggs.com # open the book + $ safe open docker production # at the docker (for production) chapter and verse + $ safe put docker.username admin # Put the Docker repository login docker.username into the safe + $ safe put @docker.password s3cr3t # Put the Docker repository login @docker.password into the safe + $ safe logout + +When docker credentials are injected into a Jenkins service the safe will expect to find **lines** at the open chapter and verse location with key names **`docker.username`** and **`@docker.password`**. + +The safe promises to inject credentials with an ID of **safe.docker.login.id** so any jenkins jobs that need to use the docker login docker.username and password must specify this ID when talking to the Jenkins credentials service. + + +### DockerHub Credentials Inject Response + +Here is an example of posting dockerhub credentials into a Jenkins service running on the local machine. + +``` bash +safe jenkins post docker http://localhost:8080 +``` + +If successful safe provides a polite response detailing what just happened. + +``` + - Jenkins Host Url : http://localhost:8080/credentials/store/system/domain/_/createCredentials + - Credentials ID : safe.docker.login.id + - Inject Username : devops4me + - So what is this? : The docker repository login credentials in the shape of a username and password. + + % Total % Received % Xferd Average Speed Time Time Time Current + Dload Upload Total Spent Left Speed +100 428 0 0 100 428 0 47555 --:--:-- --:--:-- --:--:-- 47555 +``` + +--- + + +## safe integrations | we need your help + +**You can help to extend safe's integrations.** + +By design - safe integrations are simple to write. They primarily integrate with producers and consumers. To deliver efficacy to devops engineers safe will endeavour to + +- **send** credentials to **downstream consumers** and +- **receive** credentials from **upstream producers** + +safe needs pull requests from the devops community and it promises to always strive to keep the task of writing an integration extremely simple. + +### integrations | what giving takes? + +Currently, writing an integration entails delivering 3 or 4 artifacts which are + +- 1 simple Ruby class +- 1 README.md documenting the command structure, the prerequisites and the expected outcome +- 1 class containing unit tests +- (optionaly) an INI file if many configuration and facts are involved + +Giving doesn't take much so roll up your sleeves (or frocks) and get writing. diff --git a/lib/usecase/docker/docker.rb b/lib/usecase/docker/docker.rb new file mode 100644 index 0000000..7fd731a --- /dev/null +++ b/lib/usecase/docker/docker.rb @@ -0,0 +1,49 @@ +#!/usr/bin/ruby + +module SafeDb + + # This docker use case handles the ... + # + # safe docker login + # safe docker logout + + class Docker < UseCase + + # The command which currently must be login, logout or + # an empty string. + attr_writer :command + + def execute + + return unless ops_key_exists? + master_db = get_master_database() + return if unopened_envelope?( master_db ) + + # Get the open chapter identifier (id). + # Decide whether chapter already exists. + # Then get (or instantiate) the chapter's hash data structure + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + verse_id = master_db[ KEY_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + + # Unlock the chapter data structure by supplying + # key/value mini-dictionary breadcrumbs sitting + # within the master database at the section labelled + # envelope@<>. + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) + + key_value_dictionary = chapter_data[ verse_id ] + docker_username = key_value_dictionary[ "docker.username" ] + docker_password = key_value_dictionary[ "@docker.password" ] + docker_login_cmd = "docker login --username #{docker_username} --password #{docker_password} 2>/dev/null" + docker_logout_cmd = "docker logout" + docker_cmd = @command.eql?( "logout" ) ? docker_logout_cmd : docker_login_cmd + system docker_cmd + + end + + + end + + +end diff --git a/lib/usecase/edit/README.md b/lib/usecase/edit/README.md new file mode 100644 index 0000000..9ca83c8 --- /dev/null +++ b/lib/usecase/edit/README.md @@ -0,0 +1,43 @@ + +### safe put | safe delete | safe copy | safe paste + +# edit use cases | copy | paste | delete + +The edit use cases create, delete and update the credentials and configuration inside your safe. + +## Common Usage + +Typically you login to a book, open a chapter and verse, then you put **`key/value`** pairs, known as **lines**. + +``` +safe login joe@home + +# -- -------------------------------------------------- -- # +# -- Create chapter (email) and verse <> -- # +# -- -------------------------------------------------- -- # +safe open email joebloggs@gmail.com + +# -- ---------------------------- -- # +# -- Populate it with credentials -- # +# -- ---------------------------- -- # +safe put gmail.id joebloggs +safe put @password s3cr3et +safe put recovery.phone 07500875278 + +# -- ----------------------------------------------- -- # +# -- Now copy and then paste a line (key/value pair) -- # +# -- ----------------------------------------------- -- # +safe copy recovery.phone +safe open email joe@ywork.com +safe paste +``` + +## editing behaviour + +**These use cases are intuitive and behave almost like what you would expect.** The safe ethos is for commands to behave according to which of the 5 levels you are at. + + +| Command | Verse | Chapter | Book | +|:---------------- |:----------------------------- |:-------------------------------- |:------------------------------- | +| safe copy <> | Copy one of the verse's lines | Copy one of the chapter's verses | Copy one of the book's chapters | +| safe copy | Copy all of the verse's lines | Copy all of the chapter's verses | Copy all of the book's chapters | diff --git a/lib/usecase/edit/delete.rb b/lib/usecase/edit/delete.rb new file mode 100644 index 0000000..981cb8f --- /dev/null +++ b/lib/usecase/edit/delete.rb @@ -0,0 +1,46 @@ +#!/usr/bin/ruby + +module SafeDb + + # The delete use case delete's one or more of the safe's entities. + # + # - at verse level - it can delete one or more lines + # - at chapter level - it can delete one or more verses + # - at book level - it can delete one or more chapters + # - at safe level - it can delete one book + # + class DeleteMe < UseCase + + attr_writer :entity_id + + # Deletion that currently expects an open chapter and verse and always + # wants to delete only one line (key/value pair). + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + return if unopened_envelope?( master_db ) + + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) if chapter_exists + chapter_data = OpenKey::KeyDb.new() unless chapter_exists + + content_hdr = create_header() + master_db[ chapter_id ] = {} unless chapter_exists + verse_id = master_db[ KEY_PATH ] + + chapter_data.delete_entry( verse_id, @entity_id ) + chapter_data.delete_entry( verse_id, "#{FILE_KEY_PREFIX}#{@entity_id}" ) + + OpenKey::KeyApi.content_lock( master_db[ chapter_id ], chapter_data.to_json, content_hdr ) + OpenKey::KeyApi.write_master_db( content_hdr, master_db ) + Show.new.flow_of_events + + end + + + end + + +end diff --git a/lib/usecase/export.rb b/lib/usecase/export.rb new file mode 100644 index 0000000..b705c8c --- /dev/null +++ b/lib/usecase/export.rb @@ -0,0 +1,40 @@ +#!/usr/bin/ruby + +module SafeDb + + # Export the entire book if no chapter and verse is specified (achieved with a safe close), + # or the chapter if only the chapter is open (safe shut or safe open <>, or the + # mini-dictionary at the verse if both chapter and verse are open. + class Export < UseCase + + def get_chapter_data( chapter_key ) + return OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( chapter_key ) ) + end + + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + return if unopened_envelope?( master_db ) + + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + has_chapter = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + + unless has_chapter + puts "{}" + return + end + + chapter_data = get_chapter_data( master_db[ chapter_id ] ) + puts JSON.pretty_generate( chapter_data ) + + return + + end + + + end + + +end diff --git a/lib/usecase/files/README.md b/lib/usecase/files/README.md new file mode 100644 index 0000000..d0a5948 --- /dev/null +++ b/lib/usecase/files/README.md @@ -0,0 +1,37 @@ + +# safe file | ingest and eject files + +You ingest a file with **safe file** and then **safe eject** will output that file into the present working directory. + +If safe detects during an eject, that a file already exists with the same name - it backs it up with a timestamp before ejecting and clobbering the existing file. + +```bash +safe open <> <> +safe file <> <> +safe eject <> +safe show +``` + +To pull in 3 certificate oriented files for Kubernetes one could use these commands. + +```bash +safe open production kubernetes +safe file kubernetes.cert ~/.kubectl/kube.prod.cert.pem +safe file kubernetes.ca.cert ~/.kubectl/kube.prod.ca.cert.pem +safe file kubernetes.key ~/.kubectl/kube.prod.key.pem +cd /tmp +safe eject +``` + +The safe ingests the files and spits them out whenever you so desire. +**Binary files** are supported and can be safely pulled in with safe file and ejected at any point in the future. + +## remote (external) files + +The **local filesystem** is the most common, but by no means the only file storage location. You can read from and write to + +- a zip file **`zip://`** +- an S3 filesystem **`s3://`** +- SSH locations **`<>@<>:/path/to/file`** +- a git repository **`git@github.com`** +- a **google drive** store diff --git a/lib/usecase/files/eject.rb b/lib/usecase/files/eject.rb new file mode 100644 index 0000000..a62f8f7 --- /dev/null +++ b/lib/usecase/files/eject.rb @@ -0,0 +1,56 @@ +#!/usr/bin/ruby + +module SafeDb + + # The eject use case writes (or overwrites) a file or files. + # Files are always ejected into the present working directory. If an + # overwrite is detected a backup is taken of the about to be clobbered + # file. + # + # If a keyname is provided then only the file against that key is ejected. + # No keyname will eject every file in the opened chapter and verse. + class Eject < UseCase + + attr_writer :file_key + + # Files are always ejected into the present working directory and any + # about to be clobbered files are backed up with a timestamp. + # + # If a keyname is provided then only the file against that key is ejected. + # No keyname will eject every file in the opened chapter and verse. + def execute + + return unless ops_key_exists? + master_db = get_master_database() + return if unopened_envelope?( master_db ) + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + verse_id = master_db[ KEY_PATH ] + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) + + base64_content = chapter_data[ verse_id ][ "#{FILE_KEY_PREFIX}#{@file_key}" ][ FILE_CONTENT_KEY ] + simple_filename = chapter_data[ verse_id ][ "#{FILE_KEY_PREFIX}#{@file_key}" ][ FILE_NAME_KEY ] + file_full_path = File.join( Dir.pwd, simple_filename ) + backup_filename = OpenKey::KeyNow.yyjjj_hhmm_sst() + "-" + simple_filename + backup_file_path = File.join( Dir.pwd, backup_filename ) + will_clobber = File.file?( file_full_path ) + + File.write( backup_file_path, File.read( file_full_path ) ) if will_clobber + ::File.write( file_full_path, Base64.urlsafe_decode64( base64_content ) ) + + puts "" + puts "File successfully ejected from safe into current directory." + puts "" + puts "Clobbered File = #{backup_filename}" if will_clobber + puts "Current Directory = #{Dir.pwd}" + puts "Ejected Filename = #{simple_filename}" + puts "Chapter and Verse = #{master_db[ENV_PATH]}:#{verse_id}" + puts "Ejected File Key = #{@file_key}" + puts "" + + end + + + end + + +end diff --git a/lib/usecase/files/file_me.rb b/lib/usecase/files/file_me.rb new file mode 100644 index 0000000..d6eaeee --- /dev/null +++ b/lib/usecase/files/file_me.rb @@ -0,0 +1,78 @@ +#!/usr/bin/ruby + +module SafeDb + + # The file use case pulls a file in from either an accessible filesystem + # or from a remote http, https, git, S3, GoogleDrive and/or ssh source. + # + # The @file_url is the most common parameter given to this use case. + class FileMe < UseCase + + attr_writer :file_key, :file_url + + # There are 3 maps involved in the implementation and they are all (or in part) retrieved and/or + # created as necessary. They are + # + # - the current chapter as a map + # - the current verse as a map + # - the file's keyname as a map + # + # Once the maps have been found and/or created if necessary the file's keyname map is either + # populated or amended with the following data. + # + # - filename | {UseCase::FILE_NAME_KEY} | the file's simple name + # - content64 | {UseCase::FILE_CONTENT_KEY} | the file's base64 content + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + return if unopened_envelope?( master_db ) + + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) if chapter_exists + chapter_data = OpenKey::KeyDb.new() unless chapter_exists + + content_hdr = create_header() + master_db[ chapter_id ] = {} unless chapter_exists + verse_id = master_db[ KEY_PATH ] + + file_full_path = ::File.absolute_path( @file_url ) + file_base_name = ::File.basename( file_full_path ) + file_content64 = Base64.urlsafe_encode64( ::File.read( file_full_path ) ) + + log.info(x) { "Key name of the file to ingest => #{@file_key}" } + log.info(x) { "Ingesting file at path => #{file_full_path}" } + log.info(x) { "The name of the file to ingest is => #{file_base_name}" } + log.info(x) { "Size of base64 file content => [#{file_content64.length}]" } + + chapter_data.create_map_entry( verse_id, "#{FILE_KEY_PREFIX}#{@file_key}", FILE_NAME_KEY, file_base_name ) + chapter_data.create_map_entry( verse_id, "#{FILE_KEY_PREFIX}#{@file_key}", FILE_CONTENT_KEY, file_content64 ) + + OpenKey::KeyApi.content_lock( master_db[ chapter_id ], chapter_data.to_json, content_hdr ) + OpenKey::KeyApi.write_master_db( content_hdr, master_db ) + + Show.new.flow_of_events + + end + + + private + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + + end + + + end + + +end diff --git a/lib/usecase/files/read.rb b/lib/usecase/files/read.rb new file mode 100644 index 0000000..6e4b6ca --- /dev/null +++ b/lib/usecase/files/read.rb @@ -0,0 +1,169 @@ +#!/usr/bin/ruby + +module SafeDb + + # The read use case pulls a file in from either an accessible filesystem + # or from a remote http, https, git, S3, GoogleDrive and/or ssh source. + # + # This use case expects a @file_url parameter. The actions it takes are to + # + # - register @in.url to mirror @file_url + # - register @out.url to mirror @file_url + # - check the location of @file_url + # - if no file exists it humbly finishes up + # + # If a file does exist at the @in.url this use case + # + # - handles HOME directory enabling portability + # - creates an encryption key and random iv + # - creates a file (name) id + # - stores the file byte and human readable size + # - stores the extension if it has one + # - stores the last created date + # - stores the last modified date + # - stores the (now) in date + # + # Once done it displays key facts about the file. + class Read < UseCase + +# -- ---------------------- --# +# -- ---------------------- --# +# -- [SAFE] Name Changes --# +# -- ---------------------- --# +# -- Change env.path ~> open.chapter +# -- Change key.path ~> open.verse +# -- Change envelope@xxxx ~> chapter@xxxx +# -- +# -- Change filenames to ~~~~~> book.db.breadcrumbs +# -- Change filenames to ~~~~~> chapter.cipher.file +# -- Change filenames to ~~~~~> safe.db.abc123xyzpq +# -- ---------------------- --# +# -- { +# -- "db.create.date": "Sat Aug 11 11:20:16 2018 ( 18223.1120.07.511467675 )", +# -- "db.domain.name": "ab.com", +# -- "db.domain.id": "uhow-ku9l", +# -- "env.path": "aa", +# -- "key.path": "aa", +# -- "envelope@aa": { +# -- "content.xid": "3uzk12dxity", +# -- "content.iv": "XTVe%qIGKVvWw@EKcgSa153nfVPaMVJH", +# -- "content.key": "1u3b2o6KLiAUmt11yYEDThJw1E5Mh4%1iHYOpJQjWiYLthUGgl8IZ5szus8Fz2Jt" +# -- } +# -- } +# -- ---------------------- --# +# -- ---------------------- --# + + attr_writer :file_url + + # The read use case pulls a file in from either an accessible filesystem + # or from a remote http, https, git, S3, GoogleDrive and/or ssh source. + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + return if unopened_envelope?( master_db ) + + # -- Get the open chapter identifier (id). + # -- Decide whether chapter already exists. + # -- Then get (or instantiate) the chapter's hash data structure + # -- + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) if chapter_exists + chapter_data = OpenKey::KeyDb.new() unless chapter_exists + + content_hdr = create_header() + + # -- If no content envelope exists we need to place + # -- an empty one inside the appdb content database. + # -- + master_db[ chapter_id ] = {} unless chapter_exists + + # -- We populate (PUT) file instance attributes into + # -- the mini-dictionary at the [VERSE] location. + # -- + verse_id = master_db[ KEY_PATH ] + file_absolute_path = ::File.absolute_path( @file_url ) + chapter_data.create_entry( verse_id, "@in.url", file_absolute_path ) + chapter_data.create_entry( verse_id, "@out.url", file_absolute_path ) + + # -- Lock No.1 + # -- + # -- Lock the file content and leave the 3 breadcrumbs + # -- (content id, content iv and content key) inside + # -- the file attributes mini dictionary to facilitate + # -- decrypting and writing out the file again. + # -- + OpenKey::KeyApi.content_lock( chapter_data[ verse_id ], ::File.read( @file_url ), content_hdr ) + + # -- Lock No.2 + # -- + # -- Lock the chapter's data which includes the new or + # -- updated mini-dictionary that holds the breadcrumbs + # -- (content id, content iv and content key) that will + # -- be used to decrypt and write out the file content. + # -- + # -- Leave another set of breadcrumbs inside the master + # -- database (content id, content iv and content key) + # -- to facilitate decrypting the chapter's data. + # -- + OpenKey::KeyApi.content_lock( master_db[ chapter_id ], chapter_data.to_json, content_hdr ) + + # -- Lock No.3 + # -- + # -- Re-lock the master database including the breadcrumbs + # -- (content id, content iv and content key) that will + # -- (in the future) decrypt this chapter's data. + # -- + OpenKey::KeyApi.write_master_db( content_hdr, master_db ) + + + # -- Communicate that the indicated file has just been + # -- successfully ingested into the safe. + # -- + print_file_success master_db[ ENV_PATH ], verse_id, file_absolute_path + + end + + + private + + + def print_file_success chapter_id, verse_id, file_url + + puts "" + puts "|-" + puts "|- Chapter ~> #{chapter_id}" + puts "|- + Verse ~> #{verse_id}" + puts "|-" + puts "|- In File ~> #{file_url}" + puts "|-" + puts "|- File cocooned inside your safe." + puts "|-" + puts "|-Command Options" + puts "|-" + puts "|- #{COMMANDMENT} put out.dir ~/this/folder" + puts "|- #{COMMANDMENT} put out.name new-filename.txt" + puts "|- #{COMMANDMENT} write" + puts "|-" + puts "" + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + + end + + + end + + +end diff --git a/lib/usecase/files/write.rb b/lib/usecase/files/write.rb new file mode 100644 index 0000000..62d801d --- /dev/null +++ b/lib/usecase/files/write.rb @@ -0,0 +1,89 @@ +#!/usr/bin/ruby + +module SafeDb + + # The write use case writes (or overwrites) a file at the + # out url destination. + class Write < UseCase + + attr_writer :file_url + + # The read use case pulls a file in from either an accessible filesystem + # or from a remote http, https, git, S3, GoogleDrive and/or ssh source. + def execute + + return unless ops_key_exists? + master_db = get_master_database() + return if unopened_envelope?( master_db ) + + # Get the open chapter identifier (id). + # Decide whether chapter already exists. + # Then get (or instantiate) the chapter's hash data structure + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + verse_id = master_db[ KEY_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + + + # @todo begin + # Throw an exception (error) if the chapter + # either exists and is empty or does not exist. + # @todo end + + + # Unlock the chapter data structure by supplying + # key/value mini-dictionary breadcrumbs sitting + # within the master database at the section labelled + # envelope@<>. + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) + + + # Unlock the file content by supplying the + # key/value mini-dictionary breadcrumbs sitting + # within the chapter's data structure in the + # section labelled <>. + file_content = OpenKey::KeyApi.content_unlock( chapter_data[ verse_id ] ) + + + # We read the location url we plan to eject the + # file out into. + file_path = @file_url ? @file_url : chapter_data[ verse_id ][ "@out.url" ] + file_name = ::File.basename( file_path) + + # If the directory the file will be exported to does + # not exist we promptly create it. + FileUtils.mkdir_p( File.dirname( file_path ) ) + + # Create a backup file if we can detect that a + # file occupies the eject (write) filepath. + backup_file_path = ::File.join( ::File.dirname( file_path ), OpenKey::KeyNow.yyjjj_hhmm_sst() + "-" + file_name ) + ::File.write( backup_file_path, ::File.read( file_path ) ) if ::File.file?( file_path ) + + + # Now write (and if necessary overwrite) the eject + # file url path with the previously ingested content. + ::File.write( file_path, file_content ) + + + # Communicate that the indicated file has just been + # successfully written out from the safe. + print_file_success( master_db[ ENV_PATH ], verse_id, file_path ) + + end + + + private + + + # Document a successful write of a file cocooned in the safe. + # @param chapter_id the chapter of the file written out + # @param verse_id the verse of the file written out + # @param file_url the filepath the file was written to + def print_file_success chapter_id, verse_id, file_url + puts "File [#{file_url}] written out of safe at chapter [#{chapter_id}] and verse [#{verse_id}]." + end + + + end + + +end diff --git a/lib/usecase/goto.rb b/lib/usecase/goto.rb new file mode 100644 index 0000000..1eff242 --- /dev/null +++ b/lib/usecase/goto.rb @@ -0,0 +1,57 @@ +#!/usr/bin/ruby + +module SafeDb + + # Goto is a shortcut (or alias even) for the open command that takes an integer + # index that effectively specifies which and to open. + # + # Use view to list the valid integer indices for each envelope and key + # combination. + # + # View maps out and numbers each envelope/key combination. + # Goto with the number effectively shortcuts the open pin pointer command. + # Show prints the dictionary at the opened path masking any secrets. + # + # Once goto is enacted all path CRUD commands come into play as if you had + # opened the path. These include put, copy, paste, show, tell and delete. + class Goto < UseCase + + # The index (number) starting with 1 of the envelope and key-path + # combination that should be opened. + attr_writer :index + + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + goto_location = 0 + envelope_dictionaries = OpenKey::KeyApi.to_matching_dictionary( master_db, ENVELOPE_KEY_PREFIX ) + envelope_dictionaries.each_pair do | envelope_name, crumb_dictionary | + + envelope_content = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( crumb_dictionary ) ) + envelope_content.each_key do | envelope_key | + + goto_location += 1 + next unless @index.to_i == goto_location + + open_uc = Open.new + open_uc.env_path = envelope_name + open_uc.key_path = envelope_key + open_uc.flow_of_events + + return + + end + + + end + + + end + + + end + + +end diff --git a/lib/usecase/id.rb b/lib/usecase/id.rb new file mode 100644 index 0000000..f8b24d5 --- /dev/null +++ b/lib/usecase/id.rb @@ -0,0 +1,36 @@ +#!/usr/bin/ruby + +module SafeDb + + + class Id < UseCase + + + def execute + + puts "" + puts OpenKey::KeyNow.grab() + puts OpenKey::KeyNow.fetch() + puts "" + + return + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + + end + + + end + + +end diff --git a/lib/usecase/import.rb b/lib/usecase/import.rb new file mode 100644 index 0000000..ce3790f --- /dev/null +++ b/lib/usecase/import.rb @@ -0,0 +1,157 @@ +#!/usr/bin/ruby + +module SafeDb + + # The import use case follows open and it pulls a file into an + # (encrypted at rest) envelope while writing metadata about + # the file into the opened tree dictionary position. + # + # == import and reimport commands + # + # - the import command expects a path parameter and errors if not recvd + # - the reimport command is happy with either one or zero parameters + # + # If the reimport command has no parameters it expects that the opened path + # already contains an imported file. It uses the import.path key to locate + # the file. + # + # If the path parameter is given to reimport it uses it and also resets the + # import.path key to reflect the path it was given. + # + # == garbage collect dangling files + # + # Like dangling envelopes - dangling files will pop up when re-imported. + # These are handled by the garbage collection policy which can be to + # remove immediately - remove on next login - remove after a time period + # or to never remove (manual garbage collection). + # + class Import < UseCase + + attr_writer :secret_id, :secret_value + + # The put use case follows open and it adds secrets into an + # (encrypted at rest) envelope. Put can be called many times to + # add secrets. Finally the lock use case commits all opened secrets + # into the configured storage engines. + # + # Calling put before calling open or after calling lock + # is not allowed and will result in an error. + # + # == Put Pre-Conditions + # + # When the put use case is called - the below conditions ring true. + # + # - the folder path ending in ../../my must exist + # - a session id, filename and encryption key ( in workstation config ) + # + # == Observable Value + # + # The observable value delivered by +put+ boils down to + # + # - a new friends.xyz123abc.os.txt file if this is the first put. + # - a new group_name/key_name (like monica/surname) entry is added if required + # - a secret value is added against the key or updated if it already exists + # - a new session id and encryption key is generated and used to re-encrypt + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + puts "---\n" + puts "--- The Master Database (Before)\n" + puts "---\n" + puts JSON.pretty_generate( master_db ) + puts "---\n" + + return if unopened_envelope?( master_db ) + + envelope_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + has_content = OpenKey::KeyApi.db_envelope_exists?( master_db[ envelope_id ] ) + + # -- + # -- To get hold of the content we must either + # -- + # -- a) unlock it using the breadcrumbs or + # -- b) start afresh with a new content db + # -- + content_box = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ envelope_id ] ) ) if has_content + content_box = OpenKey::KeyDb.new() unless has_content + content_hdr = create_header() + + # -- + # -- If no content envelope exists we need to place + # -- an empty one inside the appdb content database. + # -- + master_db[ envelope_id ] = {} unless has_content + + # -- + # -- This is the PUT use case so we append a + # -- + # -- a) key for the new dictionary entry + # -- b) value for the new dictionary entry + # -- + # -- into the current content envelope and write + # -- the envelope to the content filepath. + # -- + crumbs_dict = master_db[ envelope_id ] + content_box.create_entry( master_db[ KEY_PATH ], @secret_id, @secret_value ) + OpenKey::KeyApi.content_lock( crumbs_dict, content_box.to_json, content_hdr ) + + puts "---\n" + puts "--- The Master Database (After)\n" + puts "---\n" + puts JSON.pretty_generate( master_db ) + puts "---\n" + + # -- + # -- Three envelope crumbs namely the external ID, the + # -- random iv and the crypt key are written afreshinto + # -- the master database. + # -- + OpenKey::KeyApi.write_master_db( content_hdr, master_db ) + print_put_success + + return + + +# ---> secret_ids = @secret_id.split("/") +# ---> if ( envelope.has_key? secret_ids.first ) +# ---> envelope[secret_ids.first][secret_ids.last] = @secret_value +# ---> else +# ---> envelope[secret_ids.first] = { secret_ids.last => @secret_value } +# ---> end + + end + + + private + + + def print_put_success + + puts "" + puts "Success putting a key/value pair into the open envelope." + puts "You can put more in and then close the envelope." + puts "" + puts " #{COMMANDMENT} close" + puts "" + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + + end + + + end + + +end diff --git a/lib/usecase/init.rb b/lib/usecase/init.rb new file mode 100644 index 0000000..f4746a2 --- /dev/null +++ b/lib/usecase/init.rb @@ -0,0 +1,63 @@ +#!/usr/bin/ruby + +module SafeDb + + # The init use case initializes safe thus preparing it + # for the ability to lock secrets, unlock them, transport their keys and + # much more. + # + # safe is a (glorified) placeholder. It takes things in now, + # keeps them safe and gives them back later, in a helpful manner. + # + # == Alternat Error Flows + # + # An error will be thrown + # + # - if safe cannot create, extend, read or write the drive folder + # - if the domain is already in the configuration file + # - if domain has non alphanums, excl hyphens, underscores, @ symbols, periods + # - if domain does not begin or end with alphanums. + # - if non alpha-nums (excl at signs) appear consecutively + # - if no alpha-nums appear in the string + # - if the domain string's length is less than 5 + # - if "safedb.net" appears twice (or more) in a directory tree + # + class Init < UseCase + + attr_writer :master_p4ss, :domain_name, :base_path + + + # The init use case prepares the safe so that you can open an envelope, + # put secrets into it and then seal (lock) it. Locking effectively writes + # crypted blocks to both keystore and crypt store. + def execute + + return unless ops_key_exists? + + OpenKey::KeyApi.init_app_domain( @domain_name, @base_path ) + keys_setup = OpenKey::KeyApi.is_domain_keys_setup?( @domain_name ) + + if ( keys_setup ) + print_already_initialized + return + end + + domain_password = OpenKey::KeyPass.password_from_shell( true ) + OpenKey::KeyApi.setup_domain_keys( @domain_name, domain_password, create_header() ) + print_domain_initialized + +# --> unless @base_path.nil? +# --> key_api.register_keystore( @base_path ) +# --> end + + end + + + def pre_validation + end + + + end + + +end diff --git a/lib/usecase/jenkins/README.md b/lib/usecase/jenkins/README.md new file mode 100644 index 0000000..26523cf --- /dev/null +++ b/lib/usecase/jenkins/README.md @@ -0,0 +1,146 @@ + +# safe jenkins + + +### safe jenkins post [aws|docker|git] <> | introduction + +Use **`safe jenkins post`** to inject both your **AWS IAM User** and **docker login/password** credentials into your Jenkins 2.0 continuous integration portal reachable by the **jenkins host url** given in the 4th parameter of the safe command. + +--- + +## safe jenkins post | prerequisite + +Before you can inject credentials into jenkins using **`safe jenkins post`** you must + +- be logged into your safe +- have opened the appropriate chapter/verse +- have put the required credential key/value pairs into the safe +- have the jenkins service up and running + +After the post (to jenkins), your continuous integration jobs will be able to access the credential values via their IDs as stated in the below table. + +--- + +## safe jenkins post aws | key names table + +As credentials are WORO (write once, read often), safe makes the reading part very very easy (and secure) so your effort is frontloaded. + +| Safe Key | Jenkins Credential IDs | Environment Variable | Description | +|:-----------:|:----------------------:|:--------------------- |:-------------------------------------------------------- | +| @access.key | safe.aws.access.key | AWS_ACCESS_KEY_ID | The AWS IAM user's access key credential. | +| @secret.key | safe.aws.secret.key | AWS_SECRET_ACCESS_KEY | The AWS IAM user's secret key credential. | +| region.key | safe.aws.region.key | AWS_REGION | The AWS region key that your Jenkins service points to. | + +So you can see that by convention, safe expects the credential keys in the safe to be named a particular way, and likewise, you can be assured of the IDs it gives those credentials when posted to Jenkins. + + +## safe jenkins post | credentials lifecycle + +The life of the credentials begins when you create an IAM user and record its access and secret keys. Then + +- you login to safe and store the 3 keys and their values +- safe jenkins post will read the values and post them to Jenkins +- Jenkins stores the values in conjunction with the Jenkins Credential IDs +- pipeline jobs ask Jenkins to put the Credential ID values against environment variables +- tools like Terraform and AwsCli use the environment variables to work in the cloud + + +## Jenkinsfile | Usage in Pipeline Jobs + +Here is a pipeline declaration within a Jenkinsfile that asks Jenkins to put the credential values in its secrets store into the stated environment variables. + + environment + { + AWS_ACCESS_KEY_ID = credentials( 'safe.aws.access.key' ) + AWS_SECRET_ACCESS_KEY = credentials( 'safe.aws.secret.key' ) + AWS_REGION = credentials( 'safe.aws.region.key' ) + } + +After **`safe jenkins post aws`** you can **click into the Credentials item in the Jenkins main menu** to assure yourself that the credentials have indeed been properly injected. + +--- + +## How to Write AWS Credentials into your Safe + +In order to **`safe terraform apply`** or **`safe jenkins post aws <>`** or `safe visit` you must first put those ubiquitous IAM programmatic user credentials into your safe. + + $ safe login joebloggs.com # open the book + + $ safe open iam dev.s3.reader # open chapter and verse + $ safe put @access.key ABCD1234EFGH5678 # Put IAM access key in safe + $ safe put @secret.key xyzabcd1234efgh5678 # Put IAM secret key in safe + $ safe put region.key eu-west-3 # infrastructure in Paris + + $ safe open iam canary.admin # open chapter and verse + $ safe put @access.key 4321DCBA8765WXYZ # Put IAM access key in safe + $ safe put @secret.key 5678uvwx4321abcd9876 # Put IAM secret key in safe + $ safe put region.key eu-west-1 # infrastructure in Dublin + + $ safe logout + + +--- + + +## How to write DockerHub Credentials into your Safe + +#### safe jenkins post docker https://jenkins.example.com + +Before you can issue a **`safe jenkins post docker http://localhost:8080`** you must insert your docker login credentials in the form of a username and @password into your safe. Remember that any key starting with the `@ sign` tells the safe to keep it a secret like when you issue a **`safe show`** command. + + $ safe login joebloggs.com # open the book + $ safe open docker production # at the docker (for production) chapter and verse + $ safe put username admin # Put the Docker repository login username into the safe + $ safe put @password secret12345 # Put the Docker repository login @password into the safe + $ safe logout + +When docker credentials are injected into a Jenkins service the safe will expect to find a key at the open chapter and verse called username and another one called password. + +The safe promises to inject credentials with an ID of **safe.docker.login.id** so any jenkins jobs that need to use the docker login username and password must specify this ID when talking to the Jenkins credentials service. + + +### DockerHub Credentials Inject Response + +Here is an example of posting dockerhub credentials into a Jenkins service running on the local machine. + +``` bash +safe jenkins post docker http://localhost:8080 +``` + +If successful safe provides a polite response detailing what just happened. + +``` + - Jenkins Host Url : http://localhost:8080/credentials/store/system/domain/_/createCredentials + - Credentials ID : safe.docker.login.id + - Inject Username : devops4me + - So what is this? : The docker repository login credentials in the shape of a username and password. + + % Total % Received % Xferd Average Speed Time Time Time Current + Dload Upload Total Spent Left Speed +100 428 0 0 100 428 0 47555 --:--:-- --:--:-- --:--:-- 47555 +``` + +--- + + +## safe integrations | we need your help + +**You can help to extend safe's integrations.** + +By design - safe integrations are simple to write. They primarily integrate with producers and consumers. To deliver efficacy to devops engineers safe will endeavour to + +- **send** credentials to **downstream consumers** and +- **receive** credentials from **upstream producers** + +safe needs pull requests from the devops community and it promises to always strive to keep the task of writing an integration extremely simple. + +### integrations | what giving takes? + +Currently, writing an integration entails delivering 3 or 4 artifacts which are + +- 1 simple Ruby class +- 1 README.md documenting the command structure, the prerequisites and the expected outcome +- 1 class containing unit tests +- (optionaly) an INI file if many configuration and facts are involved + +Giving doesn't take much so roll up your sleeves (or frocks) and get writing. diff --git a/lib/usecase/jenkins/jenkins.rb b/lib/usecase/jenkins/jenkins.rb new file mode 100644 index 0000000..e0ed3ae --- /dev/null +++ b/lib/usecase/jenkins/jenkins.rb @@ -0,0 +1,208 @@ +#!/usr/bin/ruby + +module SafeDb + + # This Jenkins use case handles the to and fro integration of secrets and sensitive information + # between the safe database under management and a Jenkins service pinpointed by an incoming + # host url parameter. + # + # This Jenkins use case injects for example the AWS IAM user access key, secret key and region key + # into a running Jenkins CI (Continuous Integration) service at the specified (url) location. + # + # safe jenkins post <<[ aws | docker | git ]>> <> + + class Jenkins < UseCase + + # The three instance variables provided through the command line like + # for example $ safe jenkins post aws http://localhost:8080 + # For more info visit the documentation in the command interpreter class. + attr_writer :command, :service, :url + + # If string variables EXPLODE throughout (and come to dominate) this class + # we should consider introducing an INI factfile like the [vpn] use case. + JENKINS_URI_PATH = "credentials/store/system/domain/_/createCredentials" + + # If string variables EXPLODE throughout (and come to dominate) this class + # we should consider introducing an INI factfile like the [vpn] use case. + SECRET_KEY_VALUE_PAIR_DICTIONARY = + { + "scope" => "GLOBAL", + "$class" => "org.jenkinsci.plugins.plaincredentials.impl.StringCredentialsImpl" + } + + # If string variables EXPLODE throughout (and come to dominate) this class + # we should consider introducing an INI factfile like the [vpn] use case. + SECRET_KEY_VALUE_PAIR_TO_POST = { "" => "0", "credentials" => SECRET_KEY_VALUE_PAIR_DICTIONARY } + + + # If string variables EXPLODE throughout (and come to dominate) this class + # we should consider introducing an INI factfile like the [vpn] use case. + USERNAME_AND_PASSWORD_DICTIONARY = + { + "scope" => "GLOBAL", + "$class" => "com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl" + } + + # If string variables EXPLODE throughout (and come to dominate) this class + # we should consider introducing an INI factfile like the [vpn] use case. + USERNAME_AND_PASSWORD_TO_POST = { "" => "0", "credentials" => USERNAME_AND_PASSWORD_DICTIONARY } + + + + # Inject a Jenkins credential key-value pair that is secret and/or sensitive and + # needs to be referenced by executing continuous integration jobs. + # + # @param jenkins_base_url [String] + # + # This base url includes the scheme (protocol) which can be either http + # or https. It can include the port if it is not either 80 or 443. A common + # example is http://localhost:8080 but can also be https://jenkins.example.com + # It pays not to provide a trailing backslash on this url. + # + # @param credentials_id [String] + # + # The ID that Jenkins jobs will use to reference this credential's value. + # + # @param secret_value [String] + # + # The value of this credential (secret) that will be injected for SafeKeeping + # to the Jenkins service at the provided URL. + # + # @param description [String] + # + # Description of the credential that will be posted and can be viewed via + # the Jenkins user interface. + def inject_secret_key_value_pair( jenkins_base_url, credentials_id, secret_value, description ) + + jenkins_url = File.join( jenkins_base_url, JENKINS_URI_PATH ) + + credentials_dictionary = SECRET_KEY_VALUE_PAIR_DICTIONARY + credentials_dictionary.store( "id", credentials_id ) + credentials_dictionary.store( "secret", secret_value ) + credentials_dictionary.store( "description", description ) + + curl_cmd = "curl -X POST '#{jenkins_url}' --data-urlencode 'json=#{SECRET_KEY_VALUE_PAIR_TO_POST.to_json}'" + + puts "" + puts " - Jenkins Host Url : #{jenkins_url}" + puts " - Credentials ID : #{credentials_id}" + puts " - So what is this? : #{description}" + puts "" + + %x[ #{curl_cmd} ] + + puts "" + + end + + + + # Inject into Jenkins a username and password pairing against an ID key that the + # continuous integration jobs know and can use to access the credentials pair. + # + # @param jenkins_base_url [String] + # + # This base url includes the scheme (protocol) which can be either http + # or https. It can include the port if it is not either 80 or 443. A common + # example is http://localhost:8080 but can also be https://jenkins.example.com + # It pays not to provide a trailing backslash on this url. + # + # @param credentials_id [String] + # + # The ID that Jenkins jobs will use to reference this credential's value. + # + # @param username [String] + # + # The value of this username (secret) that will be injected for SafeKeeping + # to the Jenkins service at the provided URL. + # + # @param password [String] + # + # The value of this password (secret) that will be injected for SafeKeeping + # to the Jenkins service at the provided URL. + # + # @param description [String] + # + # Description of the username and password pairing that will be posted and + # can be viewed via the Jenkins user interface. + def inject_username_and_password( jenkins_base_url, credentials_id, username, password, description ) + + jenkins_url = File.join( jenkins_base_url, JENKINS_URI_PATH ) + + credentials_dictionary = USERNAME_AND_PASSWORD_DICTIONARY + credentials_dictionary.store( "id", credentials_id ) + credentials_dictionary.store( "username", username ) + credentials_dictionary.store( "password", password ) + credentials_dictionary.store( "description", description ) + + curl_cmd = "curl -X POST '#{jenkins_url}' --data-urlencode 'json=#{USERNAME_AND_PASSWORD_TO_POST.to_json}'" + + puts "" + puts " - Jenkins Host Url : #{jenkins_url}" + puts " - Credentials ID : #{credentials_id}" + puts " - Inject Username : #{username}" + puts " - So what is this? : #{description}" + puts "" + + %x[ #{curl_cmd} ] + + puts "" + + end + + + + def execute + + return unless ops_key_exists? + master_db = get_master_database() + return if unopened_envelope?( master_db ) + + # Get the open chapter identifier (id). + # Decide whether chapter already exists. + # Then get (or instantiate) the chapter's hash data structure + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + verse_id = master_db[ KEY_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + + # Unlock the chapter data structure by supplying + # key/value mini-dictionary breadcrumbs sitting + # within the master database at the section labelled + # envelope@<>. + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) + + key_value_dictionary = chapter_data[ verse_id ] + + inject_aws_credentials( key_value_dictionary ) if @service.eql?( "aws" ) + inject_docker_credentials( key_value_dictionary ) if @service.eql?( "docker" ) + + end + + + + def inject_aws_credentials( mini_dictionary ) + + access_key_desc = "The access key of the AWS IAM (programmatic) user credentials." + secret_key_desc = "The secret key of the AWS IAM (programmatic) user credentials." + region_key_desc = "The AWS region key for example eu-west-1 for Dublin in Ireland." + + inject_secret_key_value_pair( @url, "safe.aws.access.key", mini_dictionary[ "@access.key" ], access_key_desc ) + inject_secret_key_value_pair( @url, "safe.aws.secret.key", mini_dictionary[ "@secret.key" ], secret_key_desc ) + inject_secret_key_value_pair( @url, "safe.aws.region.key", mini_dictionary[ "region.key" ], region_key_desc ) + + end + + + def inject_docker_credentials( mini_dictionary ) + + docker_desc = "The docker repository login credentials in the shape of a username and password." + + inject_username_and_password( @url, "safe.docker.login.id", mini_dictionary[ "docker.username" ], mini_dictionary[ "@docker.password" ], docker_desc ) + + end + + + end + + +end diff --git a/lib/usecase/login.rb b/lib/usecase/login.rb new file mode 100644 index 0000000..e208615 --- /dev/null +++ b/lib/usecase/login.rb @@ -0,0 +1,71 @@ +#!/usr/bin/ruby + +module SafeDb + + # The login use case is given the domain name and if needs be + # it collects the password then (if correct) logs the user in. + # + # Here are some key facts about the login command + # + # - its domain name parameter is mandatory + # - it is called at the start of every session + # - it is undone by the logout command + # - it requires the shell token environment variable to be set + # - you can nest login commands thus using multiple domains + # - you can call it with a --with=password switch + # - a space before the command prevents it being logged in .bash_history + # - you can deliver the password in multiple ways + class Login < UseCase + + attr_writer :master_p4ss, :domain_name + + + def execute + + return unless ops_key_exists? + + unless ( OpenKey::KeyApi.is_domain_keys_setup?( @domain_name ) ) + print_not_initialized + return + end + +############## Call [[ KeyApi.is_logged_in? ]] - then print msg and skip password collection below +############## Call [[ KeyApi.is_logged_in? ]] - then print msg and skip password collection below +############## Call [[ KeyApi.is_logged_in? ]] - then print msg and skip password collection below +############## Call [[ KeyApi.is_logged_in? ]] - then print msg and skip password collection below +############## Call [[ KeyApi.is_logged_in? ]] - then print msg and skip password collection below +############## Call [[ KeyApi.is_logged_in? ]] - then print msg and skip password collection below + + domain_secret = OpenKey::KeyPass.password_from_shell( false ) + +############## Use [[ KeyApi.valid_password? ]] and give error if not valid +############## Use [[ KeyApi.valid_password? ]] and give error if not valid +############## Use [[ KeyApi.valid_password? ]] and give error if not valid +############## Use [[ KeyApi.valid_password? ]] and give error if not valid +############## Use [[ KeyApi.valid_password? ]] and give error if not valid + + OpenKey::KeyApi.do_login( @domain_name, domain_secret, create_header() ) + + view_uc = View.new + view_uc.flow_of_events + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + end + + + end + + +end + + diff --git a/lib/usecase/logout.rb b/lib/usecase/logout.rb new file mode 100644 index 0000000..18d82e8 --- /dev/null +++ b/lib/usecase/logout.rb @@ -0,0 +1,28 @@ +#!/usr/bin/ruby + +module SafeDb + + class Logout < UseCase + + def execute + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + end + + + end + + +end + + diff --git a/lib/usecase/open.rb b/lib/usecase/open.rb new file mode 100644 index 0000000..a3b2de7 --- /dev/null +++ b/lib/usecase/open.rb @@ -0,0 +1,71 @@ +#!/usr/bin/ruby + +module SafeDb + + # The open use case allows us to add (put), subtract (del)ete, change + # (update) and list the secrets within an envelope (outer path) at a given + # position (inner path), whether that envelope exists or not. + # + # Also see the reopen command which only differs from open in that it + # fails if the path specified does not exist in either the sealed or session + # envelopes. + # + # == The Open Path Parameter + # + # Open must be called with a single path parameter with an optional + # single colon separating the outer (path to envelope) from the inner (path + # within envelope). + # + # == Open (Path) Pre-Conditions + # + # The domain must have been initialized on this machine stating the path to + # the base folder that contains the key and crypt material. + # + # To open a path these conditions must be true. + # + # - the shell session token must have been set at the session beginning + # - a successful login command must have been issued + # - the external drive (eg usb key) must be configured and accessible + # + # == Observable Value + # + # The observable value delivered by +[open]+ boils down to + # + # - an openkey (eg asdfx1234) and corresponding open encryption key + # - open encryption key written to ~/.safedb.net/open.keys/asdfx1234.x.txt + # - the opened path (ending in filename) written to session.cache base in [safe] + # - the INI string (were the file to be decrypted) would look like the below + # + # [session] + # base.path = home/wifi + # + class Open < UseCase + + # The two paths that have been posted to the open command. + # First is a relative path to the obfuscated envelope and then + # the path in envelope to the point of interest. + attr_writer :env_path, :key_path + + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + master_db[ ENV_PATH ] = @env_path + master_db[ KEY_PATH ] = @key_path + + OpenKey::KeyApi.write_master_db( create_header(), master_db ) + + # Show the mini dictionary at the opened chapter and verse location + # More work is needed when for when only the chapter is opened in + # which case we should show the list of verses and perhaps the count + # of key value pairs each verse contains. + Show.new.flow_of_events + + end + + + end + + +end diff --git a/lib/usecase/print.rb b/lib/usecase/print.rb new file mode 100644 index 0000000..db9b6e8 --- /dev/null +++ b/lib/usecase/print.rb @@ -0,0 +1,40 @@ +#!/usr/bin/ruby + +module SafeDb + + class Print < UseCase + + attr_writer :key_name + + def get_chapter_data( chapter_key ) + return OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( chapter_key ) ) + end + + def execute + + return unless ops_key_exists? + + master_db = get_master_database() + + return if unopened_envelope?( master_db ) + + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + has_chapter = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + + chapter_data = get_chapter_data( master_db[ chapter_id ] ) if has_chapter + has_verse = has_chapter && chapter_data.has_key?( master_db[ KEY_PATH ] ) + + chapter_err_msg = "Nothing was found at chapter " + master_db[ ENV_PATH ] + raise ArgumentError, chapter_err_msg unless has_chapter + verse_err_msg = "Nothing was found at chapter " + master_db[ ENV_PATH ] + " verse " + master_db[ KEY_PATH ] + raise ArgumentError, verse_err_msg unless has_verse + + print chapter_data[ master_db[ KEY_PATH ] ][ @key_name ] + + end + + + end + + +end diff --git a/lib/usecase/put.rb b/lib/usecase/put.rb new file mode 100644 index 0000000..65cbd47 --- /dev/null +++ b/lib/usecase/put.rb @@ -0,0 +1,81 @@ +#!/usr/bin/ruby + +module SafeDb + + # The put use case follows open and it adds secrets into an + # (encrypted at rest) envelope. Put can be called many times + # and when done, the lock use case can be called to commit all opened + # secrets into the configured storage engines. + # + # Calling put before calling open or after calling lock + # is not allowed and will result in an error. + # + # == Put Pre-Conditions + # + # When the put use case is called - the below conditions ring true. + # + # - the folder path ending in ../../my must exist + # - a session id, filename and encryption key ( in workstation config ) + # + # == Observable Value + # + # The observable value delivered by +put+ boils down to + # + # - a new friends.xyz123abc.os.txt file if this is the first put. + # - a new group_name/key_name (like monica/surname) entry is added if required + # - a secret value is added against the key or updated if it already exists + # - a new session id and encryption key is generated and used to re-encrypt + class Put < UseCase + + attr_writer :secret_id, :secret_value + + # Execute the act of putting a string key and string value pair into a + # map at the chapter and verse location, overwriting if need be. + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + return if unopened_envelope?( master_db ) + + envelope_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + has_content = OpenKey::KeyApi.db_envelope_exists?( master_db[ envelope_id ] ) + + # To get hold of the content we must either + # + # a) unlock it using the breadcrumbs or + # b) start afresh with a new content db + content_box = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ envelope_id ] ) ) if has_content + content_box = OpenKey::KeyDb.new() unless has_content + content_hdr = create_header() + + # If no content envelope exists we need to place + # an empty one inside the appdb content database. + master_db[ envelope_id ] = {} unless has_content + + # This is the PUT use case so we append a + # + # a) key for the new dictionary entry + # b) value for the new dictionary entry + # + # into the current content envelope and write + # the envelope to the content filepath. + crumbs_dict = master_db[ envelope_id ] + content_box.create_entry( master_db[ KEY_PATH ], @secret_id, @secret_value ) + OpenKey::KeyApi.content_lock( crumbs_dict, content_box.to_json, content_hdr ) + + # Three envelope crumbs namely the external ID, the + # random iv and the crypt key are written afresh into + # the master database. + OpenKey::KeyApi.write_master_db( content_hdr, master_db ) + + # Show the mini dictionary at the opened chapter and verse location + Show.new.flow_of_events + + end + + + end + + +end diff --git a/lib/usecase/set.rb b/lib/usecase/set.rb new file mode 100644 index 0000000..fd0d0da --- /dev/null +++ b/lib/usecase/set.rb @@ -0,0 +1,44 @@ +#!/usr/bin/ruby + +module SafeDb + + # The set use case is the generic tool for setting configuration + # directives inside the safe workstation INI formatted file. + # + # The mirror of this use case is unset. + # + # == Observable Value + # + # The configuration directive will eithe be created (or will overwrite) an existing + # directive with the same path. + # + # The configuration file is printed to inform the user of the current state. + # + # == Alternative / Error Flows + # + # Error - if the directive path is not composed of two (fwd slash separated) parts + # Error - if the directive path and/or value contains (or not) unacceptable characters + # + class Set < UseCase + + attr_writer :domain_name + + + # The use use case is borrowed from the database world and it denotes + # the domain to be used for now (and evermore) for this workstation until another + # use command is issued. + # + # The parameter domain_name must be set after an object instance is acquired but + # before the execute method runs. + def execute + end + + + def pre_validation + end + + + end + + +end diff --git a/lib/usecase/show.rb b/lib/usecase/show.rb new file mode 100644 index 0000000..1f71027 --- /dev/null +++ b/lib/usecase/show.rb @@ -0,0 +1,138 @@ +#!/usr/bin/ruby + +module SafeDb + + # Show the mini dictionary of key-value pairs within the logged in book + # at the opened chapter and verse. + # + # If no dictionary exists at the opened chapter and verse a suitable + # message is pushed out to the console. + class Show < UseCase + + def get_chapter_data( chapter_key ) + return OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( chapter_key ) ) + end + + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + return if unopened_envelope?( master_db ) + + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + has_chapter = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + chapter_data = get_chapter_data( master_db[ chapter_id ] ) if has_chapter + has_verse = has_chapter && chapter_data.has_key?( master_db[ KEY_PATH ] ) + + +##global_variables - DONE +##local_variables - DONE +##instance_variables - DONE +##class_variables - tough nut to crack with very little benefit (method class_variables not defined) + +=begin + puts "" + puts "QQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQ ~~~~~~~~~~~~~ Global Variable Array List ~~~~~~~~~~~~~~~~ QQQQQ" + puts "QQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + + puts global_variables.inspect + + puts "QQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQ ~~~~~~~~~~~~~ Global Variable Values Printed ~~~~~~~~~~~~~~~~ QQQQQ" + puts "QQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + + global_variables.sort.each do |name| + + puts "<<< ------------------------------------------------------------------->>>" + puts "<<< #{name.to_s} >>>" + puts "<<< ------------------------------------------------------------------->>>" + next if name.to_s.eql?( "$FILENAME" ) + global_variable_value = eval "#{name}.inspect" + puts "<<< #{global_variable_value}" + + end + + puts "" + puts "QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "" + puts "QQQQQQQQQQQ QQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQQQQQQQQQ Bug Finder QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQQQQQQQQQ QQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "" + self.instance_variables.map do |attribute| + puts "==============================================" + puts "----------------------------------------------" + puts attribute + pp self.instance_variable_get(attribute) + end + puts "==============================================" + puts "QQQQQQQQQQQ QQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQQQQQQQQQ QQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "" + puts "### ------------------------------------" + puts "### Inspect View" + puts "### ------------------------------------" + pp self.inspect + puts "### ------------------------------------" + puts "QQQQQQQQQQQ QQQQQQQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQQQQQQQQQ Local Variables QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + puts "QQQQQQQQQQQ QQQQQQQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + + local_variables.map do |attribute| + puts "==============================================" + puts "----------------------------------------------" + puts attribute + pp binding.local_variable_get(attribute.to_sym) + end + puts "QQQQQQQQQQQ QQQQQQQQQQQQQQQ QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ" + + puts "" +=end + + + return unless has_verse + + line_dictionary = chapter_data[ master_db[ KEY_PATH ] ] + + puts "" + puts "### ##################################\n" + puts "### chapter =>> #{master_db[ ENV_PATH ]}\n" + puts "### & verse =>> #{master_db[ KEY_PATH ]}\n" + puts "### # lines =>> #{line_dictionary.length}\n" + puts "### ##################################\n" + puts "--- ----------------------------------\n" + puts "" + + showable_content = {} + line_dictionary.each do | key_str, value_object | + + is_file = key_str.start_with? FILE_KEY_PREFIX + value_object.store( FILE_CONTENT_KEY, SECRET_MASK_STRING ) if is_file + showable_content.store( key_str[ FILE_KEY_PREFIX.length .. -1 ], value_object ) if is_file + next if is_file + + is_secret = key_str.start_with? "@" + showable_val = SECRET_MASK_STRING if is_secret + showable_val = value_object unless is_secret + showable_content.store( key_str, showable_val ) + + end + + puts JSON.pretty_generate( showable_content ) + puts "--- ----------------------------------\n" + puts "### ##################################\n" + puts "" + + end + + private + + SECRET_MASK_STRING = "***********************" + + end + + +end diff --git a/lib/usecase/terraform/README.md b/lib/usecase/terraform/README.md new file mode 100644 index 0000000..c453714 --- /dev/null +++ b/lib/usecase/terraform/README.md @@ -0,0 +1,91 @@ + +# safe terraform + +### safe terraform | introduction + +This terraform use case exports the AWS IAM user access key, secret key and region key into (very safe) environment variables and then runs the specified terraform be it **init**, **plan**, **apply** or **destroy**. + + +## safe terraform | credential creation + +The first use case is importing the IAM user credentials into safe. + + $ safe login joebloggs.com # open the book + $ safe open iam dev.s3.writer # open chapter and verse + $ safe put @access.key ABCD1234EFGH5678 # Put IAM access key in safe + $ safe put @secret.key xyzabcd1234efgh5678 # Put IAM secret key in safe + $ safe put region.key eu-west-3 # infrastructure in Paris + + $ safe open iam prod.provisioner # open chapter and verse + $ safe put @access.key 4321DCBA8765WXYZ # Put IAM access key in safe + $ safe put @secret.key 5678uvwx4321abcd9876 # Put IAM secret key in safe + $ safe put region.key eu-west-1 # infrastructure in Dublin + + safe logout + +Take care to specify these 3 key names **@access.key**, **@secret.key**, **region.key** and note that safe's convention is to sensitively treat the value's of keys beginning with an **@** sign. **safe show** and other readers **mask out (redact)** these sensitive values. + + +## safe terraform | running terraform + +Now and forever you can return to the chapter and verse and enjoy a secure credentials transfer where safe makes the IAM user credentials available to Terraform via environment variables. **Never do the plain text credentials touch the floor (disk).** + +### Why no safe terraform init? +**safe only gets involved when credentials are involved**. +**safe** is not trying to wrap command willy nilly. safe's policy is to keep external tool interfaces as **small** as possible. **`terraform init .`** does not involve credentials so safe does not get involved. + + $ cd /path/to/terraform/dir # go to directory holding your .tf file + $ safe login joebloggs.com # login to your chosen book + $ safe open iam dev.s3.writer # open chapter and verse holding IAM creds + $ terraform init . # the usual terraform init command + $ safe terraform plan # credentials are exported then terraform plan is run + $ safe terraform apply # credentials are exported then terraform apply is run + $ safe terraform destroy # credentials are exported then terraform destroy is run + +You can even change directories and run other terraform projects against the opened IAM user. You can also open an IAM user, run commands, open another run commands and then reopen the first and run commands. + +As long as you stay within your shell window - your safe login will persist. Once your session is finished you either logout or exit the shell. + +### Shortcut Alert + +**safe terraform** is a shortcut for **safe terraform apply** + + $ safe terraform apply + $ safe terraform + +## safe terraform | pre-conditions + +To enact a successful safe terraform call you will need + +- to have created an IAM user +- to open chapter and verse which +- has these 3 keys @access.key @secret.key and region.key (at least) +- terraform installed on the machine or container + + +## safe terraform | benefits + +The safe terraform command is both an ultra secure and extremely convenient way of launching terraform. + +Your precious AWS IAM user credentials do not leave the safe and exist within (environment variable) memory only for the duration of the terraform command. + +It is safe as you need neither expose your AWS credentials in plain text in **~/.aws/credentials**, nor risk them sliding into version control. It is convenient because switching IAM users and AWS regions is as easy as typing the now ubiquitous safe open command. + + +## quick tip | view then goto + +No need to type out the safe open command everytime. Use it the very first time you create a path to chapter and verse. + + safe open <> <> + +Then use safe view and safe goto instead of safe open. + + $ safe view # list all chapter and verses + $ safe goto <> # use the number from safe view to open the location + $ safe show # look at your mini dictionary + + +## safe terraform | only for aws + +This command currently only supports the AWS provider but will be extended to support Google's Compute Engine and more besides. + diff --git a/lib/usecase/terraform/terraform.rb b/lib/usecase/terraform/terraform.rb new file mode 100644 index 0000000..357ec9f --- /dev/null +++ b/lib/usecase/terraform/terraform.rb @@ -0,0 +1,121 @@ +#!/usr/bin/ruby + +module SafeDb + + # This terraform use case exports the AWS IAM user access key, secret key and region key + # into (very safe) environment variables and then runs terraform init, plan, apply or destroy. + # + # This is both ultra secure and extremely convenient because the credentials do not leave + # the safe and exist within (environment variable) memory only for the duration of the + # terraform command. + # + # It is safe because you do not need to expose your AWS credentials in plain text. + # It is convenient because switching IAM users and AWS regions is as easy as typing the now + # ubiquitous safe open command. + # + # safe open <> <> + class Terraform < UseCase + + attr_writer :command + + # This prefix is tagged onto environment variables which Terraform will read + # and convert for consumption into module input variables. + TERRAFORM_EVAR_PREFIX = "TF_VAR_" + + def execute + + return unless ops_key_exists? + master_db = get_master_database() + return if unopened_envelope?( master_db ) + + # Get the open chapter identifier (id). + # Decide whether chapter already exists. + # Then get (or instantiate) the chapter's hash data structure + chapter_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + verse_id = master_db[ KEY_PATH ] + chapter_exists = OpenKey::KeyApi.db_envelope_exists?( master_db[ chapter_id ] ) + + + # -- @todo begin + # -- Throw an exception (error) if the chapter + # -- either exists and is empty or does not exist. + # -- @todo end + + + # Unlock the chapter data structure by supplying + # key/value mini-dictionary breadcrumbs sitting + # within the master database at the section labelled + # envelope@<>. + chapter_data = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ chapter_id ] ) ) + + # Now read the three AWS IAM credentials @access.key, @secret.key and region.key + # into the 3 environment variables terraform expects to find. + + # ############## | ############################################################ + # @todo refactor | ############################################################ + # -------------- | 000000000000000000000000000000000000000000000000000000000000 + # export-then-execute + # ------------------- + # Put all the code above in a generic export-then-execute use case + # Then you pass in a Key/Value Dictionary + # + # { "AWS_ACCESS_KEY_ID" => "@access_key", + # "AWS_SECRET_ACCESS_KEY" => "@secret_key", + # "AWS_DEFAULT_REGION" => "region_key" + # } + # + # And pass in a command array [ "terraform #{command_name} #{auto_approve}", "terraform graph ..." ] + # + # Validation is done by the generic use case (which loops checking that every value exists + # as a key at the opened location. + # + # If all good the generic use case exports the ENV vars and runs each command in the list. + # PS - configure map in INI not code file + # + # The extra power will speed up generation of environment variable use cases including + # ansible, s3 bucket operations, git interactions and more. + # + # ############## | ############################################################ + # ############## | ############################################################ + + puts "" + puts "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + puts "" + + ENV[ "AWS_ACCESS_KEY_ID" ] = chapter_data[ verse_id ][ "@access.key" ] + ENV[ "AWS_SECRET_ACCESS_KEY" ] = chapter_data[ verse_id ][ "@secret.key" ] + ENV[ "AWS_DEFAULT_REGION" ] = chapter_data[ verse_id ][ "region.key" ] + + mini_dictionary = chapter_data[ verse_id ] + mini_dictionary.each do | key_str, value_object | + + is_env_var = key_str.start_with?( ENV_VAR_PREFIX_A ) || key_str.start_with?( ENV_VAR_PREFIX_B ) + next unless is_env_var + + env_var_name = key_str[ ENV_VAR_PREFIX_A.length .. -1 ] if key_str.start_with? ENV_VAR_PREFIX_A + env_var_name = key_str[ ENV_VAR_PREFIX_B.length .. -1 ] if key_str.start_with? ENV_VAR_PREFIX_B + env_var_keyname = TERRAFORM_EVAR_PREFIX + env_var_name + ENV[ env_var_keyname ] = value_object + puts "Environment variable #{env_var_keyname} has been set." + + end + + puts "" + puts "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + puts "" + + auto_approve = @command && @command.eql?( "plan" ) ? "" : "-auto-approve" + command_name = @command ? @command : "apply" + system "terraform #{command_name} #{auto_approve}" + + puts "" + puts "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + puts "" + + end + + + end + + +end diff --git a/lib/usecase/token.rb b/lib/usecase/token.rb new file mode 100644 index 0000000..8d5faf6 --- /dev/null +++ b/lib/usecase/token.rb @@ -0,0 +1,35 @@ +#!/usr/bin/ruby + +module SafeDb + + # The token use case prints out an encrypted session token tied + # to the workstation and shell environment. See the root README.md on how + # to export it and create a simple command alias for it in the ~/.bash_aliases + # script which is executed when the shell starts. + class Token < UseCase + + + def execute + + print OpenKey::KeyLocal.generate_shell_key_and_token() + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + + end + + + end + + +end + diff --git a/lib/usecase/update/README.md b/lib/usecase/update/README.md new file mode 100644 index 0000000..cff81ee --- /dev/null +++ b/lib/usecase/update/README.md @@ -0,0 +1,55 @@ + + +# safe rename + +Changing your mind is a basic human right! In lieu of this, safe provides a **rename** use case that can be used to rename + +- a chapter +- a verse +- a key (at a chapter and verse location) + +
+As yet safe has no command for renaming books. You can achieve this by first cloning the book then deleting the original. +
+ +## safe rename | chapter + +To rename a chapter you must not have an open location. If you do you must first close it before renaming. + + $ safe close + $ safe view + $ safe rename + +When safe sees that the book is not open, it knows that you want to rename the chapter. + +The rename command returns a view allowing you to check that the chapter name has indeed been updated. + + +## safe rename | verse + +To rename the verse you must have its chapter (and only its chapter) open. + + $ safe close + $ safe open + $ safe view + $ safe rename + +The rename command returns a view of all the verses in the open chapter allowing you to check that the verse name has indeed been updated. + +## safe rename | key + +Most of the time you will want to rename keys in the mini-dictionary at a chapter and verse location. To do this you must open the chapter and verse first. + + $ safe open + $ safe show + $ safe rename + +The rename command shows you the mini-dictionary (hashing out sensitive credentials) allowing you to check that the key name has indeed been updated. + +## safe rename | be aware + +Be aware of the following when renaming. + +- key names that start with @ guard the key's value during a safe show +- renaming keys that are required for integration functionality will need you pass the --force switch + diff --git a/lib/usecase/update/rename.rb b/lib/usecase/update/rename.rb new file mode 100644 index 0000000..2105307 --- /dev/null +++ b/lib/usecase/update/rename.rb @@ -0,0 +1,180 @@ +#!/usr/bin/ruby + +module SafeDb + + # The put use case follows open and it adds secrets into an + # (encrypted at rest) envelope. Put can be called many times + # and when done, the lock use case can be called to commit all opened + # secrets into the configured storage engines. + # + # Calling put before calling open or after calling lock + # is not allowed and will result in an error. + # + # == Put Pre-Conditions + # + # When the put use case is called - the below conditions ring true. + # + # - the folder path ending in ../../my must exist + # - a session id, filename and encryption key ( in workstation config ) + # + # == Observable Value + # + # The observable value delivered by +put+ boils down to + # + # - a new friends.xyz123abc.os.txt file if this is the first put. + # - a new group_name/key_name (like monica/surname) entry is added if required + # - a secret value is added against the key or updated if it already exists + # - a new session id and encryption key is generated and used to re-encrypt + # + # == Example | Bill Clinton's Secrets + # + # In our fictitious example Bill Clinton uses safe to lock away the + # names and dates of his lady friends. + # + # $ safe init bill.clinton@example.com + # $ safe open my/friends + # + # $ safe put monica/surname lewinsky + # $ safe put monica/from "April 1989" + # $ safe put monica/to "September 1994" + # + # $ safe put hilary/surname clinton + # $ safe put hilary/from "January 1988" + # $ safe put hilary/to "Present Day" + # + # $ safe lock + # + # Soon follow up use cases will be unveiled, enabling us to + # + # - get + # - read + # - list + # - look + # - peep and + # - peek + class Rename < UseCase + + + attr_writer :secret_id, :secret_value + + + # The put use case follows open and it adds secrets into an + # (encrypted at rest) envelope. Put can be called many times to + # add secrets. Finally the lock use case commits all opened secrets + # into the configured storage engines. + # + # Calling put before calling open or after calling lock + # is not allowed and will result in an error. + # + # == Put Pre-Conditions + # + # When the put use case is called - the below conditions ring true. + # + # - the folder path ending in ../../my must exist + # - a session id, filename and encryption key ( in workstation config ) + # + # == Observable Value + # + # The observable value delivered by +put+ boils down to + # + # - a new friends.xyz123abc.os.txt file if this is the first put. + # - a new group_name/key_name (like monica/surname) entry is added if required + # - a secret value is added against the key or updated if it already exists + # - a new session id and encryption key is generated and used to re-encrypt + # + # == How to Pretty Print a Hash in JSON Format + # + # This pretty prints a Hash (dictionary) data structure in JSON format. + # + # puts "---\n" + # puts JSON.pretty_generate( master_db ) + # puts "---\n" + # + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + return if unopened_envelope?( master_db ) + + envelope_id = ENVELOPE_KEY_PREFIX + master_db[ ENV_PATH ] + has_content = OpenKey::KeyApi.db_envelope_exists?( master_db[ envelope_id ] ) + + # -- + # -- To get hold of the content we must either + # -- + # -- a) unlock it using the breadcrumbs or + # -- b) start afresh with a new content db + # -- + content_box = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( master_db[ envelope_id ] ) ) if has_content + content_box = OpenKey::KeyDb.new() unless has_content + content_hdr = create_header() + + # -- + # -- If no content envelope exists we need to place + # -- an empty one inside the appdb content database. + # -- + master_db[ envelope_id ] = {} unless has_content + + # -- + # -- This is the PUT use case so we append a + # -- + # -- a) key for the new dictionary entry + # -- b) value for the new dictionary entry + # -- + # -- into the current content envelope and write + # -- the envelope to the content filepath. + # -- + crumbs_dict = master_db[ envelope_id ] + content_box.create_entry( master_db[ KEY_PATH ], @secret_id, @secret_value ) + OpenKey::KeyApi.content_lock( crumbs_dict, content_box.to_json, content_hdr ) + + # -- + # -- Three envelope crumbs namely the external ID, the + # -- random iv and the crypt key are written afresh into + # -- the master database. + # -- + OpenKey::KeyApi.write_master_db( content_hdr, master_db ) + print_put_success + +# ---> secret_ids = @secret_id.split("/") +# ---> if ( envelope.has_key? secret_ids.first ) +# ---> envelope[secret_ids.first][secret_ids.last] = @secret_value +# ---> else +# ---> envelope[secret_ids.first] = { secret_ids.last => @secret_value } +# ---> end + + end + + + private + + + def print_put_success + + puts "" + puts "Success putting a key/value pair into the open envelope." + puts "You can put more in and then close the envelope." + puts "" + puts " #{COMMANDMENT} close" + puts "" + + end + + + # Perform pre-conditional validations in preparation to executing the main flow + # of events for this use case. This method may throw the below exceptions. + # + # @raise [SafeDirNotConfigured] if the safe's url has not been configured + # @raise [EmailAddrNotConfigured] if the email address has not been configured + # @raise [StoreUrlNotConfigured] if the crypt store url is not configured + def pre_validation + + + end + + + end + + +end diff --git a/lib/usecase/use.rb b/lib/usecase/use.rb new file mode 100644 index 0000000..ce432a7 --- /dev/null +++ b/lib/usecase/use.rb @@ -0,0 +1,41 @@ +#!/usr/bin/ruby + +module SafeDb + + # The use use case borrowed from the database world denotes which + # domain will be used for now (and evermore) on the workstation until another + # use command is issued. + # + # == Observable Value + # + # The workstation configuration file will point to the domain name specified + # marking it as the current and correct domain to use. + # + # == Alternative / Error Flows + # + # Error - if the domain name is not listed in the configuration file. + # Error - if the (dictionary) path to the domain's base does not exist + # + class Use < UseCase + + attr_writer :domain_name + + + # The use use case is borrowed from the database world and it denotes + # the domain to be used for now (and evermore) for this workstation until another + # use command is issued. + # + # The parameter domain_name must be set after an object instance is acquired but + # before the execute method runs. + def execute + end + + + def pre_validation + end + + + end + + +end diff --git a/lib/usecase/verse.rb b/lib/usecase/verse.rb new file mode 100644 index 0000000..b14c0f6 --- /dev/null +++ b/lib/usecase/verse.rb @@ -0,0 +1,20 @@ +#!/usr/bin/ruby + +module SafeDb + + class Verse < UseCase + + def execute + + return unless ops_key_exists? + master_db = get_master_database() + return if unopened_envelope?( master_db ) + print master_db[ KEY_PATH ] + + end + + + end + + +end diff --git a/lib/usecase/view.rb b/lib/usecase/view.rb new file mode 100644 index 0000000..044670f --- /dev/null +++ b/lib/usecase/view.rb @@ -0,0 +1,71 @@ +#!/usr/bin/ruby + +module SafeDb + + # View provides a bird's eye view of the domain's content and links well with + # the goto, show and tell commands. + # + # $ xxx view + # $ xxx goto 5 # shortcut for xxx open <> <> + # $ xxx show + # $ xxx tell + # $ xxx tell url + # + # View maps out and numbers each envelope/key combination. + # Goto with the number effectively shortcuts the open pinpointer. + # Show prints out the dictionary at the opened path but masks any secrets. + # Tell without a parameter echoes the secret. + # Tell with parameter echoes the value of the parameter key (eg url). + # + # Once goto is enacted all path CRUD commands come into play as if you had + # opened the path. These include put, copy, paste, show, tell and delete. + class View < UseCase + + def execute + + return unless ops_key_exists? + master_db = OpenKey::KeyApi.read_master_db() + + open_envelope = "(none)" if master_db[ ENV_PATH ].nil? + open_envelope = master_db[ ENV_PATH ] unless master_db[ ENV_PATH ].nil? + open_key_path = "(none)" if master_db[ KEY_PATH ].nil? + open_key_path = master_db[ KEY_PATH ] unless master_db[ KEY_PATH ].nil? + + puts "" + puts "--- Book Birthday ~> #{OpenKey::KeyApi.to_db_create_date(master_db)}\n" + puts "--- The Book Name ~> #{OpenKey::KeyApi.to_db_domain_name(master_db)}\n" + puts "--- The Book (Id) ~> #{OpenKey::KeyApi.to_db_domain_id(master_db)}\n" + puts "---\n" + puts "--- Chapter ~> #{open_envelope}\n" + puts "--- + Verse ~> #{open_key_path}\n" + puts "---\n" + + goto_location = 1 + envelope_dictionaries = OpenKey::KeyApi.to_matching_dictionary( master_db, ENVELOPE_KEY_PREFIX ) + envelope_dictionaries.each_pair do | envelope_name, crumb_dictionary | + is_opened_chapter = envelope_name.eql?( open_envelope ) + envelope_content = OpenKey::KeyDb.from_json( OpenKey::KeyApi.content_unlock( crumb_dictionary ) ) + envelope_content.each_key do | envelope_key | + is_opened_verse = envelope_key.eql?( open_key_path ) + is_open = is_opened_chapter && is_opened_verse + openend = is_open ? " (( open location ))" : "" + fixdint = format( "%02d", goto_location ) + goindex = is_open ? "" : "[#{fixdint}] " + puts "--- --- --------------------------------------" if is_open + puts "--- #{goindex}#{envelope_name} ~> #{envelope_key}#{openend}\n" + puts "--- --- --------------------------------------" if is_open + goto_location += 1 + end + end + + puts "" + + return + + end + + + end + + +end diff --git a/lib/usecase/vpn/README.md b/lib/usecase/vpn/README.md new file mode 100644 index 0000000..acdb7b4 --- /dev/null +++ b/lib/usecase/vpn/README.md @@ -0,0 +1,150 @@ + +# Switch On an OpenVPN Client Connection + + safe vpn + +## Introduction + +This DevOps task is a collaboration to **switch on a VPN connection** with safe as the credentials provider, nmcli on Ubuntu and an OpenVPN account embodied details within an ovpn file. + +## Task Preconditions + +To switch on a client OpenVPN connection the following must hold true + +- a shell safe tokenize, login and open has ocurred +- the opened safe location must have a key vpn.id +- safe write <> must eject <>.ovpn +- the ovpn file must be valid and point to a running accessible openvpn server +- the ubiquitous @password field must hold a credible value +- the VPN connection is assumed to be not just switched off, but deleted (at the start) + + + + +# Switch Off an OpenVPN Client Connection + + dot vpn down + +## Introduction + +This DevOps task is a collaboration to **switch on a VPN connection** with safe as the credentials provider, nmcli on Ubuntu and an OpenVPN account embodied details within an ovpn file. + +## Task Preconditions + +To switch on a client OpenVPN connection the following must hold true + +- a shell safe tokenize, login and open has ocurred +- the opened safe location must have a key vpn.id +- safe write <> must eject <>.ovpn +- the ovpn file must be valid and point to a running accessible openvpn server +- the ubiquitous @password field must hold a credible value +- the VPN connection is assumed to be not just switched off, but deleted (at the start) + + +# safe vpn up | safe vpn down + + $ safe open vpn production + $ safe vpn up + $ ... (do work using vpn) + $ safe vpn down + +## safe vpn | introduction + +Once you put VPN credentials into a mini-dictionary (in a safe book chapter and verse), you can bring up a VPN connection and after doing your work through the VPN you can tear it down. + +**[The strategy used to bring the OpenVPN connection up and down can be found here.](http://www.devopswiki.co.uk/wiki/middleware/network/openvpn/openvpn)** + + +### safe vpn | ovpn | requirements + +Currently the safe vpn command is only integration tested with the following tech requirements + +- an Ubuntu 16.04 and Ubuntu 18.04 operating system +- the nmcli (network manager command line) client which is installed if absent +- an OpenVPN server +- VPN configuration imported via an OpenVPN **`*.ovpn`** file + + +## safe terraform | credential creation + +The first use case is importing the IAM user credentials into safe. + + $ safe login joebloggs.com # open the book + $ safe open iam dev.s3.writer # open chapter and verse + $ safe put @access.key ABCD1234EFGH5678 # Put IAM access key in safe + $ safe put @secret.key xyzabcd1234efgh5678 # Put IAM secret key in safe + $ safe put region.key eu-west-3 # infrastructure in Paris + + $ safe open iam prod.provisioner # open chapter and verse + $ safe put @access.key 4321DCBA8765WXYZ # Put IAM access key in safe + $ safe put @secret.key 5678uvwx4321abcd9876 # Put IAM secret key in safe + $ safe put region.key eu-west-1 # infrastructure in Dublin + + safe logout + +Take care to specify these 3 key names **@access.key**, **@secret.key**, **region.key** and note that safe's convention is to sensitively treat the value's of keys beginning with an **@** sign. **safe show** and other readers **mask out (redact)** these sensitive values. + + +## safe terraform | running terraform + +Now and forever you can return to the chapter and verse and enjoy a secure credentials transfer where safe makes the IAM user credentials available to Terraform via environment variables. **Never do the plain text credentials touch the floor (disk).** + +### Why no safe terraform init? +**safe only gets involved when credentials are involved**. +**safe** is not trying to wrap command willy nilly. safe's policy is to keep external tool interfaces as **small** as possible. **`terraform init .`** does not involve credentials so safe does not get involved. + + $ cd /path/to/terraform/dir # go to directory holding your .tf file + $ safe login joebloggs.com # login to your chosen book + $ safe open iam dev.s3.writer # open chapter and verse holding IAM creds + $ terraform init . # the usual terraform init command + $ safe terraform plan # credentials are exported then terraform plan is run + $ safe terraform apply # credentials are exported then terraform apply is run + $ safe terraform destroy # credentials are exported then terraform destroy is run + +You can even change directories and run other terraform projects against the opened IAM user. You can also open an IAM user, run commands, open another run commands and then reopen the first and run commands. + +As long as you stay within your shell window - your safe login will persist. Once your session is finished you either logout or exit the shell. + +### Shortcut Alert + +**safe terraform** is a shortcut for **safe terraform apply** + + $ safe terraform apply + $ safe terraform + +## safe terraform | pre-conditions + +To enact a successful safe terraform call you will need + +- to have created an IAM user +- to open chapter and verse which +- has these 3 keys @access.key @secret.key and region.key (at least) +- terraform installed on the machine or container + + +## safe terraform | benefits + +The safe terraform command is both an ultra secure and extremely convenient way of launching terraform. + +Your precious AWS IAM user credentials do not leave the safe and exist within (environment variable) memory only for the duration of the terraform command. + +It is safe as you need neither expose your AWS credentials in plain text in **~/.aws/credentials**, nor risk them sliding into version control. It is convenient because switching IAM users and AWS regions is as easy as typing the now ubiquitous safe open command. + + +## quick tip | view then goto + +No need to type out the safe open command everytime. Use it the very first time you create a path to chapter and verse. + + safe open <> <> + +Then use safe view and safe goto instead of safe open. + + $ safe view # list all chapter and verses + $ safe goto <> # use the number from safe view to open the location + $ safe show # look at your mini dictionary + + + + + + diff --git a/lib/usecase/vpn/vpn.ini b/lib/usecase/vpn/vpn.ini new file mode 100644 index 0000000..675ae2d --- /dev/null +++ b/lib/usecase/vpn/vpn.ini @@ -0,0 +1,31 @@ + +[vpn] + +vpn.id = rb>> @f[:secrets][:vpn_id] +vpn.filename = rb>> @s[:vpn_id] + ".ovpn" +export.folder = rb>> File.join( Dir.home, ".config/safe.db" ) +vpn.filepath = rb>> File.join( @s[:export_folder], @s[:vpn_filename] ) +vpn.username = rb>> @f[:secrets][:username] +vpn.password = rb>> @f[:secrets][:@password] +safe.write.cmd = rb>> "safe write --script " + @s[:vpn_filepath] + +nm.import.cmd = rb>> "sudo nmcli connection import type openvpn file " + @s[:vpn_filepath] +nm.default.cmd = rb>> "nmcli connection modify " + @s[:vpn_id] + " ipv4.never-default true" +nm.user.cmd = rb>> "nmcli connection modify " + @s[:vpn_id] + " +vpn.data username=" + @s[:vpn_username] +nm.reload.cmd = rb>> "sudo nmcli connection reload " + @s[:vpn_id] +nm.flags.cmd = rb>> "nmcli connection modify " + @s[:vpn_id] + " +vpn.data password-flags=0" +this.user = rb>> Etc.getlogin() + +nm.directory = /etc/NetworkManager/system-connections +nm.filepath = rb>> File.join @s[:nm_directory], @s[:vpn_id] +nm.cache.name = rb>> @s[:vpn_id] + ".ini" +nm.cache.path = rb>> File.join( Gem.user_home(), @s[:nm_cache_name] ) + +chown.cmd.1 = rb>> "sudo chown " + @s[:this_user] + ":" + @s[:this_user] + " " + @s[:nm_filepath] +chown.cmd.2 = rb>> "sudo chown root:root " + @s[:nm_filepath] + +nm.conn.up = rb>> "nmcli connection up " + @s[:vpn_id] +nm.restart = sudo service network-manager restart + +nm.conn.off = rb>> "nmcli con down id " + @s[:vpn_id] +nm.conn.del = rb>> "nmcli connection delete " + @s[:vpn_id] diff --git a/lib/usecase/vpn/vpn.rb b/lib/usecase/vpn/vpn.rb new file mode 100644 index 0000000..045c99d --- /dev/null +++ b/lib/usecase/vpn/vpn.rb @@ -0,0 +1,54 @@ +#!/usr/bin/ruby + +module SafeDb + + # This vpn use case sets up vpn connection paraphernelia and can bring up a VPN connection + # and then tear it down. + # + # safe vpn up + # safe vpn down + class Vpn < UseCase + + attr_writer :command + + def execute + + if( @command && @command.eql?( "down" ) ) + + puts "" + system @dictionary[ :nm_conn_off ]; sleep 2; + system @dictionary[ :nm_conn_del ] + puts "" + return + + end + + puts "" + system @dictionary[ :safe_write_cmd ] + puts "[#{@dictionary[ :vpn_filename ]}] temporarily exported to [#{@dictionary[ :vpn_filepath ]}]." + system @dictionary[ :nm_import_cmd ] + File.delete( @dictionary[ :vpn_filepath ] ) + puts "Exported file [#{@dictionary[ :vpn_filepath ]}] has now been deleted." + + system @dictionary[ :nm_default_cmd ] + system @dictionary[ :nm_user_cmd ] + system @dictionary[ :nm_reload_cmd ] + system @dictionary[ :nm_flags_cmd ] + system @dictionary[ :chown_cmd_1 ] + + vpn_data = IniFile.load( @dictionary[:nm_filepath] ) + vpn_data['vpn-secrets'] = { 'password' => @dictionary[:vpn_password] } + vpn_data.write() + + system @dictionary[ :chown_cmd_2 ] + system @dictionary[ :nm_restart ]; sleep 2; + system @dictionary[ :nm_conn_up ] + puts "" + + end + + + end + + +end diff --git a/lib/version.rb b/lib/version.rb new file mode 100644 index 0000000..0bc2a66 --- /dev/null +++ b/lib/version.rb @@ -0,0 +1,3 @@ +module SafeDb + VERSION = "v0.01.0001" +end diff --git a/safedb.gemspec b/safedb.gemspec new file mode 100644 index 0000000..45a0204 --- /dev/null +++ b/safedb.gemspec @@ -0,0 +1,34 @@ +lib = File.expand_path("../lib", __FILE__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) + +require 'version' + +Gem::Specification.new do |spec| + + spec.name = "safedb" + spec.version = SafeDb::VERSION + spec.authors = [ "Apollo Akora" ] + spec.email = [ "devopsassets@gmail.com" ] + + spec.summary = %q{safe locks and unlocks secrets in a simple, secure and intuitive way.} + spec.description = %q{safe is a credentials manager for the linux command line written in Ruby. It locks and unlocks secrets in a safe simple and intuitive manner. You can then visit websites, manufacture keys and passwords, inject credentials into Jenkins, and interact with many tools including S3, GoogleDrive, Terraform, Git and Docker.} + spec.homepage = "https://www.safedb.net" + spec.license = "MIT" + + spec.files = `git ls-files -z`.split("\x0").reject do |f| + f.match(%r{^(test|spec|features)/}) + end + + spec.metadata["yard.run"] = "yri" + spec.bindir = "bin" + spec.executables = [ 'safe', 'safedb' ] + spec.require_paths = ["lib"] + spec.required_ruby_version = '>= 2.5.0' + + spec.add_dependency 'inifile', '~> 3.0' + spec.add_dependency 'thor', '~> 0.2' + spec.add_dependency 'bcrypt' + + spec.add_development_dependency "bundler", "~> 1.16" + +end