2024-05-17 20:33:48 +00:00
|
|
|
#!/usr/bin/ruby
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
# BEGIN: Requires
|
|
|
|
|
|
|
|
require 'digest'
|
|
|
|
|
|
|
|
# END: Requires
|
2024-05-17 20:33:48 +00:00
|
|
|
|
2024-05-26 02:42:46 +00:00
|
|
|
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
# BEGIN: Helper Functions
|
|
|
|
|
|
|
|
def usage
|
2024-05-26 17:21:07 +00:00
|
|
|
puts <<EOT
|
|
|
|
#{__FILE__} [ACTION <arg>] [<flags>]
|
2024-05-17 20:33:48 +00:00
|
|
|
|
2024-05-18 18:12:27 +00:00
|
|
|
Description:
|
|
|
|
This utlity is meant to be used to interact with & manage the filebucket on P4 nodes due to the
|
|
|
|
utility for that `puppet filebucket -l <action>` being nonfunctional.
|
|
|
|
|
|
|
|
This implements the same functionality (minus the puppet tie-in) and will allow the user to
|
|
|
|
search the filebucket and restore from it.
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
Actions:
|
2024-05-25 00:13:55 +00:00
|
|
|
search <term> : Search for bucket entries matching a portion of the filepath
|
2024-05-24 13:54:04 +00:00
|
|
|
list : List all Bucket entries
|
|
|
|
list-files : List all files/paths that have been backed up to the bucket
|
|
|
|
get <entry-hash> : Get the content of a specific entry (by hash)
|
2024-05-26 02:53:20 +00:00
|
|
|
restore <value> : Restore previous state of file stored in bucket. Value can be hash or filename/filepath
|
|
|
|
: Note: To restore to an alternate path you will need to provide the path via the -o flag
|
|
|
|
:
|
|
|
|
backup <file> : Backup the file to the bucket (will work relatively unless full path is provided)
|
|
|
|
|
|
|
|
Restore Flags:
|
|
|
|
-o | --output-file <file> : Used to provide an alternate restoral path for the restore function
|
2024-05-17 20:33:48 +00:00
|
|
|
|
|
|
|
Global Flags:
|
2024-05-24 13:54:04 +00:00
|
|
|
-d | --debug : Set debug flag
|
|
|
|
-h | --help : This help message
|
2024-05-18 18:12:27 +00:00
|
|
|
|
2024-05-26 17:21:07 +00:00
|
|
|
Listing Filter Flags:
|
|
|
|
--from-date <DATE> : Filter listings from starting after this point (FORMAT: YYYY-MM-DD HH:MM:SS)
|
|
|
|
: EX:
|
|
|
|
: 1. --from-date 2023-05-10
|
|
|
|
: 2. --from-date "2023-05-10 13:23"
|
|
|
|
:
|
|
|
|
--to-date <DATE> : Filter listings from ending at this point (FORMAT: YYYY-MM-DD HH:MM:SS)
|
|
|
|
: EX:
|
|
|
|
: 1. --to-date 2024-05-10
|
|
|
|
: 2. --to-date "2024-05-10 05:27"
|
|
|
|
:
|
|
|
|
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
Info Format Flags:
|
|
|
|
-i | --inline : Set the info format to inline (MTIME : HASH : FILENAME)
|
|
|
|
-l | --long : Set the info format to long
|
|
|
|
: Entry [HASH]:
|
|
|
|
: Paths: path1,path2,...,pathn
|
|
|
|
: MTIME: YYYY-MM-DD HH:MM:SS -####
|
|
|
|
:
|
|
|
|
-c | --csv : Set the info format to csv ( MTIME,HASH,FILENAME1[;FILENAMEn] )
|
|
|
|
|
2024-05-18 18:12:27 +00:00
|
|
|
Author:
|
|
|
|
Name: Tristan Ancelet
|
|
|
|
Email: tristan.ancelet@acumera.com
|
|
|
|
Phone (Work) #: +1 (337) 965-1855
|
|
|
|
|
2024-05-26 17:21:07 +00:00
|
|
|
EOT
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def log (message, level = 0)
|
|
|
|
if $DEBUG == true or $CONFIG[:log_file] != ""
|
|
|
|
if message == ""
|
|
|
|
puts "log was called without providing a message"
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
case level
|
|
|
|
when 0
|
|
|
|
level="INFO"
|
|
|
|
when 1
|
|
|
|
level="WARN"
|
|
|
|
when 2
|
|
|
|
level="CRIT"
|
|
|
|
else
|
|
|
|
level="UNDF"
|
|
|
|
end
|
|
|
|
datestamp=Time.now
|
|
|
|
log_message="#{datestamp} : #{$HOSTNAME} : #{level} : #{caller[0]} : #{message}"
|
|
|
|
|
|
|
|
if $CONFIG[:log_file] != ""
|
|
|
|
File.open($CONFIG[:log_file],'a') do |file|
|
|
|
|
file.write("#{log_message}\n")
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts log_message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def which(cmd)
|
|
|
|
#https://stackoverflow.com/questions/2108727/which-in-ruby-checking-if-program-exists-in-path-from-ruby
|
|
|
|
exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
|
|
|
|
ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
|
|
|
|
exts.each do |ext|
|
|
|
|
exe = File.join(path, "#{cmd}#{ext}")
|
|
|
|
return exe if File.executable?(exe) && !File.directory?(exe)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
2024-05-18 23:40:58 +00:00
|
|
|
def get_verification (prompt = "Do you want to continue?")
|
|
|
|
while true
|
|
|
|
puts "#{prompt} (y/n): "
|
|
|
|
answer = STDIN.gets
|
|
|
|
answer = answer.strip()
|
|
|
|
case answer
|
|
|
|
when 'y','yes'
|
|
|
|
return true
|
|
|
|
when 'n', 'no'
|
|
|
|
return false
|
|
|
|
else
|
|
|
|
puts "#{answer} is not an acceptible answer"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def get_selections (reference_array, prompt = "Which of the following do you want to select? ", options = { :multiple => false, }, &procedure)
|
|
|
|
## Making clone of array since the selections were passed by reference
|
|
|
|
selections = reference_array.clone
|
|
|
|
|
|
|
|
def put_prompt (selections, prompt)
|
|
|
|
puts prompt
|
|
|
|
selections.each_with_index do |value,index|
|
|
|
|
puts "#{index} : #{value}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if options[:multiple] == true
|
|
|
|
output = Array.new
|
|
|
|
else
|
|
|
|
output = ""
|
|
|
|
end
|
|
|
|
|
|
|
|
put_prompt selections, prompt
|
|
|
|
|
|
|
|
while true
|
|
|
|
choice = Integer(STDIN.gets.strip())
|
|
|
|
if choice.is_a? Integer
|
|
|
|
if choice >= 0 and choice < selections.count
|
|
|
|
if options[:multiple] == true
|
|
|
|
output.push(selections[choice])
|
|
|
|
selections.delete_at(choice)
|
|
|
|
if get_verification "Are you done selecting?"
|
|
|
|
break
|
|
|
|
end
|
|
|
|
put_prompt selections, prompt
|
|
|
|
else
|
|
|
|
output = selections[choice]
|
|
|
|
break
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "#{choice} is not between the values of 0 and #{selections.count}. Please try again."
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "#{choice} is not a valid option. Please try again."
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if procedure.respond_to? "call"
|
|
|
|
output = procedure.call(output)
|
|
|
|
end
|
|
|
|
output
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
# END: Helper Functions
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-05-18 18:12:27 +00:00
|
|
|
# BEGIN: Variables
|
|
|
|
|
|
|
|
if not (ARGV & ['-h', '--help']).empty?
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
if ENV["USER"] != 'root'
|
|
|
|
puts "This script should only be run by root (permissions issues). Please rerun it as root or prepend \"sudo\""
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
$DEBUG=false
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
puppet_exe = which "puppet"
|
|
|
|
|
|
|
|
if puppet_exe == nil
|
|
|
|
puts "The puppet utility was not found in $PATH. This utility will not be able to function"
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
2024-05-18 18:12:27 +00:00
|
|
|
$CONFIG = Hash.new
|
2024-05-25 00:13:55 +00:00
|
|
|
$CONFIG[:bucket_dir]=` #{puppet_exe} agent --configprint clientbucketdir `.strip()
|
2024-05-26 02:42:46 +00:00
|
|
|
$CONFIG[:puppet_version]=` #{puppet_exe} --version `
|
2024-05-18 18:12:27 +00:00
|
|
|
$CONFIG[:action]=""
|
|
|
|
$CONFIG[:search_term]=""
|
|
|
|
$CONFIG[:log_file]=""
|
2024-05-26 02:53:20 +00:00
|
|
|
$CONFIG[:alt_filepath]=""
|
2024-05-25 00:13:55 +00:00
|
|
|
$CONFIG[:info_format]="inline"
|
2024-05-26 17:21:07 +00:00
|
|
|
$CONFIG[:from_time]=Time.at(0) # Beginning of epoch time
|
|
|
|
$CONFIG[:to_time]=Time.now # Today
|
2024-05-18 18:12:27 +00:00
|
|
|
File.open('/etc/hostname') do |file|
|
|
|
|
$HOSTNAME=file.read().strip()
|
|
|
|
end
|
2024-05-26 02:42:46 +00:00
|
|
|
FLAG_REGEX=/^\-+\S+/
|
2024-05-26 17:21:07 +00:00
|
|
|
DATE_REGEX=/^(?<year>[0-9]{4})-(?<month>[0-9]{1,2})-(?<day>[0-9]{1,2})[[:space:]]*((?<hour>[0-9]{1,2}):(?<minute>[0-9]{1,2}):(?<second>[0-9]{1,2}))?$/
|
2024-05-18 18:12:27 +00:00
|
|
|
|
|
|
|
# END: Variables
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
# BEGIN: Handle CLI Args
|
|
|
|
|
2024-05-18 18:12:27 +00:00
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
if ARGV.count == 0
|
|
|
|
puts "No arguments were provided"
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
if not (ARGV & ['-d', '--debug']).empty?
|
|
|
|
$DEBUG=true
|
|
|
|
end
|
|
|
|
i=0
|
|
|
|
case ARGV[i]
|
2024-05-26 02:42:46 +00:00
|
|
|
when 'search', 'get', 'restore', 'backup'
|
2024-05-18 23:40:58 +00:00
|
|
|
$CONFIG[:action]=ARGV[i]
|
2024-05-17 20:33:48 +00:00
|
|
|
log "$CONFIG[:action] was set to #{ARGV[i]}"
|
|
|
|
log "user provided search action ARGV[i.next] == #{ARGV[i.next]}"
|
|
|
|
if ARGV[i.next] != "" and not ARGV[i.next] =~ FLAG_REGEX
|
|
|
|
$CONFIG[:search_term]=ARGV[i.next]
|
|
|
|
log "search_term was set to #{ARGV[i.next]}"
|
|
|
|
i+=2
|
|
|
|
else
|
|
|
|
puts "Flag[#{ARGV[i]}] : Argument[#{ARGV[i.next]}] : Either the argument was not provided or it was a flag"
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
2024-05-18 23:40:58 +00:00
|
|
|
when 'list', 'list-files'
|
|
|
|
$CONFIG[:action] = ARGV[i]
|
2024-05-17 20:33:48 +00:00
|
|
|
log "$CONFIG[:action] was set to #{ARGV[i]}"
|
|
|
|
i+=1
|
|
|
|
|
|
|
|
else
|
|
|
|
puts "#{ARGV[i]} is not a valid action. Please make sure you use a valid action as the first argument of the script"
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
while i < ARGV.count
|
|
|
|
case ARGV[i]
|
|
|
|
when '-c', '--csv'
|
|
|
|
$CONFIG[:info_format]='csv'
|
|
|
|
log "$CONFIG[:info_format] was set to #{$CONFIG[:info_format]}"
|
|
|
|
i+=1
|
|
|
|
when '-l', '--long'
|
|
|
|
$CONFIG[:info_format]='long'
|
|
|
|
log "$CONFIG[:info_format] was set to #{$CONFIG[:info_format]}"
|
|
|
|
i+=1
|
|
|
|
when '-i', '--inline'
|
|
|
|
$CONFIG[:info_format]='inline'
|
|
|
|
log "$CONFIG[:info_format] was set to #{$CONFIG[:info_format]}"
|
|
|
|
i+=1
|
2024-05-26 02:53:20 +00:00
|
|
|
when '-o', '--output-file'
|
|
|
|
log "user provided ARGV[i.next] == #{ARGV[i.next]}"
|
|
|
|
if ARGV[i.next] != "" and not ARGV[i.next] =~ FLAG_REGEX
|
|
|
|
$CONFIG[:alt_filepath]=ARGV[i.next]
|
|
|
|
log "search_term was set to #{ARGV[i.next]}"
|
|
|
|
i+=2
|
|
|
|
else
|
|
|
|
puts "Flag[#{ARGV[i]}] : Argument[#{ARGV[i.next]}] : Either the argument was not provided or it was a flag"
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
2024-05-26 17:21:07 +00:00
|
|
|
when '--to-date', '--from-date'
|
|
|
|
if ARGV[i.next] != "" and ARGV[i.next] =~ DATE_REGEX
|
|
|
|
date_matches = DATE_REGEX.match(ARGV[i.next]).named_captures.each_value.select{|val| not val.nil?}.map(&:to_i)
|
|
|
|
case ARGV[i]
|
|
|
|
when '--to-date'
|
|
|
|
$CONFIG[:to_time]=Time.new(*date_matches)
|
|
|
|
log "$CONFIG[:to_time] was set to #{$CONFIG[:to_time]}}"
|
|
|
|
when '--from-date'
|
|
|
|
$CONFIG[:from_time]=Time.new(*date_matches)
|
|
|
|
log "$CONFIG[:from_time] was set to #{$CONFIG[:from_time]}}"
|
|
|
|
end
|
|
|
|
i+=2
|
|
|
|
else
|
|
|
|
puts "Flag[#{ARGV[i]}] : Argument[#{ARGV[i.next]}] : Either the argument was not provided or it was a flag"
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
2024-05-26 02:53:20 +00:00
|
|
|
|
2024-05-26 17:33:33 +00:00
|
|
|
when FLAG_REGEX
|
|
|
|
# Catch all to prevent user from specifying a non-accounted for flag
|
|
|
|
puts "#{ARGV[i]} is not a valid flag."
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
else
|
|
|
|
i+=1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
## BEGIN: Checks
|
|
|
|
|
|
|
|
if $CONFIG[:action] == ""
|
|
|
|
puts "Action was not provided"
|
|
|
|
end
|
|
|
|
|
|
|
|
case $CONFIG[:action]
|
|
|
|
when 'search', 'get'
|
|
|
|
if $CONFIG[:search_term] == ""
|
|
|
|
puts "Search Term was not provided"
|
|
|
|
usage
|
|
|
|
exit
|
|
|
|
end
|
2024-05-25 00:13:55 +00:00
|
|
|
|
2024-05-26 02:42:46 +00:00
|
|
|
when 'backup'
|
|
|
|
if not File.exist? $CONFIG[:search_term]
|
|
|
|
puts "File #{$CONFIG[:search_term]} does not exist. Please check to make sure that there are not typos and attempt the run again."
|
|
|
|
exit
|
|
|
|
end
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
## END: Checks
|
|
|
|
|
|
|
|
# END: Handle CLI Args
|
|
|
|
|
|
|
|
|
|
|
|
# BEGIN: Classes
|
|
|
|
|
|
|
|
class BucketEntry
|
2024-05-18 23:07:50 +00:00
|
|
|
attr_reader :hash, :filepaths, :mtime
|
2024-05-17 20:33:48 +00:00
|
|
|
def initialize (entry_dir)
|
|
|
|
@entry_dir = entry_dir
|
2024-05-18 23:07:50 +00:00
|
|
|
@hash = File.basename(entry_dir)
|
2024-05-17 20:33:48 +00:00
|
|
|
File.open("#{entry_dir}/paths") do |file|
|
2024-05-25 00:13:55 +00:00
|
|
|
@filepaths = file.read.split(/\n/)
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
@mtime = File.mtime(entry_dir)
|
|
|
|
log "BucketEntry was created from #{entry_dir}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def path_include? (path_string)
|
2024-05-18 23:07:50 +00:00
|
|
|
log "BucketEntry[#{hash}] was called with #{path_string}"
|
2024-05-25 00:13:55 +00:00
|
|
|
@filepaths.any?{|path| path.include? path_string}
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def long_info
|
2024-05-18 23:07:50 +00:00
|
|
|
"Entry [#{@hash}]:
|
2024-05-17 20:33:48 +00:00
|
|
|
Paths: #{@filepaths.join(',')}
|
|
|
|
MTIME: #{@mtime}
|
|
|
|
|
|
|
|
"
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def csv_info
|
|
|
|
[@mtime,@hash,@filepaths.join(';')].join(',')
|
|
|
|
end
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
def inline_info
|
2024-05-18 23:07:50 +00:00
|
|
|
"#{@mtime} : #{@hash} : #{@filepaths.join(',')}"
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def info
|
|
|
|
case $CONFIG[:info_format]
|
|
|
|
when 'long'
|
|
|
|
long_info
|
|
|
|
when 'inline'
|
|
|
|
inline_info
|
|
|
|
when 'csv'
|
|
|
|
csv_info
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
def content
|
2024-05-18 23:07:50 +00:00
|
|
|
log "BucketEntry[#{@hash}] getting contents"
|
2024-05-17 20:33:48 +00:00
|
|
|
File.open("#{@entry_dir}/contents",'r') do |file|
|
|
|
|
file.read()
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
class Bucket
|
|
|
|
attr_reader :bucketdir, :entries
|
|
|
|
def initialize (clientbucketdir)
|
|
|
|
log "Bucket is being created from #{clientbucketdir}"
|
|
|
|
@bucketdir = clientbucketdir
|
2024-05-18 23:17:03 +00:00
|
|
|
@entries = Hash.new
|
2024-05-17 20:33:48 +00:00
|
|
|
load_bucket
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def select(&proc)
|
|
|
|
@entries.each_value.select &proc
|
|
|
|
end
|
|
|
|
|
|
|
|
def any?(&proc)
|
|
|
|
@entries.each_value.any? &proc
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
def load_bucket
|
|
|
|
log "Bucket[#{@bucketdir}] is loading entries"
|
|
|
|
Dir["#{@bucketdir}/**/paths"].each.map{|path| File.dirname(path)}.each do |directory|
|
|
|
|
log "\"#{directory}\" was grabbed from bucket directory. Making new BucketEntry"
|
|
|
|
entry = BucketEntry.new(directory)
|
2024-05-26 17:21:07 +00:00
|
|
|
if entry.mtime <= $CONFIG[:to_time] and entry.mtime >= $CONFIG[:from_time]
|
|
|
|
@entries[entry.hash]=entry
|
|
|
|
log "BucketEntry[#{entry.hash}] was added to @entries Size=#{@entries.count()}"
|
|
|
|
else
|
|
|
|
log "Entry[#{entry.hash}] was filtered out by the user provided time constraints"
|
|
|
|
end
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
log "Bucket[#{@bucketdir}] was loaded"
|
|
|
|
end
|
2024-05-25 00:13:55 +00:00
|
|
|
|
|
|
|
def filenames
|
|
|
|
filenames = Array.new
|
|
|
|
@entries.each_value do |entry|
|
|
|
|
entry.filepaths.each do |path|
|
|
|
|
if not filenames.include? path
|
|
|
|
filenames.push(path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
filenames
|
|
|
|
end
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# END: Classes
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# BEGIN: Work Functions
|
|
|
|
|
|
|
|
def search_entries_paths (bucket)
|
2024-05-25 00:13:55 +00:00
|
|
|
puts bucket.select{|entry| entry.path_include? $CONFIG[:search_term]}.sort_by(&:mtime).map(&:info)
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
2024-05-18 23:07:50 +00:00
|
|
|
def get_content_of_entry_hash (bucket)
|
2024-05-18 23:17:03 +00:00
|
|
|
if bucket.entries.has_key? $CONFIG[:search_term]
|
|
|
|
puts bucket.entries[$CONFIG[:search_term]].content
|
|
|
|
else
|
|
|
|
puts "There were no entries corresponding to #{$CONFIG[:search_term]}"
|
|
|
|
exit
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def list_all_entries (bucket)
|
2024-05-25 00:13:55 +00:00
|
|
|
puts bucket.filenames.map{|filename| bucket.select{|entry| entry.path_include? filename}.sort_by(&:mtime).map(&:info)}
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def list_entry_files (bucket)
|
2024-05-25 00:13:55 +00:00
|
|
|
puts bucket.filenames.sort.join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_entry_by_file (bucket, filenames)
|
|
|
|
entry = nil
|
|
|
|
if filenames.count == 1
|
|
|
|
filename = filenames[0]
|
|
|
|
else
|
|
|
|
filename = get_selections filenames, "Your filename matched multiple files. Please select one to restore"
|
|
|
|
end
|
|
|
|
|
|
|
|
entries = bucket.select{|entry| entry.path_include? filename}
|
|
|
|
if entries.count == 1
|
|
|
|
entry = entries.first
|
|
|
|
else
|
|
|
|
while true
|
|
|
|
mtimes = entries.map{|entry| entry.mtime}
|
|
|
|
entry_mtime = get_selections(mtimes , "Which timestamp to you want to revert the file to?")
|
|
|
|
entry = entries.lazy.select{|entry| entry.mtime == entry_mtime}.first
|
|
|
|
|
|
|
|
if get_verification "Do you want to see the contents of #{filename} at this time?"
|
|
|
|
puts entry.content
|
|
|
|
if get_verification "Is this the entry you want to overwrite #{filename} with?"
|
|
|
|
break
|
|
|
|
end
|
|
|
|
else
|
|
|
|
break
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2024-05-25 00:13:55 +00:00
|
|
|
|
|
|
|
if filename[0] != '/'
|
|
|
|
filename = "/#{filename}"
|
|
|
|
end
|
|
|
|
|
|
|
|
return entry, filename
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def get_entry_by_hash (bucket)
|
2024-05-18 23:40:58 +00:00
|
|
|
if bucket.entries.has_key? $CONFIG[:search_term]
|
|
|
|
entry = bucket.entries[$CONFIG[:search_term]]
|
2024-05-25 00:13:55 +00:00
|
|
|
filepath = ""
|
2024-05-18 23:40:58 +00:00
|
|
|
if entry.filepaths.count == 1
|
|
|
|
filepath = entry.filepaths[0]
|
2024-05-25 00:13:55 +00:00
|
|
|
else
|
|
|
|
filepath = get_selections entry.filepaths, "What filepath do you wish to restore to?"
|
|
|
|
end
|
|
|
|
|
|
|
|
if filepath[0] != '/'
|
|
|
|
filepath = "/#{filepath}"
|
2024-05-18 23:40:58 +00:00
|
|
|
end
|
2024-05-25 00:13:55 +00:00
|
|
|
|
|
|
|
return entry, filepath
|
|
|
|
|
2024-05-18 23:40:58 +00:00
|
|
|
else
|
|
|
|
puts "There were no entries corresponding to #{$CONFIG[:search_term]}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
def restore_entry (bucket)
|
|
|
|
entry = nil
|
|
|
|
if bucket.any?{ |entry| entry.path_include? $CONFIG[:search_term] }
|
|
|
|
filenames = bucket.filenames.select {|filename| filename.include? $CONFIG[:search_term]}
|
|
|
|
entry, filepath = get_entry_by_file bucket, filenames
|
|
|
|
else
|
|
|
|
entry, filepath = get_entry_by_hash bucket
|
|
|
|
end
|
|
|
|
|
2024-05-26 02:53:20 +00:00
|
|
|
if $CONFIG[:alt_filepath] != ""
|
|
|
|
filepath=$CONFIG[:alt_filepath]
|
|
|
|
end
|
|
|
|
|
2024-05-25 00:13:55 +00:00
|
|
|
if get_verification "Are you sure you want to overwrite #{filepath}?"
|
|
|
|
File.open(filepath,'w') do |file|
|
|
|
|
file.write(entry.content)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "Ok not overwriting."
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-26 02:42:46 +00:00
|
|
|
def backup_file (bucket)
|
|
|
|
hash=""
|
|
|
|
if Gem::Version.new($CONFIG[:puppet_version]) >= Gem::Version.new("7.0.0")
|
|
|
|
log "Puppet Version is +7.* so hashing algo will be SHA256"
|
|
|
|
File.open($CONFIG[:search_term],'r') do |file|
|
|
|
|
hash = Digest::SHA2.hexdigest file.read()
|
|
|
|
end
|
|
|
|
|
|
|
|
else
|
|
|
|
log "Puppet Version is not 7.* so hashing algo will be MD5"
|
|
|
|
File.open($CONFIG[:search_term],'r') do |file|
|
|
|
|
hash = Digest::MD5.hexdigest file.read()
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
log "Hash for #{$CONFIG[:search_term]} was generated to be #{hash}"
|
|
|
|
|
|
|
|
if bucket.entries.has_key? hash
|
|
|
|
log "Hash was found to already be backed up to bucket"
|
|
|
|
puts "This file (hash: #{hash} has already been backed up in the bucket"
|
|
|
|
puts bucket.entries[hash].info
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
puppet_uid=0
|
|
|
|
puppet_gid=0
|
|
|
|
File.open('/etc/passwd','r') do |file|
|
|
|
|
log "Getting puppet UID & GID from /etc/passwd"
|
|
|
|
passd = file.read.split(/\n/).select{|line| line =~ /puppet.+/}.first
|
|
|
|
puppet_uid=Integer(passd.split(':')[2])
|
|
|
|
log "Retrieved Puppet UID as #{puppet_uid}"
|
|
|
|
puppet_gid=Integer(passd.split(':')[3])
|
|
|
|
log "Retrieved Puppet GID as #{puppet_gid}"
|
|
|
|
end
|
|
|
|
|
|
|
|
log "Created preceeding directorys to hash directory"
|
|
|
|
dir=$CONFIG[:bucket_dir]
|
2024-05-26 17:21:07 +00:00
|
|
|
hash.chars[0,8].each{|char|
|
2024-05-26 02:42:46 +00:00
|
|
|
dir+="/#{char}"
|
|
|
|
log "Checking to make sure that #{dir} doesn't exist"
|
|
|
|
if not Dir.exist? dir
|
|
|
|
log "#{dir} didn't exist so creating the directory & changing the UID to #{puppet_uid} & GID #{puppet_gid}"
|
|
|
|
Dir.mkdir dir
|
|
|
|
File.chown(puppet_gid, puppet_uid, dir)
|
|
|
|
end
|
|
|
|
}
|
|
|
|
|
|
|
|
entry_dir="#{dir}/#{hash}"
|
|
|
|
if not Dir.exist? entry_dir
|
|
|
|
log "#{entry_dir} didn't exist so creating the directory & changing the UID to #{puppet_uid} & GID #{puppet_gid}"
|
|
|
|
Dir.mkdir entry_dir
|
|
|
|
File.chown(puppet_gid, puppet_uid, entry_dir)
|
|
|
|
end
|
|
|
|
|
|
|
|
contents_path="#{entry_dir}/contents"
|
|
|
|
log "#{contents_path} will be created"
|
|
|
|
paths_path="#{entry_dir}/paths"
|
|
|
|
log "#{paths_path} will be created"
|
|
|
|
|
|
|
|
log "Creating #{contents_path}"
|
|
|
|
File.open(contents_path, 'w') do |contents_file|
|
|
|
|
log "Opened #{contents_path} to be written to"
|
|
|
|
File.open($CONFIG[:search_term], 'r') do |source_file|
|
|
|
|
log "Opened #{$CONFIG[:search_term]} to be read from"
|
|
|
|
contents_file.write(source_file.read)
|
|
|
|
contents_file.chown(puppet_gid, puppet_uid)
|
|
|
|
log "#{contents_path} was created & was chowned to UID to #{puppet_uid} & GID #{puppet_gid}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
log "Creating #{paths_path}"
|
|
|
|
File.open(paths_path, 'w') do |paths_file|
|
|
|
|
log "Opened #{paths_path} to be written to"
|
|
|
|
paths_file.write($CONFIG[:search_term])
|
|
|
|
log "Just wrote #{$CONFIG[:search_term]} to #{paths_path}"
|
|
|
|
paths_file.chown(puppet_gid, puppet_uid)
|
|
|
|
log "#{paths_path} was created & was chowned to UID to #{puppet_uid} & GID #{puppet_gid}"
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "File #{$CONFIG[:search_term]} was backed up to #{entry_dir}"
|
|
|
|
end
|
|
|
|
|
2024-05-17 20:33:48 +00:00
|
|
|
# END: Work Functions
|
|
|
|
|
|
|
|
|
|
|
|
# BEGIN: Work
|
|
|
|
|
|
|
|
if __FILE__ == $0
|
|
|
|
bucket = Bucket.new($CONFIG[:bucket_dir])
|
|
|
|
|
|
|
|
case $CONFIG[:action]
|
|
|
|
when 'search'
|
|
|
|
search_entries_paths bucket
|
|
|
|
|
|
|
|
when 'get'
|
2024-05-18 23:07:50 +00:00
|
|
|
get_content_of_entry_hash bucket
|
2024-05-17 20:33:48 +00:00
|
|
|
|
|
|
|
when 'list'
|
|
|
|
list_all_entries bucket
|
|
|
|
|
|
|
|
when 'list-files'
|
|
|
|
list_entry_files bucket
|
|
|
|
|
2024-05-18 23:40:58 +00:00
|
|
|
when 'restore'
|
|
|
|
restore_entry bucket
|
|
|
|
|
2024-05-26 02:42:46 +00:00
|
|
|
when 'backup'
|
|
|
|
backup_file bucket
|
2024-05-17 20:33:48 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
# END: Work
|
|
|
|
|
|
|
|
|