Compare commits
No commits in common. "master" and "feature-restore-by-file" have entirely different histories.
master
...
feature-re
@ -26,6 +26,3 @@ This will allow you to restore a file on disk to the specific version that is st
|
||||
|
||||
### Get
|
||||
The utility will allow you to get the contents of a backup up file and have it output to the terminal.
|
||||
|
||||
### Delete
|
||||
The utility allows you to delete entries/backups in the bucket if you choose to do so.
|
||||
|
309
bucket-tool
Executable file → Normal file
309
bucket-tool
Executable file → Normal file
@ -6,68 +6,44 @@ require 'digest'
|
||||
|
||||
# END: Requires
|
||||
|
||||
|
||||
|
||||
# BEGIN: Helper Functions
|
||||
|
||||
def usage
|
||||
puts <<EOT
|
||||
#{__FILE__} [ACTION <arg>] [<flags>]
|
||||
puts "#{__FILE__} [ACTION <arg>] [<flags>]
|
||||
|
||||
Description:
|
||||
This utlity is meant to be used to interact with & manage the filebucket in the same capacity as
|
||||
the builtin filebucket utility. However, it does not handle any puppet tie ins.
|
||||
This utlity is meant to be used to interact with & manage the filebucket on P4 nodes due to the
|
||||
utility for that `puppet filebucket -l <action>` being nonfunctional.
|
||||
|
||||
I created this due to the puppet filebucket utility in my companies P4 envrionments not
|
||||
functioning
|
||||
This implements the same functionality (minus the puppet tie-in) and will allow the user to
|
||||
search the filebucket and restore from it.
|
||||
|
||||
Actions:
|
||||
search <term> : Search for bucket entries matching a portion of the filepath
|
||||
list : List all Bucket entries
|
||||
list-files : List all files/paths that have been backed up to the bucket
|
||||
get <entry-hash> : Get the content of a specific entry (by hash)
|
||||
restore <value> : Restore previous state of file stored in bucket. Value can be hash or filename/filepath
|
||||
: Note: To restore to an alternate path you will need to provide the path via the -o flag
|
||||
:
|
||||
backup <file> : Backup the file to the bucket
|
||||
delete <hash> : Delete the entry from the bucket
|
||||
|
||||
Restore Flags:
|
||||
-o | --output-file <file> : Used to provide an alternate restoral path for the restore function
|
||||
search <term> : Search for bucket entries matching a portion of the filepath
|
||||
list : List all Bucket entries
|
||||
list-files : List all files/paths that have been backed up to the bucket
|
||||
get <entry-hash> : Get the content of a specific entry (by hash)
|
||||
restore <value> : Restore previous state of file stored in bucket. Value can be hash or filename/filepath
|
||||
|
||||
Global Flags:
|
||||
-d | --debug : Set debug flag
|
||||
-h | --help : This help message
|
||||
-b | --bucket <bucket-dir> : User specified bucket directory
|
||||
|
||||
Listing Filter Flags:
|
||||
--from-date <DATE> : Filter listings from starting after this point (FORMAT: YYYY-MM-DD HH:MM:SS)
|
||||
: EX:
|
||||
: 1. --from-date 2023-05-10
|
||||
: 2. --from-date "2023-05-10 13:23"
|
||||
:
|
||||
--to-date <DATE> : Filter listings from ending at this point (FORMAT: YYYY-MM-DD HH:MM:SS)
|
||||
: EX:
|
||||
: 1. --to-date 2024-05-10
|
||||
: 2. --to-date "2024-05-10 05:27"
|
||||
:
|
||||
|
||||
-d | --debug : Set debug flag
|
||||
-h | --help : This help message
|
||||
|
||||
Info Format Flags:
|
||||
-i | --inline : Set the info format to inline (MTIME : HASH : FILENAME)
|
||||
-l | --long : Set the info format to long
|
||||
: Entry [HASH]:
|
||||
: Paths: path1,path2,...,pathn
|
||||
: MTIME: YYYY-MM-DD HH:MM:SS -####
|
||||
:
|
||||
-c | --csv : Set the info format to csv ( MTIME,HASH,FILENAME1[;FILENAMEn] )
|
||||
-i | --inline : Set the info format to inline (MTIME : HASH : FILENAME)
|
||||
-l | --long : Set the info format to long
|
||||
: Entry [HASH]:
|
||||
: Paths: path1,path2,...,pathn
|
||||
: MTIME: YYYY-MM-DD HH:MM:SS -####
|
||||
:
|
||||
-c | --csv : Set the info format to csv ( MTIME,HASH,FILENAME1[;FILENAMEn] )
|
||||
|
||||
Author:
|
||||
Name: Tristan Ancelet
|
||||
Email: tristan.ancelet@acumera.com
|
||||
Phone (Work) #: +1 (337) 965-1855
|
||||
|
||||
EOT
|
||||
"
|
||||
end
|
||||
|
||||
def log (message, level = 0)
|
||||
@ -204,19 +180,14 @@ end
|
||||
|
||||
$CONFIG = Hash.new
|
||||
$CONFIG[:bucket_dir]=` #{puppet_exe} agent --configprint clientbucketdir `.strip()
|
||||
$CONFIG[:puppet_version]=` #{puppet_exe} --version `
|
||||
$CONFIG[:action]=""
|
||||
$CONFIG[:search_term]=""
|
||||
$CONFIG[:log_file]=""
|
||||
$CONFIG[:alt_filepath]=""
|
||||
$CONFIG[:info_format]="inline"
|
||||
$CONFIG[:from_time]=Time.at(0) # Beginning of epoch time
|
||||
$CONFIG[:to_time]=Time.now # Today
|
||||
File.open('/etc/hostname') do |file|
|
||||
$HOSTNAME=file.read().strip()
|
||||
end
|
||||
FLAG_REGEX=/^\-+\S+/
|
||||
DATE_REGEX=/^(?<year>[0-9]{4})-(?<month>[0-9]{1,2})-(?<day>[0-9]{1,2})[[:space:]]*((?<hour>[0-9]{1,2}):(?<minute>[0-9]{1,2}):(?<second>[0-9]{1,2}))?$/
|
||||
FLAG_REGEX=/\-+\S+/
|
||||
|
||||
# END: Variables
|
||||
|
||||
@ -236,7 +207,7 @@ if not (ARGV & ['-d', '--debug']).empty?
|
||||
end
|
||||
i=0
|
||||
case ARGV[i]
|
||||
when 'search', 'get', 'restore', 'backup', 'delete'
|
||||
when 'search', 'get', 'restore'
|
||||
$CONFIG[:action]=ARGV[i]
|
||||
log "$CONFIG[:action] was set to #{ARGV[i]}"
|
||||
log "user provided search action ARGV[i.next] == #{ARGV[i.next]}"
|
||||
@ -275,57 +246,6 @@ while i < ARGV.count
|
||||
$CONFIG[:info_format]='inline'
|
||||
log "$CONFIG[:info_format] was set to #{$CONFIG[:info_format]}"
|
||||
i+=1
|
||||
when '-o', '--output-file'
|
||||
log "user provided ARGV[i.next] == #{ARGV[i.next]}"
|
||||
if ARGV[i.next] != "" and not ARGV[i.next] =~ FLAG_REGEX
|
||||
$CONFIG[:alt_filepath]=File.expand_path(ARGV[i.next])
|
||||
log "search_term was set to #{$CONFIG[:alt_filepath]}"
|
||||
i+=2
|
||||
else
|
||||
puts "Flag[#{ARGV[i]}] : Argument[#{ARGV[i.next]}] : Either the argument was not provided or it was a flag"
|
||||
usage
|
||||
exit
|
||||
end
|
||||
when '-d', '--debug'
|
||||
log "#{ARGV[i]} as specified, and the $CONFIG[:debug] flag was already enabled"
|
||||
i+=1
|
||||
|
||||
when '-b', '--bucket'
|
||||
log "user provided ARGV[i.next] == #{ARGV[i.next]}"
|
||||
if ARGV[i.next] != "" and not ARGV[i.next] =~ FLAG_REGEX
|
||||
$CONFIG[:bucket_dir]=ARGV[i.next]
|
||||
log "$CONFIG[:bucket_dir] was set to #{ARGV[i.next]}"
|
||||
i+=2
|
||||
else
|
||||
puts "Flag[#{ARGV[i]}] : Argument[#{ARGV[i.next]}] : Either the argument was not provided or it was a flag"
|
||||
usage
|
||||
exit
|
||||
end
|
||||
|
||||
when '--to-date', '--from-date'
|
||||
if ARGV[i.next] != "" and ARGV[i.next] =~ DATE_REGEX
|
||||
date_matches = DATE_REGEX.match(ARGV[i.next]).named_captures.each_value.select{|val| not val.nil?}.map(&:to_i)
|
||||
case ARGV[i]
|
||||
when '--to-date'
|
||||
$CONFIG[:to_time]=Time.new(*date_matches)
|
||||
log "$CONFIG[:to_time] was set to #{$CONFIG[:to_time]}}"
|
||||
when '--from-date'
|
||||
$CONFIG[:from_time]=Time.new(*date_matches)
|
||||
log "$CONFIG[:from_time] was set to #{$CONFIG[:from_time]}}"
|
||||
end
|
||||
i+=2
|
||||
else
|
||||
puts "Flag[#{ARGV[i]}] : Argument[#{ARGV[i.next]}] : Either the argument was not provided or it was a flag"
|
||||
usage
|
||||
exit
|
||||
end
|
||||
|
||||
when FLAG_REGEX
|
||||
# Catch all to prevent user from specifying a non-accounted for flag
|
||||
puts "#{ARGV[i]} is not a valid flag."
|
||||
usage
|
||||
exit
|
||||
|
||||
else
|
||||
i+=1
|
||||
end
|
||||
@ -337,11 +257,6 @@ if $CONFIG[:action] == ""
|
||||
puts "Action was not provided"
|
||||
end
|
||||
|
||||
if not Dir.exist? $CONFIG[:bucket_dir]
|
||||
puts "BucketDir[#{$CONFIG[:bucket_dir]}] Does not exist. Please check to make sure configuration is correct"
|
||||
exit
|
||||
end
|
||||
|
||||
case $CONFIG[:action]
|
||||
when 'search', 'get'
|
||||
if $CONFIG[:search_term] == ""
|
||||
@ -350,13 +265,6 @@ case $CONFIG[:action]
|
||||
exit
|
||||
end
|
||||
|
||||
when 'backup'
|
||||
if not File.exist? $CONFIG[:search_term]
|
||||
puts "File #{$CONFIG[:search_term]} does not exist. Please check to make sure that there are not typos and attempt the run again."
|
||||
exit
|
||||
else
|
||||
$CONFIG[:search_term] = File.expand_path($CONFIG[:search_term])
|
||||
end
|
||||
end
|
||||
|
||||
## END: Checks
|
||||
@ -417,54 +325,6 @@ class BucketEntry
|
||||
end
|
||||
end
|
||||
|
||||
def delete
|
||||
returncode = true
|
||||
log "User has chosen to delete this BucketEntry"
|
||||
|
||||
Dir.chdir(@entry_dir){
|
||||
log "Changed to #{@entry_dir} to delete children files"
|
||||
Dir.children(@entry_dir).each{|file|
|
||||
log "Deleting #{file}"
|
||||
if File.unlink(file)
|
||||
log "#{file} deleted"
|
||||
else
|
||||
puts "There was an issue deleting #{File.expand_path(file)}"
|
||||
exit
|
||||
end
|
||||
}
|
||||
}
|
||||
|
||||
log "Deleting #{@entry_dir}"
|
||||
if Dir.delete(@entry_dir)
|
||||
log "Deleted #{@entry_dir}"
|
||||
else
|
||||
puts "There was an issue when attempting to delete #{@entry_dir}"
|
||||
end
|
||||
|
||||
dir = @entry_dir
|
||||
|
||||
log "Beginning to delete trailing dirs unless one has children other than those that make up #{File.dirname(@entry_dir)}"
|
||||
for i in 1..8
|
||||
dir = File.dirname(dir)
|
||||
log "Beginning to delete #{dir}"
|
||||
children = Dir.children(dir)
|
||||
log "Dir[#{dir}] children found to be #{children.join(',')}"
|
||||
if children.empty?
|
||||
if Dir.delete(dir)
|
||||
log "Deleted #{dir}"
|
||||
else
|
||||
puts "There was an issue when attempting to delete #{dir}"
|
||||
exit
|
||||
end
|
||||
else
|
||||
log "#{dir} showed to contain another child directory. Not deleting and breaking loop"
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
returncode
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
class Bucket
|
||||
@ -490,12 +350,8 @@ class Bucket
|
||||
Dir["#{@bucketdir}/**/paths"].each.map{|path| File.dirname(path)}.each do |directory|
|
||||
log "\"#{directory}\" was grabbed from bucket directory. Making new BucketEntry"
|
||||
entry = BucketEntry.new(directory)
|
||||
if entry.mtime <= $CONFIG[:to_time] and entry.mtime >= $CONFIG[:from_time]
|
||||
@entries[entry.hash]=entry
|
||||
log "BucketEntry[#{entry.hash}] was added to @entries Size=#{@entries.count()}"
|
||||
else
|
||||
log "Entry[#{entry.hash}] was filtered out by the user provided time constraints"
|
||||
end
|
||||
@entries[entry.hash]=entry
|
||||
log "BucketEntry[#{entry.hash}] was added to @entries Size=#{@entries.count()}"
|
||||
end
|
||||
log "Bucket[#{@bucketdir}] was loaded"
|
||||
end
|
||||
@ -568,10 +424,6 @@ def get_entry_by_file (bucket, filenames)
|
||||
end
|
||||
end
|
||||
|
||||
if $CONFIG[:alt_filepath] != ""
|
||||
return entry, $CONFIG[:alt_filepath]
|
||||
end
|
||||
|
||||
if filename[0] != '/'
|
||||
filename = "/#{filename}"
|
||||
end
|
||||
@ -582,12 +434,6 @@ end
|
||||
def get_entry_by_hash (bucket)
|
||||
if bucket.entries.has_key? $CONFIG[:search_term]
|
||||
entry = bucket.entries[$CONFIG[:search_term]]
|
||||
|
||||
if $CONFIG[:alt_filepath] != ""
|
||||
log "$CONFIG[:alt_filepath] was set. Skipping prompts asking for filepaths"
|
||||
return entry, $CONFIG[:alt_filepath]
|
||||
end
|
||||
|
||||
filepath = ""
|
||||
if entry.filepaths.count == 1
|
||||
filepath = entry.filepaths[0]
|
||||
@ -624,108 +470,6 @@ def restore_entry (bucket)
|
||||
end
|
||||
end
|
||||
|
||||
def backup_file (bucket)
|
||||
hash=""
|
||||
if Gem::Version.new($CONFIG[:puppet_version]) >= Gem::Version.new("7.0.0")
|
||||
log "Puppet Version is +7.* so hashing algo will be SHA256"
|
||||
File.open($CONFIG[:search_term],'r') do |file|
|
||||
hash = Digest::SHA2.hexdigest file.read()
|
||||
end
|
||||
|
||||
else
|
||||
log "Puppet Version is not 7.* so hashing algo will be MD5"
|
||||
File.open($CONFIG[:search_term],'r') do |file|
|
||||
hash = Digest::MD5.hexdigest file.read()
|
||||
end
|
||||
end
|
||||
|
||||
log "Hash for #{$CONFIG[:search_term]} was generated to be #{hash}"
|
||||
|
||||
if bucket.entries.has_key? hash
|
||||
log "Hash was found to already be backed up to bucket"
|
||||
puts "This file (hash: #{hash} has already been backed up in the bucket"
|
||||
puts bucket.entries[hash].info
|
||||
exit
|
||||
end
|
||||
|
||||
puppet_uid=0
|
||||
puppet_gid=0
|
||||
File.open('/etc/passwd','r') do |file|
|
||||
log "Getting puppet UID & GID from /etc/passwd"
|
||||
passd = file.read.split(/\n/).select{|line| line =~ /puppet.+/}.first
|
||||
puppet_uid=Integer(passd.split(':')[2])
|
||||
log "Retrieved Puppet UID as #{puppet_uid}"
|
||||
puppet_gid=Integer(passd.split(':')[3])
|
||||
log "Retrieved Puppet GID as #{puppet_gid}"
|
||||
end
|
||||
|
||||
log "Created preceeding directorys to hash directory"
|
||||
dir=$CONFIG[:bucket_dir]
|
||||
hash.chars[0,8].each{|char|
|
||||
dir+="/#{char}"
|
||||
log "Checking to make sure that #{dir} doesn't exist"
|
||||
if not Dir.exist? dir
|
||||
log "#{dir} didn't exist so creating the directory & changing the UID to #{puppet_uid} & GID #{puppet_gid}"
|
||||
Dir.mkdir dir
|
||||
File.chown(puppet_gid, puppet_uid, dir)
|
||||
end
|
||||
}
|
||||
|
||||
entry_dir="#{dir}/#{hash}"
|
||||
if not Dir.exist? entry_dir
|
||||
log "#{entry_dir} didn't exist so creating the directory & changing the UID to #{puppet_uid} & GID #{puppet_gid}"
|
||||
Dir.mkdir entry_dir
|
||||
File.chown(puppet_gid, puppet_uid, entry_dir)
|
||||
end
|
||||
|
||||
contents_path="#{entry_dir}/contents"
|
||||
log "#{contents_path} will be created"
|
||||
paths_path="#{entry_dir}/paths"
|
||||
log "#{paths_path} will be created"
|
||||
|
||||
log "Creating #{contents_path}"
|
||||
File.open(contents_path, 'w') do |contents_file|
|
||||
log "Opened #{contents_path} to be written to"
|
||||
File.open($CONFIG[:search_term], 'r') do |source_file|
|
||||
log "Opened #{$CONFIG[:search_term]} to be read from"
|
||||
contents_file.write(source_file.read)
|
||||
contents_file.chown(puppet_gid, puppet_uid)
|
||||
log "#{contents_path} was created & was chowned to UID to #{puppet_uid} & GID #{puppet_gid}"
|
||||
end
|
||||
end
|
||||
|
||||
log "Creating #{paths_path}"
|
||||
File.open(paths_path, 'w') do |paths_file|
|
||||
log "Opened #{paths_path} to be written to"
|
||||
paths_file.write($CONFIG[:search_term])
|
||||
log "Just wrote #{$CONFIG[:search_term]} to #{paths_path}"
|
||||
paths_file.chown(puppet_gid, puppet_uid)
|
||||
log "#{paths_path} was created & was chowned to UID to #{puppet_uid} & GID #{puppet_gid}"
|
||||
end
|
||||
|
||||
puts "File #{$CONFIG[:search_term]} was backed up to #{entry_dir}"
|
||||
end
|
||||
|
||||
def delete_entry (bucket)
|
||||
if bucket.entries.has_key? $CONFIG[:search_term]
|
||||
entry = bucket.entries[$CONFIG[:search_term]]
|
||||
else
|
||||
puts "BucketEntry[#{$CONFIG[:search_term]}] Does not exist. Please make sure you provided the correct hash value"
|
||||
exit
|
||||
end
|
||||
|
||||
puts "Corresponding Entry: #{entry.info}"
|
||||
if get_verification "Are you sure you want to delete BucketEntry[#{entry.hash}]? "
|
||||
if get_verification "This cannot be undone. Are you sure you want to continue?: "
|
||||
if entry.delete
|
||||
puts "Ok. BucketEntry[#{entry.hash}] Has been deleted"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
# END: Work Functions
|
||||
|
||||
|
||||
@ -750,11 +494,6 @@ if __FILE__ == $0
|
||||
when 'restore'
|
||||
restore_entry bucket
|
||||
|
||||
when 'backup'
|
||||
backup_file bucket
|
||||
|
||||
when 'delete'
|
||||
delete_entry bucket
|
||||
end
|
||||
|
||||
end
|
||||
|
Loading…
Reference in New Issue
Block a user