Had to change 'md5' mentions to 'hash' since depending on the version of puppet they will either be using MD5 or SHA256
This commit is contained in:
parent
76f04b44ac
commit
2125a2f9d5
@ -14,7 +14,7 @@ This project is meant to handle & query the filebucket locally on the device of
|
|||||||
This will search for a file by a substring (or full path) of the file you are attempting to view or restore.
|
This will search for a file by a substring (or full path) of the file you are attempting to view or restore.
|
||||||
|
|
||||||
### List
|
### List
|
||||||
This will simply list out every file that has been backed up to the bucket in `<TIME> : <MD5> : <FILEPATH>` format.
|
This will simply list out every file that has been backed up to the bucket in `<TIME> : <HASH> : <FILEPATH>` format.
|
||||||
|
|
||||||
### List-Files
|
### List-Files
|
||||||
This will allow you to just list the specific (unique) files/paths that were backed up to the bucket during puppet runs before they get changed on disk.
|
This will allow you to just list the specific (unique) files/paths that were backed up to the bucket during puppet runs before they get changed on disk.
|
||||||
|
30
bucket-tool
30
bucket-tool
@ -17,7 +17,7 @@ Actions:
|
|||||||
search <term> : Search for bucket entries matching a portion of the filepath
|
search <term> : Search for bucket entries matching a portion of the filepath
|
||||||
list : List all Bucket entries
|
list : List all Bucket entries
|
||||||
list-files : List all files/paths that have been backed up to the bucket
|
list-files : List all files/paths that have been backed up to the bucket
|
||||||
get <entry-md5> : Get the content of a specific entry (by md5)
|
get <entry-hash> : Get the content of a specific entry (by hash)
|
||||||
|
|
||||||
Global Flags:
|
Global Flags:
|
||||||
-d | --debug : Set debug flag
|
-d | --debug : Set debug flag
|
||||||
@ -174,14 +174,14 @@ end
|
|||||||
# BEGIN: Classes
|
# BEGIN: Classes
|
||||||
|
|
||||||
class BucketEntry
|
class BucketEntry
|
||||||
attr_reader :md5, :filepaths, :mtime
|
attr_reader :hash, :filepaths, :mtime
|
||||||
def initialize (entry_dir)
|
def initialize (entry_dir)
|
||||||
@entry_dir = entry_dir
|
@entry_dir = entry_dir
|
||||||
@md5 = File.basename(entry_dir)
|
@hash = File.basename(entry_dir)
|
||||||
@filepaths = Array.new
|
@filepaths = Array.new
|
||||||
File.open("#{entry_dir}/paths") do |file|
|
File.open("#{entry_dir}/paths") do |file|
|
||||||
file.read().split(/\n/).each do |path|
|
file.read().split(/\n/).each do |path|
|
||||||
log "BucketEntry[#{@md5}] adding #{path} to @filepaths"
|
log "BucketEntry[#{@hash}] adding #{path} to @filepaths"
|
||||||
@filepaths.push(path)
|
@filepaths.push(path)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -190,12 +190,12 @@ class BucketEntry
|
|||||||
end
|
end
|
||||||
|
|
||||||
def path_include? (path_string)
|
def path_include? (path_string)
|
||||||
log "BucketEntry[#{md5}] was called with #{path_string}"
|
log "BucketEntry[#{hash}] was called with #{path_string}"
|
||||||
@filepaths.each.any? {|path| path.include? path_string}
|
@filepaths.each.any? {|path| path.include? path_string}
|
||||||
end
|
end
|
||||||
|
|
||||||
def infostring
|
def infostring
|
||||||
"Entry [#{@md5}]:
|
"Entry [#{@hash}]:
|
||||||
Paths: #{@filepaths.join(',')}
|
Paths: #{@filepaths.join(',')}
|
||||||
MTIME: #{@mtime}
|
MTIME: #{@mtime}
|
||||||
|
|
||||||
@ -203,11 +203,11 @@ class BucketEntry
|
|||||||
end
|
end
|
||||||
|
|
||||||
def inline_info
|
def inline_info
|
||||||
"#{@mtime} : #{@md5} : #{@filepaths.join(',')}"
|
"#{@mtime} : #{@hash} : #{@filepaths.join(',')}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def content
|
def content
|
||||||
log "BucketEntry[#{@md5}] getting contents"
|
log "BucketEntry[#{@hash}] getting contents"
|
||||||
File.open("#{@entry_dir}/contents",'r') do |file|
|
File.open("#{@entry_dir}/contents",'r') do |file|
|
||||||
file.read()
|
file.read()
|
||||||
end
|
end
|
||||||
@ -230,7 +230,7 @@ class Bucket
|
|||||||
log "\"#{directory}\" was grabbed from bucket directory. Making new BucketEntry"
|
log "\"#{directory}\" was grabbed from bucket directory. Making new BucketEntry"
|
||||||
entry = BucketEntry.new(directory)
|
entry = BucketEntry.new(directory)
|
||||||
@entries.push(entry)
|
@entries.push(entry)
|
||||||
log "BucketEntry[#{entry.md5}] was added to @entries Size=#{@entries.count()}"
|
log "BucketEntry[#{entry.hash}] was added to @entries Size=#{@entries.count()}"
|
||||||
end
|
end
|
||||||
log "Bucket[#{@bucketdir}] was loaded"
|
log "Bucket[#{@bucketdir}] was loaded"
|
||||||
end
|
end
|
||||||
@ -245,19 +245,19 @@ end
|
|||||||
def search_entries_paths (bucket)
|
def search_entries_paths (bucket)
|
||||||
log "user entered"
|
log "user entered"
|
||||||
bucket.entries.each do |entry|
|
bucket.entries.each do |entry|
|
||||||
log "checking Entry[#{entry.md5}]"
|
log "checking Entry[#{entry.hash}]"
|
||||||
if entry.path_include? $CONFIG[:search_term]
|
if entry.path_include? $CONFIG[:search_term]
|
||||||
puts entry.inline_info
|
puts entry.inline_info
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_content_of_entry_md5 (bucket)
|
def get_content_of_entry_hash (bucket)
|
||||||
log "user entered"
|
log "user entered"
|
||||||
bucket.entries.each do |entry|
|
bucket.entries.each do |entry|
|
||||||
log "checking Entry[#{entry.md5}]"
|
log "checking Entry[#{entry.hash}]"
|
||||||
if entry.md5 == $CONFIG[:search_term]
|
if entry.hash == $CONFIG[:search_term]
|
||||||
log "BucketEntry[#{entry.md5}] Matched. Getting contents"
|
log "BucketEntry[#{entry.hash}] Matched. Getting contents"
|
||||||
puts entry.content
|
puts entry.content
|
||||||
exit
|
exit
|
||||||
end
|
end
|
||||||
@ -293,7 +293,7 @@ if __FILE__ == $0
|
|||||||
search_entries_paths bucket
|
search_entries_paths bucket
|
||||||
|
|
||||||
when 'get'
|
when 'get'
|
||||||
get_content_of_entry_md5 bucket
|
get_content_of_entry_hash bucket
|
||||||
|
|
||||||
when 'list'
|
when 'list'
|
||||||
list_all_entries bucket
|
list_all_entries bucket
|
||||||
|
Loading…
Reference in New Issue
Block a user