Compare commits

...

7 Commits

1 changed files with 41 additions and 26 deletions

View File

@ -1,6 +1,6 @@
#!/usr/bin/python3
import sys, hashlib, os, sqlite3, shutil
import sys, hashlib, os, sqlite3, shutil, json
################################################################################
# program wide constants
@ -126,7 +126,7 @@ def get_hashes_by_tag(storage_directory, tag):
return file_hashes
def file_is_in_storage(storage_directory, file_hash_or_path, compress):
def file_hash_or_path_is_known_hash(storage_directory, file_hash_or_path, compress):
suffix=""
if compress:
suffix=".xz"
@ -141,6 +141,7 @@ def load_container_settings(storage_directory):
cursor = db.cursor()
cursor.execute("SELECT option, value FROM settings")
#TODO: check container settings properly instead of just assuming default values for things that aren't found
# set default values and then read the db, just in case...
parity = False
parity_bytes = 512
@ -301,19 +302,19 @@ if __name__ == "__main__":
hash_allowed=True
if 'file' in command:
hash_allowed=False
if not any([hash_allowed and file_is_in_storage(storage_directory, file_hash_or_path, compress), os.path.isfile(file_hash_or_path)]):
if not any([hash_allowed and file_hash_or_path_is_known_hash(storage_directory, file_hash_or_path, compress), os.path.isfile(file_hash_or_path)]):
print("Unknown file!", file=sys.stderr)
print(USAGE, file=sys.stderr)
sys.exit(PATH_ERROR)
tags = sys.argv[4:]
if hash_allowed and file_is_in_storage(storage_directory, file_hash_or_path, compress):
if hash_allowed and file_hash_or_path_is_known_hash(storage_directory, file_hash_or_path, compress):
file_hash = file_hash_or_path
print("File already in storage.")
print("Hash for file in storage: "+file_hash)
else:
file_hash = calculate_file_hash(checksum_algorithm, file_hash_or_path)
if file_is_in_storage(storage_directory, file_hash_or_path, compress):
if file_hash_or_path_is_known_hash(storage_directory, file_hash, compress):
print("File already in storage.")
#this assumes that the storage directory has not been tampered with or corrupted, FIXME!
if 'move' in command:
@ -344,9 +345,11 @@ if __name__ == "__main__":
# lookup subcommand: return hash and tags of found files
# arguments: <storage directory> <hash|exact tag|set of exact tags>
# modifiers:
# first - only return one file
# first - only return one file
# unique - return error if not found or multiple found
#TODO: modifiers
# hash - perform lookup by hash
# tags - perform lookup by tag or set of tags
#TODO: modifiers: first unique
if command[0] == "lookup":
if len(sys.argv)<4:
print("Too few arguments!", file=sys.stderr)
@ -364,27 +367,39 @@ if __name__ == "__main__":
sys.exit(status)
file_tags_or_hash = sys.argv[3:]
if file_is_in_storage(storage_directory, file_tags_or_hash[0], compress):
tags = get_tags_by_hash(storage_directory, file_tags_or_hash[0])
print("Tags for file:")
print(tags)
lookup_results = {}
if not 'tags' in command:
if file_hash_or_path_is_known_hash(storage_directory, file_tags_or_hash[0], compress):
lookup_results[file_tags_or_hash[0]] = get_tags_by_hash(storage_directory, file_tags_or_hash[0])
if not 'hash' in command:
# create a two dimensional array of all the files associated with each individual tag
file_hash_lists = []
for tag in file_tags_or_hash:
file_hash_lists = file_hash_lists + [get_hashes_by_tag(storage_directory, tag)]
# take the first of the arrays in the two dimensional array
common_file_hashes = file_hash_lists[0]
# iterate over the two dimensional array
for file_hash_list in file_hash_lists:
# check each element in common_file_hashes to ensure it is also in all other arrays in the two dimensional array, remove if it isnt
for file_hash in common_file_hashes:
if not file_hash in file_hash_list:
common_file_hashes.remove(file_hash)
# create a two dimensional array of all the files associated with each individual tag
file_hash_lists = []
for tag in file_tags_or_hash:
file_hash_lists = file_hash_lists + [get_hashes_by_tag(storage_directory, tag)]
# take the first of the arrays in the two dimensional array
common_file_hashes = file_hash_lists[0]
# iterate over the two dimensional array
for file_hash_list in file_hash_lists:
# check each element in common_file_hashes to ensure it is also in all other arrays in the two dimensional array, remove if it isnt
for file_hash in common_file_hashes:
if not file_hash in file_hash_list:
common_file_hashes.remove(file_hash)
lookup_results[file_hash] = get_tags_by_hash(storage_directory, file_hash)
if not common_file_hashes == []:
print("Files for tag(s):")
print(common_file_hashes)
if 'unique' in command:
if(len(lookup_results)==1):
print(json.dumps(lookup_results))
else:
print("More than one matching file found.", file=sys.stderr)
sys.exit(GENERAL_ERROR)
elif 'first' in command:
file_hash, tags = list(lookup_results.items())[0]
print(json.dumps({file_hash : tags}))
else:
print(json.dumps(lookup_results))