more code cleanup & added 'exclude filenames' from hashing

This commit is contained in:
2023-03-02 23:22:49 +00:00
parent 4c9f55db45
commit 669ab50930
2 changed files with 48 additions and 35 deletions

View File

@@ -1,7 +1,5 @@
import os, sys
import pandas as pd
from datetime import datetime
from utils.inspector import hash_submissions, suspicious_by_hash
from utils.inspector import hash_submissions, inspect_for_duplicate_hashes
CSV_DIR = os.path.join(os.getcwd(), 'csv')
@@ -9,17 +7,10 @@ def main():
submissions_dir_name = ' '.join(sys.argv[1:]) if len(sys.argv) > 1 else exit(f'\nNo submissions dir name given. Provide the name as an argument.\n\nUsage: python {sys.argv[0]} [submissions dir name]\nExample: python {sys.argv[0]} AssignmentX\n')
submissions_dir_path = os.path.join('BB_submissions', submissions_dir_name)
if not os.path.isdir(submissions_dir_path):
exit(f'Directory {submissions_dir_path} does not exist.\nMake sure "{submissions_dir_name}" exists in "BB_submissions".')
exit(f'Directory {submissions_dir_path} does not exist.\nMake sure "{submissions_dir_name}" exists in "BB_submissions".\n')
else:
hashes_csv_file_path = hash_submissions(submissions_dir_path) # generate hashes for all files and return output csv file to load & find duplicate/suspicious hashes
csv = pd.read_csv(hashes_csv_file_path)
df = pd.DataFrame(csv) # df with all files and their hashes
df_suspicious = suspicious_by_hash(df) # df with all files with duplicate/suspicious hash, excludes files from the same student id
csv_name = f'{submissions_dir_name}_suspicious_{datetime.now().strftime("%Y%m%d-%H%M%S")}.csv'
csv_out = os.path.join(CSV_DIR, csv_name)
df_suspicious.to_csv(csv_out, index=False)
print(f'[INFO] Created CSV file with duplicate/suspicious hashes in {submissions_dir_name}\nCSV file: {csv_out}')
hashes_csv_file_path = hash_submissions(submissions_dir_path) # generate CSV file with hashes for all files (except for any 'excluded') & return path to CSV file for finding duplicate/suspicious hashes
inspect_for_duplicate_hashes(hashes_csv_file_path) # generate CSV file with files having duplicate/suspicious hashes
if __name__ == '__main__':