[Scummvm-git-logs] scummvm-sites integrity -> 3d9c8267cee99005fadcda606d3018e953ef8291
sev-
noreply at scummvm.org
Thu Aug 21 09:04:39 UTC 2025
This automated email contains information about 14 new commits which have been
pushed to the 'scummvm-sites' repo located at https://api.github.com/repos/scummvm/scummvm-sites .
Summary:
3684fff5e8 INTEGRITY: Re-raise the exception for parent to handle.
b75f69e100 INTEGRITY: Fix file update logic for scan and auto merge fileset even when matched with detection fileset.
cea8e38c9d INTEGRITY: Update the site url to gamesdb.scummvm.org
e8e43f5d60 INTEGRITY: Report files with mismatched checksums in integrity check.
1f92448469 INTEGRITY: Skip fileset deletion after manual merge for user fileset.
c505c9ab5a INTEGRITY: Fix single quote issue in search filters.
d90afd5924 INTEGRITY: Fix the SQL query to delete files from the UI.
9eb4571ea5 INTEGRITY: Add relative path in scan dat
4fd450b00e INTEGRITY: Add metadata updates from UI
e876778276 INTEGRITY: Add file updates from UI
4c025f7b07 INTEGRITY: Add checks for status='ReadyForReview'
cf8c8158bb INTEGRITY: Add confirmation window for fileset actions
d24b682a84 INTEGRITY: Add button for deleting fileset
3d9c8267ce INTEGRITY: Create endpoint for mail notification
Commit: 3684fff5e879e44069460329af4864c8b0373ca9
https://github.com/scummvm/scummvm-sites/commit/3684fff5e879e44069460329af4864c8b0373ca9
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Re-raise the exception for parent to handle.
Changed paths:
src/scripts/dat_parser.py
src/scripts/db_functions.py
diff --git a/src/scripts/dat_parser.py b/src/scripts/dat_parser.py
index 3ffd683..dff7b85 100644
--- a/src/scripts/dat_parser.py
+++ b/src/scripts/dat_parser.py
@@ -200,8 +200,8 @@ def main():
else:
print(f"Error: Failed to parse file for upload: {filepath}")
except Exception as e:
- print(f"Error uploading {filepath}: {e}")
- continue
+ print(f"Error uploading {filepath}.")
+ raise e
if args.match:
for filepath in args.match:
@@ -212,8 +212,8 @@ def main():
else:
print(f"Error: Failed to parse file for matching: {filepath}")
except Exception as e:
- print(f"Error matching {filepath}: {e}")
- continue
+ print(f"Error matching {filepath}:")
+ raise e
except KeyboardInterrupt:
print("Operation cancelled by user")
diff --git a/src/scripts/db_functions.py b/src/scripts/db_functions.py
index 95c8aa4..a111370 100644
--- a/src/scripts/db_functions.py
+++ b/src/scripts/db_functions.py
@@ -580,7 +580,7 @@ def db_insert(data_arr, username=None, skiplog=False):
conn.commit()
except Exception as e:
conn.rollback()
- print(f"Transaction failed: {e}")
+ raise e
finally:
conn.close()
@@ -661,7 +661,7 @@ def match_fileset(data_arr, username=None, skiplog=False):
conn.commit()
except Exception as e:
conn.rollback()
- print(f"Transaction failed: {e}")
+ raise e
finally:
conn.close()
Commit: b75f69e10019f432f8e99b5d4fccedeb484edabc
https://github.com/scummvm/scummvm-sites/commit/b75f69e10019f432f8e99b5d4fccedeb484edabc
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Fix file update logic for scan and auto merge fileset even when matched with detection fileset.
Changed paths:
src/scripts/db_functions.py
diff --git a/src/scripts/db_functions.py b/src/scripts/db_functions.py
index a111370..3358b40 100644
--- a/src/scripts/db_functions.py
+++ b/src/scripts/db_functions.py
@@ -804,45 +804,55 @@ def pre_update_files(rom, transaction_id, conn, filesets_check_for_full=None):
if filesets_check_for_full is None:
filesets_check_for_full = set()
with conn.cursor() as cursor:
- checksums = defaultdict(str)
+ full_checksums = defaultdict(str)
+ all_checksums = defaultdict(str)
for key in rom:
+ if key in ["md5-r", "md5-d", "md5"]:
+ if rom[key] != "d41d8cd98f00b204e9800998ecf8427e":
+ full_checksums[key] = rom[key]
if key not in ["name", "size", "size-r", "size-rd", "modification-time"]:
- checksums[key] = rom[key]
-
+ all_checksums[key] = rom[key]
files_to_update = set()
size = rom["size"] if "size" in rom else 0
size_r = rom["size-r"] if "size-r" in rom else 0
size_rd = rom["size-rd"] if "size-rd" in rom else 0
- for _, checksum in checksums.items():
- query = """
+ main_size = size
+ main_size_name = "size"
+ if size_rd != 0:
+ main_size = size_rd
+ main_size_name = "`size-rd`"
+ if main_size == 0:
+ return
+
+ for _, checksum in full_checksums.items():
+ query = f"""
SELECT f.id as file_id, fs.id as fileset_id
FROM file f
JOIN filechecksum fc ON fc.file = f.id
JOIN fileset fs ON fs.id = f.fileset
JOIN transactions t ON t.fileset = fs.id
WHERE fc.checksum = %s
- AND f.size = %s
- AND f.`size-r` = %s
- AND f.`size-rd` = %s
+ AND (f.{main_size_name} = %s OR f.{main_size_name} = -1)
+ AND f.name = %s
AND t.transaction != %s
"""
- cursor.execute(query, (checksum, size, size_r, size_rd, transaction_id))
+ cursor.execute(query, (checksum, main_size, rom["name"], transaction_id))
result = cursor.fetchall()
if result:
for file in result:
filesets_check_for_full.add(file["fileset_id"])
files_to_update.add(file["file_id"])
-
for file_id in files_to_update:
query = """
DELETE FROM filechecksum
WHERE file = %s
+ AND checksize IN ('0', '5000', '1M', '1048576')
"""
cursor.execute(query, (file_id,))
# Update checksums
- for check, checksum in checksums.items():
+ for check, checksum in all_checksums.items():
checksize, checktype, checksum = get_checksum_props(check, checksum)
query = "INSERT INTO filechecksum (file, checksize, checktype, checksum) VALUES (%s, %s, %s, %s)"
cursor.execute(query, (file_id, checksize, checktype, checksum))
@@ -851,13 +861,10 @@ def pre_update_files(rom, transaction_id, conn, filesets_check_for_full=None):
UPDATE file
SET size = %s,
`size-r` = %s,
- `size-rd` = %s,
- name = %s
+ `size-rd` = %s
WHERE id = %s
"""
- cursor.execute(
- query, (size, size_r, size_rd, normalised_path(rom["name"]), file_id)
- )
+ cursor.execute(query, (size, size_r, size_rd, file_id))
def scan_perform_match(
@@ -939,45 +946,19 @@ def scan_perform_match(
# Detection filests can be turned full if the number of files are equal,
# otherwise we do manual merge to remove extra files.
elif status == "detection":
- if total_fileset_files(fileset) == total_files(
- matched_fileset_id, conn, detection_only=True
- ):
- update_all_files(fileset, matched_fileset_id, True, conn)
- update_fileset_status(cursor, matched_fileset_id, "full")
- if not skiplog:
- log_matched_fileset(
- src,
- fileset_id,
- matched_fileset_id,
- "full",
- user,
- conn,
- )
- delete_original_fileset(fileset_id, conn)
- automatic_merged_filesets += 1
-
- else:
- log_text = f"Created Fileset:{fileset_id}. Name: {fileset_name} Description: {fileset_description}"
- category_text = "Uploaded from scan."
- create_log(
- category_text,
- user,
- log_text,
- conn,
- )
- console_log(log_text)
- category_text = "Manual Merge - Detection found"
- log_text = f"Matched with detection. Merge Fileset:{fileset_id} manually with Fileset:{matched_fileset_id}."
- add_manual_merge(
- candidate_filesets,
+ scan_populate_file(fileset, matched_fileset_id, conn, detection)
+ update_fileset_status(cursor, matched_fileset_id, "full")
+ if not skiplog:
+ log_matched_fileset(
+ src,
fileset_id,
- category_text,
- log_text,
+ matched_fileset_id,
+ "full",
user,
conn,
- log_text,
)
- manual_merged_with_detection += 1
+ delete_original_fileset(fileset_id, conn)
+ automatic_merged_filesets += 1
# Drop the fileset, note down the file differences
elif status == "full":
@@ -1032,6 +1013,108 @@ def scan_perform_match(
)
+def scan_populate_file(fileset, fileset_id, conn, detection):
+ """
+ Updates the detection fileset with the new scan files.
+ """
+ with conn.cursor() as cursor:
+ # Extracting the filename from the filepath.
+ cursor.execute(
+ "SELECT id, name, size,`size-rd` AS size_rd FROM file WHERE fileset = %s",
+ (fileset_id,),
+ )
+ candidate_files = defaultdict(list)
+ candidate_basename_size_set = set()
+ candidate_name_size_to_path_map = defaultdict(str)
+ target_files = cursor.fetchall()
+ for target_file in target_files:
+ size = target_file["size"]
+ size_name = "size"
+ if target_file["size_rd"] != 0:
+ path = target_file["name"].lower()
+ filename = os.path.basename(normalised_path(path))
+ size = target_file["size_rd"]
+ size_name = "size-rd"
+ candidate_files[filename] = [target_file["id"], size, size_name]
+ candidate_basename_size_set.add((filename, size))
+ candidate_name_size_to_path_map[(filename, size)] = path
+
+ seen_detection_files = set()
+
+ for file in fileset["rom"]:
+ all_checksums = defaultdict(str)
+ checksum = ""
+ for key in file:
+ if key not in [
+ "name",
+ "size",
+ "size-r",
+ "size-rd",
+ "modification-time",
+ ]:
+ all_checksums[key] = file[key]
+ if (
+ key in ["md5", "md5-r", "md5-d"]
+ and file[key] != "d41d8cd98f00b204e9800998ecf8427e"
+ ):
+ checksum = file[key]
+
+ filename = os.path.basename(normalised_path(file["name"])).lower()
+ size = file["size"]
+ size_rd = file["size-rd"]
+ detection_file = False
+ if (filename, size) in candidate_basename_size_set or (
+ filename,
+ size_rd,
+ ) in candidate_basename_size_set:
+ detection_file = True
+ if (filename, size) in candidate_basename_size_set:
+ main_size = size
+ cfile_id = candidate_name_size_to_path_map[(filename, size)]
+ else:
+ main_size = size_rd
+ cfile_id = candidate_name_size_to_path_map[(filename, size)]
+
+ if (
+ (filename, size) in seen_detection_files
+ or (filename, size_rd) in seen_detection_files
+ or not detection_file
+ ):
+ values = [file["name"]]
+ values.append(file["size"] if "size" in file else "0")
+ values.append(file["size-r"] if "size-r" in file else "0")
+ values.append(file["size-rd"] if "size-rd" in file else "0")
+ values.extend([checksum, fileset_id, detection, "None"])
+
+ query = "INSERT INTO file ( name, size, `size-r`, `size-rd`, checksum, fileset, detection, detection_type, `timestamp` ) VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, NOW())"
+
+ cursor.execute(query, values)
+ cursor.execute("SET @file_last = LAST_INSERT_ID()")
+ cursor.execute("SELECT @file_last AS file_id")
+
+ file_id = cursor.fetchone()["file_id"]
+
+ for check, checksum in all_checksums.items():
+ checksize, checktype, checksum = get_checksum_props(check, checksum)
+ query = "INSERT INTO filechecksum (file, checksize, checktype, checksum) VALUES (%s, %s, %s, %s)"
+ cursor.execute(query, (file_id, checksize, checktype, checksum))
+
+ else:
+ query = """
+ UPDATE file
+ SET name = %s,
+ `timestamp` = NOW()
+ WHERE id = %s
+ """
+
+ cursor.execute(
+ query,
+ (normalised_path(file["name"]), cfile_id),
+ )
+
+ seen_detection_files.add((filename.lower(), main_size))
+
+
def update_all_files(fileset, candidate_fileset_id, is_candidate_detection, conn):
"""
Updates all the files, if they were missed out earlier due to missing size.
@@ -1101,6 +1184,7 @@ def update_all_files(fileset, candidate_fileset_id, is_candidate_detection, conn
query = """
DELETE FROM filechecksum
WHERE file = %s
+ AND checksize IN ('0', '5000', '1M', '1048576')
"""
cursor.execute(query, (file_id,))
# Update the checksums
@@ -1201,8 +1285,8 @@ def filter_candidate_filesets(roms, transaction_id, conn):
"file_id": row["file_id"],
"name": os.path.basename(normalised_path(row["name"])).lower(),
"size": row["size"] if "size" in row else 0,
- "size-r": row["size_r"] if "size-r" in row else 0,
- "size-rd": row["size_rd"] if "size-rd" in row else 0,
+ "size-r": row["size_r"] if "size_r" in row else 0,
+ "size-rd": row["size_rd"] if "size_rd" in row else 0,
}
)
for id, files in candidate_map.items():
@@ -1214,20 +1298,22 @@ def filter_candidate_filesets(roms, transaction_id, conn):
name = os.path.basename(normalised_path(file["name"]))
for key in file:
if key.startswith("md5"):
- set_checksums.add(
- (
- file[key],
- name.lower(),
- int(file["size"]),
+ if int(file["size"]) != 0:
+ set_checksums.add(
+ (
+ file[key],
+ name.lower(),
+ int(file["size"]),
+ )
)
- )
- set_checksums.add(
- (
- file[key],
- name.lower(),
- int(file["size-rd"]),
+ if int(file["size-rd"]) != 0:
+ set_checksums.add(
+ (
+ file[key],
+ name.lower(),
+ int(file["size-rd"]),
+ )
)
- )
set_checksums.add(
(
file[key],
@@ -1236,8 +1322,10 @@ def filter_candidate_filesets(roms, transaction_id, conn):
)
)
set_file_name_size.add((name.lower(), -1))
- set_file_name_size.add((name.lower(), int(file["size-rd"])))
- set_file_name_size.add((name.lower(), int(file["size"])))
+ if int(file["size-rd"]) != 0:
+ set_file_name_size.add((name.lower(), int(file["size-rd"])))
+ if int(file["size"]) != 0:
+ set_file_name_size.add((name.lower(), int(file["size"])))
# Filter candidates by detection filename and file size (including -1) and increase matched file count
# if filesize = -1,
@@ -1249,43 +1337,42 @@ def filter_candidate_filesets(roms, transaction_id, conn):
with conn.cursor() as cursor:
for f in files:
filename = os.path.basename(f["name"]).lower()
- sizes = [f["size"], f["size-rd"]]
- for size in sizes:
- if (filename, size) in set_file_name_size:
- if size == -1:
+ size = f["size-rd"] if f["size-rd"] != 0 else f["size"]
+ if (filename, size) in set_file_name_size:
+ if size == -1:
+ count += 1
+ else:
+ cursor.execute(
+ """
+ SELECT checksum, checksize, checktype
+ FROM filechecksum
+ WHERE file = %s
+ """,
+ (f["file_id"],),
+ )
+ checksums = cursor.fetchall()
+ not_inc_count = False
+ for c in checksums:
+ filesize = size
+ checksum = c["checksum"]
+ checksize = c["checksize"]
+
+ if checksize == "1M":
+ checksize = 1048576
+ elif checksize == "0":
+ checksize = filesize
+ if filesize <= int(checksize):
+ if (
+ checksum,
+ filename,
+ size,
+ ) in set_checksums:
+ count += 1
+ not_inc_count = True
+ # if it was a true match, checksum should be present
+ break
+ if not not_inc_count:
count += 1
- else:
- cursor.execute(
- """
- SELECT checksum, checksize, checktype
- FROM filechecksum
- WHERE file = %s
- """,
- (f["file_id"],),
- )
- checksums = cursor.fetchall()
- not_inc_count = False
- for c in checksums:
- filesize = size
- checksum = c["checksum"]
- checksize = c["checksize"]
-
- if checksize == "1M":
- checksize = 1048576
- elif checksize == "0":
- checksize = filesize
- if filesize <= int(checksize):
- if (
- checksum,
- filename,
- size,
- ) in set_checksums:
- count += 1
- not_inc_count = True
- # if it was a true match, checksum should be present
- break
- if not not_inc_count:
- count += 1
if count > 0 and total_detection_files_map[fileset_id] <= count:
match_counts[fileset_id] = count
Commit: cea8e38c9de5d5b5abfdd45ab60bc2e71edde062
https://github.com/scummvm/scummvm-sites/commit/cea8e38c9de5d5b5abfdd45ab60bc2e71edde062
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Update the site url to gamesdb.scummvm.org
Changed paths:
apache2-config/gamesdb.sev.zone.conf
diff --git a/apache2-config/gamesdb.sev.zone.conf b/apache2-config/gamesdb.sev.zone.conf
index 342101e..27fb6c6 100644
--- a/apache2-config/gamesdb.sev.zone.conf
+++ b/apache2-config/gamesdb.sev.zone.conf
@@ -1,6 +1,6 @@
<VirtualHost *:80>
- ServerName gamesdb.sev.zone
- ServerAlias www.gamesdb.sev.zone
+ ServerName gamesdb.scummvm.org
+ ServerAlias www.gamesdb.scummvm.org
ServerAdmin webmaster at localhost
CustomLog ${APACHE_LOG_DIR}/integrity-access.log combined
ErrorLog ${APACHE_LOG_DIR}/integrity-error.log
Commit: e8e43f5d60890cbc0d30bb8208ee754ad3e757a4
https://github.com/scummvm/scummvm-sites/commit/e8e43f5d60890cbc0d30bb8208ee754ad3e757a4
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Report files with mismatched checksums in integrity check.
Changed paths:
src/app/fileset.py
src/scripts/db_functions.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index bdc78c8..c6ee408 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -1327,6 +1327,8 @@ def validate():
matched_user_files,
unmatched_full_files,
unmatched_user_files,
+ mismatched_user_files,
+ additional_user_files,
) = user_integrity_check(json_object, ip, game_metadata)
except Exception as e:
json_response["error"] = -1
@@ -1355,16 +1357,20 @@ def validate():
return jsonify(json_response)
# If match was with full
+ json_response["fileset"] = str(fileset_id)
for file in matched_user_files:
json_response["files"].append(
{"status": "ok", "fileset_id": fileset_id, "name": file}
)
for file in unmatched_full_files:
json_response["files"].append(
- {"status": "missing/unmatched", "fileset_id": fileset_id, "name": file}
+ {"status": "missing", "fileset_id": fileset_id, "name": file}
)
-
- for file in unmatched_user_files:
+ for file in mismatched_user_files:
+ json_response["files"].append(
+ {"status": "checksum_mismatch", "fileset_id": fileset_id, "name": file}
+ )
+ for file in additional_user_files:
json_response["files"].append(
{"status": "unknown_file", "fileset_id": fileset_id, "name": file}
)
diff --git a/src/scripts/db_functions.py b/src/scripts/db_functions.py
index 3358b40..bbeab60 100644
--- a/src/scripts/db_functions.py
+++ b/src/scripts/db_functions.py
@@ -83,6 +83,7 @@ def insert_fileset(
ip="",
username=None,
skiplog=None,
+ note="",
):
status = "detection" if detection else src
game = "NULL"
@@ -148,15 +149,14 @@ def insert_fileset(
log_text = f"Created Fileset:{fileset_last}, {log_text}"
if src == "user":
query = """
- INSERT INTO queue (time, fileset, ip)
- VALUES (FROM_UNIXTIME(@fileset_time_last), %s, %s)
+ INSERT INTO queue (time, fileset, ip, notes)
+ VALUES (FROM_UNIXTIME(@fileset_time_last), %s, %s, %s)
"""
- cursor.execute(query, (fileset_id, ip))
+ cursor.execute(query, (fileset_id, ip, note))
cursor.execute(
"UPDATE fileset SET user_count = COALESCE(user_count, 0) + 1 WHERE id = %s",
(fileset_id,),
)
- cursor.execute(query, (fileset_id, ip))
log_text = f"Created Fileset:{fileset_last}, from user: IP {ip}."
user = f"cli:{getpass.getuser()}" if username is None else username
@@ -962,7 +962,7 @@ def scan_perform_match(
# Drop the fileset, note down the file differences
elif status == "full":
- (_, unmatched_candidate_files, unmatched_scan_files) = (
+ (_, unmatched_candidate_files, unmatched_scan_files, _, _) = (
get_unmatched_files(matched_fileset_id, fileset, conn)
)
fully_matched = (
@@ -1403,13 +1403,27 @@ def get_unmatched_files(candidate_fileset, fileset, conn):
"""
with conn.cursor() as cursor:
cursor.execute(
- "SELECT id, name FROM file WHERE fileset = %s", (candidate_fileset,)
+ "SELECT id, name, size, `size-rd` AS size_rd FROM file WHERE fileset = %s",
+ (candidate_fileset,),
)
candidate_file_rows = cursor.fetchall()
candidate_files = {row["id"]: row["name"] for row in candidate_file_rows}
+ candidate_sizes = set()
+ candidate_name_by_size = {}
+ for candidate_file in candidate_file_rows:
+ base_name = os.path.basename(
+ normalised_path(candidate_file["name"])
+ ).lower()
+ size = candidate_file["size"]
+ if candidate_file["size_rd"] != 0:
+ size = candidate_file["size_rd"]
+ candidate_sizes.add((base_name, size))
+ candidate_name_by_size[(base_name, size)] = candidate_file["name"]
+
dat_checksums = set()
dat_names_by_checksum = {}
+ dat_sizes_by_name = {}
for file in fileset["rom"]:
base_name = os.path.basename(normalised_path(file["name"])).lower()
@@ -1417,6 +1431,12 @@ def get_unmatched_files(candidate_fileset, fileset, conn):
if key.startswith("md5"):
dat_checksums.add((file[key], base_name))
dat_names_by_checksum[(file[key], base_name)] = file["name"]
+ file_sizes = []
+ if "size" in file:
+ file_sizes.append((base_name, int(file["size"])))
+ if "size-rd" in file:
+ file_sizes.append((base_name, int(file["size-rd"])))
+ dat_sizes_by_name[file["name"]] = file_sizes
unmatched_candidate_files = []
matched_dat_pairs = set()
@@ -1444,14 +1464,37 @@ def get_unmatched_files(candidate_fileset, fileset, conn):
for key in dat_checksums
if key not in matched_dat_pairs
}
- matched_dat_files = {
+ all_matched_dat_files = {
dat_names_by_checksum[key]
for key in dat_checksums
if key in matched_dat_pairs
}
+
+ partially_matched_dat_files = all_matched_dat_files & unmatched_dat_files
+ matched_dat_files = all_matched_dat_files - partially_matched_dat_files
+
unmatched_dat_files = list(unmatched_dat_files)
- return (matched_dat_files, unmatched_candidate_files, unmatched_dat_files)
+ mismatched_dat_files = []
+ additional_dat_files = []
+
+ # Mismatched file
+ for unmatched_dat_file in unmatched_dat_files:
+ mismatch = False
+ for file_size in dat_sizes_by_name[unmatched_dat_file]:
+ if file_size in candidate_sizes:
+ mismatched_dat_files.append(unmatched_dat_file)
+ mismatch = True
+ if not mismatch:
+ additional_dat_files.append(unmatched_dat_file)
+
+ return (
+ matched_dat_files,
+ unmatched_candidate_files,
+ unmatched_dat_files,
+ mismatched_dat_files,
+ additional_dat_files,
+ )
def is_full_detection_checksum_match(candidate_fileset, files, conn):
@@ -1856,7 +1899,7 @@ def set_perform_match(
matched_fileset_id, manual_merge_map, set_to_candidate_dict, conn
)
elif status == "partial" or status == "full":
- (_, unmatched_candidate_files, unmatched_dat_files) = (
+ (_, unmatched_candidate_files, unmatched_dat_files, _, _) = (
get_unmatched_files(matched_fileset_id, fileset, conn)
)
is_match = (
@@ -2473,13 +2516,15 @@ def log_user_match_with_full(
unmatched_user_files,
matched_user_files,
fully_matched,
+ mismatched_user_files,
+ additional_user_files,
user,
conn,
):
- category_text = "User fileset mismatch"
+ category_text = "User fileset report"
if fully_matched:
- category_text = "User fileset matched"
- log_text = f"""Candidate Full Fileset:{candidate_id}. Total matched user files = {len(matched_user_files)}. Missing/mismatch Files = {len(unmatched_full_files)}. Unknown Files = {len(unmatched_user_files)}. List of Missing/mismatch files : {", ".join(scan_file for scan_file in unmatched_full_files)}, List of unknown files : {", ".join(scan_file for scan_file in unmatched_user_files)}"""
+ category_text = "User fileset report"
+ log_text = f"""Matched with Full Fileset:{candidate_id}. Total matched user files = {len(matched_user_files)}. Missing Files = {len(unmatched_full_files)} Mismatched Files = {len(mismatched_user_files)}. Extra Files = {len(additional_user_files)}. List of Missing Files: {", ".join(user_file for user_file in unmatched_full_files)}, List of Mismatched Files: {", ".join(user_file for user_file in mismatched_user_files)} List of extra files: {", ".join(user_file for user_file in additional_user_files)}"""
create_log(category_text, user, log_text, conn)
@@ -2496,6 +2541,12 @@ def finalize_fileset_insertion(
if src != "user":
log_text = f"Completed loading DAT file, filename {filepath}, size {os.path.getsize(filepath)}, author {author}, version {version}. State {source_status}. Number of filesets: {fileset_insertion_count}. Transaction: {transaction_id}"
create_log(category_text, user, log_text, conn)
+ else:
+ cursor.execute("SELECT MAX(`transaction`) FROM transactions")
+ old_transaction_id = cursor.fetchone()["MAX(`transaction`)"]
+ if old_transaction_id == transaction_id:
+ log_text = f"Completed loading user data. Transaction: {transaction_id}"
+ create_log(category_text, user, log_text, conn)
def user_perform_match(
@@ -2507,6 +2558,8 @@ def user_perform_match(
transaction_id,
conn,
ip,
+ mismatched,
+ user_fileset_id=-1,
):
with conn.cursor() as cursor:
single_candidate_id = candidate_filesets[0]
@@ -2517,16 +2570,39 @@ def user_perform_match(
if len(candidate_filesets) == 1 and status == "full":
if status == "full":
# Checks how many files match
- (matched_dat_files, unmatched_full_files, unmatched_user_files) = (
- get_unmatched_files(single_candidate_id, fileset, conn)
- )
+ (
+ matched_dat_files,
+ unmatched_full_files,
+ unmatched_user_files,
+ mismatched_user_files,
+ additional_user_files,
+ ) = get_unmatched_files(single_candidate_id, fileset, conn)
+ if len(mismatched_user_files) != 0 and not mismatched:
+ note = "mismatch"
+ print(note)
+ user_fileset_id = create_user_fileset(
+ fileset,
+ game_metadata,
+ src,
+ transaction_id,
+ user,
+ conn,
+ ip,
+ note,
+ )
+ category_text = "New User Fileset"
+ log_text = f"New User Fileset:{user_fileset_id} created. Matched with full Fileset:{single_candidate_id} with mismatched files."
+ create_log(category_text, user, log_text, conn)
+
return (
"full",
- -1,
+ user_fileset_id,
single_candidate_id,
matched_dat_files,
unmatched_full_files,
unmatched_user_files,
+ mismatched_user_files,
+ additional_user_files,
)
# Includes cases for
# - single candidate with detection or partial status
@@ -2545,10 +2621,12 @@ def user_perform_match(
user,
conn,
)
- return ("multiple", fileset_id, -1, [], [], [])
+ return ("multiple", fileset_id, -1, [], [], [], [], [])
-def create_user_fileset(fileset, game_metadata, src, transaction_id, user, conn, ip):
+def create_user_fileset(
+ fileset, game_metadata, src, transaction_id, user, conn, ip, note=""
+):
with conn.cursor() as cursor:
key = calc_key(fileset)
try:
@@ -2564,7 +2642,7 @@ def create_user_fileset(fileset, game_metadata, src, transaction_id, user, conn,
return
(fileset_id, _) = insert_fileset(
- src, False, key, "", transaction_id, None, conn, ip=ip
+ src, False, key, "", transaction_id, None, conn, ip=ip, note=note
)
insert_game(engine_name, engineid, title, gameid, extra, platform, lang, conn)
@@ -2586,6 +2664,7 @@ def user_integrity_check(data, ip, game_metadata=None):
src = "user"
source_status = src
new_files = []
+ user = ip
for file in data["files"]:
new_file = {
@@ -2619,13 +2698,6 @@ def user_integrity_check(data, ip, game_metadata=None):
transaction_id = 0
transaction_id += 1
- category_text = f"Uploaded from {src}"
- log_text = f"Started loading file, State {source_status}. Transaction: {transaction_id}"
-
- user = f"cli:{getpass.getuser()}"
-
- create_log(category_text, user, log_text, conn)
-
# Check if the key already exists in the db
query = """
SELECT id
@@ -2635,16 +2707,42 @@ def user_integrity_check(data, ip, game_metadata=None):
"""
cursor.execute(query, (key,))
existing_entry = cursor.fetchone()
+ mismatched = False
+ existing_fileset_id = -1
if existing_entry is not None:
match_type = "no_candidate"
existing_fileset_id = existing_entry["id"]
- add_usercount(existing_fileset_id, ip, conn)
- conn.commit()
- return (match_type, existing_fileset_id, [], [], [])
+ query = """
+ SELECT id
+ FROM queue
+ WHERE fileset = %s
+ AND notes = 'mismatch'
+ """
+ cursor.execute(query, (existing_fileset_id,))
+ result = cursor.fetchall()
+ if not result:
+ add_usercount(existing_fileset_id, ip, conn)
+ finalize_fileset_insertion(
+ conn, transaction_id, src, None, user, 0, source_status, user
+ )
+ conn.commit()
+ return (match_type, existing_fileset_id, [], [], [], [], [])
+ else:
+ mismatched = True
- candidate_filesets = filter_candidate_filesets(
+ all_candidate_filesets = filter_candidate_filesets(
data["rom"], transaction_id, conn
)
+ candidate_filesets = []
+ # Filter by key
+ query = """
+ SELECT * FROM fileset WHERE id = %s AND `KEY` != %s
+ """
+ for candidate_fileset in all_candidate_filesets:
+ cursor.execute(query, (candidate_fileset, key))
+ result = cursor.fetchall()
+ if result:
+ candidate_filesets.append(candidate_fileset)
if len(candidate_filesets) == 0:
(user_fileset_id, _) = insert_new_fileset(
@@ -2655,26 +2753,21 @@ def user_integrity_check(data, ip, game_metadata=None):
key,
None,
transaction_id,
- log_text,
+ "",
user,
ip=ip,
)
match_type = "no_candidate"
category_text = "New User Fileset"
- engineid = (
- game_metadata["engineid"] if "engineid" in game_metadata else ""
- )
- gameid = game_metadata["gameid"] if "gameid" in game_metadata else ""
- platform = (
- game_metadata["platform"] if "platform" in game_metadata else ""
- )
- language = (
- game_metadata["language"] if "language" in game_metadata else ""
+ log_text = (
+ f"New User Fileset:{user_fileset_id} with no matching candidates."
)
- log_text = f"New User Fileset:{user_fileset_id} with no matching candidates. Engine: {engineid} Name: {gameid}-{platform}-{language}"
create_log(category_text, user, log_text, conn)
+ finalize_fileset_insertion(
+ conn, transaction_id, src, None, user, 0, source_status, user
+ )
conn.commit()
- return (match_type, user_fileset_id, [], [], [])
+ return (match_type, user_fileset_id, [], [], [], [], [])
else:
(
@@ -2684,6 +2777,8 @@ def user_integrity_check(data, ip, game_metadata=None):
matched_user_files,
unmatched_full_files,
unmatched_user_files,
+ mismatched_user_files,
+ additional_user_files,
) = user_perform_match(
data,
src,
@@ -2693,6 +2788,8 @@ def user_integrity_check(data, ip, game_metadata=None):
transaction_id,
conn,
ip,
+ mismatched,
+ existing_fileset_id,
)
if match_type == "multiple":
# If multiple candidates matched, we will do manual review and ask user for more details.
@@ -2704,6 +2801,9 @@ def user_integrity_check(data, ip, game_metadata=None):
log_text,
conn,
)
+ finalize_fileset_insertion(
+ conn, transaction_id, src, None, user, 0, source_status, user
+ )
conn.commit()
return (
match_type,
@@ -2711,6 +2811,8 @@ def user_integrity_check(data, ip, game_metadata=None):
matched_user_files,
unmatched_full_files,
unmatched_user_files,
+ mismatched_user_files,
+ additional_user_files,
)
if match_type == "full":
fully_matched = (
@@ -2725,28 +2827,29 @@ def user_integrity_check(data, ip, game_metadata=None):
unmatched_user_files,
matched_user_files,
fully_matched,
+ mismatched_user_files,
+ additional_user_files,
user,
conn,
)
+
+ finalize_fileset_insertion(
+ conn, transaction_id, src, None, user, 0, source_status, user
+ )
conn.commit()
return (
match_type,
- matched_id,
+ user_fileset_id,
matched_user_files,
unmatched_full_files,
unmatched_user_files,
+ mismatched_user_files,
+ additional_user_files,
)
-
- finalize_fileset_insertion(
- conn, transaction_id, src, None, user, 0, source_status, user
- )
except Exception as e:
conn.rollback()
print(f"Error processing user data: {e}")
finally:
- category_text = f"Uploaded from {src}"
- log_text = f"Completed loading file, State {source_status}. Transaction: {transaction_id}"
- create_log(category_text, user, log_text, conn)
conn.close()
@@ -2800,7 +2903,6 @@ def add_usercount(fileset, ip, conn):
"""
cursor.execute(query, (fileset, ip))
duplicate = True if cursor.fetchone()["count"] != 0 else False
- print("dupe ", duplicate)
if not duplicate:
cursor.execute(
"UPDATE fileset SET user_count = COALESCE(user_count, 0) + 1 WHERE id = %s",
Commit: 1f92448469b015b57823fa853d5cb77c9334b412
https://github.com/scummvm/scummvm-sites/commit/1f92448469b015b57823fa853d5cb77c9334b412
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Skip fileset deletion after manual merge for user fileset.
Changed paths:
src/app/fileset.py
src/scripts/db_functions.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index c6ee408..e14fb7b 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -710,9 +710,9 @@ def confirm_merge(id):
)
source_fileset = cursor.fetchone()
- # Select all filesw
+ # Select all files
file_query = """
- SELECT f.name, f.size, f.`size-r`, f.`size-rd`,
+ SELECT f.name, f.size, f.`size-r`, f.`size-rd`, f.detection_type,
fc.checksum, fc.checksize, fc.checktype, f.detection
FROM file f
LEFT JOIN filechecksum fc ON fc.file = f.id
@@ -762,6 +762,9 @@ def confirm_merge(id):
matched_files = get_file_status(
target_id, source_fileset_with_files, connection
)
+ source_to_target_matched_map = {
+ s.lower(): t.lower() for (t, s) in matched_files
+ }
def highlight_differences(source, target):
diff = difflib.ndiff(source, target)
@@ -846,6 +849,7 @@ def confirm_merge(id):
size = file["size"]
size_r = file["size-r"]
size_rd = file["size-rd"]
+ detection_type = file["detection_type"]
if file["checksum"] is None:
checksum = ""
checksize = ""
@@ -864,6 +868,9 @@ def confirm_merge(id):
source_files_map[file["name"].lower()]["size"] = size
source_files_map[file["name"].lower()]["size-r"] = size_r
source_files_map[file["name"].lower()]["size-rd"] = size_rd
+ source_files_map[file["name"].lower()]["detection_type"] = (
+ detection_type
+ )
if target_files:
for file in target_files:
@@ -877,11 +884,18 @@ def confirm_merge(id):
target_files_map[file["name"].lower()]["size"] = file["size"]
target_files_map[file["name"].lower()]["size-r"] = file["size-r"]
target_files_map[file["name"].lower()]["size-rd"] = file["size-rd"]
+ target_files_map[file["name"].lower()]["detection_type"] = file[
+ "detection_type"
+ ]
if file["detection"] == 1:
detection_files_set.add(file["name"].lower())
html += """<tr><th>Files</th><td colspan='2'><label><input type="checkbox" id="toggle-common-files"> Show Only Common Files</label><label style='margin-left: 50px;' ><input type="checkbox" id="toggle-all-fields"> Show All Fields</label></td></tr>"""
+ for candidate_file_name, dat_file_name in matched_files:
+ if candidate_file_name in detection_files_set:
+ detection_files_set.add(dat_file_name)
+
all_source_unmatched_filenames = sorted(set(source_files_map.keys()))
all_target_unmatched_filenames = sorted(set(target_files_map.keys()))
@@ -934,6 +948,8 @@ def confirm_merge(id):
</tr>"""
for key in keys:
+ if key == "detection_type":
+ continue
source_value = str(source_dict.get(key, ""))
target_value = str(target_dict.get(key, ""))
@@ -952,12 +968,27 @@ def confirm_merge(id):
("source", source_checksum),
("target", target_checksum),
]:
+ detection_type = ""
is_detection = "0"
if (
side == "target"
and target_filename.lower() in detection_files_set
):
is_detection = "1"
+ detection_type = target_files_map[
+ target_filename.lower()
+ ].get("detection_type", "")
+ if (
+ side == "source"
+ and source_filename.lower() in detection_files_set
+ ):
+ is_detection = "1"
+ fname = source_to_target_matched_map[
+ source_filename.lower()
+ ]
+ detection_type = target_files_map[fname].get(
+ "detection_type", ""
+ )
vals[side] = html_lib.escape(
json.dumps(
@@ -969,6 +1000,7 @@ def confirm_merge(id):
"prop": key,
"value": checksum,
"detection": is_detection,
+ "detection_type": detection_type,
}
)
)
@@ -1051,16 +1083,18 @@ def confirm_merge(id):
@app.route("/fileset/<int:id>/merge/execute", methods=["POST"])
def execute_merge(id):
- data = request.get_json()
- source_id = data.get("source_id")
- target_id = data.get("target_id")
- options = data.get("options")
- matched_dict = json.loads(data.get("matched_files"))
-
connection = db_connect()
+ with connection.cursor() as cursor:
+ data = request.get_json()
+ source_id = data.get("source_id")
+ target_id = data.get("target_id")
+ options = data.get("options")
+ matched_dict = json.loads(data.get("matched_files"))
- try:
- with connection.cursor() as cursor:
+ cursor.execute("SELECT status FROM fileset WHERE id = %s", (source_id))
+ source_status = cursor.fetchone()["status"]
+
+ try:
cursor.execute("SELECT * FROM fileset WHERE id = %s", (source_id,))
source_fileset = cursor.fetchone()
@@ -1087,6 +1121,7 @@ def execute_merge(id):
for file in options:
filename = file["filename"].lower()
+ detection_type = file.get("detection_type", "")
if filename in matched_dict:
filename = matched_dict[filename]
file_details_map[filename]["name"] = filename
@@ -1096,7 +1131,7 @@ def execute_merge(id):
and file_details_map[filename]["detection"] != "1"
):
file_details_map[filename]["detection"] = file["detection"]
- file_details_map[filename]["detection_type"] = file["prop"]
+ file_details_map[filename]["detection_type"] = detection_type
if file["prop"].startswith("md5"):
file_details_map[filename][file["prop"]] = file["value"]
if file["prop"].startswith("size"):
@@ -1104,14 +1139,23 @@ def execute_merge(id):
query = "DELETE FROM file WHERE fileset = %s"
cursor.execute(query, (target_id,))
- query = "DELETE FROM fileset WHERE id = %s"
- cursor.execute(query, (source_id,))
+
+ if source_status != "user":
+ query = "DELETE FROM fileset WHERE id = %s"
+ cursor.execute(query, (source_id,))
for filename, details in file_details_map.items():
detection = (
details["detection"] == "1" if "detection" in details else False
)
- insert_file(details, detection, "", connection, target_id)
+ insert_file(
+ details,
+ detection,
+ "",
+ connection,
+ target_id,
+ details["detection_type"],
+ )
cursor.execute("SELECT @file_last AS file_id")
file_id = cursor.fetchone()["file_id"]
for key in details:
@@ -1125,15 +1169,16 @@ def execute_merge(id):
]:
insert_filechecksum(details, key, file_id, connection)
- cursor.execute(
- """
- INSERT INTO history (`timestamp`, fileset, oldfileset)
- VALUES (NOW(), %s, %s)
- """,
- (target_id, source_id),
- )
+ if source_status != "user":
+ cursor.execute(
+ """
+ INSERT INTO history (`timestamp`, fileset, oldfileset)
+ VALUES (NOW(), %s, %s)
+ """,
+ (target_id, source_id),
+ )
+ delete_original_fileset(source_id, connection)
- delete_original_fileset(source_id, connection)
category_text = "Manually Merged"
user = f"cli:{getpass.getuser()}"
log_text = f"Manually merged Fileset:{source_id} with Fileset:{target_id} by user: {user}."
@@ -1149,8 +1194,8 @@ def execute_merge(id):
return redirect(url_for("fileset", id=target_id))
- finally:
- connection.close()
+ finally:
+ connection.close()
@app.route("/fileset/<int:id>/mark_full", methods=["POST"])
diff --git a/src/scripts/db_functions.py b/src/scripts/db_functions.py
index bbeab60..1151c76 100644
--- a/src/scripts/db_functions.py
+++ b/src/scripts/db_functions.py
@@ -40,6 +40,12 @@ def get_checksum_props(checkcode, checksum):
checktype += "-" + prefix
checksum = checksum.split(":")[1]
+ if checktype == "md5-full":
+ checktype = "md5"
+ if checktype == "md5-r-full":
+ checktype = "md5-r"
+ if checktype == "md5-d-full":
+ checktype = "md5-d"
return checksize, checktype, checksum
@@ -182,7 +188,7 @@ def normalised_path(name):
return "/".join(path_list)
-def insert_file(file, detection, src, conn, fileset_id=None):
+def insert_file(file, detection, src, conn, fileset_id=None, detection_type=""):
# Find full md5, or else use first checksum value
checksum = ""
checksize = 5000
@@ -204,9 +210,14 @@ def insert_file(file, detection, src, conn, fileset_id=None):
if not detection:
checktype = "None"
detection = 0
- detection_type = (
- f"{checktype}-{checksize}" if checktype != "None" else f"{checktype}"
- )
+
+ if detection_type != "":
+ checksum = file[detection_type]
+ checksize, checktype, checksum = get_checksum_props(detection_type, checksum)
+ else:
+ detection_type = (
+ f"{checktype}-{checksize}" if checktype != "None" else f"{checktype}"
+ )
name = normalised_path(file["name"])
Commit: c505c9ab5a40bf00907efdf4109d096e0cc34fec
https://github.com/scummvm/scummvm-sites/commit/c505c9ab5a40bf00907efdf4109d096e0cc34fec
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Fix single quote issue in search filters.
Changed paths:
src/app/pagination.py
diff --git a/src/app/pagination.py b/src/app/pagination.py
index 75915c8..cd3bfde 100644
--- a/src/app/pagination.py
+++ b/src/app/pagination.py
@@ -20,9 +20,12 @@ def get_join_columns(table1, table2, mapping):
def build_search_condition(value, column):
+ def sql_escape(s):
+ return s.replace("'", "''")
+
phrases = re.findall(r'"([^"]+)"', value)
if phrases:
- conditions = [f"{column} REGEXP '{re.escape(p)}'" for p in phrases]
+ conditions = [f"{column} REGEXP '{sql_escape(p)}'" for p in phrases]
return " AND ".join(conditions)
if "+" in value:
@@ -32,15 +35,17 @@ def build_search_condition(value, column):
or_terms = term.strip().split()
if len(or_terms) > 1:
or_cond = " OR ".join(
- [f"{column} REGEXP '{re.escape(t)}'" for t in or_terms if t]
+ [f"{column} REGEXP '{sql_escape(t)}'" for t in or_terms if t]
)
and_conditions.append(f"({or_cond})")
else:
- and_conditions.append(f"{column} REGEXP '{re.escape(term.strip())}'")
+ and_conditions.append(f"{column} REGEXP '{sql_escape(term.strip())}'")
return " AND ".join(and_conditions)
else:
or_terms = value.split()
- return " OR ".join([f"{column} REGEXP '{re.escape(t)}'" for t in or_terms if t])
+ return " OR ".join(
+ [f"{column} REGEXP '{sql_escape(t)}'" for t in or_terms if t]
+ )
def create_page(
Commit: d90afd5924a437edb01dbc5ed5091a19c5cad97a
https://github.com/scummvm/scummvm-sites/commit/d90afd5924a437edb01dbc5ed5091a19c5cad97a
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Fix the SQL query to delete files from the UI.
-Instead of 'DELETE ... WHERE id IN ('1, 2, 3')', it should be 'DELETE ... WHERE id IN ('1', '2', '3')'.
-Remove separate deletion query for filechecksums, as 'ON DELETE CASCADE' is already setup.
Changed paths:
src/app/fileset.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index e14fb7b..0ddec69 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -1507,16 +1507,11 @@ def fileset_search():
def delete_files(id):
file_ids = request.form.getlist("file_ids")
if file_ids:
- # Convert the list to comma-separated string for SQL
- ids_to_delete = ",".join(file_ids)
connection = db_connect()
with connection.cursor() as cursor:
# SQL statements to delete related records
- cursor.execute(
- "DELETE FROM filechecksum WHERE file IN (%s)", (ids_to_delete,)
- )
- cursor.execute("DELETE FROM file WHERE id IN (%s)", (ids_to_delete,))
-
+ placeholders = ",".join(["%s"] * len(file_ids))
+ cursor.execute(f"DELETE FROM file WHERE id IN ({placeholders})", file_ids)
# Commit the deletions
connection.commit()
return redirect(url_for("fileset", id=id))
Commit: 9eb4571ea5e9273dc984ecaf0aab02e47518fba3
https://github.com/scummvm/scummvm-sites/commit/9eb4571ea5e9273dc984ecaf0aab02e47518fba3
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Add relative path in scan dat
'data_path' field has been added to the scan dat which stores the relative path of the fileset. This provides additional log information when multiple filesets are processed in a single scan.
Changed paths:
src/scripts/compute_hash.py
src/scripts/db_functions.py
diff --git a/src/scripts/compute_hash.py b/src/scripts/compute_hash.py
index a3597fd..584aa48 100644
--- a/src/scripts/compute_hash.py
+++ b/src/scripts/compute_hash.py
@@ -688,7 +688,7 @@ def compute_hash_of_dirs(
hash_of_dir[relative_path] = file_checksum(
file_path, alg, size, file_info
- ) + (filtered_file_map[file_path],)
+ ) + (filtered_file_map[file_path], os.path.basename(directory))
res.append(hash_of_dir)
except Exception:
@@ -828,13 +828,18 @@ def create_dat_file(hash_of_dirs, path, checksum_size=0):
# Game files
for hash_of_dir in hash_of_dirs:
file.write("game (\n")
+ path_added = False
for filename, (
hashes,
size,
size_r,
size_rd,
timestamp,
+ relative_path,
) in hash_of_dir.items():
+ if not path_added:
+ file.write(f"\tdata_path {relative_path}\n")
+ path_added = True
filename = encode_path_components(filename)
data = f"""name "{filename}" size {size} size-r {size_r} size-rd {size_rd} modification-time {timestamp}"""
for key, value in hashes:
diff --git a/src/scripts/db_functions.py b/src/scripts/db_functions.py
index 1151c76..bf7fb77 100644
--- a/src/scripts/db_functions.py
+++ b/src/scripts/db_functions.py
@@ -703,7 +703,6 @@ def scan_process(
match_with_full_fileset = 0
mismatch_with_full_fileset = 0
dropped_early_no_candidate = 0
- manual_merged_with_detection = 0
filesets_with_missing_files = 0
duplicate_or_existing_entry = 0
@@ -733,6 +732,11 @@ def scan_process(
)
if existing:
duplicate_or_existing_entry += 1
+ category_text = "Skip fileset"
+ relative_path = fileset["data_path"]
+ log_text = f"Existing or duplicate fileset. data_path: {relative_path} Existing Fileset:{fileset_id}"
+ create_log(category_text, user, log_text, conn)
+ console_log(f"Existing or duplicate fileset. data_path: {relative_path}")
continue
id_to_fileset_mapping[fileset_id] = fileset
@@ -754,11 +758,10 @@ def scan_process(
)
if len(candidate_filesets) == 0:
category_text = "Drop fileset - No Candidates"
- fileset_name = fileset["name"] if "name" in fileset else ""
- fileset_description = (
- fileset["description"] if "description" in fileset else ""
+ relative_path = fileset["data_path"]
+ log_text = (
+ f"Drop fileset as no matching candidates. data_path: {relative_path}"
)
- log_text = f"Drop fileset as no matching candidates. Name: {fileset_name} Description: {fileset_description}."
create_log(category_text, user, log_text, conn)
dropped_early_no_candidate += 1
delete_original_fileset(fileset_id, conn)
@@ -769,7 +772,6 @@ def scan_process(
manual_merged_filesets,
match_with_full_fileset,
mismatch_with_full_fileset,
- manual_merged_with_detection,
filesets_with_missing_files,
) = scan_perform_match(
fileset,
@@ -782,7 +784,6 @@ def scan_process(
manual_merged_filesets,
match_with_full_fileset,
mismatch_with_full_fileset,
- manual_merged_with_detection,
filesets_with_missing_files,
conn,
skiplog,
@@ -803,7 +804,7 @@ def scan_process(
log_text = f"Completed loading DAT file, filename {filepath}, size {os.path.getsize(filepath)}. State {source_status}. Number of filesets: {fileset_insertion_count}. Transaction: {transaction_id}"
create_log(category_text, user, log_text, conn)
category_text = "Upload information"
- log_text = f"Number of filesets: {fileset_insertion_count}. Duplicate or existing filesets: {duplicate_or_existing_entry}. Filesets automatically merged: {automatic_merged_filesets}. Filesets requiring manual merge (multiple candidates): {manual_merged_filesets}. Filesets requiring manual merge (matched with detection): {manual_merged_with_detection}. Filesets dropped, no candidate: {dropped_early_no_candidate}. Filesets matched with existing Full fileset: {match_with_full_fileset}. Filesets with mismatched files with Full fileset: {mismatch_with_full_fileset}. Filesets missing files compared to partial fileset candidate: {filesets_with_missing_files}."
+ log_text = f"Number of filesets: {fileset_insertion_count}. Duplicate or existing filesets: {duplicate_or_existing_entry}. Filesets automatically merged: {automatic_merged_filesets}. Filesets requiring manual merge (multiple candidates): {manual_merged_filesets}. Filesets dropped, no candidate: {dropped_early_no_candidate}. Filesets matched with existing Full fileset: {match_with_full_fileset}. Filesets with mismatched files with Full fileset: {mismatch_with_full_fileset}. Filesets missing files compared to partial fileset candidate: {filesets_with_missing_files}."
console_log(log_text)
create_log(category_text, user, log_text, conn)
@@ -889,7 +890,6 @@ def scan_perform_match(
manual_merged_filesets,
match_with_full_fileset,
mismatch_with_full_fileset,
- manual_merged_with_detection,
filesets_with_missing_files,
conn,
skiplog,
@@ -904,8 +904,7 @@ def scan_perform_match(
Put them for manual merge.
"""
with conn.cursor() as cursor:
- fileset_name = fileset["name"] if "name" in fileset else ""
- fileset_description = fileset["description"] if "description" in fileset else ""
+ relative_path = fileset["data_path"]
if len(candidate_filesets) == 1:
matched_fileset_id = candidate_filesets[0]
cursor.execute(
@@ -917,7 +916,9 @@ def scan_perform_match(
if status == "partial":
# Partial filesets contain all the files, so does the scanned filesets, so this case should not ideally happen.
if total_files(matched_fileset_id, conn) > total_fileset_files(fileset):
- log_text = f"Created Fileset:{fileset_id}. Name: {fileset_name} Description: {fileset_description}"
+ log_text = (
+ f"Created Fileset:{fileset_id}. data_path: {relative_path}"
+ )
category_text = "Uploaded from scan."
create_log(
category_text,
@@ -991,13 +992,14 @@ def scan_perform_match(
unmatched_candidate_files,
unmatched_scan_files,
fully_matched,
+ relative_path,
user,
conn,
)
delete_original_fileset(fileset_id, conn)
elif len(candidate_filesets) > 1:
- log_text = f"Created Fileset:{fileset_id}. Name: {fileset_name} Description: {fileset_description}"
+ log_text = f"Created Fileset:{fileset_id}. data_path: {relative_path}"
category_text = "Uploaded from scan."
create_log(category_text, user, log_text, conn)
console_log(log_text)
@@ -1019,7 +1021,6 @@ def scan_perform_match(
manual_merged_filesets,
match_with_full_fileset,
mismatch_with_full_fileset,
- manual_merged_with_detection,
filesets_with_missing_files,
)
@@ -2506,13 +2507,14 @@ def log_match_with_full(
unmatched_candidate_files,
unmatched_scan_files,
fully_matched,
+ relative_path,
user,
conn,
):
category_text = "Mismatch with Full set"
if fully_matched:
category_text = "Existing as Full set."
- log_text = f"""Files mismatched with Full Fileset:{candidate_id}. Unmatched Files in scan fileset = {len(unmatched_scan_files)}. Unmatched Files in full fileset = {len(unmatched_candidate_files)}. List of unmatched files scan.dat : {", ".join(scan_file for scan_file in unmatched_scan_files)}, List of unmatched files full fileset : {", ".join(scan_file for scan_file in unmatched_candidate_files)}"""
+ log_text = f"""Files mismatched with Full Fileset:{candidate_id}. data_path: {relative_path}.Unmatched Files in scan fileset = {len(unmatched_scan_files)}. Unmatched Files in full fileset = {len(unmatched_candidate_files)}. List of unmatched files scan.dat : {", ".join(scan_file for scan_file in unmatched_scan_files)}, List of unmatched files full fileset : {", ".join(scan_file for scan_file in unmatched_candidate_files)}"""
if fully_matched:
log_text = (
f"Fileset matched completely with Full Fileset:{candidate_id}. Dropping."
Commit: 4fd450b00e8208589178e4a6455cf09c011b443d
https://github.com/scummvm/scummvm-sites/commit/4fd450b00e8208589178e4a6455cf09c011b443d
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Add metadata updates from UI
- Allow updating metadata directly from the UI
- Support adding metadata for new user filesets without engine/game relations
Changed paths:
A static/js/track_metadata_update.js
src/app/fileset.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index 0ddec69..4ba99f0 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -15,6 +15,7 @@ import getpass
from src.app.pagination import create_page
import difflib
from src.scripts.db_functions import (
+ insert_game,
get_all_related_filesets,
convert_log_text_to_links,
user_integrity_check,
@@ -149,7 +150,6 @@ def fileset():
if old_id is not None:
html += f"""<h3><u>Redirected from Fileset: {old_id}</u></h3>"""
html += f"<button type='button' onclick=\"location.href='/fileset/{id}/merge'\">Manual Merge</button>"
- # html += f"<button type='button' onclick=\"location.href='/fileset/{id}/possible_merge'\">Possible Merges</button>"
html += f"""
<form action="/fileset/{id}/mark_full" method="post" style="display:inline;">
<button type='submit'>Mark as full</button>
@@ -168,29 +168,153 @@ def fileset():
if status == "dat":
cursor.execute(
- """SELECT id, game, status, src, `key`, megakey, `delete`, timestamp, set_dat_metadata FROM fileset WHERE id = %s""",
+ """SELECT id, game, status, src, `key`, timestamp, set_dat_metadata FROM fileset WHERE id = %s""",
+ (id,),
+ )
+ elif status == "user":
+ cursor.execute(
+ """SELECT id, game, status, src, `key`, timestamp, user_count FROM fileset WHERE id = %s""",
(id,),
)
else:
cursor.execute(
- """SELECT id, game, status, src, `key`, megakey, `delete`, timestamp, detection_size, user_count FROM fileset WHERE id = %s""",
+ """SELECT id, game, status, src, `key`, megakey, timestamp FROM fileset WHERE id = %s""",
(id,),
)
result = cursor.fetchone()
html += "<h3>Fileset details</h3>"
- html += "<table>\n"
+ html += f"<form method='POST' action='/fileset/{id}/update'>"
+ html += "<table'>\n"
+
if result["game"]:
if status == "dat":
- query = """SELECT game.name as 'game name', engineid, gameid, extra, platform, language, fileset.set_dat_metadata FROM fileset JOIN game ON game.id = fileset.game JOIN engine ON engine.id = game.engine WHERE fileset.id = %s"""
+ query = """
+ SELECT game.name AS 'game name', engineid, gameid, extra, platform, language, fileset.set_dat_metadata
+ FROM fileset
+ JOIN game ON game.id = fileset.game
+ JOIN engine ON engine.id = game.engine
+ WHERE fileset.id = %s
+ """
else:
- query = """SELECT game.name as 'game name', engineid, gameid, extra, platform, language FROM fileset JOIN game ON game.id = fileset.game JOIN engine ON engine.id = game.engine WHERE fileset.id = %s"""
+ query = """
+ SELECT game.name AS 'game name', engineid, gameid, extra, platform, language
+ FROM fileset
+ JOIN game ON game.id = fileset.game
+ JOIN engine ON engine.id = game.engine
+ WHERE fileset.id = %s
+ """
cursor.execute(query, (id,))
result = {**result, **cursor.fetchone()}
else:
- # result.pop('key', None)
- # result.pop('status', None)
- result.pop("delete", None)
+ if status == "user":
+ html += "<h4>Add additional metadata</h4>"
+
+ cursor.execute(
+ "SELECT DISTINCT engineid FROM engine WHERE engineid IS NOT NULL"
+ )
+ engine_ids = [row["engineid"] for row in cursor.fetchall()]
+
+ cursor.execute(
+ "SELECT DISTINCT name FROM engine WHERE name IS NOT NULL"
+ )
+ engine_names = [row["name"] for row in cursor.fetchall()]
+
+ cursor.execute(
+ "SELECT DISTINCT gameid FROM game WHERE gameid IS NOT NULL"
+ )
+ game_ids = [row["gameid"] for row in cursor.fetchall()]
+
+ cursor.execute(
+ "SELECT DISTINCT name FROM game WHERE name IS NOT NULL"
+ )
+ titles = [row["name"] for row in cursor.fetchall()]
+
+ cursor.execute(
+ "SELECT DISTINCT platform FROM game WHERE platform IS NOT NULL"
+ )
+ platforms = [row["platform"] for row in cursor.fetchall()]
+
+ cursor.execute(
+ "SELECT DISTINCT language FROM game WHERE language IS NOT NULL"
+ )
+ languages = [row["language"] for row in cursor.fetchall()]
+
+ db_options = {
+ "engine_ids": engine_ids,
+ "game_ids": game_ids,
+ "platforms": platforms,
+ "languages": languages,
+ "engine_names": engine_names,
+ "titles": titles,
+ }
+
+ datalist_html = ""
+
+ if "engine_ids" in db_options:
+ datalist_html += "<datalist id='engine-options'>"
+ for engine in db_options["engine_ids"]:
+ datalist_html += f"<option value='{engine}'></option>"
+ datalist_html += "</datalist>"
+
+ if "game_ids" in db_options:
+ datalist_html += "<datalist id='game-id-options'>"
+ for gameid in db_options["game_ids"]:
+ datalist_html += f"<option value='{gameid}'></option>"
+ datalist_html += "</datalist>"
+
+ if "titles" in db_options:
+ datalist_html += "<datalist id='title-options'>"
+ for title in db_options["titles"]:
+ datalist_html += f"<option value='{title}'></option>"
+ datalist_html += "</datalist>"
+
+ if "engine_names" in db_options:
+ datalist_html += "<datalist id='engine-name-options'>"
+ for name in db_options["engine_names"]:
+ datalist_html += f"<option value='{name}'>"
+ datalist_html += "</datalist>"
+
+ if "languages" in db_options:
+ datalist_html += "<datalist id='language-options'>"
+ for lang in db_options["languages"]:
+ datalist_html += f"<option value='{lang}'>"
+ datalist_html += "</datalist>"
+
+ if "platforms" in db_options:
+ datalist_html += "<datalist id='platform-options'>"
+ for platform in db_options["platforms"]:
+ datalist_html += f"<option value='{platform}'>"
+ datalist_html += "</datalist>"
+
+ html += datalist_html
+
+ html += """
+ <div style='display: grid; grid-template-columns: 150px 1fr; gap: 8px 12px; margin-bottom: 1em;'>
+ <label for="engineid">Engine ID:</label>
+ <input required type="text" id="engineid" name="engineid" list="engine-options" placeholder="Required: Type or select...">
+
+ <label for="gameid">Game ID:</label>
+ <input required type="text" id="gameid" name="gameid" list="game-id-options" placeholder="Required: Type or select...">
+
+ <label for="title">Title:</label>
+ <input type="text" id="title" name="title" list="title-options" placeholder="Optional: Type or select...">
+
+ <label for="engine_name">Engine Name:</label>
+ <input type="text" id="engine_name" name="engine_name" list="engine-name-options" placeholder="Optional: Type or select...">
+
+ <label for="language">Language:</label>
+ <input type="text" id="language" name="language" list="language-options" placeholder="Optional: Type or select...">
+
+ <label for="platform">Platform:</label>
+ <input type="text" id="platform" name="platform" list="platform-options" placeholder="Optional: Type or select...">
+
+ <label for="extra">Extra:</label>
+ <input type="text" id="extra" name="extra" placeholder="Optional: Type">
+ </div>
+ """
+
+ html += "<button style='margin-bottom: 10px;' type='submit' name='action' value='add_metadata'>Add metadata</button>"
for column in result.keys():
if column != "id" and column != "game":
@@ -199,9 +323,17 @@ def fileset():
html += "<tr>\n"
for column, value in result.items():
if column != "id" and column != "game":
- html += f"<td>{value}</td>"
+ if not result["game"] and status == "user":
+ html += f"<td>{value}</td>"
+ else:
+ html += f"""<td><input class='track-update' style='all: unset;' type="text" name="{column}" value="{value if value is not None else ""}" /></td>"""
html += "</tr>\n"
+
html += "</table>\n"
+ html += "<div id='updateNotice' style='display:none; color:red;'>Updates pending...</div>"
+ if not (not result["game"] and status == "user"):
+ html += "<button type='submit' name='action' value='update_metadata'>Update metadata</button>"
+ html += "</form>"
# Files in the fileset
html += "<h3>Files in the fileset</h3>"
@@ -442,12 +574,116 @@ def fileset():
</tr>
"""
html += "</table>\n"
-
+ html += "<script src='{{ url_for('static', filename='js/track_metadata_update.js') }}'></script>"
return render_template_string(html)
finally:
connection.close()
+ at app.route("/fileset/<int:id>/update", methods=["POST"])
+def update_fileset(id):
+ connection = db_connect()
+ try:
+ with connection.cursor() as cursor:
+ if request.form.get("action") == "update_metadata":
+ allowed_columns = [
+ "status",
+ "src",
+ "key",
+ "megakey",
+ "game name",
+ "engineid",
+ "gameid",
+ "extra",
+ "platform",
+ "language",
+ ]
+
+ table_map = {
+ "status": "fileset",
+ "src": "fileset",
+ "key": "fileset",
+ "megakey": "fileset",
+ "timestamp": "fileset",
+ "game name": "game",
+ "engineid": "engine",
+ "gameid": "game",
+ "extra": "game",
+ "platform": "game",
+ "language": "game",
+ }
+
+ updates_by_table = {"fileset": [], "game": [], "engine": []}
+ values_by_table = {"fileset": [], "game": [], "engine": []}
+ for col in allowed_columns:
+ if col in request.form:
+ table = table_map[col]
+ db_col = col
+ if col == "game name":
+ db_col = "name"
+ updates_by_table[table].append(f"`{db_col}` = %s")
+ values_by_table[table].append(request.form[col])
+
+ if updates_by_table["fileset"]:
+ query = f"UPDATE fileset SET {', '.join(updates_by_table['fileset'])} WHERE id = %s"
+ values = values_by_table["fileset"] + [id]
+ cursor.execute(query, values)
+
+ if updates_by_table["game"]:
+ cursor.execute("SELECT game FROM fileset WHERE id = %s", (id,))
+ game_id = cursor.fetchone()["game"]
+ query = f"UPDATE game SET {', '.join(updates_by_table['game'])} WHERE id = %s"
+ values = values_by_table["game"] + [game_id]
+ cursor.execute(query, values)
+
+ if updates_by_table["engine"]:
+ cursor.execute(
+ "SELECT engine.id AS engine_id FROM engine "
+ "JOIN game ON game.engine = engine.id "
+ "JOIN fileset ON fileset.game = game.id "
+ "WHERE fileset.id = %s",
+ (id,),
+ )
+ engine_id = cursor.fetchone()["engine_id"]
+ query = f"UPDATE engine SET {', '.join(updates_by_table['engine'])} WHERE id = %s"
+ values = values_by_table["engine"] + [engine_id]
+ cursor.execute(query, values)
+ print(f"Fileset:{id} updated successfully.")
+ connection.commit()
+ elif request.form.get("action") == "add_metadata":
+ engine_name = request.form.get("engine_name", "")
+ engine_id = request.form.get("engineid")
+ title = request.form.get("title", "")
+ gameid = request.form.get("gameid")
+ extra = request.form.get("extra", "")
+ platform = request.form.get("platform", "")
+ lang = request.form.get("lang", "")
+
+ insert_game(
+ engine_name,
+ engine_id,
+ title,
+ gameid,
+ extra,
+ platform,
+ lang,
+ connection,
+ )
+ cursor.execute("SELECT @game_last")
+ row = cursor.fetchone()
+ game_pk_id = row["@game_last"]
+
+ cursor.execute(
+ "UPDATE fileset SET game = %s WHERE id = %s", (game_pk_id, id)
+ )
+ print(f"Fileset:{id} added additional metadata.")
+ connection.commit()
+ finally:
+ connection.close()
+
+ return redirect(url_for("fileset", id=id))
+
+
@app.route("/fileset/<int:id>/merge", methods=["GET", "POST"])
def merge_fileset(id):
if request.method == "POST":
@@ -692,7 +928,7 @@ def confirm_merge(id):
SELECT
fs.id, fs.status, fs.src, fs.`key`, fs.megakey,
fs.timestamp, fs.detection_size, fs.set_dat_metadata,
- g.name AS game_name,
+ g.name AS game_name,
e.name AS game_engine,
g.platform AS game_platform,
g.language AS game_language,
diff --git a/static/js/track_metadata_update.js b/static/js/track_metadata_update.js
new file mode 100644
index 0000000..5bd7a76
--- /dev/null
+++ b/static/js/track_metadata_update.js
@@ -0,0 +1,9 @@
+
+const notice = document.getElementById("updateNotice");
+const trackedInputs = document.querySelectorAll(".track-update");
+
+trackedInputs.forEach(input => {
+input.addEventListener("input", () => {
+ notice.style.display = "block";
+});
+});
Commit: e8767782767903f8e4daeb2f92f50687d6b8d9a7
https://github.com/scummvm/scummvm-sites/commit/e8767782767903f8e4daeb2f92f50687d6b8d9a7
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Add file updates from UI
Files can now be updated from the web app along with deletion.
Changed paths:
src/app/fileset.py
src/scripts/db_functions.py
static/js/track_metadata_update.js
diff --git a/src/app/fileset.py b/src/app/fileset.py
index 4ba99f0..35e0c4f 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -326,15 +326,18 @@ def fileset():
if not result["game"] and status == "user":
html += f"<td>{value}</td>"
else:
- html += f"""<td><input class='track-update' style='all: unset;' type="text" name="{column}" value="{value if value is not None else ""}" /></td>"""
+ html += f"""<td><input style='all: unset;' type="text" name="{column}" value="{value if value is not None else ""}" /></td>"""
html += "</tr>\n"
html += "</table>\n"
- html += "<div id='updateNotice' style='display:none; color:red;'>Updates pending...</div>"
if not (not result["game"] and status == "user"):
html += "<button type='submit' name='action' value='update_metadata'>Update metadata</button>"
html += "</form>"
+ # -------------------------------------------------------------------------------------------------
+ # Files
+ # -------------------------------------------------------------------------------------------------
+
# Files in the fileset
html += "<h3>Files in the fileset</h3>"
html += "<form>"
@@ -349,9 +352,7 @@ def fileset():
html += "<input type='submit' value='Hide extra checksums' />"
html += "</form>"
- html += (
- f"""<form method="POST" action="{url_for("delete_files", id=id)}">"""
- )
+ html += f"""<form id="file_action_form" method="POST" action="{url_for("files_action", id=id)}">"""
# Table
html += "<table>\n"
@@ -414,7 +415,7 @@ def fileset():
# Generate table header
html += "<tr>\n"
html += "<th/>" # Numbering column
- html += "<th>Select</th>" # Checkbox column
+ html += "<th>delete</th>" # Checkbox column
sortable_columns = share_columns + list(temp_set)
for column in sortable_columns:
@@ -436,22 +437,26 @@ def fileset():
for column in all_columns:
if column != "id":
value = row.get(column, "")
+ input_name = f"files[{row['id']}][{column}]"
if (
column == row.get("detection_type")
and row.get("detection") == 1
):
- html += (
- f"<td style='background-color: yellow;'>{value}</td>\n"
- )
+ html += f"""<td><input style='all: unset; background-color: yellow;' type="text" name="{input_name}" value="{value if value is not None else ""}" /></td>\n"""
else:
- html += f"<td>{value}</td>\n"
+ html += f"""<td><input style='all: unset;' type="text" name="{input_name}" value="{value if value is not None else ""}" /></td>\n"""
html += "</tr>\n"
counter += 1
html += "</table>\n"
- html += "<input type='submit' value='Delete Selected Files' />"
+ html += """<input type="submit" name="action" value="Update Files">"""
+ html += """<input style="margin-left: 10px;" type="submit" name="action" value="Delete Selected Files">"""
html += "</form>\n"
+ # -------------------------------------------------------------------------------------------------
+ # developer actions
+ # -------------------------------------------------------------------------------------------------
+
# Generate the HTML for the developer actions
html += "<h3>Developer Actions</h3>"
html += f"<button id='delete-button' type='button' onclick='delete_id({id})'>Mark Fileset for Deletion</button>"
@@ -464,6 +469,10 @@ def fileset():
connection.commit()
html += "<p id='delete-confirm'>Fileset marked for deletion</p>"
+ # -------------------------------------------------------------------------------------------------
+ # logs
+ # -------------------------------------------------------------------------------------------------
+
# Generate the HTML for the fileset history
cursor.execute(
"SELECT `timestamp`, category, `text`, id FROM log WHERE `text` REGEXP 'Fileset:%s' ORDER BY `timestamp` DESC, id DESC",
@@ -536,6 +545,10 @@ def fileset():
html += "</table>\n"
+ # -------------------------------------------------------------------------------------------------
+ # manual merge
+ # -------------------------------------------------------------------------------------------------
+
# Manual merge final candidates
query = """
SELECT
@@ -580,6 +593,119 @@ def fileset():
connection.close()
+ at app.route("/files_action/<int:id>", methods=["POST"])
+def files_action(id):
+ action = request.form.get("action")
+ if action == "Delete Selected Files":
+ file_ids = request.form.getlist("file_ids")
+ if file_ids:
+ connection = db_connect()
+ with connection.cursor() as cursor:
+ placeholders = ",".join(["%s"] * len(file_ids))
+ cursor.execute(
+ f"DELETE FROM file WHERE id IN ({placeholders})", file_ids
+ )
+ connection.commit()
+
+ user = f"cli:{getpass.getuser()}"
+ log_text = (
+ f"{len(file_ids)} file(s) of Fileset:{id} deleted by moderator: {user}."
+ )
+ create_log("Files Deleted", user, log_text, connection)
+ connection.commit()
+
+ elif action == "Update Files":
+ connection = db_connect()
+ with connection.cursor() as cursor:
+ # Mapping from file id to a dictionary with field: value of any changes
+ changes_map = defaultdict(dict)
+ for k, v in request.form.items():
+ # e.g files[18704][detection] : 1
+ if k.startswith("files["):
+ file_id = k.split("[")[1].split("]")[0]
+ column = k.split("[")[2].split("]")[0]
+ changes_map[file_id][column] = v
+
+ allowed_columns = [
+ "name",
+ "size",
+ "size-r",
+ "size-rd",
+ "checksum",
+ "detection",
+ "detection_type",
+ "timestampmodification-time",
+ "language",
+ "md5-0",
+ "md5-1M",
+ "md5-1048576",
+ "md5-5000",
+ "md5-t-5000",
+ "md5-r-0",
+ "md5-r-1M",
+ "md5-r-1048576",
+ "md5-r-5000",
+ "md5-rt-5000",
+ "md5-d-0",
+ "md5-d-1M",
+ "md5-d-1048576",
+ "md5-d-5000",
+ "md5-dt-5000",
+ ]
+
+ table_map = {
+ "name": "file",
+ "size": "file",
+ "size-r": "file",
+ "size-rd": "file",
+ "checksum": "file",
+ "detection": "file",
+ "detection_type": "file",
+ "timestamp": "file",
+ "modification-time": "file",
+ "language": "file",
+ "md5-0": "filechecksum",
+ "md5-1M": "filechecksum",
+ "md5-1048576": "filechecksum",
+ "md5-5000": "filechecksum",
+ "md5-t-5000": "filechecksum",
+ "md5-r-0": "filechecksum",
+ "md5-r-1M": "filechecksum",
+ "md5-r-1048576": "filechecksum",
+ "md5-r-5000": "filechecksum",
+ "md5-rt-5000": "filechecksum",
+ "md5-d-0": "filechecksum",
+ "md5-d-1M": "filechecksum",
+ "md5-d-1048576": "filechecksum",
+ "md5-d-5000": "filechecksum",
+ "md5-dt-5000": "filechecksum",
+ }
+ for file, changes in changes_map.items():
+ updates_by_table = {"file": [], "filechecksum": []}
+ values_by_table = {"file": [], "filechecksum": []}
+ for col in allowed_columns:
+ if col in changes:
+ table = table_map[col]
+ updates_by_table[table].append(f"`{col}` = %s")
+ values_by_table[table].append(changes[col])
+
+ if updates_by_table["file"]:
+ query = f"UPDATE file SET {', '.join(updates_by_table['file'])} WHERE id = %s"
+ values = values_by_table["file"] + [file]
+ cursor.execute(query, values)
+
+ if updates_by_table["filechecksum"]:
+ query = f"UPDATE filechecksum SET {', '.join(updates_by_table['filechecksum'])} WHERE file = %s"
+ values = values_by_table["filechecksum"] + [file]
+ cursor.execute(query, values)
+ print(f"File:{file} for Fileset:{id} updated successfully.")
+ user = f"cli:{getpass.getuser()}"
+ log_text = f"{len(changes_map)} file(s) of Fileset:{id} updated by moderator: {user}."
+ create_log("Files Updated", user, log_text, connection)
+ connection.commit()
+ return redirect(url_for("fileset", id=id))
+
+
@app.route("/fileset/<int:id>/update", methods=["POST"])
def update_fileset(id):
connection = db_connect()
@@ -648,6 +774,9 @@ def update_fileset(id):
query = f"UPDATE engine SET {', '.join(updates_by_table['engine'])} WHERE id = %s"
values = values_by_table["engine"] + [engine_id]
cursor.execute(query, values)
+ user = f"cli:{getpass.getuser()}"
+ log_text = f"Fileset:{id} metadata updated by moderator: {user}."
+ create_log("Metadata Updated", user, log_text, connection)
print(f"Fileset:{id} updated successfully.")
connection.commit()
elif request.form.get("action") == "add_metadata":
@@ -676,6 +805,11 @@ def update_fileset(id):
cursor.execute(
"UPDATE fileset SET game = %s WHERE id = %s", (game_pk_id, id)
)
+ user = f"cli:{getpass.getuser()}"
+ log_text = (
+ f"Fileset:{id} additional metadata added by moderator: {user}."
+ )
+ create_log("Metadata Added", user, log_text, connection)
print(f"Fileset:{id} added additional metadata.")
connection.commit()
finally:
@@ -1417,8 +1551,8 @@ def execute_merge(id):
category_text = "Manually Merged"
user = f"cli:{getpass.getuser()}"
- log_text = f"Manually merged Fileset:{source_id} with Fileset:{target_id} by user: {user}."
- create_log(category_text, "Moderator", log_text, connection)
+ log_text = f"Manually merged Fileset:{source_id} with Fileset:{target_id} by moderator: {user}."
+ create_log(category_text, user, log_text, connection)
query = """
DELETE FROM possible_merges
@@ -1739,20 +1873,6 @@ def fileset_search():
)
- at app.route("/delete_files/<int:id>", methods=["POST"])
-def delete_files(id):
- file_ids = request.form.getlist("file_ids")
- if file_ids:
- connection = db_connect()
- with connection.cursor() as cursor:
- # SQL statements to delete related records
- placeholders = ",".join(["%s"] * len(file_ids))
- cursor.execute(f"DELETE FROM file WHERE id IN ({placeholders})", file_ids)
- # Commit the deletions
- connection.commit()
- return redirect(url_for("fileset", id=id))
-
-
if __name__ == "__main__":
app.secret_key = secret_key
app.run(port=5001, debug=True, host="0.0.0.0")
diff --git a/src/scripts/db_functions.py b/src/scripts/db_functions.py
index bf7fb77..3495fa9 100644
--- a/src/scripts/db_functions.py
+++ b/src/scripts/db_functions.py
@@ -2807,7 +2807,10 @@ def user_integrity_check(data, ip, game_metadata=None):
if match_type == "multiple":
# If multiple candidates matched, we will do manual review and ask user for more details.
category_text = "User fileset - Multiple candidates"
- log_text = f"Possible new variant Fileset:{user_fileset_id} from user. Multiple filesets candidates {', '.join(f'Fileset:{id}' for id in candidate_filesets)}"
+ match_text = f"Candidates {', '.join(f'Fileset:{id}' for id in candidate_filesets)}"
+ if len(candidate_filesets) == 1:
+ match_text = f"Matched Fileset:{candidate_filesets[0]}"
+ log_text = f"Possible new variant Fileset:{user_fileset_id} from user. {match_text}"
create_log(
category_text,
user,
diff --git a/static/js/track_metadata_update.js b/static/js/track_metadata_update.js
index 5bd7a76..2ea1010 100644
--- a/static/js/track_metadata_update.js
+++ b/static/js/track_metadata_update.js
@@ -7,3 +7,26 @@ input.addEventListener("input", () => {
notice.style.display = "block";
});
});
+
+document.addEventListener("DOMContentLoaded", function () {
+ document.querySelectorAll("input[type='text']").forEach(function (input) {
+ input.addEventListener("input", function () {
+ if (input.value !== input.defaultValue) {
+ input.style.backgroundColor = "lightyellow";
+ } else {
+ input.style.backgroundColor = "";
+ }
+ });
+ });
+
+ const form = document.querySelector("#file_action_form");
+ if (form) {
+ form.addEventListener("submit", () => {
+ form.querySelectorAll("input[type='text']").forEach(input => {
+ if (input.value === input.defaultValue) {
+ input.disabled = true;
+ }
+ });
+ });
+ }
+});
Commit: 4c025f7b070786eba87449603c6afba90b733597
https://github.com/scummvm/scummvm-sites/commit/4c025f7b070786eba87449603c6afba90b733597
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Add checks for status='ReadyForReview'
All the conditional checks for status='user' needs to additionally have checks for status='ReadyForReview' which is the user fileset that has been submitted for a threshold number of times(currently set to 3).
Changed paths:
src/app/fileset.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index 35e0c4f..4f70dc0 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -171,7 +171,7 @@ def fileset():
"""SELECT id, game, status, src, `key`, timestamp, set_dat_metadata FROM fileset WHERE id = %s""",
(id,),
)
- elif status == "user":
+ elif status == "user" or status == "ReadyForReview":
cursor.execute(
"""SELECT id, game, status, src, `key`, timestamp, user_count FROM fileset WHERE id = %s""",
(id,),
@@ -207,7 +207,7 @@ def fileset():
cursor.execute(query, (id,))
result = {**result, **cursor.fetchone()}
else:
- if status == "user":
+ if status == "user" or status == "ReadyForReview":
html += "<h4>Add additional metadata</h4>"
cursor.execute(
@@ -323,14 +323,18 @@ def fileset():
html += "<tr>\n"
for column, value in result.items():
if column != "id" and column != "game":
- if not result["game"] and status == "user":
+ if not result["game"] and (
+ status == "user" or status == "ReadyForReview"
+ ):
html += f"<td>{value}</td>"
else:
html += f"""<td><input style='all: unset;' type="text" name="{column}" value="{value if value is not None else ""}" /></td>"""
html += "</tr>\n"
html += "</table>\n"
- if not (not result["game"] and status == "user"):
+ if not (
+ not result["game"] and (status == "user" or status == "ReadyForReview")
+ ):
html += "<button type='submit' name='action' value='update_metadata'>Update metadata</button>"
html += "</form>"
@@ -1510,7 +1514,7 @@ def execute_merge(id):
query = "DELETE FROM file WHERE fileset = %s"
cursor.execute(query, (target_id,))
- if source_status != "user":
+ if source_status != "user" and source_status != "ReadyForReview":
query = "DELETE FROM fileset WHERE id = %s"
cursor.execute(query, (source_id,))
@@ -1539,7 +1543,7 @@ def execute_merge(id):
]:
insert_filechecksum(details, key, file_id, connection)
- if source_status != "user":
+ if source_status != "user" and source_status != "ReadyForReview":
cursor.execute(
"""
INSERT INTO history (`timestamp`, fileset, oldfileset)
Commit: cf8c8158bb192eb914c3551229c5e601fcc2099e
https://github.com/scummvm/scummvm-sites/commit/cf8c8158bb192eb914c3551229c5e601fcc2099e
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Add confirmation window for fileset actions
Add confirmation windows before adding or updating the metadata, and before updating or deleting the files.
Changed paths:
src/app/fileset.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index 4f70dc0..fe58b96 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -184,7 +184,7 @@ def fileset():
result = cursor.fetchone()
html += "<h3>Fileset details</h3>"
- html += f"<form method='POST' action='/fileset/{id}/update'>"
+ html += f"""<form method='POST' action='/fileset/{id}/update' onsubmit="return confirm('Are you sure you want to perform this action on the metadata?');">"""
html += "<table'>\n"
if result["game"]:
@@ -356,7 +356,7 @@ def fileset():
html += "<input type='submit' value='Hide extra checksums' />"
html += "</form>"
- html += f"""<form id="file_action_form" method="POST" action="{url_for("files_action", id=id)}">"""
+ html += f"""<form id="file_action_form" method="POST" action="{url_for("files_action", id=id)}" onsubmit="return confirm('Are you sure you want to perform this action on the files?');">"""
# Table
html += "<table>\n"
Commit: d24b682a8432975516e41d21fd0b5728f8ee9560
https://github.com/scummvm/scummvm-sites/commit/d24b682a8432975516e41d21fd0b5728f8ee9560
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Add button for deleting fileset
A button for deleting the fileset within the fileset dashboard has been added with confirmation window.
Changed paths:
src/app/fileset.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index fe58b96..612f2e1 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -45,6 +45,11 @@ limiter = Limiter(
secret_key = os.urandom(24)
+def get_current_user():
+ user = f"cli:{getpass.getuser()}"
+ return user
+
+
@app.route("/")
def index():
return redirect(url_for("logs"))
@@ -463,15 +468,9 @@ def fileset():
# Generate the HTML for the developer actions
html += "<h3>Developer Actions</h3>"
- html += f"<button id='delete-button' type='button' onclick='delete_id({id})'>Mark Fileset for Deletion</button>"
-
- if "delete" in request.form:
- cursor.execute(
- "UPDATE fileset SET `delete` = TRUE WHERE id = %s",
- (request.form["delete"],),
- )
- connection.commit()
- html += "<p id='delete-confirm'>Fileset marked for deletion</p>"
+ html += f"""<form action="{url_for("delete_fileset", id=id)}" method="POST" onsubmit="return confirm('Are you sure you want to delete the fileset?');">"""
+ html += "<button type='submit'>Delete the Fileset</button>"
+ html += "</form>"
# -------------------------------------------------------------------------------------------------
# logs
@@ -597,6 +596,19 @@ def fileset():
connection.close()
+ at app.route("/fileset/delete/<int:id>", methods=["POST"])
+def delete_fileset(id):
+ connection = db_connect()
+ with connection.cursor() as cursor:
+ query = "DELETE FROM fileset WHERE id = %s"
+ cursor.execute(query, (id,))
+ user = get_current_user()
+ log_text = f"Fileset deleted by moderator: {user} id:{id}"
+ create_log("Filset Deleted", user, log_text, connection)
+ connection.commit()
+ return redirect(url_for("logs"))
+
+
@app.route("/files_action/<int:id>", methods=["POST"])
def files_action(id):
action = request.form.get("action")
Commit: 3d9c8267cee99005fadcda606d3018e953ef8291
https://github.com/scummvm/scummvm-sites/commit/3d9c8267cee99005fadcda606d3018e953ef8291
Author: ShivangNagta (shivangnag at gmail.com)
Date: 2025-08-21T11:04:32+02:00
Commit Message:
INTEGRITY: Create endpoint for mail notification
Changed paths:
src/app/fileset.py
diff --git a/src/app/fileset.py b/src/app/fileset.py
index 612f2e1..5c66776 100644
--- a/src/app/fileset.py
+++ b/src/app/fileset.py
@@ -1889,6 +1889,21 @@ def fileset_search():
)
+ at app.route("/email_notification/<int:fileset_id>", methods=["GET"])
+def email_notification(fileset_id):
+ connection = db_connect()
+ log_text = f"User email received for Fileset:{fileset_id}"
+ create_log("Email Received", "Mail Server", log_text, connection)
+ connection.commit()
+ return jsonify(
+ {
+ "status": "success",
+ "fileset_id": fileset_id,
+ "message": "Email notification logged",
+ }
+ ), 200
+
+
if __name__ == "__main__":
app.secret_key = secret_key
app.run(port=5001, debug=True, host="0.0.0.0")
More information about the Scummvm-git-logs
mailing list