198 lines
6.8 KiB
Python
198 lines
6.8 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
CSV Management Script for CoffeeAtHomeFlutter
|
|
Usage: python csv_manager.py [command] [options]
|
|
"""
|
|
|
|
import csv
|
|
import json
|
|
import os
|
|
import sys
|
|
from datetime import datetime
|
|
|
|
CSV_DIR = "lib/database"
|
|
BACKUP_DIR = "csv_backups"
|
|
|
|
def backup_csv_files():
|
|
"""Create backup of all CSV files"""
|
|
if not os.path.exists(BACKUP_DIR):
|
|
os.makedirs(BACKUP_DIR)
|
|
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
backup_path = f"{BACKUP_DIR}/backup_{timestamp}"
|
|
os.makedirs(backup_path)
|
|
|
|
csv_files = ["Coffee_Beans.csv", "Coffee_Machines.csv", "Brew_Recipes.csv", "Origin_Countries.csv"]
|
|
|
|
for csv_file in csv_files:
|
|
src = f"{CSV_DIR}/{csv_file}"
|
|
dst = f"{backup_path}/{csv_file}"
|
|
if os.path.exists(src):
|
|
# Use shutil.copy2 instead of os.system for better cross-platform compatibility
|
|
import shutil
|
|
shutil.copy2(src, dst)
|
|
print(f"Backed up {csv_file}")
|
|
|
|
print(f"Backup created: {backup_path}")
|
|
|
|
def add_bean(bean_data):
|
|
"""Add new bean to Coffee_Beans.csv"""
|
|
csv_file = f"{CSV_DIR}/Coffee_Beans.csv"
|
|
|
|
# Validate required fields
|
|
required_fields = ["id", "name", "origin", "varietal"]
|
|
for field in required_fields:
|
|
if field not in bean_data:
|
|
print(f"Error: Missing required field '{field}'")
|
|
return False
|
|
|
|
# Read existing CSV to check for duplicates
|
|
existing_ids = set()
|
|
try:
|
|
with open(csv_file, 'r', newline='', encoding='utf-8') as f:
|
|
reader = csv.DictReader(f)
|
|
existing_ids = {row['id'] for row in reader}
|
|
except FileNotFoundError:
|
|
print(f"Error: CSV file not found: {csv_file}")
|
|
return False
|
|
|
|
if bean_data['id'] in existing_ids:
|
|
print(f"Error: Bean ID '{bean_data['id']}' already exists")
|
|
return False
|
|
|
|
# Append new bean
|
|
try:
|
|
with open(csv_file, 'a', newline='', encoding='utf-8') as f:
|
|
if os.path.getsize(csv_file) == 0:
|
|
# File is empty, write header
|
|
fieldnames = list(bean_data.keys())
|
|
writer = csv.DictWriter(f, fieldnames=fieldnames)
|
|
writer.writeheader()
|
|
else:
|
|
# Get fieldnames from existing file
|
|
with open(csv_file, 'r', newline='', encoding='utf-8') as rf:
|
|
reader = csv.DictReader(rf)
|
|
fieldnames = reader.fieldnames
|
|
writer = csv.DictWriter(f, fieldnames=fieldnames)
|
|
|
|
writer.writerow(bean_data)
|
|
print(f"Added bean: {bean_data['name']}")
|
|
return True
|
|
except Exception as e:
|
|
print(f"Error adding bean: {e}")
|
|
return False
|
|
|
|
def validate_csv_format():
|
|
"""Validate all CSV files for correct format"""
|
|
csv_configs = {
|
|
"Coffee_Beans.csv": {
|
|
"required_columns": ["id", "name", "origin", "varietal", "roastLevel"],
|
|
"unique_columns": ["id"]
|
|
},
|
|
"Coffee_Machines.csv": {
|
|
"required_columns": ["id", "manufacturer", "model", "type"],
|
|
"unique_columns": ["id"]
|
|
},
|
|
"Brew_Recipes.csv": {
|
|
"required_columns": ["id", "name", "brewMethod", "grindSize"],
|
|
"unique_columns": ["id"]
|
|
}
|
|
}
|
|
|
|
all_valid = True
|
|
|
|
for csv_file, config in csv_configs.items():
|
|
file_path = f"{CSV_DIR}/{csv_file}"
|
|
if not os.path.exists(file_path):
|
|
print(f"❌ Missing file: {csv_file}")
|
|
all_valid = False
|
|
continue
|
|
|
|
try:
|
|
with open(file_path, 'r', newline='', encoding='utf-8') as f:
|
|
reader = csv.DictReader(f)
|
|
rows = list(reader)
|
|
|
|
# Check required columns
|
|
missing_cols = set(config["required_columns"]) - set(reader.fieldnames or [])
|
|
if missing_cols:
|
|
print(f"❌ {csv_file}: Missing columns: {missing_cols}")
|
|
all_valid = False
|
|
continue
|
|
|
|
# Check unique constraints
|
|
for unique_col in config["unique_columns"]:
|
|
values = [row[unique_col] for row in rows if row[unique_col]]
|
|
duplicates = set([x for x in values if values.count(x) > 1])
|
|
if duplicates:
|
|
print(f"❌ {csv_file}: Duplicate {unique_col} values: {duplicates}")
|
|
all_valid = False
|
|
|
|
print(f"✅ {csv_file}: Valid ({len(rows)} rows)")
|
|
|
|
except Exception as e:
|
|
print(f"❌ {csv_file}: Error reading file: {e}")
|
|
all_valid = False
|
|
|
|
return all_valid
|
|
|
|
def export_to_json():
|
|
"""Export all CSV data to JSON for easy inspection"""
|
|
output_dir = "csv_exports"
|
|
if not os.path.exists(output_dir):
|
|
os.makedirs(output_dir)
|
|
|
|
csv_files = ["Coffee_Beans.csv", "Coffee_Machines.csv", "Brew_Recipes.csv", "Origin_Countries.csv"]
|
|
|
|
for csv_file in csv_files:
|
|
file_path = f"{CSV_DIR}/{csv_file}"
|
|
if not os.path.exists(file_path):
|
|
continue
|
|
|
|
try:
|
|
with open(file_path, 'r', newline='', encoding='utf-8') as f:
|
|
reader = csv.DictReader(f)
|
|
data = list(reader)
|
|
|
|
json_file = f"{output_dir}/{csv_file.replace('.csv', '.json')}"
|
|
with open(json_file, 'w', encoding='utf-8') as f:
|
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
|
|
print(f"Exported {csv_file} -> {json_file}")
|
|
except Exception as e:
|
|
print(f"Error exporting {csv_file}: {e}")
|
|
|
|
if __name__ == "__main__":
|
|
if len(sys.argv) < 2:
|
|
print("Usage: python csv_manager.py [backup|validate|export|add_bean]")
|
|
sys.exit(1)
|
|
|
|
command = sys.argv[1]
|
|
|
|
if command == "backup":
|
|
backup_csv_files()
|
|
elif command == "validate":
|
|
if validate_csv_format():
|
|
print("✅ All CSV files are valid")
|
|
else:
|
|
print("❌ Some CSV files have issues")
|
|
sys.exit(1)
|
|
elif command == "export":
|
|
export_to_json()
|
|
elif command == "add_bean":
|
|
# Example: python csv_manager.py add_bean '{"id":"bean_new","name":"New Bean","origin":"Colombia","varietal":"Arabica","roastLevel":"Medium"}'
|
|
if len(sys.argv) < 3:
|
|
print("Usage: python csv_manager.py add_bean '{json_data}'")
|
|
sys.exit(1)
|
|
|
|
try:
|
|
bean_data = json.loads(sys.argv[2])
|
|
add_bean(bean_data)
|
|
except json.JSONDecodeError:
|
|
print("Error: Invalid JSON data")
|
|
sys.exit(1)
|
|
else:
|
|
print(f"Unknown command: {command}")
|
|
print("Available commands: backup, validate, export, add_bean")
|
|
sys.exit(1)
|