diff --git a/src/app/Database/ProcessCSV.tsx b/src/app/Database/ProcessCSV.tsx index 1cd472e..9cf7350 100644 --- a/src/app/Database/ProcessCSV.tsx +++ b/src/app/Database/ProcessCSV.tsx @@ -1,82 +1,99 @@ +// File: components/ProcessCSV.tsx + 'use client'; -import React, { useState, useEffect } from 'react'; -import { FaCloudUploadAlt, FaCheckCircle, FaTimesCircle, FaSpinner, FaTrashAlt } from 'react-icons/fa'; +import React, { useState, useEffect, useRef } from 'react'; +import { + FaCloudUploadAlt, + FaCheckCircle, + FaTimesCircle, + FaSpinner, + FaTrashAlt, + FaList, + FaChartBar, +} from 'react-icons/fa'; import { Button } from '@/components/ui/button'; import { toast } from 'sonner'; -const ProcessCSV: React.FC = () => { +interface ProcessCSVProps { + clearOptions: () => void; +} + +const ProcessCSV: React.FC = ({ clearOptions }) => { const [uploading, setUploading] = useState(false); - const [uploadStatus, setUploadStatus] = useState<'idle' | 'loading' | 'success' | 'error'>('idle'); - const [progress, setProgress] = useState(''); + const [uploadStatus, setUploadStatus] = useState<'idle' | 'loading' | 'success' | 'error'>( + 'idle' + ); + const [currentLog, setCurrentLog] = useState(''); + const [logs, setLogs] = useState([]); const [totalBatches, setTotalBatches] = useState(0); const [processedBatches, setProcessedBatches] = useState(0); const [showModal, setShowModal] = useState(false); - const [estimatedTotalBatches, setEstimatedTotalBatches] = useState(0); - - // New state to track if we're in the browser const [isBrowser, setIsBrowser] = useState(false); + const [showLogs, setShowLogs] = useState(false); + const [progress, setProgress] = useState(0); + const [totalRows, setTotalRows] = useState(0); + const [processedRows, setProcessedRows] = useState(0); + + const eventSourceRef = useRef(null); - // Use useEffect to set isBrowser to true after component mounts useEffect(() => { setIsBrowser(true); + + // Cleanup EventSource on component unmount + return () => { + if (eventSourceRef.current) { + eventSourceRef.current.close(); + } + }; }, []); const resetUI = () => { setUploading(false); setUploadStatus('idle'); - setProgress(''); + setCurrentLog(''); + setLogs([]); setTotalBatches(0); setProcessedBatches(0); - setEstimatedTotalBatches(0); + setProgress(0); + setTotalRows(0); + setProcessedRows(0); }; - const handleUpload = async () => { - if (!isBrowser) return; // Prevent execution during SSR + const handleUpload = () => { + if (!isBrowser) return; setUploading(true); setUploadStatus('loading'); - setProgress(''); + setCurrentLog(''); + setLogs([]); setTotalBatches(0); setProcessedBatches(0); - setEstimatedTotalBatches(0); + setProgress(0); + setTotalRows(0); + setProcessedRows(0); try { - const response = await fetch('/api/redis/process'); - const contentType = response.headers.get('Content-Type'); - - if (contentType?.includes('application/json')) { - // Handle JSON response (build time or unsupported environment) - const data = await response.json(); - if (data.message) { - toast.info(data.message); - setUploading(false); - resetUI(); - return; - } - } else if (contentType?.includes('text/event-stream')) { - // Handle SSE for actual processing - const eventSource = new EventSource('/api/redis/process'); - - eventSource.onmessage = (event) => { - const message = event.data; - handleEventMessage(message, eventSource); - }; - - eventSource.onerror = (error) => { - console.error('EventSource failed:', error); - setUploadStatus('error'); - toast.error('An error occurred while processing CSV files'); - eventSource.close(); - setUploading(false); - }; - - eventSource.onopen = () => { - console.log('Connection to server opened.'); - }; - } else { - throw new Error('Unexpected response type'); - } + // Open EventSource to start processing + const eventSource = new EventSource('/api/redis/process'); + eventSourceRef.current = eventSource; + + eventSource.onmessage = (event) => { + const message = event.data; + handleEventMessage(message, eventSource); + }; + + eventSource.onerror = (error) => { + console.error('EventSource failed:', error); + setUploadStatus('error'); + toast.error('An error occurred while processing CSV files'); + eventSource.close(); + setUploading(false); + }; + + eventSource.onopen = () => { + console.log('Connection to server opened.'); + }; } catch (error: any) { setUploadStatus('error'); toast.error(error.message || 'An unexpected error occurred'); @@ -85,28 +102,45 @@ const ProcessCSV: React.FC = () => { }; const handleEventMessage = (message: string, eventSource: EventSource) => { - setProgress(prevProgress => prevProgress + '\n' + message); + setCurrentLog(message); + setLogs((prevLogs) => [...prevLogs, message]); - if (message.includes('Estimated total batches:')) { - const matches = message.match(/Estimated total batches: (\d+)/); + if (message.startsWith('Total rows:')) { + const matches = message.match(/Total rows: (\d+), Total batches: (\d+)/); if (matches) { - const estimated = parseInt(matches[1], 10); - setEstimatedTotalBatches(estimated); - setTotalBatches(estimated); + const total = parseInt(matches[1], 10); + const batches = parseInt(matches[2], 10); + setTotalRows(total); + setTotalBatches(batches); } - } else if (message.includes('Processed and uploaded batch')) { - const matches = message.match(/Processed and uploaded batch (\d+)\. Total rows: (\d+)/); + } else if (message.startsWith('Processed batch')) { + const matches = message.match( + /Processed batch (\d+)\/(\d+)\. Rows: (\d+)\/(\d+)\. Progress: ([\d.]+)%/ + ); if (matches) { const processed = parseInt(matches[1], 10); + const total = parseInt(matches[2], 10); + const rows = parseInt(matches[3], 10); + const totalRows = parseInt(matches[4], 10); + const progressPercentage = parseFloat(matches[5]); + setProcessedBatches(processed); + setTotalBatches(total); + setProcessedRows(rows); + setTotalRows(totalRows); + setProgress(progressPercentage); } - } else if (message.includes('Finished processing')) { + } else if ( + message.startsWith('Successfully processed') || + message.startsWith('Finished processing') + ) { const matches = message.match(/Finished processing (\d+) total rows in (\d+) batches/); if (matches) { const totalRows = parseInt(matches[1], 10); const actualTotalBatches = parseInt(matches[2], 10); setTotalBatches(actualTotalBatches); setProcessedBatches(actualTotalBatches); + setProgress(100); } setUploadStatus('success'); toast.success('CSV files processed and data stored in Redis'); @@ -121,7 +155,7 @@ const ProcessCSV: React.FC = () => { }; const handleCleanDatabase = async () => { - if (!isBrowser) return; // Prevent execution during SSR + if (!isBrowser) return; try { const response = await fetch('/api/redis', { method: 'DELETE' }); @@ -130,6 +164,8 @@ const ProcessCSV: React.FC = () => { throw new Error(errorData.error || 'Network response was not ok'); } toast.success('Database cleaned successfully'); + clearOptions(); + resetUI(); } catch (error: any) { console.error('Clean database error:', error); toast.error(error.message || 'Failed to clean the database'); @@ -138,7 +174,6 @@ const ProcessCSV: React.FC = () => { } }; - // Render null during SSR if (!isBrowser) { return null; } @@ -172,24 +207,45 @@ const ProcessCSV: React.FC = () => { {uploadStatus === 'loading' && (
-

Progress:

+

Current Progress:

-
{progress}
+

{currentLog}

- {estimatedTotalBatches > 0 && ( + {totalBatches > 0 && (

Total Progress:

-

- {processedBatches} batches processed of approximately {estimatedTotalBatches} total batches -

+
+

+ {processedBatches} of {totalBatches} batches processed, {progress.toFixed(2)}% complete + ({processedRows.toLocaleString()} of {totalRows.toLocaleString()} rows) +

+ +
+ {showLogs && ( +
+ {logs.map((log, index) => ( +

+ {log} +

+ ))} +
+ )}
)}
diff --git a/src/app/Database/SearchDatabase.tsx b/src/app/Database/SearchDatabase.tsx index 8cbc85c..046a334 100644 --- a/src/app/Database/SearchDatabase.tsx +++ b/src/app/Database/SearchDatabase.tsx @@ -14,7 +14,7 @@ import { VisibilityState, } from "@tanstack/react-table"; import { ArrowUpDown, ChevronDown, MoreHorizontal } from "lucide-react"; -import { FaSearch } from "react-icons/fa"; +import { FaSearch, FaTrashAlt } from "react-icons/fa"; import { toast } from "sonner"; import { Button } from "@/components/ui/button"; @@ -37,7 +37,7 @@ import { TableHeader, TableRow, } from "@/components/ui/table"; -import { DataTableColumnHeader } from "./components/AdvancedDataTable/DataTableColumnHeader"; // Ensure this path is correct +import { DataTableColumnHeader } from "./components/AdvancedDataTable/DataTableColumnHeader"; // Define the Payment type based on your data structure export type Payment = { @@ -124,11 +124,26 @@ export const columns: ColumnDef[] = [ }, ]; -// Main Component -const SearchDatabase: React.FC = () => { - const [clients, setClients] = useState([]); - const [warehouses, setWarehouses] = useState([]); - const [products, setProducts] = useState([]); +// New: Interface for component props +interface SearchDatabaseProps { + options: { + clients: string[]; + warehouses: string[]; + products: string[]; + }; + updateOptions: (newOptions: { + clients: string[]; + warehouses: string[]; + products: string[]; + }) => void; + clearOptions: () => void; +} + +// Updated: Component definition to accept props +const SearchDatabase: React.FC = ({ options, updateOptions, clearOptions }) => { + // Updated: Use options from props instead of state + const { clients, warehouses, products } = options; + const [selectedClient, setSelectedClient] = useState(""); const [selectedWarehouse, setSelectedWarehouse] = useState(""); const [selectedProduct, setSelectedProduct] = useState(""); @@ -142,24 +157,25 @@ const SearchDatabase: React.FC = () => { const [columnVisibility, setColumnVisibility] = useState({}); const [rowSelection, setRowSelection] = useState({}); - useEffect(() => { - // Fetch available clients, warehouses, and products from the database - const fetchOptions = async () => { - try { - const response = await fetch("/api/redis/options"); - if (!response.ok) { - throw new Error("Failed to fetch options"); - } - const data = await response.json(); - setClients(data.clients); - setWarehouses(data.warehouses); - setProducts(data.products); - } catch (error: any) { - console.error("Error fetching options:", error); - toast.error(error.message || "Failed to fetch options"); + const [showModal, setShowModal] = useState(false); + + // Updated: fetchOptions now uses updateOptions + const fetchOptions = async () => { + try { + const timestamp = new Date().getTime(); + const response = await fetch(`/api/redis/options?t=${timestamp}`); + if (!response.ok) { + throw new Error('Failed to fetch options'); } - }; + const data = await response.json(); + updateOptions(data); + } catch (error: any) { + console.error('Error fetching options:', error); + toast.error(error.message || 'Failed to fetch options'); + } + }; + useEffect(() => { fetchOptions(); }, []); @@ -256,6 +272,25 @@ const SearchDatabase: React.FC = () => { debugTable: false, }); + // Updated: handleCleanDatabase now uses clearOptions prop + const handleCleanDatabase = async () => { + try { + const response = await fetch('/api/redis', { method: 'DELETE' }); + if (!response.ok) { + const errorData = await response.json(); + throw new Error(errorData.error || 'Network response was not ok'); + } + toast.success('Database cleaned successfully'); + clearOptions(); + await fetchOptions(); // Re-fetch options (which should now be empty) + } catch (error: any) { + console.error('Clean database error:', error); + toast.error(error.message || 'Failed to clean the database'); + } finally { + setShowModal(false); + } + }; + return (

Search in Database

@@ -316,6 +351,15 @@ const SearchDatabase: React.FC = () => { {isSearching ? "Searching..." : "Search"} + {/* New Clean Database button */} +
{/* Advanced Table Controls */} @@ -429,6 +473,24 @@ const SearchDatabase: React.FC = () => { + + {/* New Confirmation Modal */} + {showModal && ( +
+
+

Confirm Database Cleaning

+

Are you sure you want to clean the database? This action cannot be undone.

+
+ + +
+
+
+ )} ); }; diff --git a/src/app/Database/page.tsx b/src/app/Database/page.tsx index d80beb3..58a5bc1 100644 --- a/src/app/Database/page.tsx +++ b/src/app/Database/page.tsx @@ -1,21 +1,37 @@ -// pages/database-manager/page.tsx - 'use client'; -import React from 'react'; +import React, { useState } from 'react'; import ProcessCSV from './ProcessCSV'; import SearchDatabase from './SearchDatabase'; const DatabaseManagerPage: React.FC = () => { + const [options, setOptions] = useState({ + clients: [], + warehouses: [], + products: [], + }); + + const clearOptions = () => { + setOptions({ + clients: [], + warehouses: [], + products: [], + }); + }; + + const updateOptions = (newOptions) => { + setOptions(newOptions); + }; + return (

Database Manager

{/* Process CSV Files Component */} - + {/* Search in Database Component */} - +
); }; diff --git a/src/app/api/redis/options/route.ts b/src/app/api/redis/options/route.ts index 9ffd2db..8775f10 100644 --- a/src/app/api/redis/options/route.ts +++ b/src/app/api/redis/options/route.ts @@ -1,6 +1,8 @@ import { NextRequest, NextResponse } from 'next/server'; import { getRedisClient } from '@/lib/redis'; +export const dynamic = 'force-dynamic'; // This tells Next.js to not cache this route + export async function GET(request: NextRequest) { const redis = getRedisClient(); @@ -20,11 +22,16 @@ export async function GET(request: NextRequest) { } }); - return NextResponse.json({ + const response = NextResponse.json({ clients: Array.from(clients), warehouses: Array.from(warehouses), products: Array.from(products), }); + + // Set cache control headers + response.headers.set('Cache-Control', 'no-store, max-age=0'); + + return response; } catch (error) { console.error('Error fetching options from Redis:', error); return NextResponse.json({ error: 'Failed to fetch options' }, { status: 500 }); diff --git a/src/app/api/redis/process/route.ts b/src/app/api/redis/process/route.ts index 9c3a54e..a7b0f67 100644 --- a/src/app/api/redis/process/route.ts +++ b/src/app/api/redis/process/route.ts @@ -1,3 +1,5 @@ +// File: app/api/redis/process/route.ts + import { NextRequest, NextResponse } from 'next/server'; import Redis from 'ioredis'; import { z } from 'zod'; @@ -6,15 +8,20 @@ import { parse } from 'csv-parse'; import path from 'path'; import { promises as fs } from 'fs'; +// Ensure the server runtime +export const runtime = 'nodejs'; + +// Disable caching and set maximum duration export const dynamic = 'force-dynamic'; export const maxDuration = 300; -// Environment variable schema and validation -const env = z.object({ +// Define and validate environment variables +const envSchema = z.object({ REDIS_URL: z.string().url(), REDIS_PASSWORD: z.string().min(1), NEXT_PUBLIC_IS_BUILD: z.string().optional(), -}).parse(process.env); +}); +const env = envSchema.parse(process.env); console.log('Environment variables validated'); @@ -22,20 +29,24 @@ console.log('Environment variables validated'); const redis = new Redis(env.REDIS_URL, { password: env.REDIS_PASSWORD }); console.log('Redis client initialized'); -// Batch size for Redis operations +// Define batch size for Redis operations const BATCH_SIZE = 1000; console.log(`Batch size set to ${BATCH_SIZE}`); -// Process combined data in batches -async function processBatch(batch: Record, sendProgress: (message: string) => void) { - console.log(`Starting to process batch of ${Object.keys(batch).length} items`); +// Function to process a single batch and store data in Redis +async function processBatch( + batch: Record>, + sendProgress: (message: string) => void +) { + const batchSize = Object.keys(batch).length; + console.log(`Starting to process batch of ${batchSize} items`); const pipeline = redis.pipeline(); for (const [key, data] of Object.entries(batch)) { for (const [field, value] of Object.entries(data)) { - pipeline.hset(key, field, String(value)); + pipeline.hset(key, field, value); } - + const [Client, Warehouse, Product] = key.split(':'); pipeline.sadd(`index:client:${Client}`, key); pipeline.sadd(`index:warehouse:${Warehouse}`, key); @@ -45,13 +56,14 @@ async function processBatch(batch: Record, sendProgress: (message: console.log('Executing Redis pipeline'); const results = await pipeline.exec(); console.log(`Redis pipeline executed with ${results?.length} operations`); - sendProgress(`Uploaded batch of ${Object.keys(batch).length} items`); + sendProgress(`Uploaded batch of ${batchSize} items`); } -// Stream and process CSV files +// Function to stream and process CSV files async function streamCSVs(sendProgress: (message: string) => void) { sendProgress('Starting CSV processing'); console.log('Starting CSV streaming and processing'); + const dataDir = path.join(process.cwd(), 'public', 'data'); const salesPath = path.join(dataDir, 'Sales.csv'); const pricePath = path.join(dataDir, 'Price.csv'); @@ -62,14 +74,16 @@ async function streamCSVs(sendProgress: (message: string) => void) { // Calculate total number of rows and batches const [salesRows, priceRows] = await Promise.all([ getLineCount(salesPath), - getLineCount(pricePath) + getLineCount(pricePath), ]); + + // Adjust totalRows based on headers const totalRows = salesRows + priceRows - 2; // Subtract 2 for headers const totalBatches = Math.ceil(totalRows / BATCH_SIZE); - sendProgress(`Total rows: ${totalRows}, Estimated total batches: ${totalBatches}`); + sendProgress(`Total rows: ${totalRows}, Total batches: ${totalBatches}`); - let combinedBatch: Record = {}; + let combinedBatch: Record> = {}; let totalProcessed = 0; let batchCount = 0; @@ -88,19 +102,17 @@ async function streamCSVs(sendProgress: (message: string) => void) { } for (const [date, value] of Object.entries(dates)) { - if (value !== undefined) { + if (value !== undefined && value !== '') { combinedBatch[key][`${type}:${date}`] = String(value); } } - - totalProcessed++; }; console.log('Starting to process Sales and Price CSVs'); let salesDone = false; let priceDone = false; - let salesIter = salesParser[Symbol.asyncIterator](); - let priceIter = priceParser[Symbol.asyncIterator](); + const salesIter = salesParser[Symbol.asyncIterator](); + const priceIter = priceParser[Symbol.asyncIterator](); while (!salesDone || !priceDone) { if (!salesDone) { @@ -121,15 +133,24 @@ async function streamCSVs(sendProgress: (message: string) => void) { } } - if (Object.keys(combinedBatch).length >= BATCH_SIZE || (salesDone && priceDone)) { + // Check if batch size reached or processing is done + if ( + Object.keys(combinedBatch).length >= BATCH_SIZE || + (salesDone && priceDone && Object.keys(combinedBatch).length > 0) + ) { await processBatch(combinedBatch, sendProgress); batchCount++; - sendProgress(`Processed and uploaded batch ${batchCount}. Total rows: ${totalProcessed}`); - combinedBatch = {}; - } - if (totalProcessed % 10000 === 0) { - sendProgress(`Processed ${totalProcessed} rows`); + // Increment totalProcessed by the number of items in the batch + totalProcessed += Object.keys(combinedBatch).length; + + const progressPercentage = ((totalProcessed / totalRows) * 100).toFixed(2); + sendProgress( + `Processed batch ${batchCount}/${totalBatches}. Rows: ${totalProcessed}/${totalRows}. Progress: ${progressPercentage}%` + ); + + // Reset combinedBatch for the next batch + combinedBatch = {}; } } @@ -137,13 +158,14 @@ async function streamCSVs(sendProgress: (message: string) => void) { sendProgress(`Finished processing ${totalProcessed} total rows in ${batchCount} batches`); } +// Helper function to count the number of lines in a file async function getLineCount(filePath: string): Promise { const fileBuffer = await fs.readFile(filePath); const fileContent = fileBuffer.toString(); return fileContent.split('\n').length; } -// GET handler +// GET handler to initiate CSV processing and stream progress updates export async function GET(request: NextRequest) { console.log('GET request received'); @@ -165,12 +187,18 @@ export async function GET(request: NextRequest) { await streamCSVs(sendProgress); console.log('CSV processing completed successfully'); controller.enqueue(`data: Successfully processed CSV files and stored data in Redis\n\n`); - } catch (error) { + } catch (error: any) { console.error('Error processing CSV files:', error); - controller.enqueue(`data: Error processing CSV files: ${error instanceof Error ? error.message : 'Unknown error'}\n\n`); + controller.enqueue( + `data: Error processing CSV files: ${ + error instanceof Error ? error.message : 'Unknown error' + }\n\n` + ); } finally { console.log('Closing ReadableStream controller'); controller.close(); + // Gracefully close Redis connection + redis.quit(); } }, }); @@ -180,7 +208,7 @@ export async function GET(request: NextRequest) { headers: { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', + Connection: 'keep-alive', }, }); } \ No newline at end of file