changeset 202:b9b184b3303c

[Notes] Images get processed and it is properly fetched. Thank you.
author MrJuneJune <me@mrjunejune.com>
date Sun, 15 Feb 2026 09:12:57 -0800
parents 6cdee35a7ba9
children 92a57bd716c1
files mrjunejune/BUILD mrjunejune/data/.gitkeep mrjunejune/main.c mrjunejune/src/notes/editor.js mrjunejune/src/notes/index.html mrjunejune/src/public/editor.js
diffstat 5 files changed, 393 insertions(+), 267 deletions(-) [+]
line wrap: on
line diff
--- a/mrjunejune/BUILD	Sun Feb 15 07:07:50 2026 -0800
+++ b/mrjunejune/BUILD	Sun Feb 15 09:12:57 2026 -0800
@@ -99,6 +99,12 @@
   visibility = ["//visibility:public"],
 )
 
+filegroup(
+  name = "data_dir",
+  srcs = glob(["data/*"]),
+  visibility = ["//visibility:public"],
+)
+
 # Run this to create html files
 cc_binary(
   name = "create_html_from_md",
--- a/mrjunejune/main.c	Sun Feb 15 07:07:50 2026 -0800
+++ b/mrjunejune/main.c	Sun Feb 15 09:12:57 2026 -0800
@@ -21,6 +21,7 @@
   char     s3_key_processed[512];
   char     content_type[128];
   char     access_token[256];
+  char     db_path[256];
   S3_Config s3_config;
 } Media_Processing_Context;
 
@@ -668,15 +669,6 @@
   return resp;
 }
 
-Seobeo_Request_Entry *GetEditor(Seobeo_Request_Entry *req, Dowa_Arena *arena)
-{
-  Seobeo_Request_Entry *resp = NULL;
-  char *final_body = Dowa_Arena_Allocate(arena, 50 * 1024);
-  Seobeo_Render_Html_FilePath(final_body, "/editor/index.html", arena);
-  Dowa_HashMap_Push_Arena(resp, "body", final_body, arena);
-  return resp;
-}
-
 Seobeo_Request_Entry *GetNotesLogin(Seobeo_Request_Entry *req, Dowa_Arena *arena)
 {
   Seobeo_Request_Entry *resp = NULL;
@@ -1268,11 +1260,16 @@
 {
   Media_Processing_Context *ctx = (Media_Processing_Context *)arg;
 
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Background thread started for media_id=%lld\n", (long long)ctx->media_id);
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] S3 key original: %s\n", ctx->s3_key_original);
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] S3 key processed: %s\n", ctx->s3_key_processed);
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] DB path: %s\n", ctx->db_path);
+
   // Open thread-local DB connection
-  Deita_Connection *db_conn = Deita_Connection_Create(DEITA_DATABASE_TYPE_SQLITE3, g_db_path);
+  Deita_Connection *db_conn = Deita_Connection_Create(DEITA_DATABASE_TYPE_SQLITE3, ctx->db_path);
   if (!db_conn || !Deita_Connection_Is_Open(db_conn))
   {
-    printf("[MEDIA] Thread ERROR: Failed to open database for media_id=%lld\n", (long long)ctx->media_id);
+    Seobeo_Log(SEOBEO_ERROR, "[MEDIA] Thread ERROR: Failed to open database for media_id=%lld\n", (long long)ctx->media_id);
     free(ctx);
     return NULL;
   }
@@ -1283,24 +1280,27 @@
   char media_id_str[32];
   snprintf(media_id_str, sizeof(media_id_str), "%lld", (long long)ctx->media_id);
   const char *params[] = { media_id_str };
-  Deita_Query_Execute_Update_Prepared(db_conn, update_processing, 1, params);
-
-  printf("[MEDIA] Processing media_id=%lld\n", (long long)ctx->media_id);
+  int32 update_result = Deita_Query_Execute_Update_Prepared(db_conn, update_processing, 1, params);
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Updated status to 'processing' for media_id=%lld (result=%d)\n", (long long)ctx->media_id, update_result);
 
   // Generate presigned GET URL for download (10 min expiry)
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Generating presigned GET URL for media_id=%lld\n", (long long)ctx->media_id);
   S3_Presigned_URL download_url = S3_Presign_Get(&ctx->s3_config, ctx->s3_key_original, 600);
   if (!download_url.success)
   {
+    const char *error_msg = download_url.error_message ? download_url.error_message : "Failed to generate download URL";
+    Seobeo_Log(SEOBEO_ERROR, "[MEDIA] ERROR: Failed to generate download URL for media_id=%lld: %s\n",
+               (long long)ctx->media_id, error_msg);
     const char *update_error =
       "UPDATE media_uploads SET status='error', error_message=?, updated_at=strftime('%s','now') WHERE id=?";
-    const char *error_params[] = { "Failed to generate download URL", media_id_str };
+    const char *error_params[] = { error_msg, media_id_str };
     Deita_Query_Execute_Update_Prepared(db_conn, update_error, 2, error_params);
-    printf("[MEDIA] ERROR: Failed to generate download URL for media_id=%lld\n", (long long)ctx->media_id);
     S3_Presigned_URL_Destroy(&download_url);
     Deita_Connection_Close(db_conn);
     free(ctx);
     return NULL;
   }
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Generated presigned URL: %.100s...\n", download_url.url);
 
   // Generate temp file paths
   char tmp_input[256];
@@ -1317,6 +1317,7 @@
   free(uuid_output);
 
   // Download from S3
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Downloading from S3 to %s for media_id=%lld\n", tmp_input, (long long)ctx->media_id);
   Seobeo_Client_Request *download_req = Seobeo_Client_Request_Create(download_url.url);
   Seobeo_Client_Request_Set_Download_Path(download_req, tmp_input);
   Seobeo_Client_Response *download_resp = Seobeo_Client_Request_Execute(download_req);
@@ -1325,11 +1326,13 @@
 
   if (!download_resp || download_resp->status_code != 200)
   {
+    int status = download_resp ? download_resp->status_code : 0;
+    Seobeo_Log(SEOBEO_ERROR, "[MEDIA] ERROR: Failed to download from S3 for media_id=%lld (status=%d)\n",
+               (long long)ctx->media_id, status);
     const char *update_error =
       "UPDATE media_uploads SET status='error', error_message=?, updated_at=strftime('%s','now') WHERE id=?";
     const char *error_params[] = { "Failed to download from S3", media_id_str };
     Deita_Query_Execute_Update_Prepared(db_conn, update_error, 2, error_params);
-    printf("[MEDIA] ERROR: Failed to download from S3 for media_id=%lld\n", (long long)ctx->media_id);
     if (download_req) Seobeo_Client_Request_Destroy(download_req);
     if (download_resp) Seobeo_Client_Response_Destroy(download_resp);
     unlink(tmp_input);
@@ -1338,6 +1341,7 @@
     return NULL;
   }
 
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Successfully downloaded file to %s\n", tmp_input);
   Seobeo_Client_Request_Destroy(download_req);
   Seobeo_Client_Response_Destroy(download_resp);
 
@@ -1348,14 +1352,40 @@
   snprintf(cmd, sizeof(cmd), "ffmpeg -y -i %s -quality 80 %s 2>%s",
            tmp_input, tmp_output, log_file);
 
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Running FFmpeg: %s\n", cmd);
   int ffmpeg_result = system(cmd);
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] FFmpeg result: %d for media_id=%lld\n", ffmpeg_result, (long long)ctx->media_id);
+
   if (ffmpeg_result != 0)
   {
+    Seobeo_Log(SEOBEO_ERROR, "[MEDIA] ERROR: FFmpeg conversion failed for media_id=%lld (exit code %d). Check log: %s\n",
+               (long long)ctx->media_id, ffmpeg_result, log_file);
     const char *update_error =
       "UPDATE media_uploads SET status='error', error_message=?, updated_at=strftime('%s','now') WHERE id=?";
     const char *error_params[] = { "Image conversion failed", media_id_str };
     Deita_Query_Execute_Update_Prepared(db_conn, update_error, 2, error_params);
-    printf("[MEDIA] ERROR: FFmpeg conversion failed for media_id=%lld\n", (long long)ctx->media_id);
+    unlink(tmp_input);
+    unlink(tmp_output);
+    Deita_Connection_Close(db_conn);
+    free(ctx);
+    return NULL;
+  }
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Successfully converted to webp: %s\n", tmp_output);
+
+  // Upload processed file to S3
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Uploading processed file to S3: %s -> %s\n", tmp_output, ctx->s3_key_processed);
+  S3_Result upload_result = S3_Upload_File_With_Content_Type(
+    &ctx->s3_config, tmp_output, ctx->s3_key_processed, "image/webp");
+
+  if (!upload_result.success)
+  {
+    const char *error_msg = upload_result.error_message ? upload_result.error_message : "Failed to upload processed file";
+    Seobeo_Log(SEOBEO_ERROR, "[MEDIA] ERROR: Failed to upload processed file for media_id=%lld: %s\n",
+               (long long)ctx->media_id, error_msg);
+    const char *update_error =
+      "UPDATE media_uploads SET status='error', error_message=?, updated_at=strftime('%s','now') WHERE id=?";
+    const char *error_params[] = { error_msg, media_id_str };
+    Deita_Query_Execute_Update_Prepared(db_conn, update_error, 2, error_params);
     unlink(tmp_input);
     unlink(tmp_output);
     Deita_Connection_Close(db_conn);
@@ -1363,30 +1393,14 @@
     return NULL;
   }
 
-  // Upload processed file to S3
-  S3_Result upload_result = S3_Upload_File_With_Content_Type(
-    &ctx->s3_config, tmp_output, ctx->s3_key_processed, "image/webp");
-
-  if (!upload_result.success)
-  {
-    const char *update_error =
-      "UPDATE media_uploads SET status='error', error_message=?, updated_at=strftime('%s','now') WHERE id=?";
-    const char *error_params[] = { "Failed to upload processed file", media_id_str };
-    Deita_Query_Execute_Update_Prepared(db_conn, update_error, 2, error_params);
-    printf("[MEDIA] ERROR: Failed to upload processed file for media_id=%lld\n", (long long)ctx->media_id);
-    unlink(tmp_input);
-    unlink(tmp_output);
-    Deita_Connection_Close(db_conn);
-    free(ctx);
-    return NULL;
-  }
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Successfully uploaded processed file to S3\n");
 
   // Update status to 'finished'
   const char *update_finished =
     "UPDATE media_uploads SET status='finished', updated_at=strftime('%s','now') WHERE id=?";
   Deita_Query_Execute_Update_Prepared(db_conn, update_finished, 1, params);
 
-  printf("[MEDIA] Successfully processed media_id=%lld\n", (long long)ctx->media_id);
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Successfully processed media_id=%lld - COMPLETE\n", (long long)ctx->media_id);
 
   // Cleanup
   unlink(tmp_input);
@@ -1500,9 +1514,13 @@
     return resp;
   }
 
+  Seobeo_Log(SEOBEO_INFO, "[MEDIA] Content type for media_id=%lld: '%s'\n", (long long)media_id, content_type_copy);
+
   // If content_type starts with "image/", spawn background processing thread
   if (strncmp(content_type_copy, "image/", 6) == 0)
   {
+    Seobeo_Log(SEOBEO_INFO, "[MEDIA] Detected image type, preparing to spawn background thread for media_id=%lld\n", (long long)media_id);
+
     // Create context for background thread (heap allocated)
     Media_Processing_Context *ctx = malloc(sizeof(Media_Processing_Context));
     ctx->media_id = media_id;
@@ -1510,28 +1528,36 @@
     strncpy(ctx->s3_key_processed, s3_key_processed_copy, sizeof(ctx->s3_key_processed) - 1);
     strncpy(ctx->content_type, content_type_copy, sizeof(ctx->content_type) - 1);
     strncpy(ctx->access_token, token, sizeof(ctx->access_token) - 1);
+    strncpy(ctx->db_path, g_db_path, sizeof(ctx->db_path) - 1);
     ctx->s3_key_original[sizeof(ctx->s3_key_original) - 1] = '\0';
     ctx->s3_key_processed[sizeof(ctx->s3_key_processed) - 1] = '\0';
     ctx->content_type[sizeof(ctx->content_type) - 1] = '\0';
     ctx->access_token[sizeof(ctx->access_token) - 1] = '\0';
+    ctx->db_path[sizeof(ctx->db_path) - 1] = '\0';
     ctx->s3_config = g_s3_config;
 
+    Seobeo_Log(SEOBEO_INFO, "[MEDIA] Creating pthread for media_id=%lld\n", (long long)media_id);
+
     // Spawn detached thread
     pthread_t thread_id;
     int thread_result = pthread_create(&thread_id, NULL, Media_Process_Background, ctx);
 
     if (thread_result != 0)
     {
-      Seobeo_Log(SEOBEO_ERROR, "[MEDIA] ERROR: Failed to spawn processing thread for media_id=%lld\n", (long long)media_id);
+      Seobeo_Log(SEOBEO_ERROR, "[MEDIA] ERROR: pthread_create failed with result=%d for media_id=%lld\n", thread_result, (long long)media_id);
       free(ctx);
     }
     else
     {
       // Detach thread so it cleans up automatically when done
       pthread_detach(thread_id);
-      Seobeo_Log(SEOBEO_INFO, "[MEDIA] Spawned processing thread for media_id=%lld\n", (long long)media_id);
+      Seobeo_Log(SEOBEO_INFO, "[MEDIA] Successfully spawned and detached thread for media_id=%lld\n", (long long)media_id);
     }
   }
+  else
+  {
+    Seobeo_Log(SEOBEO_INFO, "[MEDIA] Non-image file, skipping background processing for media_id=%lld\n", (long long)media_id);
+  }
 
   Dowa_HashMap_Push_Arena(resp, "status", "200", arena);
   Dowa_HashMap_Push_Arena(resp, "content-type", "application/json", arena);
@@ -1600,7 +1626,7 @@
 
   // Query media status
   const char *select_query =
-    "SELECT id, status, s3_key_processed, error_message FROM media_uploads WHERE id = ? AND access_token = ?";
+    "SELECT id, status, s3_key_original, s3_key_processed, error_message FROM media_uploads WHERE id = ? AND access_token = ?";
   const char *select_params[] = { media_id_str, token };
 
   Deita_Result_Set *p_result = Deita_Query_Execute_Prepared(g_db_connection, select_query, 2, select_params, arena);
@@ -1616,10 +1642,11 @@
 
   int64 id = Deita_Result_Set_Get_Integer(p_result, 0);
   const char *status = Deita_Result_Set_Get_Text(p_result, 1);
-  const char *s3_key_processed = Deita_Result_Set_Get_Text(p_result, 2);
-  const char *error_message = Deita_Result_Set_Get_Text(p_result, 3);
+  const char *s3_key_original = Deita_Result_Set_Get_Text(p_result, 2);
+  const char *s3_key_processed = Deita_Result_Set_Get_Text(p_result, 3);
+  const char *error_message = Deita_Result_Set_Get_Text(p_result, 4);
 
-  // Build CloudFront URL if status is 'finished' and s3_key_processed exists
+  // Build CloudFront URL for processed file if status is 'finished'
   char processed_url[1024] = {0};
   if (strcmp(status, "finished") == 0 && s3_key_processed && strlen(s3_key_processed) > 0)
   {
@@ -1634,32 +1661,63 @@
     }
   }
 
-  // Build JSON response
-  char *response_body = Dowa_Arena_Allocate(arena, 2048);
+  // Build CloudFront URL for original file (for non-images or before processing completes)
+  char original_url[1024] = {0};
+  if (s3_key_original && strlen(s3_key_original) > 0)
+  {
+    if (g_s3_cloudfront_url[0])
+    {
+      snprintf(original_url, sizeof(original_url), "%s/%s", g_s3_cloudfront_url, s3_key_original);
+    }
+    else
+    {
+      snprintf(original_url, sizeof(original_url), "https://%s.s3.%s.amazonaws.com/%s",
+               g_s3_bucket, g_s3_region, s3_key_original);
+    }
+  }
+
+  // Build JSON response with both processed_url and original_url
+  char *response_body = Dowa_Arena_Allocate(arena, 3072);
+
+  // Build the base response
+  int offset = snprintf(response_body, 3072,
+                        "{\"id\":%lld,\"status\":\"%s\",",
+                        (long long)id, status);
+
+  // Add processed_url
   if (strlen(processed_url) > 0)
   {
-    snprintf(response_body, 2048,
-             "{\"id\":%lld,\"status\":\"%s\",\"processed_url\":\"%s\",\"error_message\":%s}",
-             (long long)id, status, processed_url,
-             error_message ? "\"" : "null");
-    if (error_message)
-    {
-      // Append error message if exists
-      size_t len = strlen(response_body);
-      snprintf(response_body + len - 1, 2048 - len + 1, "%s\"}", error_message);
-    }
+    offset += snprintf(response_body + offset, 3072 - offset,
+                       "\"processed_url\":\"%s\",", processed_url);
   }
   else
   {
-    snprintf(response_body, 2048,
-             "{\"id\":%lld,\"status\":\"%s\",\"processed_url\":null,\"error_message\":%s}",
-             (long long)id, status,
-             error_message ? "\"" : "null");
-    if (error_message)
-    {
-      size_t len = strlen(response_body);
-      snprintf(response_body + len - 1, 2048 - len + 1, "%s\"}", error_message);
-    }
+    offset += snprintf(response_body + offset, 3072 - offset,
+                       "\"processed_url\":null,");
+  }
+
+  // Add original_url
+  if (strlen(original_url) > 0)
+  {
+    offset += snprintf(response_body + offset, 3072 - offset,
+                       "\"original_url\":\"%s\",", original_url);
+  }
+  else
+  {
+    offset += snprintf(response_body + offset, 3072 - offset,
+                       "\"original_url\":null,");
+  }
+
+  // Add error_message
+  if (error_message && strlen(error_message) > 0)
+  {
+    snprintf(response_body + offset, 3072 - offset,
+             "\"error_message\":\"%s\"}", error_message);
+  }
+  else
+  {
+    snprintf(response_body + offset, 3072 - offset,
+             "\"error_message\":null}");
   }
 
   Deita_Result_Set_Free(p_result);
@@ -1715,6 +1773,14 @@
   printf("[S3] Configured: region=%s, bucket=%s, key=%s...\n",
          g_s3_region, g_s3_bucket, s3_access_key[0] ? "***" : "(missing)");
 
+  // Show current working directory
+  char cwd[1024];
+  if (getcwd(cwd, sizeof(cwd)) != NULL)
+  {
+    printf("[STARTUP] Current working directory: %s\n", cwd);
+    printf("[STARTUP] Database path (relative): %s\n", g_db_path);
+  }
+
   // Initialize database
   init_database();
 
@@ -1760,10 +1826,6 @@
   Seobeo_Router_Register("GET", "/talk", GetTalk);
   Seobeo_Router_Register("GET", "/talk/index.html", GetRedirectTalk);
 
-  // -- Editor (legacy) --/
-  Seobeo_Router_Register("GET", "/editor", GetEditor);
-  Seobeo_Router_Register("GET", "/editor/index.html", GetRedirectEditor);
-
   // -- Notes --/
   Seobeo_Router_Register("GET", "/notes", GetNotes);
   Seobeo_Router_Register("GET", "/notes/", GetNotes);
--- a/mrjunejune/src/notes/editor.js	Sun Feb 15 07:07:50 2026 -0800
+++ b/mrjunejune/src/notes/editor.js	Sun Feb 15 09:12:57 2026 -0800
@@ -1,5 +1,3 @@
-console.log("june");
-
 let editor = null;
 let currentNoteId = 'index';
 
@@ -76,7 +74,7 @@
   }
 
   // 1. Create media record
-  const createResp = await fetch('/api/media/create', {
+  const createResponse = await fetch('/api/media/create', {
     method: 'POST',
     headers: {
       'Authorization': 'Bearer ' + token,
@@ -88,60 +86,72 @@
     })
   });
 
-  if (!createResp.ok) {
-    const error = await createResp.json();
+  if (!createResponse.ok) {
+    const error = await createResponse.json().catch(() => ({}));
     throw new Error(error.error || 'Failed to create media record');
   }
 
-  const { media_id, upload_url } = await createResp.json();
+  const data = await createResponse.json();
 
-  // 2. Upload to S3
-  const uploadResp = await fetch(upload_url, {
+  // 2. Upload file directly to S3
+  const uploadResponse = await fetch(data.upload_url, {
     method: 'PUT',
-    headers: { 'Content-Type': file.type },
+    headers: {
+      'Content-Type': file.type
+    },
     body: file
   });
 
-  if (!uploadResp.ok) {
-    throw new Error('S3 upload failed');
+  if (!uploadResponse.ok) {
+    throw new Error('Failed to upload file to S3');
   }
 
-  // 3. Mark uploaded
-  await fetch(`/api/media/${media_id}/uploaded`, {
+  // 3. Mark as uploaded (triggers processing for images)
+  await fetch(`/api/media/${data.media_id}/uploaded`, {
     method: 'POST',
-    headers: { 'Authorization': 'Bearer ' + token }
+    headers: {
+      'Authorization': 'Bearer ' + token
+    }
   });
 
-  // 4. Poll for images, immediate return for non-images
+  // 4. Poll for images, return immediately for non-images
   if (file.type.startsWith('image/')) {
-    return await pollForProcessedImage(media_id);
+    return await pollForProcessedImage(data.media_id, token);
   } else {
-    // For non-images, return the original S3 URL
-    const s3_url = upload_url.split('?')[0];
-    return { url: s3_url };
+    // For non-images, construct the public URL
+    const publicUrl = data.upload_url.split('?')[0]; // Remove query params
+    return { url: publicUrl };
   }
 }
 
-async function pollForProcessedImage(mediaId) {
-  const token = getAuthToken();
-  const maxAttempts = 60; // 2 minutes max
+async function pollForProcessedImage(mediaId, token) {
+  const maxAttempts = 60; // 2 minutes max (60 * 2 seconds)
 
   for (let i = 0; i < maxAttempts; i++) {
-    await new Promise(r => setTimeout(r, 2000)); // 2 sec interval
+    await new Promise(resolve => setTimeout(resolve, 2000)); // 2 second interval
 
-    const resp = await fetch(`/api/media/${mediaId}/status`, {
-      headers: { 'Authorization': 'Bearer ' + token }
+    const statusResponse = await fetch(`/api/media/${mediaId}/status`, {
+      headers: {
+        'Authorization': 'Bearer ' + token
+      }
     });
 
-    if (!resp.ok) continue;
+    if (!statusResponse.ok) {
+      console.warn('Status check failed, retrying...');
+      continue;
+    }
+
+    const statusData = await statusResponse.json();
 
-    const { status, processed_url, error_message } = await resp.json();
-
-    if (status === 'finished') return { url: processed_url };
-    if (status === 'error') throw new Error(error_message || 'Processing failed');
+    if (statusData.status === 'finished') {
+      return { url: statusData.processed_url };
+    } else if (statusData.status === 'error') {
+      throw new Error(statusData.error_message || 'Processing failed');
+    }
+    // Status is 'uploaded' or 'processing', continue polling
   }
 
-  throw new Error('Processing timeout');
+  throw new Error('Processing timeout after 2 minutes');
 }
 
 async function saveContent(content) {
@@ -202,3 +212,4 @@
 
   loadNote(currentNoteId);
 });
+
--- a/mrjunejune/src/notes/index.html	Sun Feb 15 07:07:50 2026 -0800
+++ b/mrjunejune/src/notes/index.html	Sun Feb 15 09:12:57 2026 -0800
@@ -158,174 +158,6 @@
   {{/parts/footer.html}}
 
   <script src="/public/js/rich_editor.js"></script>
-  <script>
-
-    let editor = null;
-    let currentNoteId = 'index';
-
-    function getAuthToken() {
-      return localStorage.getItem('notes-auth-token');
-    }
-
-    function requireAuth() {
-      if (!getAuthToken()) {
-        const returnUrl = encodeURIComponent(window.location.pathname);
-        window.location.href = '/notes/login?return=' + returnUrl;
-        return false;
-      }
-      return true;
-    }
-
-    function logout() {
-      localStorage.removeItem('notes-auth-token');
-      window.location.href = '/notes/login';
-    }
-
-    function getNoteIdFromPath() {
-      const path = window.location.pathname;
-      const match = path.match(/^\/notes\/(.+)$/);
-      if (match && match[1] && match[1] !== 'login') {
-        return decodeURIComponent(match[1]);
-      }
-      return 'index';
-    }
-
-    function showNewNoteDialog() {
-      document.getElementById('new-note-dialog').classList.add('show');
-      document.getElementById('new-note-id').focus();
-    }
-
-    function hideNewNoteDialog() {
-      document.getElementById('new-note-dialog').classList.remove('show');
-      document.getElementById('new-note-id').value = '';
-    }
-
-    function createNewNote() {
-      let noteId = document.getElementById('new-note-id').value.trim();
-      if (!noteId) return;
-
-      // Sanitize note ID
-      noteId = noteId.toLowerCase().replace(/[^a-z0-9-]/g, '-').replace(/-+/g, '-');
-
-      hideNewNoteDialog();
-      window.location.href = '/notes/' + encodeURIComponent(noteId);
-    }
-
-    // Handle Enter key in new note dialog
-    document.getElementById('new-note-id').addEventListener('keydown', function(e) {
-      if (e.key === 'Enter') {
-        e.preventDefault();
-        createNewNote();
-      }
-      if (e.key === 'Escape') {
-        hideNewNoteDialog();
-      }
-    });
-
-    // Close dialog on backdrop click
-    document.getElementById('new-note-dialog').addEventListener('click', function(e) {
-      if (e.target === this) {
-        hideNewNoteDialog();
-      }
-    });
-
-    async function uploadFile(file) {
-      const token = getAuthToken();
-      if (!token) {
-        throw new Error('Not authenticated');
-      }
-
-      //  Get s3 bucket URL
-      const response = await fetch('/api/s3/upload-url', {
-        method: 'POST',
-        headers: {
-          'Authorization': 'Bearer ' + token,
-          'Content-Type': 'application/json'
-        },
-        body: JSON.stringify({
-          filename: file.name,
-          content_type: file.type
-        })
-      });
-
-      if (!response.ok) {
-        const error = await response.json();
-        throw new Error(error.error || 'Failed to get upload URL');
-      }
-
-      const data = await response.json();
-
-      const uploadResponse = await fetch(data.upload_url, {
-        method: 'PUT',
-        headers: { 'Content-Type': file.type },
-        body: file
-      });
-
-      if (!uploadResponse.ok) {
-        throw new Error('Failed to upload file to S3');
-      }
-
-      return { url: data.public_url, key: data.key };
-    }
-
-    async function saveContent(content) {
-      const token = getAuthToken();
-      if (!token) return;
-
-      const response = await fetch('/api/editor/save', {
-        method: 'POST',
-        headers: {
-          'Authorization': 'Bearer ' + token,
-          'Content-Type': 'application/json'
-        },
-        body: JSON.stringify({
-          doc_id: currentNoteId,
-          content: content
-        })
-      });
-
-      if (!response.ok) {
-        throw new Error('Failed to save');
-      }
-    }
-
-    async function loadNote(noteId) {
-      const token = getAuthToken();
-      if (!token) return;
-
-      try {
-        const response = await fetch('/api/editor/load/' + encodeURIComponent(noteId), {
-          headers: { 'Authorization': 'Bearer ' + token }
-        });
-
-        if (response.ok) {
-          const data = await response.json();
-          editor.setContent(data.content || '');
-        }
-      } catch (error) {
-        console.error('Failed to load note:', error);
-      }
-    }
-
-    // Initialize
-    document.addEventListener('DOMContentLoaded', function() {
-      if (!requireAuth()) return;
-
-      currentNoteId = getNoteIdFromPath();
-      document.getElementById('note-id-display').textContent = currentNoteId;
-
-      // Update page title
-      document.title = currentNoteId + ' | Notes';
-
-      editor = RichEditor.init('editor-container', {
-        uploadCallback: uploadFile,
-        saveCallback: saveContent,
-        debounceMs: 1500,
-        placeholder: 'Start writing... (paste images, drag files, or use /upload)\n\nTip: Click "+ New Note" to create linked notes.'
-      });
-
-      loadNote(currentNoteId);
-    });
-  </script>
+  <script src="/public/editor.js"></script>
 </body>
 </html>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mrjunejune/src/public/editor.js	Sun Feb 15 09:12:57 2026 -0800
@@ -0,0 +1,215 @@
+let editor = null;
+let currentNoteId = 'index';
+
+function getAuthToken() {
+  return localStorage.getItem('notes-auth-token');
+}
+
+function requireAuth() {
+  if (!getAuthToken()) {
+    const returnUrl = encodeURIComponent(window.location.pathname);
+    window.location.href = '/notes/login?return=' + returnUrl;
+    return false;
+  }
+  return true;
+}
+
+function logout() {
+  localStorage.removeItem('notes-auth-token');
+  window.location.href = '/notes/login';
+}
+
+function getNoteIdFromPath() {
+  const path = window.location.pathname;
+  const match = path.match(/^\/notes\/(.+)$/);
+  if (match && match[1] && match[1] !== 'login') {
+    return decodeURIComponent(match[1]);
+  }
+  return 'index';
+}
+
+function showNewNoteDialog() {
+  document.getElementById('new-note-dialog').classList.add('show');
+  document.getElementById('new-note-id').focus();
+}
+
+function hideNewNoteDialog() {
+  document.getElementById('new-note-dialog').classList.remove('show');
+  document.getElementById('new-note-id').value = '';
+}
+
+function createNewNote() {
+  let noteId = document.getElementById('new-note-id').value.trim();
+  if (!noteId) return;
+
+  // Sanitize note ID
+  noteId = noteId.toLowerCase().replace(/[^a-z0-9-]/g, '-').replace(/-+/g, '-');
+
+  hideNewNoteDialog();
+  window.location.href = '/notes/' + encodeURIComponent(noteId);
+}
+
+// Handle Enter key in new note dialog
+document.getElementById('new-note-id').addEventListener('keydown', function(e) {
+  if (e.key === 'Enter') {
+    e.preventDefault();
+    createNewNote();
+  }
+  if (e.key === 'Escape') {
+    hideNewNoteDialog();
+  }
+});
+
+// Close dialog on backdrop click
+document.getElementById('new-note-dialog').addEventListener('click', function(e) {
+  if (e.target === this) {
+    hideNewNoteDialog();
+  }
+});
+
+async function uploadFile(file) {
+  const token = getAuthToken();
+  if (!token) {
+    throw new Error('Not authenticated');
+  }
+
+  // 1. Create media record
+  const createResponse = await fetch('/api/media/create', {
+    method: 'POST',
+    headers: {
+      'Authorization': 'Bearer ' + token,
+      'Content-Type': 'application/json'
+    },
+    body: JSON.stringify({
+      filename: file.name,
+      content_type: file.type
+    })
+  });
+
+  if (!createResponse.ok) {
+    const error = await createResponse.json().catch(() => ({}));
+    throw new Error(error.error || 'Failed to create media record');
+  }
+
+  const data = await createResponse.json();
+
+  // 2. Upload file directly to S3
+  const uploadResponse = await fetch(data.upload_url, {
+    method: 'PUT',
+    headers: {
+      'Content-Type': file.type
+    },
+    body: file
+  });
+
+  if (!uploadResponse.ok) {
+    throw new Error('Failed to upload file to S3');
+  }
+
+  // 3. Mark as uploaded (triggers processing for images)
+  await fetch(`/api/media/${data.media_id}/uploaded`, {
+    method: 'POST',
+    headers: {
+      'Authorization': 'Bearer ' + token
+    }
+  });
+
+  // 4. Poll for images, return immediately for non-images
+  if (file.type.startsWith('image/')) {
+    return await pollForProcessedImage(data.media_id, token);
+  } else {
+    // For non-images, construct the public URL
+    const publicUrl = data.upload_url.split('?')[0]; // Remove query params
+    return { url: publicUrl };
+  }
+}
+
+async function pollForProcessedImage(mediaId, token) {
+  const maxAttempts = 60; // 2 minutes max (60 * 2 seconds)
+
+  for (let i = 0; i < maxAttempts; i++) {
+    await new Promise(resolve => setTimeout(resolve, 2000)); // 2 second interval
+
+    const statusResponse = await fetch(`/api/media/${mediaId}/status`, {
+      headers: {
+        'Authorization': 'Bearer ' + token
+      }
+    });
+
+    if (!statusResponse.ok) {
+      console.warn('Status check failed, retrying...');
+      continue;
+    }
+
+    const statusData = await statusResponse.json();
+
+    if (statusData.status === 'finished') {
+      return { url: statusData.processed_url };
+    } else if (statusData.status === 'error') {
+      throw new Error(statusData.error_message || 'Processing failed');
+    }
+    // Status is 'uploaded' or 'processing', continue polling
+  }
+
+  throw new Error('Processing timeout after 2 minutes');
+}
+
+async function saveContent(content) {
+  const token = getAuthToken();
+  if (!token) return;
+
+  const response = await fetch('/api/editor/save', {
+    method: 'POST',
+    headers: {
+      'Authorization': 'Bearer ' + token,
+      'Content-Type': 'application/json'
+    },
+    body: JSON.stringify({
+      doc_id: currentNoteId,
+      content: content
+    })
+  });
+
+  if (!response.ok) {
+    throw new Error('Failed to save');
+  }
+}
+
+async function loadNote(noteId) {
+  const token = getAuthToken();
+  if (!token) return;
+
+  try {
+    const response = await fetch('/api/editor/load/' + encodeURIComponent(noteId), {
+      headers: { 'Authorization': 'Bearer ' + token }
+    });
+
+    if (response.ok) {
+      const data = await response.json();
+      editor.setContent(data.content || '');
+    }
+  } catch (error) {
+    console.error('Failed to load note:', error);
+  }
+}
+
+// Initialize
+document.addEventListener('DOMContentLoaded', function() {
+  if (!requireAuth()) return;
+
+  currentNoteId = getNoteIdFromPath();
+  document.getElementById('note-id-display').textContent = currentNoteId;
+
+  // Update page title
+  document.title = currentNoteId + ' | Notes';
+
+  editor = RichEditor.init('editor-container', {
+    uploadCallback: uploadFile,
+    saveCallback: saveContent,
+    debounceMs: 1500,
+    placeholder: 'Start writing... (paste images, drag files, or use /upload)\n\nTip: Click "+ New Note" to create linked notes.'
+  });
+
+  loadNote(currentNoteId);
+});
+