# HG changeset patch
# User June Park
# Date 1767406412 28800
# Node ID 092afa5957642e09776f284bdf39d524229c6a95
# Parent be91a73d801aab23817f22f0aabda07a6dd35eff
[MrJuneJune] Added Integration tests.
diff -r be91a73d801a -r 092afa595764 mrjunejune/BUILD
--- a/mrjunejune/BUILD Fri Jan 02 18:02:22 2026 -0800
+++ b/mrjunejune/BUILD Fri Jan 02 18:13:32 2026 -0800
@@ -37,24 +37,6 @@
binary = ":mrjunejune_server",
)
-cc_library(
- name = "mrjunejune_server_lib",
- srcs = ["server_entry.c"],
- deps = ["//seobeo:seobeo_server"], # Use server-only target (no OpenSSL)
- linkstatic = False, # ensures dynamic linking
- visibility = ["//visibility:public"],
-)
-
-# py_binary(
-# name = "python_server",
-# srcs = ["python_server.py"],
-# deps = [
-# ":mrjunejune_server_lib",
-# "@pip_deps//:cffi",
-# ],
-# data = [":mrjunejune_server_lib"],
-# )
-
cc_test(
name = "integration_test",
srcs = ["test/integration_test.c"],
@@ -63,9 +45,10 @@
"//mrjunejune:mrjunejune_server",
"//mrjunejune:src_files",
"//mrjunejune:test_snapshots",
+ "//mrjunejune:test_files",
],
- size = "medium",
- timeout = "moderate",
+ size = "large",
+ timeout = "long",
args = ["$(location //mrjunejune:mrjunejune_server)"],
)
@@ -84,3 +67,32 @@
name = "test_snapshots",
srcs = glob(["test/snapshots/**"]),
)
+
+filegroup(
+ name = "test_files",
+ srcs = [
+ "test/shiba.webp",
+ "test/test_avi.avi",
+ ],
+)
+
+# This was to use python ffi, but w/e
+# cc_library(
+# name = "mrjunejune_server_lib",
+# srcs = ["server_entry.c"],
+# deps = ["//seobeo:seobeo_server"],
+# linkstatic = False,
+# visibility = ["//visibility:public"],
+# )
+
+# py_binary(
+# name = "python_server",
+# srcs = ["python_server.py"],
+# deps = [
+# ":mrjunejune_server_lib",
+# "@pip_deps//:cffi",
+# ],
+# data = [":mrjunejune_server_lib"],
+# )
+
+
diff -r be91a73d801a -r 092afa595764 mrjunejune/test/README.md
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mrjunejune/test/README.md Fri Jan 02 18:13:32 2026 -0800
@@ -0,0 +1,115 @@
+# MrJuneJune Integration Tests
+
+This directory contains comprehensive integration tests for all mrjunejune endpoints.
+
+## Test Structure
+
+### Test Files
+- `integration_test.c` - Main integration test suite
+- `create_snapshots.c` - Utility to generate/update snapshot files
+- `shiba.webp` - Test image for image-to-webp conversion
+- `test_avi.avi` - Test video for video-to-mp4 conversion
+
+### Snapshot Directory
+- `snapshots/` - Contains expected HTTP responses for GET endpoints
+
+## Endpoints Tested
+
+### GET Endpoints (200 OK with Snapshot Verification)
+- `/` - Home page
+- `/resume` - Resume page
+- `/tools` - Tools page
+- `/tools/markdown_to_html` - Markdown to HTML converter
+- `/tools/file_converter` - File converter tool
+
+### GET Endpoints (301 Redirects)
+- `/index.html` → `/`
+- `/resume/index.html` → `/resume`
+- `/tools/index.html` → `/tools`
+- `/tools/markdown_to_html/index.html` → `/tools/markdown_to_html`
+- `/tools/file_converter/index.html` → `/tools/file_converter`
+
+### GET Endpoints (404 Not Found)
+- `/nonexistent`
+- `/does/not/exist`
+- `/missing.html`
+
+### POST Endpoints
+- `/api/convert/image-to-webp` - Converts images to WebP format
+ - Tests file upload, conversion, and download
+ - Verifies Content-Type: image/webp
+- `/api/convert/video-to-mp4` - Converts videos to MP4 format
+ - Tests file upload, conversion, and download
+ - Verifies Content-Type: video/mp4
+
+### Download Endpoint
+- `/api/download/:filename` - Download converted files
+ - Tested automatically as part of POST conversion tests
+
+## Running Tests
+
+### First Time Setup - Create Snapshots
+Before running tests for the first time, generate snapshots:
+
+```bash
+bazel run //mrjunejune:create_snapshots
+```
+
+This will:
+1. Start the mrjunejune server
+2. Make HTTP requests to all GET endpoints
+3. Save responses to `snapshots/` directory
+4. Stop the server
+
+### Run Integration Tests
+```bash
+bazel test //mrjunejune:integration_test
+```
+
+This will:
+1. Start the mrjunejune server on port 6969
+2. Test all GET endpoints against their snapshots
+3. Test POST conversion endpoints with real file uploads
+4. Verify downloads work correctly
+5. Report pass/fail for each test
+
+### View Test Output
+```bash
+bazel test //mrjunejune:integration_test --test_output=all
+```
+
+## Test Coverage
+
+✓ All 10 registered endpoints are tested
+✓ Snapshot testing for HTML responses
+✓ File upload and conversion testing
+✓ Download functionality testing
+✓ Error handling (404 responses)
+✓ Redirect testing (301 responses)
+
+## Updating Tests
+
+### When HTML Changes
+If you modify any HTML templates:
+
+```bash
+# Regenerate snapshots
+bazel run //mrjunejune:create_snapshots
+
+# Run tests to verify
+bazel test //mrjunejune:integration_test
+```
+
+### When Adding New Endpoints
+1. Update `main.c` with new route
+2. Add test case to `integration_test.c`
+3. Add snapshot config to `create_snapshots.c` (for GET)
+4. Regenerate snapshots
+5. Run tests
+
+## Notes
+
+- Tests require FFmpeg to be installed for video/image conversion tests
+- Server runs on port 6969 during tests
+- Test files are cleaned up automatically after download
+- Converted files are stored in `/tmp/` during tests
diff -r be91a73d801a -r 092afa595764 mrjunejune/test/create_snapshots.c
--- a/mrjunejune/test/create_snapshots.c Fri Jan 02 18:02:22 2026 -0800
+++ b/mrjunejune/test/create_snapshots.c Fri Jan 02 18:13:32 2026 -0800
@@ -106,11 +106,23 @@
// Define snapshots to create - paths that should succeed (200 OK)
SnapshotConfig success_snapshots[] = {
{"/", 200, snapshot_path, TEST_HOST, TEST_PORT},
- {"/index.html", 200, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/resume", 200, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/tools", 200, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/tools/markdown_to_html", 200, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/tools/file_converter", 200, snapshot_path, TEST_HOST, TEST_PORT},
};
int num_success = sizeof(success_snapshots) / sizeof(success_snapshots[0]);
- // Define snapshots for error paths (404)
+ // Define snapshots for redirect endpoints (301)
+ SnapshotConfig redirect_snapshots[] = {
+ {"/index.html", 301, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/resume/index.html", 301, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/tools/index.html", 301, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/tools/markdown_to_html/index.html", 301, snapshot_path, TEST_HOST, TEST_PORT},
+ {"/tools/file_converter/index.html", 301, snapshot_path, TEST_HOST, TEST_PORT},
+ };
+ int num_redirects = sizeof(redirect_snapshots) / sizeof(redirect_snapshots[0]);
+
SnapshotConfig error_snapshots[] = {
{"/nonexistent", 404, snapshot_path, TEST_HOST, TEST_PORT},
{"/does/not/exist", 404, snapshot_path, TEST_HOST, TEST_PORT},
@@ -135,8 +147,22 @@
}
}
+ // Create redirect snapshots
+ printf("\nCreating snapshots for redirect paths:\n\n");
+ for (int i = 0; i < num_redirects; i++)
+ {
+ if (Seobeo_Snapshot_Create(&redirect_snapshots[i]) == 0)
+ {
+ total_passed++;
+ }
+ else
+ {
+ total_failed++;
+ }
+ }
+
// Create error snapshots
- printf("Creating snapshots for error paths:\n\n");
+ printf("\nCreating snapshots for error paths:\n\n");
for (int i = 0; i < num_errors; i++)
{
if (Seobeo_Snapshot_Create(&error_snapshots[i]) == 0)
diff -r be91a73d801a -r 092afa595764 mrjunejune/test/integration_test.c
--- a/mrjunejune/test/integration_test.c Fri Jan 02 18:02:22 2026 -0800
+++ b/mrjunejune/test/integration_test.c Fri Jan 02 18:13:32 2026 -0800
@@ -380,6 +380,229 @@
return 0;
}
+// Helper: Send POST request with file data
+int send_post_file(Seobeo_Handle *client, const char *path, const char *file_data, size_t file_size)
+{
+ char request_buffer[8192];
+ int header_len = snprintf(
+ request_buffer, sizeof(request_buffer),
+ "POST %s HTTP/1.1\r\n"
+ "Host: %s\r\n"
+ "Content-Type: application/octet-stream\r\n"
+ "Content-Length: %zu\r\n"
+ "Connection: close\r\n"
+ "\r\n",
+ path, TEST_HOST, file_size
+ );
+
+ if (header_len < 0 || header_len >= sizeof(request_buffer))
+ {
+ fprintf(stderr, "Request header too large\n");
+ return -1;
+ }
+
+ // Send headers
+ Seobeo_Handle_Queue(client, (uint8*)request_buffer, (uint32)header_len);
+
+ // Send file data in chunks if needed
+ size_t remaining = file_size;
+ const char *ptr = file_data;
+ while (remaining > 0)
+ {
+ size_t chunk_size = remaining > 4096 ? 4096 : remaining;
+ Seobeo_Handle_Queue(client, (uint8*)ptr, (uint32)chunk_size);
+ ptr += chunk_size;
+ remaining -= chunk_size;
+ }
+
+ return Seobeo_Handle_Flush(client);
+}
+
+// Helper: Extract JSON field value from response body
+char* extract_json_field(const char *json, const char *field, char *buffer, size_t buffer_size)
+{
+ char search_pattern[256];
+ snprintf(search_pattern, sizeof(search_pattern), "\"%s\":\"", field);
+
+ const char *start = strstr(json, search_pattern);
+ if (!start)
+ {
+ return NULL;
+ }
+
+ start += strlen(search_pattern);
+ const char *end = strchr(start, '"');
+ if (!end)
+ {
+ return NULL;
+ }
+
+ size_t len = end - start;
+ if (len >= buffer_size)
+ {
+ len = buffer_size - 1;
+ }
+
+ memcpy(buffer, start, len);
+ buffer[len] = '\0';
+
+ return buffer;
+}
+
+// Helper: Test POST file conversion
+int test_file_conversion(const char *endpoint, const char *test_file_path,
+ const char *expected_format, pid_t server_pid)
+{
+ printf(" Testing: POST %s\n", endpoint);
+
+ // Read test file
+ size_t file_size;
+ char *file_data = read_file(test_file_path, &file_size);
+ if (!file_data)
+ {
+ printf(" ✗ Failed to read test file: %s\n", test_file_path);
+ return -1;
+ }
+
+ printf(" → Loaded test file (%zu bytes)\n", file_size);
+
+ // Create client and send request
+ Seobeo_Handle *client = create_test_client();
+ if (!client)
+ {
+ printf(" ✗ Failed to create client connection\n");
+ free(file_data);
+ return -1;
+ }
+
+ if (send_post_file(client, endpoint, file_data, file_size) < 0)
+ {
+ printf(" ✗ Failed to send POST request\n");
+ free(file_data);
+ Seobeo_Handle_Destroy(client);
+ return -1;
+ }
+
+ free(file_data);
+
+ // Read response
+ char *response = NULL;
+ size_t response_len = 0;
+ if (read_http_response(client, &response, &response_len) < 0)
+ {
+ printf(" ✗ Failed to read response\n");
+ Seobeo_Handle_Destroy(client);
+ return -1;
+ }
+
+ Seobeo_Handle_Destroy(client);
+
+ // Parse status
+ int status = parse_http_status(response);
+ if (status != 200)
+ {
+ printf(" ✗ Conversion failed with status: %d\n", status);
+ printf(" Response: %s\n", response);
+ free(response);
+ return -1;
+ }
+
+ printf(" ✓ Status code: 200\n");
+
+ // Extract download URL from JSON response
+ const char *body = strstr(response, "\r\n\r\n");
+ if (!body)
+ {
+ printf(" ✗ No response body found\n");
+ free(response);
+ return -1;
+ }
+ body += 4;
+
+ char download_url[512];
+ if (!extract_json_field(body, "download_url", download_url, sizeof(download_url)))
+ {
+ printf(" ✗ Failed to extract download_url from response\n");
+ printf(" Response body: %s\n", body);
+ free(response);
+ return -1;
+ }
+
+ printf(" ✓ Conversion succeeded\n");
+ printf(" ✓ Download URL: %s\n", download_url);
+ free(response);
+
+ // Test downloading the converted file
+ printf(" → Testing download: GET %s\n", download_url);
+
+ client = create_test_client();
+ if (!client)
+ {
+ printf(" ✗ Failed to create client for download\n");
+ return -1;
+ }
+
+ if (send_http_request(client, download_url, NULL) < 0)
+ {
+ printf(" ✗ Failed to send download request\n");
+ Seobeo_Handle_Destroy(client);
+ return -1;
+ }
+
+ response = NULL;
+ response_len = 0;
+ if (read_http_response(client, &response, &response_len) < 0)
+ {
+ printf(" ✗ Failed to read download response\n");
+ Seobeo_Handle_Destroy(client);
+ return -1;
+ }
+
+ Seobeo_Handle_Destroy(client);
+
+ status = parse_http_status(response);
+ if (status != 200)
+ {
+ printf(" ✗ Download failed with status: %d\n", status);
+ free(response);
+ return -1;
+ }
+
+ // Find body in download response
+ body = strstr(response, "\r\n\r\n");
+ if (!body)
+ {
+ printf(" ✗ No file data in download response\n");
+ free(response);
+ return -1;
+ }
+ body += 4;
+
+ size_t downloaded_size = response_len - (body - response);
+
+ // Verify content type in response headers
+ const char *content_type = strstr(response, "Content-Type: ");
+ if (!content_type)
+ {
+ printf(" ✗ No Content-Type header in download\n");
+ free(response);
+ return -1;
+ }
+
+ if (strstr(content_type, expected_format) == NULL)
+ {
+ printf(" ✗ Wrong content type (expected %s)\n", expected_format);
+ free(response);
+ return -1;
+ }
+
+ printf(" ✓ Downloaded converted file (%zu bytes)\n", downloaded_size);
+ printf(" ✓ Content-Type: %s\n", expected_format);
+
+ free(response);
+ return 0;
+}
+
// Helper: Start test server
pid_t start_test_server(const char *server_binary)
{
@@ -498,10 +721,23 @@
// Define test cases - paths that should succeed (200 OK)
TestCase success_tests[] = {
{"/", 200, NULL, NULL, NULL, 0},
- {"/index.html", 200, NULL, NULL, NULL, 0},
+ {"/resume", 200, NULL, NULL, NULL, 0},
+ {"/tools", 200, NULL, NULL, NULL, 0},
+ {"/tools/markdown_to_html", 200, NULL, NULL, NULL, 0},
+ {"/tools/file_converter", 200, NULL, NULL, NULL, 0},
};
int num_success_tests = sizeof(success_tests) / sizeof(success_tests[0]);
+ // Define test cases - paths that should redirect (301)
+ TestCase redirect_tests[] = {
+ {"/index.html", 301, NULL, NULL, NULL, 0},
+ {"/resume/index.html", 301, NULL, NULL, NULL, 0},
+ {"/tools/index.html", 301, NULL, NULL, NULL, 0},
+ {"/tools/markdown_to_html/index.html", 301, NULL, NULL, NULL, 0},
+ {"/tools/file_converter/index.html", 301, NULL, NULL, NULL, 0},
+ };
+ int num_redirect_tests = sizeof(redirect_tests) / sizeof(redirect_tests[0]);
+
// Define test cases - paths that should fail (404)
TestCase failure_tests[] = {
{"/nonexistent", 404, NULL, NULL, NULL, 0},
@@ -515,6 +751,10 @@
{
init_test_case(&success_tests[i]);
}
+ for (int i = 0; i < num_redirect_tests; i++)
+ {
+ init_test_case(&redirect_tests[i]);
+ }
for (int i = 0; i < num_failure_tests; i++)
{
init_test_case(&failure_tests[i]);
@@ -536,6 +776,22 @@
printf("\n");
+ // Run redirect tests
+ printf("Running tests for paths that should redirect:\n");
+ for (int i = 0; i < num_redirect_tests; i++)
+ {
+ if (execute_test_case(&redirect_tests[i], server_pid) == 0)
+ {
+ passed_tests++;
+ }
+ else
+ {
+ failed_tests++;
+ }
+ }
+
+ printf("\n");
+
// Run failure tests
printf("Running tests for paths that should fail:\n");
for (int i = 0; i < num_failure_tests; i++)
@@ -572,11 +828,48 @@
failed_tests++;
}
+ printf("\n");
+
+ // Test POST endpoints
+ printf("Running tests for POST conversion endpoints:\n");
+
+ // Test image-to-webp conversion
+ if (test_file_conversion("/api/convert/image-to-webp",
+ "mrjunejune/test/shiba.webp",
+ "image/webp",
+ server_pid) == 0)
+ {
+ passed_tests++;
+ }
+ else
+ {
+ failed_tests++;
+ }
+
+ printf("\n");
+
+ // Test video-to-mp4 conversion
+ if (test_file_conversion("/api/convert/video-to-mp4",
+ "mrjunejune/test/test_avi.avi",
+ "video/mp4",
+ server_pid) == 0)
+ {
+ passed_tests++;
+ }
+ else
+ {
+ failed_tests++;
+ }
+
// Cleanup test cases
for (int i = 0; i < num_success_tests; i++)
{
cleanup_test_case(&success_tests[i]);
}
+ for (int i = 0; i < num_redirect_tests; i++)
+ {
+ cleanup_test_case(&redirect_tests[i]);
+ }
for (int i = 0; i < num_failure_tests; i++)
{
cleanup_test_case(&failure_tests[i]);
diff -r be91a73d801a -r 092afa595764 mrjunejune/test/shiba.webp
Binary file mrjunejune/test/shiba.webp has changed
diff -r be91a73d801a -r 092afa595764 mrjunejune/test/snapshots/index.html.snapshot
--- a/mrjunejune/test/snapshots/index.html.snapshot Fri Jan 02 18:02:22 2026 -0800
+++ b/mrjunejune/test/snapshots/index.html.snapshot Fri Jan 02 18:13:32 2026 -0800
@@ -1,245 +1,202 @@
-HTTP/1.1 200 OK
-Content-Type: text/html; charset=utf-8
-Content-Length: 12120
+HTTP/1.1 301 Moved Permanently
+Content-Type: text/plain
+Content-Length: 0
Connection: close
+Body:
-
-
-
-
-
-
+t/otf" crossorigin>
+
+
+
+
+
+
+
+
-
-
-
Hi, my name is Juntae, but most people call me June.
-
I am a software engineer with experience spanning a wide range of companies, from small startups to FAANG.
-
Feel free to check out my resume below, and if you're interested, don’t hesitate to contact me for contract work ranging from web/app development to embedded programming.
- Implements bazel structure for the company for TypeScript and JavaScript code base for hermiticity and stablishing standards for JavaScript and
-
-
- TypeScript testing and code structures.
-
-
- Led a team of five engineers in building GraphQL endpoints for client-facing applications using Apollo and AppSync, supporting over 2000 RPS and auto scaling depending on request values.
-
-
- Improved application response times by up to 85% for graphQL response by updating database schema and SQL queries, eliminating N+1 queries and lack of indexes.
-
-
- Developed CI/CD pipelines for backend structures.
-
-
- Designed infrastructure for pub/sub, caching, and media processing logic.
-
-
+
During my free time, I like to write codes mostly in C, Python, and Typescript. All in mono repo styles using mercurial and bazel. (I know that is mentally ill...)
- Maintained Amazon amplify apps to create and deploy React web applications for companies such as NBA, Tinder, and other companies for COVID-19 at-home test kits.
-
- Implemented a script that helps accurately access and refund unused covid test kits; helping company save up to 200,000 USD.
-
- Created several Rails controllers for internal purposes; mocking end to end user experience for QA, mass refund features for CX department, and more, ultimately reducing support tickets amount by 50 percent.
-
- Implemented an audit table to help debug problems and logged which process was responsible for the change of the record using PaperTrail gems
- Constructed RESTful API endpoints in multiple different frameworks such as Django, Ruby on Rails, and Flask and automated API documentation process using swagger.
-
-
- Designed custom rake tasks for importing production data into newly updated data structure to meet client's needs.
-
-
- Maintained or updated staging/productions servers. Debugged problems in production postgres database using ssh and postgres console on Heroku or AWS servers
-
-
- Collaborated in creating automation python scripts for websites and application using selenium covering for QA eliminating 80% of QA's manual work
-
- Developed custom Shapley value regression model to calculate importance of independent variables of data sets using sklearn, pandas, and numpy.
-
-
- Created custom image uploader to Amazon s3 bucket using boto3 library.
-
-
- Built RESTful API application using Flask framework and automated extensive API documentation pages using flask-restplus, pytest, and swagger, covering 95% of the code base.
-
-
- Created an interactive graph using D3.js in Vue.js with data from Flask backend API.
-
Software Engineer with 9 years of hands-on experience across diverse tech stacks, from early-stage startups to FANG-scale systems. Adept in designing and delivering robust software solutions using modern languages, frameworks, and cloud platforms.
+ Implements bazel structure for the company for TypeScript and JavaScript code base for hermiticity and stablishing standards for JavaScript and
+
+
+ TypeScript testing and code structures.
+
+
+ Led a team of five engineers in building GraphQL endpoints for client-facing applications using Apollo and AppSync, supporting over 2000 RPS and auto scaling depending on request values.
+
+
+ Improved application response times by up to 85% for graphQL response by updating database schema and SQL queries, eliminating N+1 queries and lack of indexes.
+
+
+ Developed CI/CD pipelines for backend structures.
+
+
+ Designed infrastructure for pub/sub, caching, and media processing logic.
+
+ Maintained Amazon amplify apps to create and deploy React web applications for companies such as NBA, Tinder, and other companies for COVID-19 at-home test kits.
+
+ Implemented a script that helps accurately access and refund unused covid test kits; helping company save up to 200,000 USD.
+
+ Created several Rails controllers for internal purposes; mocking end to end user experience for QA, mass refund features for CX department, and more, ultimately reducing support tickets amount by 50 percent.
+
+ Implemented an audit table to help debug problems and logged which process was responsible for the change of the record using PaperTrail gems
+ Constructed RESTful API endpoints in multiple different frameworks such as Django, Ruby on Rails, and Flask and automated API documentation process using swagger.
+
+
+ Designed custom rake tasks for importing production data into newly updated data structure to meet client's needs.
+
+
+ Maintained or updated staging/productions servers. Debugged problems in production postgres database using ssh and postgres console on Heroku or AWS servers
+
+
+ Collaborated in creating automation python scripts for websites and application using selenium covering for QA eliminating 80% of QA's manual work
+
+ Developed custom Shapley value regression model to calculate importance of independent variables of data sets using sklearn, pandas, and numpy.
+
+
+ Created custom image uploader to Amazon s3 bucket using boto3 library.
+
+
+ Built RESTful API application using Flask framework and automated extensive API documentation pages using flask-restplus, pytest, and swagger, covering 95% of the code base.
+
+
+ Created an interactive graph using D3.js in Vue.js with data from Flask backend API.
+
Hi, my name is Juntae, but most people call me June or MrJuneJune.
+
+
I am a software engineer with experience spanning a wide range of companies, from small startups to FAANGs....
+
I know it is lame to work for them, but I have a dog so I need to put foods on my table.
+
+
+
+
+
+
During my free time, I like to write codes mostly in C, Python, and Typescript. All in mono repo styles using mercurial and bazel. (I know that is mentally ill...)
Hi, my name is Juntae, but most people call me June.
-
I am a software engineer with experience spanning a wide range of companies, from small startups to FAANG.
-
Feel free to check out my resume below, and if you're interested, don’t hesitate to contact me for contract work ranging from web/app development to embedded programming.
- Implements bazel structure for the company for TypeScript and JavaScript code base for hermiticity and stablishing standards for JavaScript and
-
-
- TypeScript testing and code structures.
-
-
- Led a team of five engineers in building GraphQL endpoints for client-facing applications using Apollo and AppSync, supporting over 2000 RPS and auto scaling depending on request values.
-
-
- Improved application response times by up to 85% for graphQL response by updating database schema and SQL queries, eliminating N+1 queries and lack of indexes.
-
-
- Developed CI/CD pipelines for backend structures.
-
-
- Designed infrastructure for pub/sub, caching, and media processing logic.
-
-
+
During my free time, I like to write codes mostly in C, Python, and Typescript. All in mono repo styles using mercurial and bazel. (I know that is mentally ill...)
- Maintained Amazon amplify apps to create and deploy React web applications for companies such as NBA, Tinder, and other companies for COVID-19 at-home test kits.
-
- Implemented a script that helps accurately access and refund unused covid test kits; helping company save up to 200,000 USD.
-
- Created several Rails controllers for internal purposes; mocking end to end user experience for QA, mass refund features for CX department, and more, ultimately reducing support tickets amount by 50 percent.
-
- Implemented an audit table to help debug problems and logged which process was responsible for the change of the record using PaperTrail gems
- Constructed RESTful API endpoints in multiple different frameworks such as Django, Ruby on Rails, and Flask and automated API documentation process using swagger.
-
-
- Designed custom rake tasks for importing production data into newly updated data structure to meet client's needs.
-
-
- Maintained or updated staging/productions servers. Debugged problems in production postgres database using ssh and postgres console on Heroku or AWS servers
-
-
- Collaborated in creating automation python scripts for websites and application using selenium covering for QA eliminating 80% of QA's manual work
-
- Developed custom Shapley value regression model to calculate importance of independent variables of data sets using sklearn, pandas, and numpy.
-
-
- Created custom image uploader to Amazon s3 bucket using boto3 library.
-
-
- Built RESTful API application using Flask framework and automated extensive API documentation pages using flask-restplus, pytest, and swagger, covering 95% of the code base.
-
-
- Created an interactive graph using D3.js in Vue.js with data from Flask backend API.
-
+ Implements bazel structure for the company for TypeScript and JavaScript code base for hermiticity and stablishing standards for JavaScript and
+
+
+ TypeScript testing and code structures.
+
+
+ Led a team of five engineers in building GraphQL endpoints for client-facing applications using Apollo and AppSync, supporting over 2000 RPS and auto scaling depending on request values.
+
+
+ Improved application response times by up to 85% for graphQL response by updating database schema and SQL queries, eliminating N+1 queries and lack of indexes.
+
+
+ Developed CI/CD pipelines for backend structures.
+
+
+ Designed infrastructure for pub/sub, caching, and media processing logic.
+
+ Maintained Amazon amplify apps to create and deploy React web applications for companies such as NBA, Tinder, and other companies for COVID-19 at-home test kits.
+
+ Implemented a script that helps accurately access and refund unused covid test kits; helping company save up to 200,000 USD.
+
+ Created several Rails controllers for internal purposes; mocking end to end user experience for QA, mass refund features for CX department, and more, ultimately reducing support tickets amount by 50 percent.
+
+ Implemented an audit table to help debug problems and logged which process was responsible for the change of the record using PaperTrail gems
+ Constructed RESTful API endpoints in multiple different frameworks such as Django, Ruby on Rails, and Flask and automated API documentation process using swagger.
+
+
+ Designed custom rake tasks for importing production data into newly updated data structure to meet client's needs.
+
+
+ Maintained or updated staging/productions servers. Debugged problems in production postgres database using ssh and postgres console on Heroku or AWS servers
+
+
+ Collaborated in creating automation python scripts for websites and application using selenium covering for QA eliminating 80% of QA's manual work
+
+ Developed custom Shapley value regression model to calculate importance of independent variables of data sets using sklearn, pandas, and numpy.
+
+
+ Created custom image uploader to Amazon s3 bucket using boto3 library.
+
+
+ Built RESTful API application using Flask framework and automated extensive API documentation pages using flask-restplus, pytest, and swagger, covering 95% of the code base.
+
+
+ Created an interactive graph using D3.js in Vue.js with data from Flask backend API.
+