Upload a Resume
Resumes are uploaded directly to the RAISE S3 bucket. Once an object lands in the bucket, S3 notifies the ingestion queue and the pipeline starts automatically.
Supported formats: PDF, DOCX, DOC.
The bucket name is printed as a CDK stack output (BucketName) after deployment.
Secure uploads from browsers
For web and mobile applications, use AWS Cognito Identity Pools to issue short-lived, scoped credentials to the client. This avoids exposing long-lived AWS keys in your frontend code. See Authentication & Authorization for the setup guide.
Backend Uploads
Use these examples when uploading from a server-side application or script.
- Node.js
- Python
- Java
- Go
- C#
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
import { readFileSync } from "fs";
const client = new S3Client({ region: "us-east-1" });
await client.send(new PutObjectCommand({
Bucket: "YOUR_BUCKET_NAME",
Key: "resumes/candidate.pdf",
Body: readFileSync("./candidate.pdf"),
ContentType: "application/pdf",
}));
console.log("Resume uploaded successfully.");
import boto3
s3 = boto3.client("s3", region_name="us-east-1")
with open("./candidate.pdf", "rb") as f:
s3.put_object(
Bucket="YOUR_BUCKET_NAME",
Key="resumes/candidate.pdf",
Body=f,
ContentType="application/pdf",
)
print("Resume uploaded successfully.")
S3Client s3 = S3Client.builder().region(Region.US_EAST_1).build();
s3.putObject(
PutObjectRequest.builder()
.bucket("YOUR_BUCKET_NAME")
.key("resumes/candidate.pdf")
.contentType("application/pdf")
.build(),
Path.of("./candidate.pdf")
);
System.out.println("Resume uploaded successfully.");
package main
import (
"context"
"os"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/service/s3"
)
func main() {
cfg, _ := config.LoadDefaultConfig(context.TODO(),
config.WithRegion("us-east-1"),
)
client := s3.NewFromConfig(cfg)
file, _ := os.Open("./candidate.pdf")
defer file.Close()
_, err := client.PutObject(context.TODO(), &s3.PutObjectInput{
Bucket: aws.String("YOUR_BUCKET_NAME"),
Key: aws.String("resumes/candidate.pdf"),
Body: file,
ContentType: aws.String("application/pdf"),
})
if err != nil {
panic(err)
}
println("Resume uploaded successfully.")
}
using Amazon.S3;
using Amazon.S3.Model;
var client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1);
using var fileStream = File.OpenRead("./candidate.pdf");
await client.PutObjectAsync(new PutObjectRequest
{
BucketName = "YOUR_BUCKET_NAME",
Key = "resumes/candidate.pdf",
InputStream = fileStream,
ContentType = "application/pdf",
});
Console.WriteLine("Resume uploaded successfully.");
Frontend Uploads
Use these examples when uploading from a web application using temporary AWS credentials from a Cognito Identity Pool.
- React
- Angular
- Vue
- Vanilla JS
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
async function uploadResume(file: File, bucketName: string, credentials: any) {
const client = new S3Client({ region: "us-east-1", credentials });
await client.send(new PutObjectCommand({
Bucket: bucketName,
Key: `resumes/${Date.now()}-${file.name}`,
Body: file,
ContentType: file.type,
}));
}
// Usage in a component
export function ResumeUploader({ bucketName, credentials }: Props) {
const handleChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (!file) return;
await uploadResume(file, bucketName, credentials);
alert("Resume uploaded successfully.");
};
return (
<input
type="file"
accept=".pdf,.docx,.doc"
onChange={handleChange}
/>
);
}
import { Injectable } from "@angular/core";
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
@Injectable({ providedIn: "root" })
export class ResumeUploadService {
private readonly region = "us-east-1";
private readonly bucket = "YOUR_BUCKET_NAME";
async upload(file: File, credentials: any): Promise<void> {
const client = new S3Client({ region: this.region, credentials });
await client.send(new PutObjectCommand({
Bucket: this.bucket,
Key: `resumes/${Date.now()}-${file.name}`,
Body: file,
ContentType: file.type,
}));
}
}
// composables/useResumeUpload.ts
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
export function useResumeUpload(bucketName: string, credentials: any) {
async function upload(file: File): Promise<void> {
const client = new S3Client({ region: "us-east-1", credentials });
await client.send(new PutObjectCommand({
Bucket: bucketName,
Key: `resumes/${Date.now()}-${file.name}`,
Body: file,
ContentType: file.type,
}));
}
return { upload };
}
<!-- ResumeUploader.vue -->
<template>
<input type="file" accept=".pdf,.docx,.doc" @change="handleChange" />
</template>
<script setup lang="ts">
import { useResumeUpload } from "@/composables/useResumeUpload";
const props = defineProps<{ bucketName: string; credentials: any }>();
const { upload } = useResumeUpload(props.bucketName, props.credentials);
async function handleChange(e: Event) {
const file = (e.target as HTMLInputElement).files?.[0];
if (!file) return;
await upload(file);
alert("Resume uploaded successfully.");
}
</script>
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
async function uploadResume(file, bucketName, credentials) {
const client = new S3Client({ region: "us-east-1", credentials });
await client.send(new PutObjectCommand({
Bucket: bucketName,
Key: `resumes/${Date.now()}-${file.name}`,
Body: file,
ContentType: file.type,
}));
}
document.getElementById("file-input").addEventListener("change", async (e) => {
const file = e.target.files[0];
if (!file) return;
// credentials obtained from Cognito Identity Pool
await uploadResume(file, "YOUR_BUCKET_NAME", credentials);
alert("Resume uploaded successfully.");
});