Skip to content

Commit 18851c7

Browse files
committed
updates mover
1 parent c7378c0 commit 18851c7

File tree

2 files changed

+65
-57
lines changed

2 files changed

+65
-57
lines changed

apps/infrastructure-migrator/blob-mover.ts

Lines changed: 0 additions & 49 deletions
This file was deleted.

apps/infrastructure-migrator/driver.ts

Lines changed: 65 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,26 @@ import { drizzle as pgDrizzle } from "drizzle-orm/vercel-postgres";
33
import { drizzle } from "drizzle-orm/libsql";
44
import * as pgSchema from "./schema";
55
import { createClient } from "@libsql/client";
6-
import { migrateBlob } from "./blob-mover";
76
export * from "drizzle-orm";
87
import dotenv from "dotenv";
98
import * as schema from "db/schema";
9+
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
10+
import c, { staticUploads } from "config";
11+
import { eq } from "drizzle-orm";
1012

1113
dotenv.config({
1214
path: "../../.env",
1315
});
1416

17+
export const S3 = new S3Client({
18+
region: "auto",
19+
endpoint: `https://${process.env.CLOUDFLARE_ACCOUNT_ID!}.r2.cloudflarestorage.com`,
20+
credentials: {
21+
accessKeyId: process.env.R2_ACCESS_KEY_ID!,
22+
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY!,
23+
},
24+
});
25+
1526
const dbPostgres = pgDrizzle(sql, { schema: pgSchema });
1627

1728
const allUserCommonDataPromise = dbPostgres.query.userCommonData.findMany();
@@ -33,6 +44,12 @@ const allChatsToUsersPromise = dbPostgres.query.chatsToUsers.findMany();
3344
async function migratePostgresSqLite() {
3445
console.log("Starting Migration 🚀");
3546
console.log("Fetching Postgres Data 🐘");
47+
const turso = createClient({
48+
url: process.env.TURSO_DATABASE_URL!,
49+
authToken: process.env.TURSO_AUTH_TOKEN,
50+
});
51+
const db = drizzle(turso, { schema });
52+
3653
const [
3754
allUserCommonData,
3855
allUserHackerData,
@@ -66,12 +83,6 @@ async function migratePostgresSqLite() {
6683
]);
6784
console.log("Postgres data fetched 📦");
6885

69-
const turso = createClient({
70-
url: process.env.TURSO_DATABASE_URL!,
71-
authToken: process.env.TURSO_AUTH_TOKEN,
72-
});
73-
const db = drizzle(turso, { schema });
74-
7586
console.log("Migrating Users 👥");
7687

7788
if (allUserCommonData.length > 0) {
@@ -198,7 +209,53 @@ async function migratePostgresSqLite() {
198209

199210
console.log("Migrating Vercel Blob Files To R2");
200211

201-
migrateBlob();
212+
const resumeData = await db.query.userHackerData.findMany({
213+
columns: { resume: true, clerkID: true },
214+
});
215+
216+
for (let resumeEntry of resumeData) {
217+
const { resume: resumeUrlAsString, clerkID: userID } = resumeEntry;
218+
if (
219+
!resumeUrlAsString.length ||
220+
resumeUrlAsString === c.noResumeProvidedURL ||
221+
resumeUrlAsString.startsWith("/api")
222+
)
223+
continue;
224+
225+
const resumeUrl = new URL(resumeUrlAsString);
226+
const resumeFetchResponse = await fetch(resumeUrl);
227+
228+
if (!resumeFetchResponse.ok) {
229+
console.log("resume fetch failed");
230+
}
231+
const resumeBlob = await resumeFetchResponse.blob();
232+
233+
let key = decodeURIComponent(resumeUrl.pathname);
234+
// if the first character is a slash, remove it
235+
if (key.charAt(0) === "/") {
236+
key = key.slice(1);
237+
}
238+
239+
const buffer = await resumeBlob.arrayBuffer();
240+
241+
const cmd = new PutObjectCommand({
242+
Key: key,
243+
Bucket: staticUploads.bucketName,
244+
ContentType: "application/pdf",
245+
///@ts-expect-error
246+
Body: buffer,
247+
});
248+
249+
await S3.send(cmd);
250+
251+
// New url to correspond to an api route
252+
const newResumeUrl = `/api/upload/resume/view?key=${key}`;
253+
254+
await db
255+
.update(schema.userHackerData)
256+
.set({ resume: newResumeUrl.toString() })
257+
.where(eq(schema.userHackerData.clerkID, userID));
258+
}
202259

203260
console.log("Migrated Vercel Blob Files To R2");
204261

0 commit comments

Comments
 (0)