Skip to content

Commit adfb50d

Browse files
committed
rename bucket
1 parent 6c8b43e commit adfb50d

File tree

6 files changed

+29
-18
lines changed

6 files changed

+29
-18
lines changed

apps/proxy/.env.example

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
BUCKET_NAME=test
1+
BUCKET_NAME=s3fs
22
AWS_REGION=us-east-1
33
S3FS_MOUNT=/mnt/s3
44
DATA_MOUNT=/mnt/data

apps/proxy/tools/certbot/.env.example

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,5 @@ AWS_ACCESS_KEY_ID=minioadmin
55
AWS_ENDPOINT_URL_S3=http://minio:9000
66
AWS_REGION=us-east-1
77
AWS_SECRET_ACCESS_KEY=minioadmin
8-
BUCKET_NAME=test
8+
BUCKET_NAME=s3fs
99
S3FS_MOUNT=/mnt/s3

apps/web/.env.example

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ NEXT_PUBLIC_IS_PREVIEW=true
44
NEXT_PUBLIC_WILDCARD_DOMAIN=db.example.com
55

66
OPENAI_API_KEY="<openai-api-key>"
7-
S3_BUCKET=test
7+
S3_BUCKET=s3fs
88
S3_ENDPOINT=http://localhost:54321/storage/v1/s3
99
AWS_ACCESS_KEY_ID=625729a08b95bf1b7ff351a663f3a23c
1010
AWS_SECRET_ACCESS_KEY=850181e4652dd023b7a98c58ae0d2d34bd487ee0cc3254aed6eda37307425907

apps/web/app/api/databases/[id]/upload/route.ts

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import { S3Client } from '@aws-sdk/client-s3'
2-
import { Upload } from '@aws-sdk/lib-storage'
1+
// import { S3Client } from '@aws-sdk/client-s3'
2+
// import { Upload } from '@aws-sdk/lib-storage'
33
import { NextRequest, NextResponse } from 'next/server'
44
import { createGzip } from 'zlib'
55
import { Readable } from 'stream'
@@ -8,7 +8,7 @@ import { createScramSha256Data } from 'pg-gateway'
88
import { generateDatabasePassword } from '~/utils/generate-database-password'
99

1010
const wildcardDomain = process.env.NEXT_PUBLIC_WILDCARD_DOMAIN ?? 'db.example.com'
11-
const s3Client = new S3Client({ endpoint: process.env.S3_ENDPOINT, forcePathStyle: true })
11+
// const s3Client = new S3Client({ endpoint: process.env.S3_ENDPOINT, forcePathStyle: true })
1212

1313
export type DatabaseUploadResponse =
1414
| {
@@ -79,17 +79,21 @@ export async function POST(
7979
const gzip = createGzip()
8080
const body = Readable.from(streamToAsyncIterable(dump.stream()))
8181

82-
const upload = new Upload({
83-
client: s3Client,
84-
params: {
85-
Bucket: process.env.S3_BUCKET,
86-
Key: key,
87-
Body: body.pipe(gzip),
88-
},
89-
})
90-
91-
await upload.done()
92-
82+
// const upload = new Upload({
83+
// client: s3Client,
84+
// params: {
85+
// Bucket: process.env.S3_BUCKET,
86+
// Key: key,
87+
// Body: body.pipe(gzip),
88+
// },
89+
// })
90+
91+
// await upload.done()
92+
93+
const { data: storageData, error: storageError } = await supabase.storage
94+
.from(process.env.S3_BUCKET!)
95+
.upload(key, body.pipe(gzip), { upsert: true, duplex: 'half' })
96+
console.log(storageData, storageError)
9397
const { data: existingDeployedDatabase } = await supabase
9498
.from('deployed_databases')
9599
.select('id')

supabase/config.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ file_size_limit = "50MiB"
7373
[storage.image_transformation]
7474
enabled = true
7575

76-
[storage.buckets.test]
76+
[storage.buckets.s3fs]
7777
public = false
7878

7979
[auth]
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
create policy "Users can upload files to the s3fs bucket"
2+
on storage.objects
3+
for insert to authenticated
4+
with check (
5+
bucket_id = 's3fs' and
6+
(storage.foldername(name))[1] = 'dbs'
7+
);

0 commit comments

Comments
 (0)