@upload ๋ฐ์ฝ๋ ์ดํฐ๋ ํ์ผ ์
๋ก๋๋ฅผ ์ฒ๋ฆฌํ๋ API๋ฅผ ์์ฑํฉ๋๋ค. Multipart form-data ์์ฒญ์ ์๋์ผ๋ก ํ์ฑํ๊ณ , ํ์ผ ๊ฐ์ฒด๋ฅผ ์ ๊ณตํฉ๋๋ค. @api ๋ฐ์ฝ๋ ์ดํฐ ์์ด๋ ๋
๋ฆฝ์ ์ผ๋ก ์ฌ์ฉํ ์ ์์ผ๋ฉฐ, ์๋์ผ๋ก POST ๋ฉ์๋์ multipart ํด๋ผ์ด์ธํธ(axios-multipart, tanstack-mutation-multipart)๋ฅผ ์ค์ ํฉ๋๋ค.
์
๋ก๋ ๋ชจ๋
Sonamu๋ ๋ ๊ฐ์ง ํ์ผ ์
๋ก๋ ๋ชจ๋๋ฅผ ์ ๊ณตํฉ๋๋ค:
๋ชจ๋ ์ต์
Context ์์ฑ ํ์ผ ํ์
ํน์ง Buffer (๊ธฐ๋ณธ)consume: "buffer" ๋๋ ์๋ตbufferedFilesBufferedFile[]๋ฉ๋ชจ๋ฆฌ์ ๋ก๋, MD5 ๊ณ์ฐ/์ด๋ฏธ์ง ์ฒ๋ฆฌ ๋ฑ ์ ์ฐํ ์์
๊ฐ๋ฅ Stream consume: "stream"uploadedFilesUploadedFile[]์ฆ์ ์ ์ฅ์๋ก ์คํธ๋ฆฌ๋ฐ, ๋์ฉ๋ ํ์ผ์ ์ ํฉ
Buffer ๋ชจ๋ (๊ธฐ๋ณธ)
Buffer ๋ชจ๋๋ ํ์ผ์ ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋ํ ํ ์ฒ๋ฆฌํฉ๋๋ค. MD5 ํด์ ๊ณ์ฐ, ์ด๋ฏธ์ง ๋ฆฌ์ฌ์ด์ง ๋ฑ ํ์ผ ๋ด์ฉ์ ์ง์ ๋ค๋ค์ผ ํ ๋ ์ฌ์ฉํฉ๋๋ค.
import { BaseModelClass , upload , Sonamu } from "sonamu" ;
class FileModelClass extends BaseModelClass {
@ upload () // ๊ธฐ๋ณธ๊ฐ: consume: "buffer"
async uploadAvatar () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file uploaded" );
}
// MD5 ํด์๋ก ์ค๋ณต ๋ฐฉ์ง
const md5 = await file . md5 ();
const key = `avatars/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
return { url , filename: file . filename , size: file . size };
}
}
BufferedFile ๊ฐ์ฒด
class BufferedFile {
// ์๋ณธ ํ์ผ ์ด๋ฆ
get filename () : string ;
// MIME ํ์
get mimetype () : string ;
// ํ์ผ ํฌ๊ธฐ (bytes)
get size () : number ;
// ํ์ฅ์ (์ ์ ์ธ, ์: "jpg", "png")
get extname () : string | false ;
// ํ์ผ Buffer (๋ฉ๋ชจ๋ฆฌ์ ๋ก๋๋ ๋ฐ์ดํฐ)
get buffer () : Buffer ;
// saveToDisk ํ ์ ์ฅ๋ URL (Unsigned)
get url () : string ;
// saveToDisk ํ ์ ์ฅ๋ Signed URL
get signedUrl () : string ;
// ์๋ณธ Fastify MultipartFile ์ ๊ทผ
get raw () : MultipartFile ;
// MD5 ํด์ ๊ณ์ฐ
async md5 () : Promise < string >;
// ํ์ผ์ ๋์คํฌ์ ์ ์ฅ (URL ๋ฐํ)
async saveToDisk ( diskName : DriverKey , key : string ) : Promise < string >;
}
saveToDisk์ ํ๋ผ๋ฏธํฐ ์์๋ (diskName, key) ์
๋๋ค.
Stream ๋ชจ๋
Stream ๋ชจ๋๋ ํ์ผ์ ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋ํ์ง ์๊ณ ์ฆ์ ์ ์ฅ์๋ก ์คํธ๋ฆฌ๋ฐํฉ๋๋ค. ๋์ฉ๋ ํ์ผ ์
๋ก๋์ ์ ํฉํฉ๋๋ค.
@ upload ({
consume: "stream" ,
destination: "s3" , // ์ ์ฅํ ๋์คํฌ ์ด๋ฆ
keyGenerator : ( file ) => `uploads/ ${ Date . now () } - ${ file . filename } ` , // ํค ์์ฑ ํจ์
limits: { files: 5 }
})
async uploadLargeFiles () {
const { uploadedFiles } = Sonamu . getContext ();
if ( ! uploadedFiles || uploadedFiles . length === 0 ) {
throw new Error ( "No files uploaded" );
}
// ํ์ผ์ ์ด๋ฏธ ์ ์ฅ์์ ์
๋ก๋๋ ์ํ
return {
files: uploadedFiles . map (( file ) => ({
filename: file . filename ,
url: file . url ,
key: file . key ,
size: file . size ,
})),
};
}
UploadedFile ๊ฐ์ฒด
Stream ๋ชจ๋์์๋ ํ์ผ์ด ์ด๋ฏธ ์ ์ฅ์์ ์
๋ก๋๋ ์ํ๋ก ๋ฉํ๋ฐ์ดํฐ๋ง ์ ๊ทผํ ์ ์์ต๋๋ค.
class UploadedFile {
// ์๋ณธ ํ์ผ ์ด๋ฆ
get filename () : string ;
// MIME ํ์
get mimetype () : string ;
// ํ์ผ ํฌ๊ธฐ (bytes)
get size () : number ;
// ํ์ฅ์ (์ ์ ์ธ, ์: "jpg", "png")
get extname () : string | false ;
// ์ ์ฅ๋ URL (Unsigned)
get url () : string ;
// ์ ์ฅ๋ Signed URL
get signedUrl () : string ;
// ์ ์ฅ์ ๋ด ํค
get key () : string ;
// ์ ์ฅ๋ ๋์คํฌ ์ด๋ฆ
get diskName () : DriverKey ;
// ์ ์ฅ์์์ ํ์ผ ๋ค์ด๋ก๋ (๋์ค์ ์ฒ๋ฆฌํด์ผ ํ ๋)
async download () : Promise < Buffer >;
}
์
๋ก๋ API์ ์ ์ฉํ ๊ฐ๋๋ฅผ ์ง์ ํฉ๋๋ค. ์ธ์ฆ์ด ํ์ํ ์
๋ก๋ API์์ ์ฌ์ฉํฉ๋๋ค.
@ upload ({ guards: [ "user" ] })
async uploadAvatar () {
const { user , bufferedFiles } = Sonamu . getContext ();
// user ๊ฐ๋๊ฐ ์ ์ฉ๋์ด ์ธ์ฆ๋ ์ฌ์ฉ์๋ง ์ ๊ทผ ๊ฐ๋ฅ
// ...
}
description
API ์ค๋ช
์ ์ง์ ํฉ๋๋ค. ์๋ ์์ฑ๋๋ API ๋ฌธ์์ ํ์๋ฉ๋๋ค.
@ upload ({ description: "์ฌ์ฉ์ ํ๋กํ ์ด๋ฏธ์ง ์
๋ก๋" })
async uploadAvatar () {
// ...
}
ํ์ผ ์
๋ก๋ ์ ํ์ ์ค์ ํฉ๋๋ค. Fastify multipart์ limits ์ต์
์ ๊ทธ๋๋ก ๋ฐ์ต๋๋ค.
type UploadDecoratorOptions = {
// ๊ณตํต ์ต์
guards ?: GuardKey []; // ์ ์ฉํ ๊ฐ๋ (์: ["user", "admin"])
description ?: string ; // API ์ค๋ช
limits ?: {
fileSize ?: number ; // ์ต๋ ํ์ผ ํฌ๊ธฐ (bytes)
files ?: number ; // ์ต๋ ํ์ผ ๊ฐ์
fields ?: number ; // ์ต๋ ํ๋ ๊ฐ์
fieldSize ?: number ; // ์ต๋ ํ๋ ํฌ๊ธฐ
parts ?: number ; // ์ต๋ ํํธ ๊ฐ์
};
} & (
// Buffer ๋ชจ๋
| { consume ?: "buffer" }
// Stream ๋ชจ๋
| {
consume : "stream" ;
destination : DriverKey ;
keyGenerator ?: ( file : { filename : string ; mimetype : string }) => string ;
}
);
@ upload ({
limits: {
fileSize: 10 * 1024 * 1024 , // 10MB
files: 5 , // ์ต๋ 5๊ฐ ํ์ผ
}
})
async uploadWithLimits () {
const { bufferedFiles } = Sonamu . getContext ();
// ...
}
๋จ์ผ ํ์ผ (Buffer)
๋ค์ค ํ์ผ (Buffer)
Stream ๋ชจ๋
@ upload ()
async uploadAvatar () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const md5 = await file . md5 ();
return {
filename: file . filename ,
size: file . size ,
md5
};
}
@ upload ({ limits: { files: 10 } })
async uploadDocuments () {
const { bufferedFiles } = Sonamu . getContext ();
if ( ! bufferedFiles || bufferedFiles . length === 0 ) {
throw new Error ( "No files" );
}
const results = [];
for ( const file of bufferedFiles ) {
const md5 = await file . md5 ();
const key = `documents/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
results . push ({
filename: file . filename ,
url
});
}
return results ;
}
@ upload ({
consume: "stream" ,
destination: "s3" ,
keyGenerator : ( file ) => `uploads/ ${ Date . now () } - ${ file . filename } ` ,
limits: { files: 5 }
})
async uploadToCloud () {
const { uploadedFiles } = Sonamu . getContext ();
if ( ! uploadedFiles || uploadedFiles . length === 0 ) {
throw new Error ( "No files" );
}
// ํ์ผ์ ์ด๋ฏธ S3์ ์
๋ก๋๋ ์ํ
return uploadedFiles . map (( file ) => ({
filename: file . filename ,
url: file . url ,
key: file . key
}));
}
ํ์ผ ์ ๋ณด ํ์ธ
@ upload ()
async uploadFile () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
console . log ( "Filename:" , file . filename );
console . log ( "MIME type:" , file . mimetype );
console . log ( "Size:" , file . size );
console . log ( "Extension:" , file . extname );
return { uploaded: true };
}
ํ์ผ ์ฒ๋ฆฌ (Buffer ๋ชจ๋)
Buffer ์ ๊ทผ
@ upload ()
async processImage () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// Buffer ์ง์ ์ ๊ทผ
const buffer = file . buffer ;
// ์ด๋ฏธ์ง ์ฒ๋ฆฌ
const processed = await sharp ( buffer )
. resize ( 300 , 300 )
. toBuffer ();
return { size: processed . length };
}
ํ์ผ ์ ์ฅ
@ upload ()
async uploadDocument () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// ๋์คํฌ์ ์ ์ฅ (diskName, key ์์)
const md5 = await file . md5 ();
const key = `uploads/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
// ์ ์ฅ๋ URL ์ ๊ทผ
console . log ( "URL:" , file . url );
console . log ( "Signed URL:" , file . signedUrl );
return { url , filename: file . filename , size: file . size };
}
MD5 ํด์ ๊ณ์ฐ
@ upload ()
async uploadWithHash () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// MD5 ํด์ ๊ณ์ฐ (์ค๋ณต ํ์ผ ๋ฐฉ์ง์ ์ ์ฉ)
const hash = await file . md5 ();
// ํด์๋ฅผ ํ์ผ๋ช
์ผ๋ก ์ฌ์ฉ
const key = `uploads/ ${ hash }${ file . extname ? `. ${ file . extname } ` : '' } ` ;
const url = await file . saveToDisk ( "fs" , key );
return { url , hash };
}
์คํ ๋ฆฌ์ง ๋๋ผ์ด๋ฒ ์ฌ์ฉ
Sonamu๋ ์ฌ๋ฌ ์คํ ๋ฆฌ์ง ๋๋ผ์ด๋ฒ๋ฅผ ์ ๊ณตํฉ๋๋ค.
@ upload ()
async uploadToS3 () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// S3 ๋์คํฌ์ ์ ์ฅ (sonamu.config.ts์์ ์ค์ )
const md5 = await file . md5 ();
const key = `avatars/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "s3" , key );
return { url };
}
์คํ ๋ฆฌ์ง ๋๋ผ์ด๋ฒ๋ sonamu.config.ts์์ ์ค์ ํฉ๋๋ค. ์ฒซ ๋ฒ์งธ ํ๋ผ๋ฏธํฐ๋ก ๋์คํฌ ์ด๋ฆ์ ์ ๋ฌํฉ๋๋ค.
๋ค๋ฅธ ๋ฐ์ฝ๋ ์ดํฐ์ ํจ๊ป ์ฌ์ฉ
@transactional๊ณผ ํจ๊ป
@ upload ()
@ transactional ()
async uploadAndSave () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const wdb = this . getDB ( "w" );
// ํ์ผ ์ ์ฅ + DB ์
๋ฐ์ดํธ๋ฅผ ํธ๋์ญ์
์ผ๋ก
const md5 = await file . md5 ();
const key = `documents/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
await wdb . table ( "documents" ). insert ({
filename: file . filename ,
url ,
size: file . size ,
created_at: new Date ()
});
return { url };
}
ํด๋ผ์ด์ธํธ ์ฌ์ฉ (Web)
Sonamu๋ ์๋์ผ๋ก ํ์ผ ์
๋ก๋ ํด๋ผ์ด์ธํธ ์ฝ๋๋ฅผ ์์ฑํฉ๋๋ค.
Axios (๋จ์ผ ํ์ผ)
import { FileService } from "@/services/FileService" ;
const formData = new FormData ();
formData . append ( "file" , file );
const result = await FileService . uploadAvatar ( formData );
Axios (์ฌ๋ฌ ํ์ผ)
const formData = new FormData ();
files . forEach ( file => {
formData . append ( "files" , file );
});
const result = await FileService . uploadDocuments ( formData );
React ์์
import { useState } from "react" ;
import { FileService } from "@/services/FileService" ;
function FileUploader () {
const [ uploading , setUploading ] = useState ( false );
const handleFileChange = async ( e : React . ChangeEvent < HTMLInputElement >) => {
const file = e . target . files ?.[ 0 ];
if ( ! file ) return ;
setUploading ( true );
try {
const formData = new FormData ();
formData . append ( "file" , file );
const result = await FileService . uploadAvatar ( formData );
console . log ( "Uploaded:" , result . url );
} catch ( error ) {
console . error ( "Upload failed:" , error );
} finally {
setUploading ( false );
}
};
return (
< div >
< input
type = "file"
onChange = { handleFileChange }
disabled = { uploading }
/>
{ uploading && < p > Uploading ...</ p >}
</ div >
);
}
TanStack Query ์์
import { useMutation } from "@tanstack/react-query" ;
import { FileService } from "@/services/FileService" ;
function useUploadFile () {
return useMutation ({
mutationFn : ( file : File ) => {
const formData = new FormData ();
formData . append ( "file" , file );
return FileService . uploadAvatar ( formData );
},
onSuccess : ( data ) => {
console . log ( "Upload success:" , data );
},
onError : ( error ) => {
console . error ( "Upload failed:" , error );
}
});
}
function FileUploader () {
const upload = useUploadFile ();
const handleFileChange = ( e : React . ChangeEvent < HTMLInputElement >) => {
const file = e . target . files ?.[ 0 ];
if ( file ) {
upload . mutate ( file );
}
};
return (
< div >
< input
type = "file"
onChange = { handleFileChange }
disabled = {upload. isPending }
/>
{ upload . isPending && < p > Uploading ...</ p >}
{ upload . isSuccess && < p > Success : { upload . data . url }</ p >}
{ upload . isError && < p > Error : { upload . error . message }</ p >}
</ div >
);
}
ํ์ผ ๊ฒ์ฆ
MIME ํ์
๊ฒ์ฆ
@ upload ()
async uploadImage () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const allowedTypes = [ "image/jpeg" , "image/png" , "image/gif" ];
if ( ! allowedTypes . includes ( file . mimetype )) {
throw new Error ( `Invalid file type: ${ file . mimetype } ` );
}
return await this . processImage ( file );
}
ํ์ผ ํฌ๊ธฐ ๊ฒ์ฆ
@ upload ()
async uploadDocument () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const maxSize = 10 * 1024 * 1024 ; // 10MB
if ( file . size > maxSize ) {
throw new Error ( "File too large" );
}
return await this . saveDocument ( file );
}
ํ์ผ ํ์ฅ์ ๊ฒ์ฆ
@ upload ()
async uploadFile () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const allowedExtensions = [ "jpg" , "png" , "pdf" ];
if ( ! file . extname || ! allowedExtensions . includes ( file . extname )) {
throw new Error ( `Invalid file extension: ${ file . extname } ` );
}
return await this . saveFile ( file );
}
์ด๋ฏธ์ง ์ฒ๋ฆฌ
Sharp ์ฌ์ฉ
import sharp from "sharp" ;
@ upload ()
async uploadAndResizeImage () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const buffer = file . buffer ;
// ์ด๋ฏธ์ง ๋ฆฌ์ฌ์ด์ง
const resized = await sharp ( buffer )
. resize ( 800 , 600 , { fit: "inside" })
. jpeg ({ quality: 80 })
. toBuffer ();
// ์ธ๋ค์ผ ์์ฑ
const thumbnail = await sharp ( buffer )
. resize ( 200 , 200 , { fit: "cover" })
. jpeg ({ quality: 70 })
. toBuffer ();
// S3์ ์
๋ก๋ (Buffer๋ฅผ ์ง์ ์ ์ฅ)
const imageKey = `images/ ${ Date . now () } .jpg` ;
await Sonamu . storage . use ( "s3" ). put ( imageKey , resized );
const imageUrl = await Sonamu . storage . use ( "s3" ). getUrl ( imageKey );
const thumbKey = `thumbnails/ ${ Date . now () } .jpg` ;
await Sonamu . storage . use ( "s3" ). put ( thumbKey , thumbnail );
const thumbUrl = await Sonamu . storage . use ( "s3" ). getUrl ( thumbKey );
return { imageUrl , thumbUrl };
}
์ ์ฝ์ฌํญ
1. @api ๋ฐ์ฝ๋ ์ดํฐ ์์ด ๋
๋ฆฝ ์ฌ์ฉ
@upload๋ @api ๋ฐ์ฝ๋ ์ดํฐ ์์ด ๋
๋ฆฝ์ ์ผ๋ก ์ฌ์ฉํฉ๋๋ค:
// ์ฌ๋ฐ๋ฅธ ์ฌ์ฉ๋ฒ
@ upload ()
async uploadFile () {}
// ๋ถํ์ - @upload๊ฐ ์๋์ผ๋ก API ์ค์
@ api ({ httpMethod: "POST" })
@ upload ()
async uploadFile () {}
2. httpMethod๋ POST
@upload๋ฅผ ์ฌ์ฉํ๋ฉด ์๋์ผ๋ก httpMethod: "POST"๊ฐ ์ค์ ๋ฉ๋๋ค.
3. clients ์๋ ์ค์
@upload๋ฅผ ์ฌ์ฉํ๋ฉด clients ์ต์
์ด ์๋์ผ๋ก ["axios-multipart", "tanstack-mutation-multipart"]๋ก ์ค์ ๋ฉ๋๋ค:
@ upload ()
async uploadFile () {
// clients: ["axios-multipart", "tanstack-mutation-multipart"]
}
์์ ๋ชจ์
ํ๋กํ ์ด๋ฏธ์ง
์ฌ๋ฌ ํ์ผ ์
๋ก๋
CSV ํ์ผ ์ฒ๋ฆฌ
MD5 ํด์ ๊ธฐ๋ฐ ์ ์ฅ
Stream ๋ชจ๋ ๋์ฉ๋
class UserModelClass extends BaseModelClass {
@ upload ()
@ transactional ()
async uploadAvatar () {
const { user , bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file uploaded" );
}
// ์ด๋ฏธ์ง ํ์
๊ฒ์ฆ
const allowedTypes = [ "image/jpeg" , "image/png" , "image/webp" ];
if ( ! allowedTypes . includes ( file . mimetype )) {
throw new Error ( "Invalid image type" );
}
// ์ด๋ฏธ์ง ์ฒ๋ฆฌ
const buffer = file . buffer ;
const processed = await sharp ( buffer )
. resize ( 300 , 300 , { fit: "cover" })
. jpeg ({ quality: 85 })
. toBuffer ();
// S3 ์
๋ก๋
const key = `avatars/ ${ user . id } / ${ Date . now () } .jpg` ;
await Sonamu . storage . use ( "s3" ). put ( key , processed );
const url = await Sonamu . storage . use ( "s3" ). getUrl ( key );
// DB ์
๋ฐ์ดํธ
const wdb = this . getDB ( "w" );
await wdb . table ( "users" )
. where ( "id" , user . id )
. update ({ avatar_url: url });
return { url };
}
}
class DocumentModelClass extends BaseModelClass {
@ upload ({ limits: { files: 10 } })
@ transactional ()
async uploadDocuments ( params : { projectId : number }) {
const { bufferedFiles } = Sonamu . getContext ();
const { projectId } = params ;
if ( ! bufferedFiles || bufferedFiles . length === 0 ) {
throw new Error ( "No files uploaded" );
}
const wdb = this . getDB ( "w" );
const results = [];
for ( const file of bufferedFiles ) {
// ํ์ผ ํ์
๊ฒ์ฆ
const allowedTypes = [
"application/pdf" ,
"application/msword" ,
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
];
if ( ! allowedTypes . includes ( file . mimetype )) {
throw new Error ( `Invalid file type: ${ file . filename } ` );
}
// ํ์ผ ์ ์ฅ
const md5 = await file . md5 ();
const key = `documents/ ${ projectId } / ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
// DB์ ๊ธฐ๋ก
const doc = await wdb . table ( "documents" ). insert ({
project_id: projectId ,
filename: file . filename ,
url ,
size: file . size ,
mimetype: file . mimetype ,
created_at: new Date ()
}). returning ( "*" );
results . push ( doc [ 0 ]);
}
return results ;
}
}
import Papa from "papaparse" ;
class ImportModelClass extends BaseModelClass {
@ upload ()
@ transactional ()
async importUsers () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
if ( file . mimetype !== "text/csv" ) {
throw new Error ( "CSV file required" );
}
// CSV ํ์ฑ (Buffer ์ง์ ์ฌ์ฉ)
const text = file . buffer . toString ( "utf-8" );
const parsed = Papa . parse < UserImportRow >( text , {
header: true ,
skipEmptyLines: true
});
if ( parsed . errors . length > 0 ) {
throw new Error ( "CSV parsing failed" );
}
// ๋ฐ์ดํฐ ๊ฒ์ฆ ๋ฐ ์ ์ฅ
const wdb = this . getDB ( "w" );
const results = [];
for ( const row of parsed . data ) {
// ๊ฒ์ฆ
if ( ! row . email || ! row . name ) {
throw new Error ( `Invalid row: ${ JSON . stringify ( row ) } ` );
}
// ์ ์ฅ
const user = await wdb . table ( "users" )
. insert ({
email: row . email ,
name: row . name ,
phone: row . phone || null ,
created_at: new Date ()
})
. returning ( "*" );
results . push ( user [ 0 ]);
}
return {
imported: results . length ,
users: results
};
}
}
class FileModelClass extends BaseModelClass {
@ upload ({ limits: { files: 10 } })
async uploadWithDeduplication () {
const { bufferedFiles } = Sonamu . getContext ();
if ( ! bufferedFiles || bufferedFiles . length === 0 ) {
throw new Error ( "No files" );
}
const results = [];
for ( const file of bufferedFiles ) {
// MD5 ํด์ ๊ณ์ฐ
const hash = await file . md5 ();
// ํด์๋ก ํ์ผ ์ ์ฅ (์ค๋ณต ๋ฐฉ์ง)
const ext = file . extname || "bin" ;
const key = `uploads/ ${ hash } . ${ ext } ` ;
const url = await file . saveToDisk ( "fs" , key );
results . push ({
filename: file . filename ,
hash ,
url ,
size: file . size
});
}
return results ;
}
}
class FileModelClass extends BaseModelClass {
@ upload ({
consume: "stream" ,
destination: "s3" ,
keyGenerator : ( file ) => `large-files/ ${ Date . now () } - ${ file . filename } ` ,
limits: { files: 3 , fileSize: 100 * 1024 * 1024 } // 100MB
})
async uploadLargeFiles () {
const { uploadedFiles } = Sonamu . getContext ();
if ( ! uploadedFiles || uploadedFiles . length === 0 ) {
throw new Error ( "No files" );
}
// ํ์ผ์ ์ด๋ฏธ S3์ ์
๋ก๋๋ ์ํ
return {
files: uploadedFiles . map (( file ) => ({
filename: file . filename ,
url: file . url ,
signedUrl: file . signedUrl ,
key: file . key ,
size: file . size ,
})),
};
}
}
์ฐธ๊ณ ์ฌํญ
@api ๋ฐ์ฝ๋ ์ดํฐ์์ ๊ด๊ณ
@upload ๋ฐ์ฝ๋ ์ดํฐ๋ ๋ด๋ถ์ ์ผ๋ก ์๋์ผ๋ก API ์๋ํฌ์ธํธ๋ฅผ ์์ฑํฉ๋๋ค. ๋ฐ๋ผ์ @api ๋ฐ์ฝ๋ ์ดํฐ๋ฅผ ๋ณ๋๋ก ์ฌ์ฉํ ํ์๊ฐ ์์ต๋๋ค.
์๋ ์ค์ ๋๋ ๊ฐ:
httpMethod: "POST" (๊ณ ์ )
clients: ["axios-multipart", "tanstack-mutation-multipart"] (multipart ์ ์ฉ ํด๋ผ์ด์ธํธ)
guards: @upload ์ต์
์ guards ๊ฐ์ด API์ ์ ๋ฌ๋จ
description: @upload ์ต์
์ description ๊ฐ์ด API์ ์ ๋ฌ๋จ
// @upload๋ง ์ฌ์ฉ (๊ถ์ฅ)
@ upload ()
async uploadFile () {
// ...
}
// ๋ถํ์ - @api๋ฅผ ํจ๊ป ์ฌ์ฉํ ํ์ ์์
@ api ({ httpMethod: "POST" }) // ๋ถํ์
@ upload ()
async uploadFile () {
// ...
}
@upload๋ ํ์ผ ์
๋ก๋์ ์ต์ ํ๋ ์ค์ ์ ์๋์ผ๋ก ์ ์ฉํ๋ฏ๋ก, @api ๋ฐ์ฝ๋ ์ดํฐ๋ฅผ ์ถ๊ฐ๋ก ์ฌ์ฉํ ํ์๊ฐ ์์ต๋๋ค.
๋ค์ ๋จ๊ณ