@upload λ°μ½λ μ΄ν°λ νμΌ μ
λ‘λλ₯Ό μ²λ¦¬νλ APIλ₯Ό μμ±ν©λλ€. Multipart form-data μμ²μ μλμΌλ‘ νμ±νκ³ , νμΌ κ°μ²΄λ₯Ό μ 곡ν©λλ€. @api λ°μ½λ μ΄ν° μμ΄λ λ
립μ μΌλ‘ μ¬μ©ν μ μμΌλ©°, μλμΌλ‘ POST λ©μλμ multipart ν΄λΌμ΄μΈνΈ(axios-multipart, tanstack-mutation-multipart)λ₯Ό μ€μ ν©λλ€.
μ
λ‘λ λͺ¨λ
Sonamuλ λ κ°μ§ νμΌ μ
λ‘λ λͺ¨λλ₯Ό μ 곡ν©λλ€:
λͺ¨λ μ΅μ
Context μμ± νμΌ νμ
νΉμ§ Buffer (κΈ°λ³Έ)consume: "buffer" λλ μλ΅bufferedFilesBufferedFile[]λ©λͺ¨λ¦¬μ λ‘λ, MD5 κ³μ°/μ΄λ―Έμ§ μ²λ¦¬ λ± μ μ°ν μμ
κ°λ₯ Stream consume: "stream"uploadedFilesUploadedFile[]μ¦μ μ μ₯μλ‘ μ€νΈλ¦¬λ°, λμ©λ νμΌμ μ ν©
Buffer λͺ¨λ (κΈ°λ³Έ)
Buffer λͺ¨λλ νμΌμ λ©λͺ¨λ¦¬μ λ‘λν ν μ²λ¦¬ν©λλ€. MD5 ν΄μ κ³μ°, μ΄λ―Έμ§ 리μ¬μ΄μ§ λ± νμΌ λ΄μ©μ μ§μ λ€λ€μΌ ν λ μ¬μ©ν©λλ€.
import { BaseModelClass , upload , Sonamu } from "sonamu" ;
class FileModelClass extends BaseModelClass {
@ upload () // κΈ°λ³Έκ°: consume: "buffer"
async uploadAvatar () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file uploaded" );
}
// MD5 ν΄μλ‘ μ€λ³΅ λ°©μ§
const md5 = await file . md5 ();
const key = `avatars/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
return { url , filename: file . filename , size: file . size };
}
}
BufferedFile κ°μ²΄
class BufferedFile {
// μλ³Έ νμΌ μ΄λ¦
get filename () : string ;
// MIME νμ
get mimetype () : string ;
// νμΌ ν¬κΈ° (bytes)
get size () : number ;
// νμ₯μ (μ μ μΈ, μ: "jpg", "png")
get extname () : string | false ;
// νμΌ Buffer (λ©λͺ¨λ¦¬μ λ‘λλ λ°μ΄ν°)
get buffer () : Buffer ;
// saveToDisk ν μ μ₯λ URL (Unsigned)
get url () : string ;
// saveToDisk ν μ μ₯λ Signed URL
get signedUrl () : string ;
// μλ³Έ Fastify MultipartFile μ κ·Ό
get raw () : MultipartFile ;
// MD5 ν΄μ κ³μ°
async md5 () : Promise < string >;
// νμΌμ λμ€ν¬μ μ μ₯ (URL λ°ν)
async saveToDisk ( diskName : DriverKey , key : string ) : Promise < string >;
}
saveToDiskμ νλΌλ―Έν° μμλ (diskName, key) μ
λλ€.
Stream λͺ¨λ
Stream λͺ¨λλ νμΌμ λ©λͺ¨λ¦¬μ λ‘λνμ§ μκ³ μ¦μ μ μ₯μλ‘ μ€νΈλ¦¬λ°ν©λλ€. λμ©λ νμΌ μ
λ‘λμ μ ν©ν©λλ€.
@ upload ({
consume: "stream" ,
destination: "s3" , // μ μ₯ν λμ€ν¬ μ΄λ¦
keyGenerator : ( file ) => `uploads/ ${ Date . now () } - ${ file . filename } ` , // ν€ μμ± ν¨μ
limits: { files: 5 }
})
async uploadLargeFiles () {
const { uploadedFiles } = Sonamu . getContext ();
if ( ! uploadedFiles || uploadedFiles . length === 0 ) {
throw new Error ( "No files uploaded" );
}
// νμΌμ μ΄λ―Έ μ μ₯μμ μ
λ‘λλ μν
return {
files: uploadedFiles . map (( file ) => ({
filename: file . filename ,
url: file . url ,
key: file . key ,
size: file . size ,
})),
};
}
UploadedFile κ°μ²΄
Stream λͺ¨λμμλ νμΌμ΄ μ΄λ―Έ μ μ₯μμ μ
λ‘λλ μνλ‘ λ©νλ°μ΄ν°λ§ μ κ·Όν μ μμ΅λλ€.
class UploadedFile {
// μλ³Έ νμΌ μ΄λ¦
get filename () : string ;
// MIME νμ
get mimetype () : string ;
// νμΌ ν¬κΈ° (bytes)
get size () : number ;
// νμ₯μ (μ μ μΈ, μ: "jpg", "png")
get extname () : string | false ;
// μ μ₯λ URL (Unsigned)
get url () : string ;
// μ μ₯λ Signed URL
get signedUrl () : string ;
// μ μ₯μ λ΄ ν€
get key () : string ;
// μ μ₯λ λμ€ν¬ μ΄λ¦
get diskName () : DriverKey ;
// μ μ₯μμμ νμΌ λ€μ΄λ‘λ (λμ€μ μ²λ¦¬ν΄μΌ ν λ)
async download () : Promise < Buffer >;
}
μ
λ‘λ APIμ μ μ©ν κ°λλ₯Ό μ§μ ν©λλ€. μΈμ¦μ΄ νμν μ
λ‘λ APIμμ μ¬μ©ν©λλ€.
@ upload ({ guards: [ "user" ] })
async uploadAvatar () {
const { user , bufferedFiles } = Sonamu . getContext ();
// user κ°λκ° μ μ©λμ΄ μΈμ¦λ μ¬μ©μλ§ μ κ·Ό κ°λ₯
// ...
}
description
API μ€λͺ
μ μ§μ ν©λλ€. μλ μμ±λλ API λ¬Έμμ νμλ©λλ€.
@ upload ({ description: "μ¬μ©μ νλ‘ν μ΄λ―Έμ§ μ
λ‘λ" })
async uploadAvatar () {
// ...
}
νμΌ μ
λ‘λ μ νμ μ€μ ν©λλ€. Fastify multipartμ limits μ΅μ
μ κ·Έλλ‘ λ°μ΅λλ€.
type UploadDecoratorOptions = {
// κ³΅ν΅ μ΅μ
guards ?: GuardKey []; // μ μ©ν κ°λ (μ: ["user", "admin"])
description ?: string ; // API μ€λͺ
limits ?: {
fileSize ?: number ; // μ΅λ νμΌ ν¬κΈ° (bytes)
files ?: number ; // μ΅λ νμΌ κ°μ
fields ?: number ; // μ΅λ νλ κ°μ
fieldSize ?: number ; // μ΅λ νλ ν¬κΈ°
parts ?: number ; // μ΅λ ννΈ κ°μ
};
} &
// Buffer λͺ¨λ
( | { consume ?: "buffer" }
// Stream λͺ¨λ
| {
consume : "stream" ;
destination : DriverKey ;
keyGenerator ?: ( file : { filename : string ; mimetype : string }) => string ;
}
);
@ upload ({
limits: {
fileSize: 10 * 1024 * 1024 , // 10MB
files: 5 , // μ΅λ 5κ° νμΌ
}
})
async uploadWithLimits () {
const { bufferedFiles } = Sonamu . getContext ();
// ...
}
λ¨μΌ νμΌ (Buffer)
λ€μ€ νμΌ (Buffer)
Stream λͺ¨λ
@ upload ()
async uploadAvatar () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const md5 = await file . md5 ();
return {
filename: file . filename ,
size: file . size ,
md5
};
}
@ upload ({ limits: { files: 10 } })
async uploadDocuments () {
const { bufferedFiles } = Sonamu . getContext ();
if ( ! bufferedFiles || bufferedFiles . length === 0 ) {
throw new Error ( "No files" );
}
const results = [];
for ( const file of bufferedFiles ) {
const md5 = await file . md5 ();
const key = `documents/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
results . push ({
filename: file . filename ,
url
});
}
return results ;
}
@ upload ({
consume: "stream" ,
destination: "s3" ,
keyGenerator : ( file ) => `uploads/ ${ Date . now () } - ${ file . filename } ` ,
limits: { files: 5 }
})
async uploadToCloud () {
const { uploadedFiles } = Sonamu . getContext ();
if ( ! uploadedFiles || uploadedFiles . length === 0 ) {
throw new Error ( "No files" );
}
// νμΌμ μ΄λ―Έ S3μ μ
λ‘λλ μν
return uploadedFiles . map (( file ) => ({
filename: file . filename ,
url: file . url ,
key: file . key
}));
}
νμΌ μ 보 νμΈ
@ upload ()
async uploadFile () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
console . log ( "Filename:" , file . filename );
console . log ( "MIME type:" , file . mimetype );
console . log ( "Size:" , file . size );
console . log ( "Extension:" , file . extname );
return { uploaded: true };
}
νμΌ μ²λ¦¬ (Buffer λͺ¨λ)
Buffer μ κ·Ό
@ upload ()
async processImage () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// Buffer μ§μ μ κ·Ό
const buffer = file . buffer ;
// μ΄λ―Έμ§ μ²λ¦¬
const processed = await sharp ( buffer )
. resize ( 300 , 300 )
. toBuffer ();
return { size: processed . length };
}
νμΌ μ μ₯
@ upload ()
async uploadDocument () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// λμ€ν¬μ μ μ₯ (diskName, key μμ)
const md5 = await file . md5 ();
const key = `uploads/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
// μ μ₯λ URL μ κ·Ό
console . log ( "URL:" , file . url );
console . log ( "Signed URL:" , file . signedUrl );
return { url , filename: file . filename , size: file . size };
}
MD5 ν΄μ κ³μ°
@ upload ()
async uploadWithHash () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// MD5 ν΄μ κ³μ° (μ€λ³΅ νμΌ λ°©μ§μ μ μ©)
const hash = await file . md5 ();
// ν΄μλ₯Ό νμΌλͺ
μΌλ‘ μ¬μ©
const key = `uploads/ ${ hash }${ file . extname ? `. ${ file . extname } ` : '' } ` ;
const url = await file . saveToDisk ( "fs" , key );
return { url , hash };
}
μ€ν λ¦¬μ§ λλΌμ΄λ² μ¬μ©
Sonamuλ μ¬λ¬ μ€ν λ¦¬μ§ λλΌμ΄λ²λ₯Ό μ 곡ν©λλ€.
@ upload ()
async uploadToS3 () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
// S3 λμ€ν¬μ μ μ₯ (sonamu.config.tsμμ μ€μ )
const md5 = await file . md5 ();
const key = `avatars/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "s3" , key );
return { url };
}
μ€ν λ¦¬μ§ λλΌμ΄λ²λ sonamu.config.tsμμ μ€μ ν©λλ€. 첫 λ²μ§Έ νλΌλ―Έν°λ‘ λμ€ν¬ μ΄λ¦μ
μ λ¬ν©λλ€.
λ€λ₯Έ λ°μ½λ μ΄ν°μ ν¨κ» μ¬μ©
@transactionalκ³Ό ν¨κ»
@ upload ()
@ transactional ()
async uploadAndSave () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const wdb = this . getDB ( "w" );
// νμΌ μ μ₯ + DB μ
λ°μ΄νΈλ₯Ό νΈλμμ
μΌλ‘
const md5 = await file . md5 ();
const key = `documents/ ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
await wdb . table ( "documents" ). insert ({
filename: file . filename ,
url ,
size: file . size ,
created_at: new Date ()
});
return { url };
}
ν΄λΌμ΄μΈνΈ μ¬μ© (Web)
Sonamuλ μλμΌλ‘ νμΌ μ
λ‘λ ν΄λΌμ΄μΈνΈ μ½λλ₯Ό μμ±ν©λλ€.
Axios (λ¨μΌ νμΌ)
import { FileService } from "@/services/FileService" ;
const formData = new FormData ();
formData . append ( "file" , file );
const result = await FileService . uploadAvatar ( formData );
Axios (μ¬λ¬ νμΌ)
const formData = new FormData ();
files . forEach (( file ) => {
formData . append ( "files" , file );
});
const result = await FileService . uploadDocuments ( formData );
React μμ
import { useState } from "react" ;
import { FileService } from "@/services/FileService" ;
function FileUploader () {
const [ uploading , setUploading ] = useState ( false );
const handleFileChange = async ( e : React . ChangeEvent < HTMLInputElement >) => {
const file = e . target . files ?.[ 0 ];
if ( ! file ) return ;
setUploading ( true );
try {
const formData = new FormData ();
formData . append ( "file" , file );
const result = await FileService . uploadAvatar ( formData );
console . log ( "Uploaded:" , result . url );
} catch ( error ) {
console . error ( "Upload failed:" , error );
} finally {
setUploading ( false );
}
};
return (
< div >
< input
type = "file"
onChange = { handleFileChange }
disabled = { uploading }
/>
{ uploading && < p > Uploading ...</ p >}
</ div >
);
}
TanStack Query μμ
import { useMutation } from "@tanstack/react-query" ;
import { FileService } from "@/services/FileService" ;
function useUploadFile () {
return useMutation ({
mutationFn : ( file : File ) => {
const formData = new FormData ();
formData . append ( "file" , file );
return FileService . uploadAvatar ( formData );
},
onSuccess : ( data ) => {
console . log ( "Upload success:" , data );
},
onError : ( error ) => {
console . error ( "Upload failed:" , error );
}
});
}
function FileUploader () {
const upload = useUploadFile ();
const handleFileChange = ( e : React . ChangeEvent < HTMLInputElement >) => {
const file = e . target . files ?.[ 0 ];
if ( file ) {
upload . mutate ( file );
}
};
return (
< div >
< input
type = "file"
onChange = { handleFileChange }
disabled = {upload. isPending }
/>
{ upload . isPending && < p > Uploading ...</ p >}
{ upload . isSuccess && < p > Success : { upload . data . url }</ p >}
{ upload . isError && < p > Error : { upload . error . message }</ p >}
</ div >
);
}
νμΌ κ²μ¦
MIME νμ
κ²μ¦
@ upload ()
async uploadImage () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const allowedTypes = [ "image/jpeg" , "image/png" , "image/gif" ];
if ( ! allowedTypes . includes ( file . mimetype )) {
throw new Error ( `Invalid file type: ${ file . mimetype } ` );
}
return await this . processImage ( file );
}
νμΌ ν¬κΈ° κ²μ¦
@ upload ()
async uploadDocument () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const maxSize = 10 * 1024 * 1024 ; // 10MB
if ( file . size > maxSize ) {
throw new Error ( "File too large" );
}
return await this . saveDocument ( file );
}
νμΌ νμ₯μ κ²μ¦
@ upload ()
async uploadFile () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const allowedExtensions = [ "jpg" , "png" , "pdf" ];
if ( ! file . extname || ! allowedExtensions . includes ( file . extname )) {
throw new Error ( `Invalid file extension: ${ file . extname } ` );
}
return await this . saveFile ( file );
}
μ΄λ―Έμ§ μ²λ¦¬
Sharp μ¬μ©
import sharp from "sharp" ;
@ upload ()
async uploadAndResizeImage () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
const buffer = file . buffer ;
// μ΄λ―Έμ§ 리μ¬μ΄μ§
const resized = await sharp ( buffer )
. resize ( 800 , 600 , { fit: "inside" })
. jpeg ({ quality: 80 })
. toBuffer ();
// μΈλ€μΌ μμ±
const thumbnail = await sharp ( buffer )
. resize ( 200 , 200 , { fit: "cover" })
. jpeg ({ quality: 70 })
. toBuffer ();
// S3μ μ
λ‘λ (Bufferλ₯Ό μ§μ μ μ₯)
const imageKey = `images/ ${ Date . now () } .jpg` ;
await Sonamu . storage . use ( "s3" ). put ( imageKey , resized );
const imageUrl = await Sonamu . storage . use ( "s3" ). getUrl ( imageKey );
const thumbKey = `thumbnails/ ${ Date . now () } .jpg` ;
await Sonamu . storage . use ( "s3" ). put ( thumbKey , thumbnail );
const thumbUrl = await Sonamu . storage . use ( "s3" ). getUrl ( thumbKey );
return { imageUrl , thumbUrl };
}
μ μ½μ¬ν
1. @api λ°μ½λ μ΄ν° μμ΄ λ
립 μ¬μ©
@uploadλ @api λ°μ½λ μ΄ν° μμ΄ λ
립μ μΌλ‘ μ¬μ©ν©λλ€:
// μ¬λ°λ₯Έ μ¬μ©λ²
@ upload ()
async uploadFile () {}
// λΆνμ - @uploadκ° μλμΌλ‘ API μ€μ
@ api ({ httpMethod: "POST" })
@ upload ()
async uploadFile () {}
2. httpMethodλ POST
@uploadλ₯Ό μ¬μ©νλ©΄ μλμΌλ‘ httpMethod: "POST"κ° μ€μ λ©λλ€.
3. clients μλ μ€μ
@uploadλ₯Ό μ¬μ©νλ©΄ clients μ΅μ
μ΄ μλμΌλ‘ ["axios-multipart", "tanstack-mutation-multipart"]λ‘ μ€μ λ©λλ€:
@ upload ()
async uploadFile () {
// clients: ["axios-multipart", "tanstack-mutation-multipart"]
}
μμ λͺ¨μ
νλ‘ν μ΄λ―Έμ§
μ¬λ¬ νμΌ μ
λ‘λ
CSV νμΌ μ²λ¦¬
MD5 ν΄μ κΈ°λ° μ μ₯
Stream λͺ¨λ λμ©λ
class UserModelClass extends BaseModelClass {
@ upload ()
@ transactional ()
async uploadAvatar () {
const { user , bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file uploaded" );
}
// μ΄λ―Έμ§ νμ
κ²μ¦
const allowedTypes = [ "image/jpeg" , "image/png" , "image/webp" ];
if ( ! allowedTypes . includes ( file . mimetype )) {
throw new Error ( "Invalid image type" );
}
// μ΄λ―Έμ§ μ²λ¦¬
const buffer = file . buffer ;
const processed = await sharp ( buffer )
. resize ( 300 , 300 , { fit: "cover" })
. jpeg ({ quality: 85 })
. toBuffer ();
// S3 μ
λ‘λ
const key = `avatars/ ${ user . id } / ${ Date . now () } .jpg` ;
await Sonamu . storage . use ( "s3" ). put ( key , processed );
const url = await Sonamu . storage . use ( "s3" ). getUrl ( key );
// DB μ
λ°μ΄νΈ
const wdb = this . getDB ( "w" );
await wdb . table ( "users" )
. where ( "id" , user . id )
. update ({ avatar_url: url });
return { url };
}
}
class DocumentModelClass extends BaseModelClass {
@ upload ({ limits: { files: 10 } })
@ transactional ()
async uploadDocuments ( params : { projectId : number }) {
const { bufferedFiles } = Sonamu . getContext ();
const { projectId } = params ;
if ( ! bufferedFiles || bufferedFiles . length === 0 ) {
throw new Error ( "No files uploaded" );
}
const wdb = this . getDB ( "w" );
const results = [];
for ( const file of bufferedFiles ) {
// νμΌ νμ
κ²μ¦
const allowedTypes = [
"application/pdf" ,
"application/msword" ,
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
];
if ( ! allowedTypes . includes ( file . mimetype )) {
throw new Error ( `Invalid file type: ${ file . filename } ` );
}
// νμΌ μ μ₯
const md5 = await file . md5 ();
const key = `documents/ ${ projectId } / ${ md5 } . ${ file . extname } ` ;
const url = await file . saveToDisk ( "fs" , key );
// DBμ κΈ°λ‘
const doc = await wdb . table ( "documents" ). insert ({
project_id: projectId ,
filename: file . filename ,
url ,
size: file . size ,
mimetype: file . mimetype ,
created_at: new Date ()
}). returning ( "*" );
results . push ( doc [ 0 ]);
}
return results ;
}
}
import Papa from "papaparse" ;
class ImportModelClass extends BaseModelClass {
@ upload ()
@ transactional ()
async importUsers () {
const { bufferedFiles } = Sonamu . getContext ();
const file = bufferedFiles ?.[ 0 ];
if ( ! file ) {
throw new Error ( "No file" );
}
if ( file . mimetype !== "text/csv" ) {
throw new Error ( "CSV file required" );
}
// CSV νμ± (Buffer μ§μ μ¬μ©)
const text = file . buffer . toString ( "utf-8" );
const parsed = Papa . parse < UserImportRow >( text , {
header: true ,
skipEmptyLines: true
});
if ( parsed . errors . length > 0 ) {
throw new Error ( "CSV parsing failed" );
}
// λ°μ΄ν° κ²μ¦ λ° μ μ₯
const wdb = this . getDB ( "w" );
const results = [];
for ( const row of parsed . data ) {
// κ²μ¦
if ( ! row . email || ! row . name ) {
throw new Error ( `Invalid row: ${ JSON . stringify ( row ) } ` );
}
// μ μ₯
const user = await wdb . table ( "users" )
. insert ({
email: row . email ,
name: row . name ,
phone: row . phone || null ,
created_at: new Date ()
})
. returning ( "*" );
results . push ( user [ 0 ]);
}
return {
imported: results . length ,
users: results
};
}
}
class FileModelClass extends BaseModelClass {
@ upload ({ limits: { files: 10 } })
async uploadWithDeduplication () {
const { bufferedFiles } = Sonamu . getContext ();
if ( ! bufferedFiles || bufferedFiles . length === 0 ) {
throw new Error ( "No files" );
}
const results = [];
for ( const file of bufferedFiles ) {
// MD5 ν΄μ κ³μ°
const hash = await file . md5 ();
// ν΄μλ‘ νμΌ μ μ₯ (μ€λ³΅ λ°©μ§)
const ext = file . extname || "bin" ;
const key = `uploads/ ${ hash } . ${ ext } ` ;
const url = await file . saveToDisk ( "fs" , key );
results . push ({
filename: file . filename ,
hash ,
url ,
size: file . size
});
}
return results ;
}
}
class FileModelClass extends BaseModelClass {
@ upload ({
consume: "stream" ,
destination: "s3" ,
keyGenerator : ( file ) => `large-files/ ${ Date . now () } - ${ file . filename } ` ,
limits: { files: 3 , fileSize: 100 * 1024 * 1024 } // 100MB
})
async uploadLargeFiles () {
const { uploadedFiles } = Sonamu . getContext ();
if ( ! uploadedFiles || uploadedFiles . length === 0 ) {
throw new Error ( "No files" );
}
// νμΌμ μ΄λ―Έ S3μ μ
λ‘λλ μν
return {
files: uploadedFiles . map (( file ) => ({
filename: file . filename ,
url: file . url ,
signedUrl: file . signedUrl ,
key: file . key ,
size: file . size ,
})),
};
}
}
μ°Έκ³ μ¬ν
@api λ°μ½λ μ΄ν°μμ κ΄κ³
@upload λ°μ½λ μ΄ν°λ λ΄λΆμ μΌλ‘ μλμΌλ‘ API μλν¬μΈνΈλ₯Ό μμ±ν©λλ€. λ°λΌμ @api λ°μ½λ μ΄ν°λ₯Ό λ³λλ‘ μ¬μ©ν νμκ° μμ΅λλ€.
μλ μ€μ λλ κ°:
httpMethod: "POST" (κ³ μ )
clients: ["axios-multipart", "tanstack-mutation-multipart"] (multipart μ μ© ν΄λΌμ΄μΈνΈ)
guards: @upload μ΅μ
μ guards κ°μ΄ APIμ μ λ¬λ¨
description: @upload μ΅μ
μ description κ°μ΄ APIμ μ λ¬λ¨
// @uploadλ§ μ¬μ© (κΆμ₯)
@ upload ()
async uploadFile () {
// ...
}
// λΆνμ - @apiλ₯Ό ν¨κ» μ¬μ©ν νμ μμ
@ api ({ httpMethod: "POST" }) // λΆνμ
@ upload ()
async uploadFile () {
// ...
}
@uploadλ νμΌ μ
λ‘λμ μ΅μ νλ μ€μ μ μλμΌλ‘ μ μ©νλ―λ‘, @api λ°μ½λ μ΄ν°λ₯Ό μΆκ°λ‘ μ¬μ©ν
νμκ° μμ΅λλ€.
λ€μ λ¨κ³
@api API μλν¬μΈνΈ λ§λ€κΈ°
@transactional νΈλμμ
μΌλ‘ μμ νκ² μ μ₯νκΈ°
Storage λλΌμ΄λ² S3, Local λ± μ€ν λ¦¬μ§ μ¬μ©νκΈ°
νμΌ μ²λ¦¬ μ΄λ―Έμ§/λ¬Έμ μ²λ¦¬ κ°μ΄λ