import ballerina/graphql;
import ballerina/io;

service /fileUpload on new graphql:Listener(4000) {

    // Store the file information that need to be shared between the remote and
    // resource functions.
    string[] uploadedFiles = [];

    // Remote functions can use the `graphql:Upload` type as an input
    // parameter type.
    remote function singleFileUpload(graphql:Upload file)
        returns string|error {

        // Access the file name from the `graphql:Upload` type parameter.
        // Similarly, it can access the mime type as `file.mimeType`
        // and encoding as `file.encoding`. Except the `byteStream` field, all
        // other fields in the `graphql:Upload` are `string` values.
        string fileName = file.fileName;
        string path = string`./uploads/${fileName}`;

        // Access the byte stream of the file from the `graphql:Upload` type
        // parameter. The type of the `byteStream` field is
        // `stream<byte[], io:Error?>`
        stream<byte[], io:Error?> byteStream = file.byteStream;

        // Store the received file using the ballerina `io` package. If any
        // `error` occurred during the file write, it can be returned as the
        // resolver function output.
        check io:fileWriteBlocksFromStream(path, byteStream);

        // Returns the message if the uploading process is successful.
        return "Successfully Uploaded";
    }

    // Remote functions in GraphQL services can use the `graphql:Upload[]` as
    // an input parameter type. Therefore, remote functions can accept an array
    // of `graphql:Upload` values. This can be used to store multiple files via
    // a single request.
    remote function multipleFileUpload(graphql:Upload[] files)
        returns string[]|error {

        // Iterates the `graphql:Upload` type array to store the files.
        foreach int i in 0..< files.length() {
            graphql:Upload file = files[i];
            stream<byte[], io:Error?> byteStream = file.byteStream;
            string fileName = file.fileName;
            string path = string`./uploads/${fileName}`;
            check io:fileWriteBlocksFromStream(path, byteStream);
            self.uploadedFiles.push(file.fileName);
        }
        return self.uploadedFiles;
    }

    resource function get getUploadedFileNames() returns string[] {
        return self.uploadedFiles;
    }
}

File upload

GraphQL package provides a way to upload files through the GraphQL endpoints with GraphQL mutations. To define an endpoint with the file upload capability, the graphql:Upload type can be used as the input parameter of resolver functions. The graphql:Upload type can represent the details of the file that needs to be uploaded and that can be used only with the remote functions. The value of graphql:Upload type is extracted from the HTTP multipart request, which will be received by the GraphQL endpoints. This example shows how to implement a GraphQL endpoint that can be used to upload files.

For more information on the underlying package, see the GraphQL package.

import ballerina/graphql;
import ballerina/io;
service /fileUpload on new graphql:Listener(4000) {
    string[] uploadedFiles = [];

Store the file information that need to be shared between the remote and resource functions.

    remote function singleFileUpload(graphql:Upload file)
        returns string|error {

Remote functions can use the graphql:Upload type as an input parameter type.

        string fileName = file.fileName;
        string path = string`./uploads/${fileName}`;

Access the file name from the graphql:Upload type parameter. Similarly, it can access the mime type as file.mimeType and encoding as file.encoding. Except the byteStream field, all other fields in the graphql:Upload are string values.

        stream<byte[], io:Error?> byteStream = file.byteStream;

Access the byte stream of the file from the graphql:Upload type parameter. The type of the byteStream field is stream<byte[], io:Error?>

        check io:fileWriteBlocksFromStream(path, byteStream);

Store the received file using the ballerina io package. If any error occurred during the file write, it can be returned as the resolver function output.

        return "Successfully Uploaded";
    }

Returns the message if the uploading process is successful.

    remote function multipleFileUpload(graphql:Upload[] files)
        returns string[]|error {

Remote functions in GraphQL services can use the graphql:Upload[] as an input parameter type. Therefore, remote functions can accept an array of graphql:Upload values. This can be used to store multiple files via a single request.

        foreach int i in 0..< files.length() {
            graphql:Upload file = files[i];
            stream<byte[], io:Error?> byteStream = file.byteStream;
            string fileName = file.fileName;
            string path = string`./uploads/${fileName}`;
            check io:fileWriteBlocksFromStream(path, byteStream);
            self.uploadedFiles.push(file.fileName);
        }
        return self.uploadedFiles;
    }

Iterates the graphql:Upload type array to store the files.

    resource function get getUploadedFileNames() returns string[] {
        return self.uploadedFiles;
    }
}
bal run graphql_file_upload.bal
# Send a HTTP multipart request to upload a single file via the GraphQL endpoint using a cURL command.
# The first part of the request is `operations` that includes a `JSON-encoded` map value.
# The `operations` map value is similar to a standard GraphQL POST request, in which all the variable values related to the file upload are `null`.
# `operations`: { "query": "mutation($file: Upload!) { singleFileUpload(file: $file) }", "variables": {"file": null} }
# The second part of the request is a `map` field that includes a `JSON-encoded` map of files that occurred in the operations.
# The `key` is file field name and the `value` is an array of paths in which the files occurred in the `operations`.
# `map`: { “0”: ["variables.file"] }
# A file can be added to the next part of the request with a unique, arbitrary field name.
# 0=@file1.png
 curl localhost:4000/fileUpload \
  -F operations='{ "query": "mutation($file: Upload!) { singleFileUpload(file: $file) }", "variables": { "file": null } }' \
  -F map='{ "0": ["variables.file"] }' \
  -F 0=@file1.png
 {"data":{"singleFileUpload":"Successfully Uploaded"}}
# Now, send a request with multiple files.
# The variable value related to the files is an array of `null` values.
# `operations`: { "query": "mutation($file: [Upload!]!) { multipleFileUpload(files: $file) }", "variables": { "file": [null, null] } }
# Same as the single file upload, the `map` value is a JSON-encoded map of paths in which files occurred in the `operations`.
# Since the `operations` has an array of `null` values, an array index is included in the path value.
#  E.g., In `file.0`, `0` is the array index.
# `map`: { "0": ["variables.file.0"], "1": ["variables.file.1"]}
# Files can be added to the next fields of the request with a unique, arbitrary field name.
# 0=@file1.png
# 1=@file2.png
 curl localhost:4000/fileUpload \
  -F operations='{ "query": "mutation($file: [Upload!]!) { multipleFileUpload(files: $file) }", "variables": { "file": [null, null] } }' \
  -F map='{ "0": ["variables.file.0"], "1": ["variables.file.1"]}' \
  -F 0=@file1.png \
  -F 1=@file2.png
  {"data":{"multipleFileUpload":["file1.png", "file2.png"]}}