Auto-Upload Files to Google Drive from Node.js

Prerequisites

  • Completed Lesson 1 — Google Cloud project with Drive API enabled
  • Completed Lesson 2 — OAuth authorization with saved token
  • Google Drive folder IDs for each target directory

Getting Google Drive Folder IDs

Each Google Drive folder has a unique ID in its URL:

https://drive.google.com/drive/folders/1aBcDeFgHiJkLmNoPqRsTuVwXyZ012345
                                        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                                        This is the folder ID

Open each target folder in your browser and copy the ID from the URL.


The Upload Script

Create src/upload-to-gdrive.js:

const fs = require('fs')
const path = require('path')
const { getDriveClient } = require('./gdrive-auth')

const CLOUD_DIR = path.join(__dirname, '..', 'output', 'cloud')

// Map local directory names to Google Drive folder IDs
const FOLDER_MAP = {
  reports: 'YOUR_FOLDER_ID_1',
  images: 'YOUR_FOLDER_ID_2',
  exports: 'YOUR_FOLDER_ID_3',
}

const CHECK_INTERVAL_MS = 60_000 // 1 minute

async function uploadFile(drive, localPath, folderId, fileName) {
  const fileStream = fs.createReadStream(localPath)

  const res = await drive.files.create({
    requestBody: {
      name: fileName,
      parents: [folderId],
    },
    media: {
      mimeType: 'application/json',
      body: fileStream,
    },
    fields: 'id, name',
  })

  return res.data
}

async function processDirectory(drive, dirName, folderId) {
  const dirPath = path.join(CLOUD_DIR, dirName)

  if (!fs.existsSync(dirPath)) {
    return 0
  }

  const files = fs
    .readdirSync(dirPath)
    .filter((f) => f.endsWith('.json') && !f.startsWith('.'))

  if (files.length === 0) {
    return 0
  }

  let uploaded = 0

  for (const fileName of files) {
    const filePath = path.join(dirPath, fileName)

    try {
      const result = await uploadFile(drive, filePath, folderId, fileName)
      fs.unlinkSync(filePath) // Delete local file after successful upload
      console.log(`${dirName}/${fileName} → Drive (id: ${result.id})`)
      uploaded++
    } catch (err) {
      console.error(`${dirName}/${fileName}: ${err.message}`)
    }
  }

  return uploaded
}

async function runOnce(drive) {
  let totalUploaded = 0

  for (const [dirName, folderId] of Object.entries(FOLDER_MAP)) {
    const count = await processDirectory(drive, dirName, folderId)
    totalUploaded += count
  }

  return totalUploaded
}

async function main() {
  console.log('\n☁️  Google Drive uploader started')
  console.log('   Watching: output/cloud/{reports,images,exports}')
  console.log(`   Interval: every ${CHECK_INTERVAL_MS / 1000}s\n`)

  const drive = getDriveClient()

  // Verify auth works
  try {
    await drive.about.get({ fields: 'user' })
    console.log('   ✅ Google Drive authentication successful\n')
  } catch (err) {
    console.error(`\n❌ Google Drive authentication failed: ${err.message}`)
    console.error('   Run: npm run gdrive-auth\n')
    process.exit(1)
  }

  // Initial run
  const initial = await runOnce(drive)
  if (initial > 0) {
    console.log(`\n📤 Uploaded ${initial} file(s) on startup\n`)
  } else {
    console.log('   No files to upload right now\n')
  }

  // Continuous loop
  setInterval(async () => {
    try {
      const timestamp = new Date().toLocaleTimeString()
      const count = await runOnce(drive)
      if (count > 0) {
        console.log(`\n📤 [${timestamp}] Uploaded ${count} file(s)\n`)
      }
    } catch (err) {
      console.error(
        `\n❌ [${new Date().toLocaleTimeString()}] Error: ${err.message}\n`,
      )
    }
  }, CHECK_INTERVAL_MS)
}

main()

Replace YOUR_FOLDER_ID_1, YOUR_FOLDER_ID_2, YOUR_FOLDER_ID_3 with your actual Google Drive folder IDs.

Using .env instead of hardcoding

For projects with a single upload folder, it's better to store the folder ID in .env rather than hardcoding it:

# .env
GDRIVE_FOLDER_ID=1aBcDeFgHiJkLmNoPqRsTuVwXyZ012345
require('dotenv').config()
const FOLDER_ID = process.env.GDRIVE_FOLDER_ID

Important: Set the folder ID only, not the full URL. Extract it from: https://drive.google.com/drive/folders/1aBcDeFgHiJkLmNoPqRsTuVwXyZ0123451aBcDeFgHiJkLmNoPqRsTuVwXyZ012345

Environment variables reference

VariableRequiredSet inDescription
GDRIVE_FOLDER_IDYes (single-folder projects).envGoogle Drive folder ID to upload into
GDRIVE_AUTH_PORTNo.env or inlinePort for OAuth callback (default: 3456)

The FOLDER_MAP approach (hardcoded in the script) is better when you upload to multiple folders. The .env approach is better when you have a single target folder — it keeps secrets out of source code and makes it easy to change per environment.


Add npm Script

{
  "scripts": {
    "gdrive-auth": "node src/gdrive-authorize.js",
    "upload-gdrive": "node src/upload-to-gdrive.js"
  }
}

How It Works

  1. Startup: Authenticates with Google Drive using the saved OAuth token
  2. Initial scan: Checks all configured directories for .json files
  3. Upload: For each file found, uploads it to the corresponding Google Drive folder
  4. Cleanup: Deletes the local file after successful upload
  5. Loop: Repeats every 60 seconds (configurable via CHECK_INTERVAL_MS)

Flow diagram

Local directories              Google Drive
─────────────────              ────────────
output/cloud/
├── reports/
│   └── file_123.json  ──────→  reports/ (folder ID 1)
├── images/
│   └── img_456.json   ──────→  images/ (folder ID 2)
└── exports/
    └── map_789.json   ──────→  exports/ (folder ID 3)

After upload, local files are deleted. The directories remain empty until new files appear.


Testing

Create a test file and run the uploader:

# Create a test file
echo '{"test": true}' > output/cloud/reports/test_upload.json

# Run the uploader
npm run upload-gdrive

Expected output:

☁️  Google Drive uploader started
   Watching: output/cloud/{reports,images,exports}
   Interval: every 60s

   ✅ Google Drive authentication successful

  ✅ reports/test_upload.json → Drive (id: 1zYxWvUtSrQpOnMlKjIhGfEdCbA987654)

📤 Uploaded 1 file(s) on startup

Check your Google Drive folder — the file should be there.


Running in Production

To keep the uploader running in the background while you work:

# Option 1: Run in a separate terminal tab
npm run upload-gdrive

# Option 2: Run with nohup (persists after closing terminal)
nohup npm run upload-gdrive > upload.log 2>&1 &

# Check the log
tail -f upload.log

# Stop it
kill $(pgrep -f upload-to-gdrive)

Customization

Change the check interval

Edit CHECK_INTERVAL_MS in the script:

const CHECK_INTERVAL_MS = 30_000 // 30 seconds
const CHECK_INTERVAL_MS = 300_000 // 5 minutes

Add more directories

Add entries to FOLDER_MAP:

const FOLDER_MAP = {
  reports: 'FOLDER_ID_1',
  images: 'FOLDER_ID_2',
  exports: 'FOLDER_ID_3',
  'new-directory': 'FOLDER_ID_4', // Add new mappings here
}

Upload non-JSON files

Change the filter in processDirectory:

// Upload all files (not just .json)
const files = fs.readdirSync(dirPath).filter((f) => !f.startsWith('.'))

And adjust the MIME type in uploadFile:

// Auto-detect MIME type
const mimeTypes = {
  '.json': 'application/json',
  '.csv': 'text/csv',
  '.png': 'image/png',
  '.jpg': 'image/jpeg',
}
const ext = path.extname(fileName)
const mimeType = mimeTypes[ext] || 'application/octet-stream'

Token Expiry

The OAuth refresh token is long-lived but can expire if:

If uploads start failing with authentication errors, re-run:

npm run gdrive-auth

Complete File Summary

FilePurposeRun
credentials/gdrive-oauth.jsonOAuth client credentialsDownloaded from GCP Console
credentials/gdrive-token.jsonSaved access + refresh tokensAuto-created by gdrive-auth
src/gdrive-auth.jsShared authentication helperImported by other scripts
src/gdrive-authorize.jsOne-time browser authorizationnpm run gdrive-auth (once)
src/upload-to-gdrive.jsContinuous file uploadernpm run upload-gdrive