如何使用Firebase Cloud Functions将FTP文件上传到Firebase存储?

3

在同一个Firebase项目中,使用云函数(编写在node.js中),首先下载FTP文件(使用npm ftp模块),然后尝试将其上传到Firebase存储。

目前所有的尝试都失败了,文档也没有帮助……如果有专家能给出建议/提示,将不胜感激。

以下代码使用了两种不同的方法:fs.createWriteStream()和bucket.file().createWriteStream()。但两者都失败了,原因各异(请参见代码中的错误消息)。

'use strict'

// [START import]
let admin = require('firebase-admin')
let functions = require('firebase-functions')
const gcpStorage = require('@google-cloud/storage')()
admin.initializeApp(functions.config().firebase)    
var FtpClient = require('ftp')
var fs = require('fs')
// [END import]

// [START Configs]
// Firebase Storage is configured with the following rules and grants read write access to everyone
/*
service firebase.storage {
  match /b/{bucket}/o {
    match /{allPaths=**} {
      allow read, write;
    }
  }
}
*/
// Replace this with your project id, will be use by: const bucket = gcpStorage.bucket(firebaseProjectID)
const firebaseProjectID = 'your_project_id'
// Public FTP server, uploaded files are removed after 48 hours ! Upload new ones when needed for testing
const CONFIG = {
  test_ftp: {
    source_path: '/48_hour',
    ftp: {
      host: 'ftp.uconn.edu'
    }
  }
}
const SOURCE_FTP  = CONFIG.test_ftp
// [END Configs]

// [START saveFTPFileWithFSCreateWriteStream]
function saveFTPFileWithFSCreateWriteStream(file_name) {
  const ftpSource = new FtpClient()
  ftpSource.on('ready', function() {
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
      if (err) throw err
      stream.once('close', function() { ftpSource.end() })
      stream.pipe(fs.createWriteStream(file_name))
      console.log('File downloaded: ', file_name)
    })
  })
  ftpSource.connect(SOURCE_FTP.ftp)
}
// This fails with the following error in firebase console:
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
// [END saveFTPFileWithFSCreateWriteStream]

// [START saveFTPFileWithBucketUpload]    
function saveFTPFileWithBucketUpload(file_name) {
  const bucket = gcpStorage.bucket(firebaseProjectID)
  const file = bucket.file(file_name)
  const ftpSource = new FtpClient()
  ftpSource.on('ready', function() {
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
      if (err) throw err
      stream.once('close', function() { ftpSource.end() })
      stream.pipe(file.createWriteStream())
      console.log('File downloaded: ', file_name)
    })
  })
  ftpSource.connect(SOURCE_FTP.ftp)
}    
// [END saveFTPFileWithBucketUpload]

// [START database triggers]
// Listens for new triggers added to /ftp_fs_triggers/:pushId and calls the saveFTPFileWithFSCreateWriteStream
// function to save the file in the default project storage bucket
exports.dbTriggersFSCreateWriteStream = functions.database
  .ref('/ftp_fs_triggers/{pushId}')
  .onWrite(event => {
    const trigger = event.data.val()
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
    return saveFTPFileWithFSCreateWriteStream(trigger.file_name)
    // This fails with the following error in firebase console:
    // Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
  })
// Listens for new triggers added to /ftp_bucket_triggers/:pushId and calls the saveFTPFileWithBucketUpload
// function to save the file in the default project storage bucket
exports.dbTriggersBucketUpload = functions.database
  .ref('/ftp_bucket_triggers/{pushId}')
  .onWrite(event => {
    const trigger = event.data.val()
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
    return saveFTPFileWithBucketUpload(trigger.file_name)
    // This fails with the following error in firebase console:
    /*
    Error: Uncaught, unspecified "error" event. ([object Object])
    at Pumpify.emit (events.js:163:17)
    at Pumpify.onerror (_stream_readable.js:579:12)
    at emitOne (events.js:96:13)
    at Pumpify.emit (events.js:188:7)
    at Pumpify.Duplexify._destroy (/user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:184:15)
    at /user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:175:10
    at _combinedTickCallback (internal/process/next_tick.js:67:7)
    at process._tickDomainCallback (internal/process/next_tick.js:122:9)
    */
  })
// [END database triggers]

1
请编辑您的问题,包括相关功能的代码。 - Jen Person
抱歉,我已经添加了代码来测试两种不同的方法,但都没有成功。 - denisgmag
1个回答

4
我终于找到了正确的实现方法。
1)确保正确引用存储桶。最初我只使用了我的 project_id,没有在末尾加上 '.appspot.com'。
const bucket = gsc.bucket('<project_id>.appspot.com')

2) 首先创建一个存储桶流,然后将来自FTP获取调用的流传输到bucketWriteStream中。请注意,file_name将是保存文件的名称(此文件事先不必存在)。

ftpSource.get(filePath, function(err, stream) {
  if (err) throw err
  stream.once('close', function() { ftpSource.end() })

  // This didn't work !
  //stream.pipe(fs.createWriteStream(fileName))

  // This works...
  let bucketWriteStream = bucket.file(fileName).createWriteStream()
  stream.pipe(bucketWriteStream)
})

看到了吗,就像魔法一样顺畅...


你能否把整个代码展示给我们以便更好地解释一下,谢谢。 - GILO
FTP服务器地址是什么? - GILO

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接