Is there a way to upload to S3 from a url using node.js?

I’m not using knox but the official AWS SDK for JavaScript in Node.js. I issue a request to a URL with {encoding: null} in order to retrieve the data in buffer which can be passed directly to the Body parameter for s3.putObject(). Here below is an example of putting a remote image in a bucket with aws-sdk and request.

var AWS = require('aws-sdk');
var request = require('request');

AWS.config.loadFromPath('./config.json');
var s3 = new AWS.S3();

function put_from_url(url, bucket, key, callback) {
    request({
        url: url,
        encoding: null
    }, function(err, res, body) {
        if (err)
            return callback(err, res);

        s3.putObject({
            Bucket: bucket,
            Key: key,
            ContentType: res.headers['content-type'],
            ContentLength: res.headers['content-length'],
            Body: body // buffer
        }, callback);
    })
}

put_from_url('http://a0.awsstatic.com/main/images/logos/aws_logo.png', 'your_bucket', 'media/aws_logo.png', function(err, res) {
    if (err)
        throw err;

    console.log('Uploaded data successfully!');
});

Building on @Yuri's post, for those who would like to use axios instead of request & ES6 syntax for a more modern approach + added the required Bucket property to params (and it downloads any file, not only images):

const uploadFileToS3 = (url, bucket, key) => {
  return axios.get(url, { responseType: "arraybuffer", responseEncoding: "binary" }).then((response) => {
    const params = {
      ContentType: response.headers["content-type"],
      ContentLength: response.data.length.toString(), // or response.header["content-length"] if available for the type of file downloaded
      Bucket: bucket,
      Body: response.data,
      Key: key,
    };
    return s3.putObject(params).promise();
  });
}

uploadFileToS3(<your_file_url>, <your_s3_path>, <your_s3_bucket>)
   .then(() => console.log("File saved!"))
   .catch(error) => console.log(error));

Same thing as the above answer but with fetch:

async function upload(url: string, key: string, bucket: string) {
  const response = await fetch(url)
  const contentType = response.headers.get("content-type") ?? undefined;
  const contentLength =
    response.headers.get("content-length") != null
      ? Number(response.headers.get("content-length"))
      : undefined;

  return s3
    .putObject({
      Bucket: bucket,
      Key: key,
      ContentType: contentType,
      ContentLength: contentLength,
      Body: response.body, // buffer
    })
    .promise();
}

For those that are looking for a solution that doesn't involves callbacks, and prefeers promises, based on @micmia code here is an alternative:

var AWS = require('aws-sdk'),
request = require('request');


const bucketName='yourBucketName';
const bucketOptions = {...Your options};
var s3 = new AWS.S3(options);

function UploadFromUrlToS3(url,destPath){
    return new Promise((resolve,reject)=> {            
        request({
            url: url,
            encoding: null
        }, function(err, res, body) {        
            if (err){
                reject(err);
            }
            var objectParams = {
                ContentType: res.headers['content-type'],
                ContentLength: res.headers['content-length'],
                Key: destPath,
                Body: body 
            };
            resolve(s3.putObject(objectParams).promise());
        });
    });
}

UploadFromUrlToS3(
    'http://a0.awsstatic.com/main/images/logos/aws_logo.png',
    'your/s3/path/aws_logo.png' )
    .then(function() {
        console.log('image was saved...');
    }).catch(function(err) {
        console.log('image was not saved!',err);
    });