diff --git a/Gemfile b/Gemfile index a9906f3..4e8f058 100644 --- a/Gemfile +++ b/Gemfile @@ -42,7 +42,7 @@ gem 'uglifier' group :development do gem 'annotate', '~> 2.6.1' gem 'quiet_assets' - gem 'thin' + gem 'puma' gem 'i18n-tasks', '~> 0.2.10' end diff --git a/Gemfile.lock b/Gemfile.lock index 51e2ef2..f473234 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -203,6 +203,8 @@ GEM polyamorous (0.6.4) activerecord (>= 3.0) polyglot (0.3.4) + puma (2.8.1) + rack (>= 1.1, < 2.0) quiet_assets (1.0.2) railties (>= 3.1, < 5.0) rack (1.5.2) @@ -297,10 +299,6 @@ GEM therubyracer (0.12.0) libv8 (~> 3.16.14.0) ref - thin (1.6.1) - daemons (>= 1.0.9) - eventmachine (>= 1.0.0) - rack (>= 1.0.0) thor (0.18.1) thread (0.1.3) thread_safe (0.1.3) @@ -366,6 +364,7 @@ DEPENDENCIES nested_form newrelic_rpm nokogiri + puma quiet_assets rack-utf8_sanitizer rails (= 4.0.3) @@ -379,7 +378,6 @@ DEPENDENCIES spork sqlite3-ruby (= 1.3.3) therubyracer - thin tweetstream twitter_oauth! uglifier diff --git a/app/controllers/stream_controller.rb b/app/controllers/stream_controller.rb new file mode 100644 index 0000000..6c19999 --- /dev/null +++ b/app/controllers/stream_controller.rb @@ -0,0 +1,83 @@ +class StreamController < ApplicationController + include ActionController::Live + require 'csv' + + def channel_feed + channel = Channel.find(params[:id]) + api_key = ApiKey.find_by_api_key(get_apikey) + + # set timezone correctly + set_time_zone(params) + + # output proper http response if error + render :text => '-1', :status => 400 and return if !channel_permission?(channel, api_key) + + # set the attachment headers + response.headers['Content-Type'] = 'text/csv' + response.headers['Content-Disposition'] = 'attachment; filename=feeds.csv' + + # get the feed headers + csv_headers = Feed.select_options(channel, params) + + # set the total records and batch size + total_records = channel.feeds.count + batch = 1000 + + # write the headers row + response.stream.write "#{CSV.generate_line(csv_headers)}" + + # for every 1000 records + (0..(total_records - batch).abs).step(batch) do |i| + # variable to hold the streaming output for this batch + batch_output = "" + # feeds query + feeds = Feed.where(:channel_id => channel.id).order('entry_id asc').offset(i).limit(batch) + + # for each feed, add the data according to the csv_headers + feeds.each do |feed| + row = [] + csv_headers.each { |attr| row.push(feed.send(attr)) } + batch_output += CSV.generate_line(row) + end + + # write the output for this batch + response.stream.write batch_output + # add a slight delay between database queries + sleep 0.1 + end + ensure + response.stream.close + end + + def stream_example + # get the channel + channel = Channel.find(params[:channel_id]) + + # stream the response + response.headers['Content-Type'] = 'text/csv' + response.headers['Content-Disposition'] = 'attachment; filename=feeds.csv' + 20.times { + response.stream.write "hello world\n" + sleep 1 + } + ensure + response.stream.close + end + + def stream_chunked_example + #response.headers['Content-Type'] = 'text/event-stream' + response.headers['Content-Type'] = 'text/csv' + response.headers['Content-Disposition'] = 'attachment; filename=feeds.csv' + response.headers['Transfer-Encoding'] = 'chunked' + 10.times { + response.stream.write "4\n" # size must be in hex format? + response.stream.write "hel\n\n" + sleep 1 + } + response.stream.write "0\n\n" + ensure + response.stream.close + end + +end + diff --git a/app/views/channels/_import.html.erb b/app/views/channels/_import.html.erb index bc950e4..e90567f 100644 --- a/app/views/channels/_import.html.erb +++ b/app/views/channels/_import.html.erb @@ -1,4 +1,5 @@
+

<%= t(:import) %>

<%= t(:upload_select) %>
@@ -11,6 +12,12 @@ <%= f.submit t(:upload), :disable_with => t(:uploading) %> <% end %> +

+ +

<%= t(:export) %>

+ <%= t(:download_feeds) %> +

+ <%= button_to t(:download), "#{@ssl_api_domain}stream/channels/#{@channel.id}/feeds?api_key=#{@channel.write_api_key}" %>