explain (analyze) select unnest(tags) from (SELECT tags FROM t5 WHERE tags @> array['param-1'] group by 1) as t group by 1;
QUERY PLAN
-------------------------------------------------------------------------------------------------------------------------------
HashAggregate (cost=14655.30..14660.38 rows=1000 width=84) (actual time=490.721..490.722 rows=10 loops=1)
-> Subquery Scan on t (cost=14647.62..14652.80 rows=1000 width=84) (actual time=490.690..490.706 rows=55 loops=1)
-> HashAggregate (cost=14647.62..14647.73 rows=10 width=84) (actual time=490.676..490.676 rows=10 loops=1)
-> Seq Scan on t5 (cost=0.00..13459.00 rows=475450 width=84) (actual time=0.033..181.649 rows=475324 loops=1)
Filter: (tags @> '{param-1}'::text[])
Rows Removed by Filter: 24676
Total runtime: 490.843 ms
(7 rows)
explain (analyze) SELECT unnest(tags) FROM t5 WHERE tags @> array['param-1'] group by 1;
QUERY PLAN
-----------------------------------------------------------------------------------------------------------------------
HashAggregate (cost=368857.88..368862.95 rows=1000 width=84) (actual time=1196.758..1196.759 rows=10 loops=1)
-> Seq Scan on t5 (cost=0.00..249995.38 rows=47545000 width=84) (actual time=0.020..680.547 rows=2501231 loops=1)
Filter: (tags @> '{param-1}'::text[])
Rows Removed by Filter: 24676
Total runtime: 1196.790 ms
(5 rows)
explain (analyze) select unnest(tags) from (SELECT tags FROM t5 WHERE tags @> array['param-1'] group by 1) as t group by 1;
QUERY PLAN
------------------------------------------------------------------------------------------------------------------------------------------------------
HashAggregate (cost=19433.16..19438.24 rows=1000 width=84) (actual time=411.248..411.249 rows=10 loops=1)
-> Subquery Scan on t (cost=19425.49..19430.66 rows=1000 width=84) (actual time=411.220..411.233 rows=55 loops=1)
-> HashAggregate (cost=19425.49..19425.59 rows=10 width=84) (actual time=411.205..411.206 rows=10 loops=1)
-> Bitmap Heap Scan on t5 (cost=5084.74..18236.86 rows=475450 width=84) (actual time=74.696..126.809 rows=475324 loops=1)
Recheck Cond: (tags @> '{param-1}'::text[])
-> Bitmap Index Scan on t5_tagx_gist (cost=0.00..4965.87 rows=475450 width=0) (actual time=73.514..73.514 rows=475324 loops=1)
Index Cond: (tags @> '{param-1}'::text[])
Total runtime: 411.337 ms
(8 rows)
create table t5 as (select i id, rand_array() tags from generate_series(1,500000) as i);
create index t5_tagx_gin on t5 using gin (tags);
create or replace function rand_array() returns text[][] as $$
begin
return (select array_agg(t1.col1) from (select format('param-%s', i) col1 from generate_series(1,rand(10)) as i) as t1);
end;
$$ language 'plpgsql' strict;
require 'fileutils'
pid_file = 'tmp/pids/server.pid'
if File.exist?(pid_file)
pid = File.read(pid_file).to_i
Process.kill 'TERM', pid
puts "SERVER STOPPED"
else
puts "SERVER NOT STARTED"
end
Process.exec( "/usr/bin/env rails s -p 8090" )
attr_accessor :cache_file
after_initialize :fetch_upload
def fetch_upload
options = { namespace: 'upload_cache', expire_in: 10.minutes }
file = attach.queued_for_write[:original]
if file.present?
self.cache_file = attach.original_filename
Rails.cache.write(cache_file, File.read(file.path), options)
elsif cache_file.present?
file = StringIO.new(Rails.cache.read(cache_file, options))
if file.present?
self.attach = file
self.attach.instance_write(:file_name, cache_file)
end
end
end
<%= f.text_field :cache_file %>
<%= f.file_field :attach %>
Rails.application.config.session_store :cache_store, key: '_super_app_session_key'
where("i18n_field @> hstore(:lang, :data)", lang: lang, data: data)
WHERE to_tsvector('russian', i18n_field->'lang') @@ to_tsquery('russain, 'Запрос')
ORDER BY i18n_field->'lang' ASC
gem 'thin', github: 'macournoyer/thin', branch: 'streaming'
def stream
response.headers['Content-Type'] = 'text/event-stream'
sse = SSE.new(response.stream, retry: 5000, event: 'notify')
3.times do |a|
sse.write({ class_id: __id__ })
sleep 3
end
rescue IOError
logger.info('Client alive channel')
ensure
response.stream.close
end
event = new EventSource('/stream')
event.addEventListener 'notify', (e)->
console.log(e.data)
def current_link_to label, path
from_path = controller._routes.recognize_path(path).values rescue nil
css = from_path & [controller_name, action_name] == from_path ? 'active' : nil
link_to label, path, class: css
end
get '/posts(/:locale)'
# или
scope "/:locale" do
...
SELECT MAX(msg-id) AS msg-id, user_id, msg-text FROM (
SELECT msg-id, msg-for AS user_id, msg-text
FROM messages
UNION
SELECT msg-id, msg-from AS user_id, msg-text
FROM messages
) AS t
GROUP BY user_id
ORDER BY user_id
SELECT a.msg_id, c.user_id, a.msg_text
FROM messages AS a
JOIN
(
SELECT max(max) AS msg_id, user_id FROM (
(SELECT msg_from AS user_id, max(msg_id) from messages group by msg_from)
UNION
(SELECT msg_for AS user_id, max(msg_id) from messages group by msg_for)
) as b GROUP BY user_id
) c ON a.msg_id = c.msg_id
ORDER BY c.user_id;
<div class="msg"></div>
<div class="form-wrap" style="display:block">
<%= form_tag('/feedback', remote: true, format: :json, id: 'feedback') do %>
....
def send_email
....
respond_to do |format|
# if success
msg = success ? 'You request will be success processed'
: 'Ooops. Error occurred. Try later.'
format.json { render json: {msg: msg}.to_json }
end
end
$('#feedback')
.on 'ajax:before', ()->
$('.form-wrap').hide()
$('.msg').html('Sending...')
.on 'ajax:success', (e, data)->
$('.msg').html(data.msg)
memcahed -m 64M -I 10M
config.cache_store = :dalli_store, {:value_max_bytes => 10.megabytes}
Rails.cache.fetch('10mb'){ 'x' * 10.megabytes }.size
Cache read: 10mb
Cache generate: 10mb
Cache write: 10mb
Value for 10mb over max size: 10485760 <= 10485773
Rails.cache.fetch('9mb'){ 'x' * 9.megabytes }.size
Cache read: 9mb
Cache fetch_hit: 9mb