class Sequel::Postgres::Dataset
Constants
- BindArgumentMethods
- PREPARED_ARG_PLACEHOLDER
- PreparedStatementMethods
Public Instance Methods
# File lib/sequel/adapters/postgres.rb 694 def bound_variable_modules 695 [BindArgumentMethods] 696 end
# File lib/sequel/adapters/postgres.rb 608 def fetch_rows(sql) 609 return cursor_fetch_rows(sql){|h| yield h} if @opts[:cursor] 610 execute(sql){|res| yield_hash_rows(res, fetch_rows_set_cols(res)){|h| yield h}} 611 end
Use a cursor for paging.
# File lib/sequel/adapters/postgres.rb 614 def paged_each(opts=OPTS, &block) 615 unless defined?(yield) 616 return enum_for(:paged_each, opts) 617 end 618 use_cursor(opts).each(&block) 619 end
PostgreSQL uses $N for placeholders instead of ?, so use a $ as the placeholder.
# File lib/sequel/adapters/postgres.rb 704 def prepared_arg_placeholder 705 PREPARED_ARG_PLACEHOLDER 706 end
# File lib/sequel/adapters/postgres.rb 698 def prepared_statement_modules 699 [PreparedStatementMethods] 700 end
Uses a cursor for fetching records, instead of fetching the entire result set at once. Note this uses a transaction around the cursor usage by default and can be changed using `hold: true` as described below. Cursors can be used to process large datasets without holding all rows in memory (which is what the underlying drivers may do by default). Options:
- :cursor_name
-
The name assigned to the cursor (default 'sequel_cursor'). Nested cursors require different names.
- :hold
-
Declare the cursor WITH HOLD and don't use transaction around the cursor usage.
- :rows_per_fetch
-
The number of rows per fetch (default 1000). Higher numbers result in fewer queries but greater memory use.
Usage:
DB[:huge_table].use_cursor.each{|row| p row} DB[:huge_table].use_cursor(rows_per_fetch: 10000).each{|row| p row} DB[:huge_table].use_cursor(cursor_name: 'my_cursor').each{|row| p row}
This is untested with the prepared statement/bound variable support, and unlikely to work with either.
# File lib/sequel/adapters/postgres.rb 643 def use_cursor(opts=OPTS) 644 clone(:cursor=>{:rows_per_fetch=>1000}.merge!(opts)) 645 end
Replace the WHERE clause with one that uses CURRENT OF with the given cursor name (or the default cursor name). This allows you to update a large dataset by updating individual rows while processing the dataset via a cursor:
DB[:huge_table].use_cursor(rows_per_fetch: 1).each do |row| DB[:huge_table].where_current_of.update(column: ruby_method(row)) end
# File lib/sequel/adapters/postgres.rb 655 def where_current_of(cursor_name='sequel_cursor') 656 clone(:where=>Sequel.lit(['CURRENT OF '], Sequel.identifier(cursor_name))) 657 end
Private Instance Methods
Generate and execute a procedure call.
# File lib/sequel/adapters/postgres.rb 712 def call_procedure(name, args) 713 sql = String.new 714 sql << "CALL " 715 identifier_append(sql, name) 716 sql << "(" 717 expression_list_append(sql, args) 718 sql << ")" 719 with_sql_first(sql) 720 end
Use a cursor to fetch groups of records at a time, yielding them to the block.
# File lib/sequel/adapters/postgres.rb 723 def cursor_fetch_rows(sql) 724 server_opts = {:server=>@opts[:server] || :read_only} 725 cursor = @opts[:cursor] 726 hold = cursor[:hold] 727 cursor_name = quote_identifier(cursor[:cursor_name] || 'sequel_cursor') 728 rows_per_fetch = cursor[:rows_per_fetch].to_i 729 730 db.public_send(*(hold ? [:synchronize, server_opts[:server]] : [:transaction, server_opts])) do 731 begin 732 execute_ddl("DECLARE #{cursor_name} NO SCROLL CURSOR WITH#{'OUT' unless hold} HOLD FOR #{sql}", server_opts) 733 rows_per_fetch = 1000 if rows_per_fetch <= 0 734 fetch_sql = "FETCH FORWARD #{rows_per_fetch} FROM #{cursor_name}" 735 cols = nil 736 # Load columns only in the first fetch, so subsequent fetches are faster 737 execute(fetch_sql) do |res| 738 cols = fetch_rows_set_cols(res) 739 yield_hash_rows(res, cols){|h| yield h} 740 return if res.ntuples < rows_per_fetch 741 end 742 while true 743 execute(fetch_sql) do |res| 744 yield_hash_rows(res, cols){|h| yield h} 745 return if res.ntuples < rows_per_fetch 746 end 747 end 748 rescue Exception => e 749 raise 750 ensure 751 begin 752 execute_ddl("CLOSE #{cursor_name}", server_opts) 753 rescue 754 raise e if e 755 raise 756 end 757 end 758 end 759 end
Set the columns based on the result set, and return the array of field numers, type conversion procs, and name symbol arrays.
# File lib/sequel/adapters/postgres.rb 763 def fetch_rows_set_cols(res) 764 cols = [] 765 procs = db.conversion_procs 766 res.nfields.times do |fieldnum| 767 cols << [procs[res.ftype(fieldnum)], output_identifier(res.fname(fieldnum))] 768 end 769 self.columns = cols.map{|c| c[1]} 770 cols 771 end
Use the driver's escape_bytea
# File lib/sequel/adapters/postgres.rb 774 def literal_blob_append(sql, v) 775 sql << "'" << db.synchronize(@opts[:server]){|c| c.escape_bytea(v)} << "'" 776 end
Use the driver's escape_string
# File lib/sequel/adapters/postgres.rb 779 def literal_string_append(sql, v) 780 sql << "'" << db.synchronize(@opts[:server]){|c| c.escape_string(v)} << "'" 781 end
For each row in the result set, yield a hash with column name symbol keys and typecasted values.
# File lib/sequel/adapters/postgres.rb 785 def yield_hash_rows(res, cols) 786 ntuples = res.ntuples 787 recnum = 0 788 while recnum < ntuples 789 fieldnum = 0 790 nfields = cols.length 791 converted_rec = {} 792 while fieldnum < nfields 793 type_proc, fieldsym = cols[fieldnum] 794 value = res.getvalue(recnum, fieldnum) 795 converted_rec[fieldsym] = (value && type_proc) ? type_proc.call(value) : value 796 fieldnum += 1 797 end 798 yield converted_rec 799 recnum += 1 800 end 801 end