fixed duplicate key bug - reportable - Fork of reportable required by WarVox, from hdm/reportable.
(DIR) Log
(DIR) Files
(DIR) Refs
(DIR) README
---
(DIR) commit 75d16c9759dbcc0b7ba35e9ee7af5779cd2e9147
(DIR) parent fa012ce490e894ff8b65eaf1a820536bfa2023b2
(HTM) Author: marcoow <marco.otte-witte@simplabs.com>
Date: Tue, 16 Dec 2008 01:37:33 +0800
fixed duplicate key bug
Signed-off-by: Marco Otte-Witte <marco.otte-witte@simplabs.com>
Diffstat:
M lib/kvlr/reports_as_sparkline/grou… | 2 +-
M lib/kvlr/reports_as_sparkline/repo… | 24 ++++++++++++------------
M spec/other/report_cache_spec.rb | 14 ++++----------
3 files changed, 17 insertions(+), 23 deletions(-)
---
(DIR) diff --git a/lib/kvlr/reports_as_sparkline/grouping.rb b/lib/kvlr/reports_as_sparkline/grouping.rb
@@ -8,7 +8,7 @@ module Kvlr #:nodoc:
# ==== Parameters
# * <tt>identifier</tt> - The identifier of the grouping - one of :hour, :day, :week or :month
def initialize(identifier)
- raise ArgumentError.new("Invalid grouping #{grouping}") unless [:hour, :day, :week, :month].include?(identifier)
+ raise ArgumentError.new("Invalid grouping #{identifier}") unless [:hour, :day, :week, :month].include?(identifier)
@identifier = identifier
end
(DIR) diff --git a/lib/kvlr/reports_as_sparkline/report_cache.rb b/lib/kvlr/reports_as_sparkline/report_cache.rb
@@ -4,8 +4,6 @@ module Kvlr #:nodoc:
class ReportCache < ActiveRecord::Base #:nodoc:
- serialize :reporting_period, Kvlr::ReportsAsSparkline::ReportingPeriod
-
def self.process(report, limit, no_cache = false, &block)
raise ArgumentError.new('A block must be given') unless block_given?
self.transaction do
@@ -21,12 +19,12 @@ module Kvlr #:nodoc:
:aggregation => report.aggregation.to_s
},
:limit => limit,
- :order => 'reporting_period DESC'
+ :order => 'reporting_period ASC'
)
- last_reporting_period_to_read = cached_data.last.reporting_period unless cached_data.empty?
+ last_reporting_period_to_read = ReportingPeriod.new(report.grouping, cached_data.last.reporting_period) unless cached_data.empty?
end
new_data = yield(last_reporting_period_to_read.date_time)
- prepare_result(new_data, cached_data, last_reporting_period_to_read, report, no_cache)
+ prepare_result(new_data, cached_data, last_reporting_period_to_read, report, no_cache)[0..(limit - 1)]
end
end
@@ -34,27 +32,29 @@ module Kvlr #:nodoc:
def self.prepare_result(new_data, cached_data, last_reporting_period_to_read, report, no_cache = false)
new_data.map! { |data| [ReportingPeriod.from_db_string(report.grouping, data[0]), data[1]] }
- reporting_period = ReportingPeriod.new(report.grouping)
result = []
- begin
+ reporting_period = ReportingPeriod.new(report.grouping)
+ while reporting_period != last_reporting_period_to_read
data = new_data.detect { |data| data[0] == reporting_period }
cached = self.new(
:model_name => report.klass.to_s,
:report_name => report.name.to_s,
:grouping => report.grouping.identifier.to_s,
:aggregation => report.aggregation.to_s,
- :reporting_period => reporting_period,
- :value => (data ? data[1] : 0)
+ :reporting_period => reporting_period.date_time,
+ :value => (data ? data[1] : 0.0)
)
cached.save! unless no_cache
- result << [cached.reporting_period.date_time, cached.value]
+ result << [reporting_period.date_time, cached.value]
reporting_period = reporting_period.previous
- end while reporting_period != last_reporting_period_to_read
+ end
+ data = (new_data.first && new_data.first[0] == last_reporting_period_to_read) ? new_data.first : nil
unless no_cache
cached = cached_data.last || nil
- data = (new_data.first && new_data.first[0] == last_reporting_period_to_read) ? new_data.first : nil
cached.update_attributes!(:value => data[1]) unless cached.nil? || data.nil?
end
+ result << [last_reporting_period_to_read.date_time, data ? data[1] : 0.0]
+ result += (cached_data.map { |cached| [cached.reporting_period, cached.value] }).reverse
result
end
(DIR) diff --git a/spec/other/report_cache_spec.rb b/spec/other/report_cache_spec.rb
@@ -41,10 +41,10 @@ describe Kvlr::ReportsAsSparkline::ReportCache do
:aggregation => @report.aggregation.to_s
},
:limit => 10,
- :order => "reporting_period DESC"
+ :order => 'reporting_period ASC'
)
- Kvlr::ReportsAsSparkline::ReportCache.process(@report, 10) { [] }
+ puts Kvlr::ReportsAsSparkline::ReportCache.process(@report, 10) { [] }
end
it 'should prepare the results before it returns them' do
@@ -72,7 +72,7 @@ describe Kvlr::ReportsAsSparkline::ReportCache do
:grouping => @report.grouping.identifier.to_s,
:aggregation => @report.aggregation.to_s,
:value => 1,
- :reporting_period => reporting_period
+ :reporting_period => reporting_period.date_time
})
Kvlr::ReportsAsSparkline::ReportCache.stub!(:find).and_return([cached])
@@ -109,7 +109,7 @@ describe Kvlr::ReportsAsSparkline::ReportCache do
Kvlr::ReportsAsSparkline::ReportingPeriod.stub!(:from_db_string).and_return(Kvlr::ReportsAsSparkline::ReportingPeriod.new(@report.grouping))
@cached = Kvlr::ReportsAsSparkline::ReportCache.new
@cached.stub!(:save!)
- @cached.stub!(:reporting_period).and_return(Kvlr::ReportsAsSparkline::ReportingPeriod.new(@report.grouping))
+ @cached.stub!(:reporting_period).and_return(Kvlr::ReportsAsSparkline::ReportingPeriod.new(@report.grouping).date_time)
Kvlr::ReportsAsSparkline::ReportCache.stub!(:new).and_return(@cached)
end
@@ -160,12 +160,6 @@ describe Kvlr::ReportsAsSparkline::ReportCache do
result[0][1].should be_kind_of(Float)
end
- it 'should return an array with :limit elements' do
- result = Kvlr::ReportsAsSparkline::ReportCache.send(:prepare_result, @new_data, [], @last_reporting_period_to_read, @report, true)
-
- result.length.should == 10
- end
-
it 'should update the last cached record if new data has been read for the last reporting period to read' do
Kvlr::ReportsAsSparkline::ReportingPeriod.stub!(:from_db_string).and_return(@last_reporting_period_to_read)
@cached.should_receive(:update_attributes!).once.with(:value => 1.0)