* VRFS-1924 - score_history added
This commit is contained in:
parent
433148cd55
commit
6c4bcd98be
|
|
@ -191,4 +191,5 @@ fix_sms_query_cancel_flag.sql
|
|||
fix_sms_query_cancel_flag2.sql
|
||||
next_session_scheduled_default.sql
|
||||
migrate_old_sessions.sql
|
||||
max_mind_releases.sql
|
||||
max_mind_releases.sql
|
||||
score_histories.sql
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
-- VRFS-1924
|
||||
|
||||
-- generic, single row config table. if you need a new config, add a column
|
||||
CREATE TABLE generic_state (
|
||||
id VARCHAR(64) PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
score_history_last_imported_at TIMESTAMP
|
||||
);
|
||||
|
||||
INSERT INTO generic_state (id, score_history_last_imported_at) VALUES ('default', TIMESTAMP '01-01-1900 00:00:00');
|
||||
|
||||
CREATE TABLE score_histories (
|
||||
from_client_id VARCHAR(64),
|
||||
from_addr BIGINT,
|
||||
from_isp VARCHAR(50),
|
||||
from_country VARCHAR(64),
|
||||
from_region VARCHAR(64),
|
||||
from_city VARCHAR(255),
|
||||
from_postal VARCHAR(25),
|
||||
from_latitude DOUBLE PRECISION,
|
||||
from_longitude DOUBLE PRECISION,
|
||||
to_client_id VARCHAR(64),
|
||||
to_addr BIGINT,
|
||||
to_isp VARCHAR(50),
|
||||
to_country VARCHAR(64),
|
||||
to_region VARCHAR(64),
|
||||
to_city VARCHAR(255),
|
||||
to_postal VARCHAR(25),
|
||||
to_latitude DOUBLE PRECISION,
|
||||
to_longitude DOUBLE PRECISION,
|
||||
score INTEGER NOT NULL,
|
||||
score_dt TIMESTAMP NOT NULL,
|
||||
scoring_data TEXT
|
||||
);
|
||||
|
|
@ -49,6 +49,7 @@ require "jam_ruby/resque/scheduled/daily_session_emailer"
|
|||
require "jam_ruby/resque/scheduled/new_musician_emailer"
|
||||
require "jam_ruby/resque/scheduled/music_session_scheduler"
|
||||
require "jam_ruby/resque/scheduled/active_music_session_cleaner"
|
||||
require "jam_ruby/resque/scheduled/score_history_sweeper"
|
||||
require "jam_ruby/resque/google_analytics_event"
|
||||
require "jam_ruby/resque/batch_email_job"
|
||||
require "jam_ruby/mq_router"
|
||||
|
|
@ -168,6 +169,9 @@ require "jam_ruby/app/mailers/batch_mailer"
|
|||
require "jam_ruby/app/mailers/progress_mailer"
|
||||
require "jam_ruby/models/affiliate_partner"
|
||||
require "jam_ruby/models/chat_message"
|
||||
require "jam_ruby/models/generic_state"
|
||||
require "jam_ruby/models/score_history"
|
||||
require "jam_ruby/models/jam_company"
|
||||
|
||||
include Jampb
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
module JamRuby
|
||||
class GenericState < ActiveRecord::Base
|
||||
|
||||
self.table_name = 'generic_state'
|
||||
|
||||
def self.singleton
|
||||
GenericState.find('default')
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
module JamRuby
|
||||
class JamCompany < ActiveRecord::Base
|
||||
|
||||
self.table_name = 'jamcompany'
|
||||
|
||||
has_many :jam_isps, class_name: 'JamRuby::JamIsp', foreign_key: 'coid'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -24,12 +24,15 @@ module JamRuby
|
|||
JAMISP_COID_INDEX_NAME = 'jamisp_coid_ndx'
|
||||
COPIED_JAMISP_COID_INDEX_NAME = 'jamisp_copied_coid_ndx'
|
||||
|
||||
@@log = Logging.logger[JamIsp]
|
||||
|
||||
self.table_name = 'jamisp'
|
||||
COMPANY_TABLE = 'jamcompany'
|
||||
GEOIPISP_TABLE = 'geoipisp'
|
||||
|
||||
@@log = Logging.logger[JamIsp]
|
||||
|
||||
belongs_to :jam_company, class_name: 'JamRuby::JamCompany', foreign_key: 'coid'
|
||||
|
||||
self.table_name = 'jamisp'
|
||||
|
||||
def self.ip_to_num(ip_addr)
|
||||
begin
|
||||
i = IPAddr.new(ip_addr)
|
||||
|
|
|
|||
|
|
@ -63,6 +63,9 @@ module JamRuby
|
|||
Connection.after_maxmind_import
|
||||
Band.after_maxmind_import
|
||||
|
||||
# migrate any scores that need migrating, before we move all the new maxmind content over the old content
|
||||
ScoreHistory.migrate_scores
|
||||
|
||||
@@log.debug("rename temporary tables over existing tables")
|
||||
# replace existing tables with new tables
|
||||
GeoIpBlocks.after_maxmind_import
|
||||
|
|
|
|||
|
|
@ -11,12 +11,13 @@ module JamRuby
|
|||
|
||||
default_scope order('score_dt desc')
|
||||
|
||||
def self.createx(alocidispid, anodeid, aaddr, blocidispid, bnodeid, baddr, score, score_dt=nil, score_data=nil)
|
||||
def self.createx(alocidispid, anodeid, aaddr, blocidispid, bnodeid, baddr, score, score_dt = Time.now, score_data = nil)
|
||||
score_dt = Time.new.utc if score_dt.nil?
|
||||
score = score.ceil
|
||||
raise "score must be positive" if score <= 0
|
||||
Score.create(alocidispid: alocidispid, anodeid: anodeid, aaddr: aaddr, blocidispid: blocidispid, bnodeid: bnodeid, baddr: baddr, score: score, scorer: 0, score_dt: score_dt, scoring_data: score_data)
|
||||
Score.create(alocidispid: blocidispid, anodeid: bnodeid, aaddr: baddr, blocidispid: alocidispid, bnodeid: anodeid, baddr: aaddr, score: score, scorer: 1, score_dt: score_dt) if alocidispid != blocidispid
|
||||
ascore = Score.create(alocidispid: alocidispid, anodeid: anodeid, aaddr: aaddr, blocidispid: blocidispid, bnodeid: bnodeid, baddr: baddr, score: score, scorer: 0, score_dt: score_dt, scoring_data: score_data)
|
||||
bscore = Score.create(alocidispid: blocidispid, anodeid: bnodeid, aaddr: baddr, blocidispid: alocidispid, bnodeid: anodeid, baddr: aaddr, score: score, scorer: 1, score_dt: score_dt) if alocidispid != blocidispid
|
||||
return [ascore, bscore]
|
||||
end
|
||||
|
||||
def self.deletex(alocidispid, blocidispid)
|
||||
|
|
@ -34,5 +35,15 @@ module JamRuby
|
|||
self.createx(c1.locidispid, c1.client_id, c1.addr, c2.locidispid, c2.client_id, c2.addr, score)
|
||||
end
|
||||
|
||||
# locid is a geoiplocation or geoipblock locid.
|
||||
# coid is a jamisp coid
|
||||
def self.compute_locidispid(locid, coid)
|
||||
locid * 1000000 + coid
|
||||
end
|
||||
|
||||
# if you have the right models in hand, this will compute a valid locidispid
|
||||
def self.create_locidispid(geoiplocation_or_geoipblock, jamisp_or_jamcompany)
|
||||
compute_locidispid(geoiplocation_or_geoipblock.locid, jamisp_or_jamcompany.coid)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,55 @@
|
|||
|
||||
module JamRuby
|
||||
class ScoreHistory < ActiveRecord::Base
|
||||
|
||||
self.table_name = 'score_histories'
|
||||
|
||||
def self.migrate_scores
|
||||
|
||||
generic_state = GenericState.singleton
|
||||
last_imported_at = generic_state.score_history_last_imported_at
|
||||
pretty_close_to_now = Time.now - 5.seconds # give the db some time to have entered all scores
|
||||
|
||||
result = connection.execute(
|
||||
"INSERT INTO score_histories
|
||||
(from_client_id, from_addr, from_isp, from_country, from_region, from_city, from_postal, from_latitude, from_longitude,
|
||||
to_client_id, to_addr, to_isp, to_country, to_region, to_city, to_postal, to_latitude, to_longitude,
|
||||
score, score_dt, scoring_data)
|
||||
SELECT
|
||||
s.anodeid AS from_client_id,
|
||||
s.aaddr AS from_addr,
|
||||
x.company AS from_isp,
|
||||
a.countrycode AS from_country,
|
||||
a.region AS from_region,
|
||||
a.city AS from_city,
|
||||
a.postalcode AS from_postal,
|
||||
a.latitude AS from_latitude,
|
||||
a.longitude AS from_longitude,
|
||||
s.bnodeid AS to_client_id,
|
||||
s.baddr AS to_addr,
|
||||
y.company AS to_isp,
|
||||
b.countrycode AS to_country,
|
||||
b.region AS to_region,
|
||||
b.city AS to_city,
|
||||
b.postalcode AS to_postal,
|
||||
b.latitude AS to_latitude,
|
||||
b.longitude AS to_longitude,
|
||||
s.score AS score,
|
||||
s.score_dt AS score_dt,
|
||||
s.scoring_data AS scoring_data
|
||||
FROM scores s
|
||||
JOIN geoiplocations a ON a.locid = (s.alocidispid / 1000000)
|
||||
JOIN geoiplocations b ON b.locid = (s.blocidispid / 1000000)
|
||||
JOIN jamcompany x ON x.coid = (s.alocidispid % 1000000)
|
||||
JOIN jamcompany y ON y.coid = (s.blocidispid % 1000000)
|
||||
WHERE s.scorer = 0
|
||||
AND s.created_at BETWEEN TIMESTAMP '#{last_imported_at}' AND TIMESTAMP '#{pretty_close_to_now}'")
|
||||
|
||||
result.check
|
||||
|
||||
generic_state.score_history_last_imported_at = pretty_close_to_now
|
||||
generic_state.save!
|
||||
return result.cmd_tuples, pretty_close_to_now
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
require 'json'
|
||||
require 'resque'
|
||||
require 'resque-retry'
|
||||
require 'net/http'
|
||||
require 'digest/md5'
|
||||
|
||||
module JamRuby
|
||||
|
||||
# periodically scheduled to find jobs that need retrying
|
||||
class ScoreHistorySweeper
|
||||
extend Resque::Plugins::LonelyJob
|
||||
|
||||
@queue = :score_history_sweeper
|
||||
|
||||
@@log = Logging.logger[ScoreHistorySweeper]
|
||||
|
||||
def self.lock_timeout
|
||||
# this should be enough time to make sure the job has finished, but not so long that the system isn't recovering from a abandoned job
|
||||
120
|
||||
end
|
||||
|
||||
def self.perform
|
||||
ScoreHistory.transaction do
|
||||
num_rows, sweeped_until = ScoreHistory.migrate_scores
|
||||
@@log.debug("migrated #{num_rows} scores")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe ScoreHistory do
|
||||
|
||||
describe "migrate scores" do
|
||||
it "works against empty scores" do
|
||||
original = GenericState.singleton.score_history_last_imported_at
|
||||
Score.count.should == 0
|
||||
num_affected, scanned_until = ScoreHistory.migrate_scores
|
||||
original.should_not == scanned_until
|
||||
num_affected.should == 0
|
||||
ScoreHistory.count.should == 0
|
||||
GenericState.singleton.score_history_last_imported_at.should == scanned_until
|
||||
end
|
||||
|
||||
it "migrates a single score" do
|
||||
austin = austin_geoip
|
||||
dallas = dallas_geoip
|
||||
|
||||
Region.count.should == 11
|
||||
Country.count.should == 1
|
||||
JamCompany.count.should == 4
|
||||
|
||||
score1, score2 = create_score(austin, dallas)
|
||||
Score.count.should == 2
|
||||
Score.connection.execute("UPDATE scores SET created_at = TIMESTAMP '#{1.hour.ago}'").check
|
||||
|
||||
num_affected, scanned_until = ScoreHistory.migrate_scores
|
||||
ScoreHistory.count.should == 1
|
||||
num_affected.should == 1
|
||||
GenericState.singleton.score_history_last_imported_at.should == scanned_until
|
||||
|
||||
score_history = ScoreHistory.first
|
||||
score_history.from_client_id.should == score1.anodeid
|
||||
score_history.from_addr.should == score1.aaddr
|
||||
score_history.from_isp.should == austin[:jamisp].jam_company.company
|
||||
score_history.from_country.should == austin[:geoiplocation].countrycode
|
||||
score_history.from_region.should == austin[:geoiplocation].region
|
||||
score_history.from_city.should == austin[:geoiplocation].city
|
||||
score_history.from_postal == austin[:geoiplocation].postalcode
|
||||
score_history.from_latitude == austin[:geoiplocation].latitude
|
||||
score_history.from_longitude == austin[:geoiplocation].longitude
|
||||
end
|
||||
|
||||
it "ignores recent scores" do
|
||||
austin = austin_geoip
|
||||
dallas = dallas_geoip
|
||||
|
||||
Region.count.should == 11
|
||||
Country.count.should == 1
|
||||
JamCompany.count.should == 4
|
||||
|
||||
score1, score2 = create_score(austin, dallas) # creates scores with very recent created_at, so it should be skipped
|
||||
Score.count.should == 2
|
||||
|
||||
num_affected, scanned_until = ScoreHistory.migrate_scores
|
||||
ScoreHistory.count.should == 0
|
||||
num_affected.should == 0
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -85,13 +85,13 @@ end
|
|||
|
||||
config.before(:suite) do
|
||||
DatabaseCleaner.strategy = :transaction
|
||||
DatabaseCleaner.clean_with(:deletion, {pre_count: true, reset_ids:false, :except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries] })
|
||||
DatabaseCleaner.clean_with(:deletion, {pre_count: true, reset_ids:false, :except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries generic_state] })
|
||||
end
|
||||
|
||||
config.around(:each) do |example|
|
||||
# set no_transaction: true as metadata on your test to use deletion strategy instead
|
||||
if example.metadata[:no_transaction]
|
||||
DatabaseCleaner.strategy = :deletion, {pre_count: true, reset_ids:false, :except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries] }
|
||||
DatabaseCleaner.strategy = :deletion, {pre_count: true, reset_ids:false, :except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries generic_state] }
|
||||
else
|
||||
DatabaseCleaner.strategy = :transaction
|
||||
end
|
||||
|
|
|
|||
|
|
@ -157,4 +157,29 @@ end
|
|||
|
||||
def create_phony_database
|
||||
GeoIpBlocks.connection.execute("select generate_scores_dataset()").check
|
||||
end
|
||||
|
||||
# gets related models for an IP in the 1st block from the scores_better_test_data.sql
|
||||
def austin_geoip
|
||||
geoiplocation = GeoIpLocations.find_by_locid(17192)
|
||||
geoipblock = GeoIpBlocks.find_by_locid(17192)
|
||||
jamisp = JamIsp.find_by_beginip(geoipblock.beginip)
|
||||
{jamisp: jamisp, geoiplocation: geoiplocation, geoipblock: geoipblock }
|
||||
end
|
||||
|
||||
# gets related models for an IP in the 1st block from the scores_better_test_data.sql
|
||||
def dallas_geoip
|
||||
geoiplocation = GeoIpLocations.find_by_locid(667)
|
||||
geoipblock = GeoIpBlocks.find_by_locid(667)
|
||||
jamisp = JamIsp.find_by_beginip(geoipblock.beginip)
|
||||
{jamisp: jamisp, geoiplocation: geoiplocation, geoipblock: geoipblock}
|
||||
end
|
||||
|
||||
# attempts to make the creation of a score more straightforward.
|
||||
# a_geoip and b_geoip are hashes with keys jamisp and geoiplocation (like those created by austin_geoip and dallas_geoip)
|
||||
def create_score(a_geoip, b_geoip, a_addr = a_geoip[:jamisp].beginip, b_addr = b_geoip[:jamisp].beginip,
|
||||
a_client_id = 'a_client_id', b_client_id = 'b_client_id', score = 10, score_dt = Time.now, score_data = nil)
|
||||
Score.createx(Score.create_locidispid(a_geoip[:geoiplocation], a_geoip[:jamisp]), a_client_id, a_addr,
|
||||
Score.create_locidispid(b_geoip[:geoiplocation], b_geoip[:jamisp]), b_client_id, b_addr,
|
||||
score, score_dt, score_data)
|
||||
end
|
||||
|
|
@ -47,4 +47,9 @@ MusicSessionScheduler:
|
|||
ActiveMusicSessionCleaner:
|
||||
cron: "0 */5 0 * *"
|
||||
class: "JamRuby::ActiveMusicSessionCleaner"
|
||||
description: "Removes any active music sessions that are stale."
|
||||
description: "Removes any active music sessions that are stale."
|
||||
|
||||
ScoreHistorySweeper:
|
||||
cron: 0 * * * *
|
||||
class: "JamRuby::ScoreHistorySweeper"
|
||||
description: "Creates 'ScoreHistory' tables from Scores"
|
||||
|
|
@ -164,12 +164,9 @@ describe "Bands", :js => true, :type => :feature, :capybara_feature => true do
|
|||
|
||||
it "displays any pending band invitations when viewed by current band member" do
|
||||
friend = user
|
||||
|
||||
sign_in_poltergeist band_musician
|
||||
friendship = FactoryGirl.create(:friendship, :user_id=>band_musician.id, :friend_id=>friend.id)
|
||||
|
||||
visit "/client#/band/setup/#{band_musician.bands.first.id}"
|
||||
#navigate_band_setup
|
||||
fast_signin(band_musician, "/client#/band/setup/#{band_musician.bands.first.id}")
|
||||
|
||||
band_name = "Just The Two Of Us"
|
||||
band_bio = "Good, good friends"
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ bputs "before register capybara"
|
|||
end
|
||||
|
||||
config.before(:each, :js => true) do
|
||||
#https://gist.github.com/josevalim/470808
|
||||
#
|
||||
#Timeout.timeout(Capybara.default_wait_time) do
|
||||
# until (i = page.evaluate_script("$.active")).zero?
|
||||
# Rails.logger.info "example [#{example.description}] has #{i} outstanding XHR(s)"
|
||||
|
|
|
|||
Loading…
Reference in New Issue