* VRFS-1403 - maxmind single step import done

This commit is contained in:
Seth Call 2014-07-19 21:11:16 -05:00
parent ae8c3120aa
commit 5f131f9407
68 changed files with 1528 additions and 750 deletions

View File

@ -121,5 +121,9 @@ module JamAdmin
config.ffmpeg_path = ENV['FFMPEG_PATH'] || (File.exist?('/usr/local/bin/ffmpeg') ? '/usr/local/bin/ffmpeg' : '/usr/bin/ffmpeg')
config.max_audio_downloads = 100
# recording upload/download configs
config.max_track_upload_failures = 10
config.max_track_part_upload_failures = 3
end
end

View File

@ -2,7 +2,7 @@ ALTER TABLE users ADD COLUMN last_jam_addr BIGINT;
ALTER TABLE users ADD COLUMN last_jam_locidispid BIGINT;
-- (j)oin session as musician, (r)egister, (f)tue, (n)etwork test
-- (j)oin session as musician, (r)egister, (f)tue, (n)etwork test, maxmind (i)mport
ALTER TABLE users ADD COLUMN last_jam_updated_reason CHAR(1);
ALTER TABLE users ADD COLUMN last_jam_updated_at TIMESTAMP;

View File

@ -1,27 +1,45 @@
-- released_at is when maxmind released this data
CREATE TABLE max_mind_releases (
id VARCHAR(64) PRIMARY KEY DEFAULT uuid_generate_v4(),
released_at DATE,
released_at DATE UNIQUE NOT NULL,
imported BOOLEAN NOT NULL DEFAULT FALSE,
imported_at DATE,
geo_ip_124_url VARCHAR(2000),
geo_ip_124_md5 VARCHAR(255),
geo_ip_124_size INTEGER,
geo_ip_134_url VARCHAR(2000),
geo_ip_134_md5 VARCHAR(255),
geo_ip_134_size INTEGER,
geo_ip_139_url VARCHAR(2000),
geo_ip_139_md5 VARCHAR(255),
geo_ip_139_size INTEGER,
geo_ip_142_url VARCHAR(2000),
geo_ip_142_md5 VARCHAR(255),
geo_ip_142_size INTEGER,
region_codes_url VARCHAR(2000),
region_codes_md5 VARCHAR(255),
region_codes_size INTEGER,
iso3166_url VARCHAR(2000),
iso3166_md5 VARCHAR(255),
iso3166_size INTEGER,
table_dumps_url VARCHAR(2000),
table_dumps_md5 VARCHAR(255),
table_dumps_size INTEGER,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- 'maxmind/2014-07-01/GeoIP-139_20140701.zip', '8487b681cc14ea9f603b52db5763a77a', 62399148,
-- 'maxmind/2014-07-01/GeoIP-142_20140701.zip', '2fb4288fa3004ad68a06388f716e4ee5', 2265920,
-- the 1st available release
INSERT INTO max_mind_releases VALUES (DEFAULT, DATE '2014-07-01',
INSERT INTO max_mind_releases VALUES (DEFAULT, DATE '2014-07-01', FALSE, NULL,
'maxmind/2014-07-01/GeoIP-124_20140701.zip', '93430c4b34b366030054a97c1b595f6f', 1997587,
'maxmind/2014-07-01/GeoIP-134_20140701.zip', '893c8674656271dac4964d5a56325203', 48198205,
'maxmind/2014-07-01/GeoIP-139_20140701.zip', '8487b681cc14ea9f603b52db5763a77a', 62399148,
'maxmind/2014-07-01/GeoIP-142_20140701.zip', '2fb4288fa3004ad68a06388f716e4ee5', 2265920,
'maxmind/2014-07-01/region_codes.csv', '74c174dc9132a95e56adf4ce32d38909', 76500,
'maxmind/2014-07-01/iso3166.csv', 'f2c15e4a163468b0b08ebedab1507911', 4282,
'maxmind/2014-07-01/copies.zip', '3a7ddf36b3a8433c19e1b9afcbd2bb77', 178660266,
DEFAULT, DEFAULT);
-- this created_at column will be used by the score_histories import process to chunk work correctly
ALTER TABLE scores ADD COLUMN created_at TIMESTAMP;
UPDATE SCORES SET created_at = score_dt;
ALTER TABLE scores ALTER COLUMN created_at SET DEFAULT CURRENT_TIMESTAMP;
ALTER TABLE scores ALTER COLUMN created_at SET NOT NULL;
DROP TABLE max_mind_isp;
DROP TABLE max_mind_geo;

View File

@ -1,5 +1,9 @@
-- cooking up some better test data for use with findblah
-- my_audio_latency can have a special value of -1, which means 'unknown'.
CREATE OR REPLACE FUNCTION generate_scores_dataset () RETURNS VOID STRICT VOLATILE AS $$
BEGIN
delete from GeoIPLocations;
insert into GeoIPLocations (locId, countryCode, region, city, postalCode, latitude, longitude, metroCode, areaCode) values
(17192,'US','TX','Austin','78749',30.2076,-97.8587,635,'512'),
@ -69,3 +73,27 @@ INSERT INTO jamisp (beginip, endip, coid) SELECT x.beginip, x.endip, y.coid FROM
UPDATE geoiplocations SET geog = ST_SetSRID(ST_MakePoint(longitude, latitude), 4326)::geography;
UPDATE geoipblocks SET geom = ST_MakeEnvelope(beginip, -1, endip, 1);
UPDATE jamisp SET geom = ST_MakeEnvelope(beginip, -1, endip, 1);
IF EXISTS(
SELECT *
FROM information_schema.tables
WHERE
table_schema = 'public' AND
table_name = 'cities') THEN
DELETE FROM cities;
INSERT INTO cities (city, region, countrycode) select distinct city, region, countrycode from geoiplocations where length(city) > 0 and length(countrycode) > 0;
DELETE FROM regions;
INSERT INTO regions (region, countrycode) select distinct region, countrycode from cities;
DELETE FROM countries;
INSERT INTO countries (countrycode) select distinct countrycode from regions;
END IF;
RETURN;
END;
$$ LANGUAGE plpgsql;
SELECT generate_scores_dataset();

View File

@ -1,2 +1,3 @@
--color
--format progress
--profile

View File

@ -54,7 +54,7 @@ group :test do
gem 'factory_girl', '4.1.0'
gem "rspec", "2.11"
gem 'spork', '0.9.0'
gem 'database_cleaner', '0.7.0'
gem 'database_cleaner', '1.3.0'
gem 'faker', '1.3.0'
gem 'resque_spec' #, :path => "/home/jam/src/resque_spec/"
gem 'timecop'

View File

@ -7,3 +7,4 @@ test:
password: postgres
timeout: 2000
encoding: unicode
min_messages: warning

View File

@ -19,6 +19,7 @@ require 'cgi'
require 'resque_mailer'
require 'rest-client'
require 'zip'
require 'csv'
require "jam_ruby/constants/limits"
require "jam_ruby/constants/notification_types"
@ -67,13 +68,14 @@ require "jam_ruby/app/uploaders/mix_uploader"
require "jam_ruby/app/uploaders/music_notation_uploader"
require "jam_ruby/app/uploaders/max_mind_release_uploader"
require "jam_ruby/lib/desk_multipass"
require "jam_ruby/lib/ip"
require "jam_ruby/amqp/amqp_connection_manager"
require "jam_ruby/database"
require "jam_ruby/message_factory"
require "jam_ruby/models/feedback"
require "jam_ruby/models/feedback_observer"
require "jam_ruby/models/max_mind_geo"
require "jam_ruby/models/max_mind_isp"
#require "jam_ruby/models/max_mind_geo"
#require "jam_ruby/models/max_mind_isp"
require "jam_ruby/models/max_mind_release"
require "jam_ruby/models/band_genre"
require "jam_ruby/models/genre"

View File

@ -12,7 +12,7 @@ class MaxMindReleaseUploader < CarrierWave::Uploader::Base
# Add a white list of extensions which are allowed to be uploaded.
def extension_white_list
%w(zip)
%w(zip csv)
end
def store_dir
@ -22,19 +22,21 @@ class MaxMindReleaseUploader < CarrierWave::Uploader::Base
# important; this code assumes that the mounted_as ends in _url, and the corresponding _md5 field has the same prefix
# this is true for max_mind_release, but not necessarily other models; so careful copy/pasting
def update_extras(file)
puts file.inspect
mounted = mounted_as.to_s
md5_field = mounted[0, mounted.rindex('_url')] + '_md5'
size_field = mounted[0, mounted.rindex('_url')] + '_size'
puts "size_field #{size_field}, file.size #{file.size}"
model[size_field.to_sym] = file.size
model[md5_field.to_sym] = ::Digest::MD5.file(file).hexdigest
end
def filename
File.join(model.store_dir, mounted_as.to_s + '.zip')
if mounted_as.to_s == 'region_codes_url' || mounted_as.to_s == 'iso3166_url'
ext = '.csv'
else
ext = '.zip'
end
File.join(model.store_dir, mounted_as.to_s + ext)
end
end

View File

@ -71,7 +71,7 @@ module JamRuby
#puts("============= GeoIpBlocks.lookup returns #{block.inspect} for #{addr} =============")
if block.nil? then locid = 0 else locid = block.locid end
location = GeoIpLocations.lookup(locid)
location = GeoIpLocations.find_by_locid(locid)
if location.nil?
# todo what's a better default location?
locidispid = 0
@ -206,7 +206,7 @@ SQL
#puts("============= GeoIpBlocks.lookup returns #{block.inspect} for #{addr} =============")
if block.nil? then locid = 0 else locid = block.locid end
location = GeoIpLocations.lookup(locid)
location = GeoIpLocations.find_by_locid(locid)
if location.nil?
# todo what's a better default location?
locidispid = 0

View File

@ -1,7 +1,23 @@
module JamRuby
def strip_quotes str
return nil if str.nil?
if str.start_with? '"'
str = str[1..-1]
end
if str.end_with? '"'
str = str.chop
end
return str
end
# creates messages (implementation: protocol buffer) objects cleanly
class Database
@@log = Logging.logger[Database]
#def self.db_timezone
# @@db_timezone ||= TZInfo::Timezone.get(fetch_db_timezone)
@ -17,5 +33,26 @@ module JamRuby
result.clear
tz
end
def self.copy(table_name, file)
@@log.debug("issuing COPY to #{table_name} from #{file}")
raw = GeoIpBlocks.connection.raw_connection
result = raw.copy_data "COPY #{table_name} FROM STDIN" do
File.open(file, 'r').each do |line|
raw.put_copy_data line
end
end
count = GeoIpBlocks.connection.select_value("select count(*) from #{table_name}").to_i
ActiveRecord::Base.logger.debug "loaded #{count} records into #{table_name}"
end
def self.copy_table(table_name)
copied_name = "#{table_name}_copied"
GeoIpBlocks.connection.execute "CREATE TABLE #{copied_name} (LIKE #{table_name} INCLUDING DEFAULTS INCLUDING CONSTRAINTS INCLUDING COMMENTS INCLUDING STORAGE)"
copied_name
end
end
end

View File

@ -0,0 +1,7 @@
module JamRuby
def ip_address_to_int(ip)
ip.split('.').inject(0) { |total, value| (total << 8) + value.to_i }
end
end

View File

@ -236,12 +236,12 @@ module JamRuby
if self.city
query = { :city => self.city }
query[:region] = self.state unless self.state.blank?
query[:country] = self.country unless self.country.blank?
if geo = MaxMindGeo.where(query).limit(1).first
geo.lat = nil if geo.lat = 0
geo.lng = nil if geo.lng = 0
if geo.lat && geo.lng && (self.lat != geo.lat || self.lng != geo.lng)
self.lat, self.lng = geo.lat, geo.lng
query[:countrycode] = self.country unless self.country.blank?
if geo = GeoIpLocations.where(query).limit(1).first
geo.latitude = nil if geo.latitude = 0
geo.longitude = nil if geo.longitude = 0
if geo.latitude && geo.longitude && (self.lat != geo.latitude || self.lng != geo.longitude)
self.lat, self.lng = geo.latitude, geo.longitude
return true
end
end
@ -268,6 +268,11 @@ module JamRuby
(b_members - s_members).blank?
end
def self.after_maxmind_import(use_copied = true)
table_suffix = use_copied ? '_copied' : ''
Band.connection.execute("UPDATE bands SET lat = geo.latitude, lng = geo.longitude FROM geoiplocations#{table_suffix} as geo WHERE bands.city = geo.city AND bands.state = geo.region AND bands.country = geo.countrycode")
end
private
def require_at_least_one_genre

View File

@ -183,6 +183,20 @@ module JamRuby
end
end
def self.update_locidispids(use_copied = true)
# using addr, we can rebuild locidispid
# this will set a connections's _locidispid = 0 if there are no geoiplocations/blocks that match their IP address, or if there are no JamIsps that match the IP address
# otherwise, locidispid will be updated to the correct new value.
# updates all connections's locidispids
table_suffix = use_copied ? '_copied' : ''
Connection.connection.execute("UPDATE connections SET locidispid = COALESCE((SELECT geolocs.locid as geolocid FROM geoipblocks#{table_suffix} as geoblocks INNER JOIN geoiplocations#{table_suffix} as geolocs ON geoblocks.locid = geolocs.locid WHERE geoblocks.geom && ST_MakePoint(addr, 0) AND addr BETWEEN geoblocks.beginip AND geoblocks.endip LIMIT 1) * 1000000::bigint +(SELECT coid FROM jamisp#{table_suffix} as jisp WHERE geom && ST_MakePoint(addr, 0) AND addr BETWEEN beginip AND endip LIMIT 1), 0) ").check
end
def self.after_maxmind_import
update_locidispids
end
private
def require_at_least_one_track_when_in_session

View File

@ -1,28 +1,83 @@
module JamRuby
class Country < ActiveRecord::Base
@@log = Logging.logger[Country]
self.table_name = 'countries'
def self.get_all()
self.order('countryname asc').all
end
def self.find_iso3166
gem_dir = Gem::Specification.find_by_name("jam_ruby").gem_dir
File.join(gem_dir, 'lib', 'jam_ruby', 'geodata', 'iso3166.csv')
end
def self.import_from_iso3166(options)
def self.import_from_iso3166(file = find_iso3166)
self.delete_all
file = options[:file]
use_copy = options[:use_copy] ||= false
start = Time.now
copied_table_name = Database.copy_table(self.table_name)
if use_copy
Database.copy(copied_table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
saved_level = ActiveRecord::Base.logger ? ActiveRecord::Base.logger.level : 0
count = 0
stmt = "INSERT INTO #{copied_table_name} (countrycode, countryname) VALUES"
vals = ''
sep = ''
i = 0
n = 20
csv = ::CSV.new(io, {encoding: 'ISO-8859-1', headers: false})
csv.each do |row|
cc = self.new
cc.countrycode = row[0]
cc.countryname = row[1]
cc.save
vals = vals+sep+"(#{ActiveRecord::Base.quote_value(row[0])}, #{ActiveRecord::Base.quote_value(row[1])})"
sep = ','
i += 1
if count == 0 or i >= n then
self.connection.execute stmt+vals
count += i
vals = ''
sep = ''
i = 0
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1 then
ActiveRecord::Base.logger.debug "... logging inserts into #{copied_table_name} suspended ..."
ActiveRecord::Base.logger.level = 1
end
if ActiveRecord::Base.logger and count%10000 < n then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "... inserted #{count} into #{copied_table_name} ..."
ActiveRecord::Base.logger.level = 1
end
end
end
if i > 0
self.connection.execute stmt+vals
count += i
end
if ActiveRecord::Base.logger then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "loaded #{count} records into #{copied_table_name}"
end
end # file
end
elapsed = Time.now - start
@@log.debug("#{copied_table_name} import took #{elapsed} seconds")
end
def self.after_maxmind_import
self.connection.execute("DROP TABLE #{self.table_name}").check
self.connection.execute("ALTER TABLE #{self.table_name}_copied RENAME TO #{self.table_name}").check
end
end
end

View File

@ -1,8 +1,17 @@
module JamRuby
class GeoIpBlocks < ActiveRecord::Base
# index names created on the copied table used during import.
# they do not exist except during import
COPIED_GEOIPBLOCKS_INDEX_NAME = 'geoipblocks_copied_geom_gix'
GEOIPBLOCKS_INDEX_NAME = "geoipblocks_geom_gix"
@@log = Logging.logger[GeoIpBlocks]
self.table_name = 'geoipblocks'
belongs_to :location, class_name: 'JamRuby::GeoIpLocations', inverse_of: 'blocks', foreign_key: 'locid'
def self.lookup(ipnum)
self.where('geom && ST_MakePoint(?, 0) AND ? BETWEEN beginip AND endip', ipnum, ipnum)
.limit(1)
@ -14,14 +23,29 @@ module JamRuby
c.exec_params("insert into #{self.table_name} (beginip, endip, locid, geom) values($1::bigint, $2::bigint, $3, ST_MakeEnvelope($1::bigint, -1, $2::bigint, 1))", [beginip, endip, locid])
end
def self.import_from_max_mind(file)
def self.ip_lookup(ip_addy)
addr = ip_address_to_int(ip_addy)
self.where(["beginip <= ? AND ? <= endip", addr, addr])
.limit(1)
.first
end
def self.import_from_max_mind(options)
file = options[:file]
use_copy = options[:use_copy]
# File Geo-134
# Format:
# startIpNum,endIpNum,locId
self.transaction do
self.delete_all
start = Time.now
copied_table_name = Database.copy_table(self.table_name)
if use_copy
Database.copy(copied_table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
s = io.gets.strip # eat the copyright line. gah, why do they have that in their file??
unless s.eql? 'Copyright (c) 2011 MaxMind Inc. All Rights Reserved.'
@ -40,7 +64,7 @@ module JamRuby
saved_level = ActiveRecord::Base.logger ? ActiveRecord::Base.logger.level : 0
count = 0
stmt = "insert into #{self.table_name} (beginip, endip, locid) values"
stmt = "INSERT INTO #{copied_table_name} (beginip, endip, locid) VALUES"
vals = ''
sep = ''
@ -51,8 +75,8 @@ module JamRuby
csv.each do |row|
raise "file does not have expected number of columns (3): #{row.length}" unless row.length == 3
beginip = MaxMindIsp.ip_address_to_int(MaxMindIsp.strip_quotes(row[0]))
endip = MaxMindIsp.ip_address_to_int(MaxMindIsp.strip_quotes(row[1]))
beginip = ip_address_to_int(strip_quotes(row[0]))
endip = ip_address_to_int(strip_quotes(row[1]))
locid = row[2]
vals = vals+sep+"(#{beginip}, #{endip}, #{locid})"
@ -67,13 +91,13 @@ module JamRuby
i = 0
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1 then
ActiveRecord::Base.logger.debug "... logging inserts into #{self.table_name} suspended ..."
ActiveRecord::Base.logger.debug "... logging inserts into #{copied_table_name} suspended ..."
ActiveRecord::Base.logger.level = 1
end
if ActiveRecord::Base.logger and count%10000 < n then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "... inserted #{count} into #{self.table_name} ..."
ActiveRecord::Base.logger.debug "... inserted #{count} into #{copied_table_name} ..."
ActiveRecord::Base.logger.level = 1
end
end
@ -86,26 +110,37 @@ module JamRuby
if ActiveRecord::Base.logger then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "loaded #{count} records into #{self.table_name}"
ActiveRecord::Base.logger.debug "loaded #{count} records into #{copied_table_name}"
end
end
sts = self.connection.execute "ALTER TABLE #{self.table_name} DROP COLUMN geom;"
end
sts = self.connection.execute "ALTER TABLE #{copied_table_name} DROP COLUMN geom;"
ActiveRecord::Base.logger.debug "DROP COLUMN geom returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
# sts.check [we don't care]
sts = self.connection.execute "ALTER TABLE #{self.table_name} ADD COLUMN geom geometry(polygon);"
sts = self.connection.execute "ALTER TABLE #{copied_table_name} ADD COLUMN geom geometry(polygon);"
ActiveRecord::Base.logger.debug "ADD COLUMN geom returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "UPDATE #{self.table_name} SET geom = ST_MakeEnvelope(beginip, -1, endip, 1);"
sts = self.connection.execute "UPDATE #{copied_table_name} SET geom = ST_MakeEnvelope(beginip, -1, endip, 1);"
ActiveRecord::Base.logger.debug "SET geom returned sts #{sts.cmd_tuples}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "CREATE INDEX #{self.table_name}_geom_gix ON #{self.table_name} USING GIST (geom);"
ActiveRecord::Base.logger.debug "CREATE INDEX #{self.table_name}_geom_gix returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts = self.connection.execute "CREATE INDEX #{COPIED_GEOIPBLOCKS_INDEX_NAME} ON #{copied_table_name} USING GIST (geom);"
ActiveRecord::Base.logger.debug "CREATE INDEX #{COPIED_GEOIPBLOCKS_INDEX_NAME} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
end
end
elapsed = Time.now - start
@@log.debug("#{copied_table_name} import took #{elapsed} seconds")
end
def self.after_maxmind_import
self.connection.execute("DROP TABLE #{self.table_name}").check
self.connection.execute("ALTER INDEX #{COPIED_GEOIPBLOCKS_INDEX_NAME} RENAME TO #{GEOIPBLOCKS_INDEX_NAME}").check
self.connection.execute("ALTER TABLE #{self.table_name}_copied RENAME TO #{self.table_name}").check
end
end
end

View File

@ -1,15 +1,53 @@
module JamRuby
class GeoIpLocations < ActiveRecord::Base
self.table_name = 'geoiplocations'
CITIES_TABLE = 'cities'
REGIONS_TABLE = 'regions'
COUNTRIES_TABLE = 'countries'
# index names created on the copied table used during import.
# they do not exist except during import
GEOIPLOCATIONS_INDEX_NAME = 'geoiplocations_geog_gix'
COPIED_GEOIPLOCATIONS_INDEX_NAME = 'geoiplocations_copied_geog_gix'
def self.lookup(locid)
self.where(locid: locid)
.limit(1)
.first
PRIMARY_KEY_NAME = 'geoiplocations_pkey'
COPIED_PRIMARY_KEY_NAME = 'geoiplocations_copied_pkey'
@@log = Logging.logger[GeoIpLocations]
self.table_name = 'geoiplocations'
self.primary_key = 'locid'
has_many :blocks, class_name: 'JamRuby::GeoIpBlocks', inverse_of: 'location', foreign_key: 'locid'
# Returns a hash with location information. Fields are nil if they can't be figured.
# This is a class method because it doesn't need to be in a transaction.
def self.lookup(ip_address)
city = state = country = nil
locid = ispid = 0
unless ip_address.nil? || ip_address !~ /^\d+\.\d+\.\d+\.\d+$/
addr = ip_address_to_int(ip_address)
block = GeoIpBlocks.lookup(addr)
if block
locid = block.locid
location = GeoIpLocations.find_by_locid(locid)
if location
# todo translate countrycode to country, region(code) to region
# MSC: it seems fine to store countrycode; the UI can translate countrycode to country display name. same for region
country = location.countrycode
state = location.region
city = location.city
end
end
isp = JamIsp.lookup(addr)
if isp
ispid = isp.coid
end
end
{city: city, state: state, country: country, addr: addr, locidispid: locid*1000000+ispid}
end
def self.createx(locid, countrycode, region, city, postalcode, latitude, longitude, metrocode, areacode)
@ -23,14 +61,64 @@ module JamRuby
return s.to_i
end
def self.import_from_max_mind(file)
def self.where_latlng(relation, params, current_user=nil)
# this is only valid to call when relation is about bands
distance = params[:distance].to_i
if distance > 0
latlng = nil
location_city = params[:city]
location_state = params[:state]
location_country = params[:country]
remote_ip = params[:remote_ip]
if location_city and location_state and location_country
geo = self.where(city: location_city, region: location_state, countrycode: location_country).limit(1).first
if geo and geo.latitude and geo.longitude and (geo.latitude != 0 or geo.longitude != 0)
# it isn't reasonable for both to be 0...
latlng = [geo.latitude, geo.longitude]
end
elsif current_user and current_user.locidispid and current_user.locidispid != 0
location = GeoIpLocations.find_by_locid(current_user.locidispid/1000000)
if location and location.latitude and location.longitude and (location.latitude != 0 or location.longitude != 0)
# it isn't reasonable for both to be 0...
latlng = [location.latitude, location.longitude]
end
elsif remote_ip
geo = GeoIpBlocks.ip_lookup(remote_ip)
geo = geo.location if geo
if geo and geo.latitude and geo.longitude and (geo.latitude != 0 or geo.longitude != 0)
# it isn't reasonable for both to be 0...
latlng = [geo.latitude, geo.longitude]
end
end
if latlng
relation = relation.where(['latitude IS NOT NULL AND longitude IS NOT NULL']).within(distance, origin: latlng)
end
end
relation
end
def self.import_from_max_mind(options)
file = options[:file]
use_copy = options[:use_copy]
# File Geo-134
# Format:
# locId,country,region,city,postalCode,latitude,longitude,metroCode,areaCode
self.transaction do
self.delete_all
start = Time.now
copied_table_name = Database.copy_table(self.table_name)
city_copied_table_name = Database.copy_table(City.table_name)
if use_copy
Database.copy(copied_table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
s = io.gets.strip # eat the copyright line. gah, why do they have that in their file??
unless s.eql? 'Copyright (c) 2012 MaxMind LLC. All Rights Reserved.'
@ -49,7 +137,7 @@ module JamRuby
saved_level = ActiveRecord::Base.logger ? ActiveRecord::Base.logger.level : 0
count = 0
stmt = "INSERT INTO #{self.table_name} (locid, countrycode, region, city, postalcode, latitude, longitude, metrocode, areacode) VALUES"
stmt = "INSERT INTO #{copied_table_name} (locid, countrycode, region, city, postalcode, latitude, longitude, metrocode, areacode) VALUES"
vals = ''
sep = ''
@ -70,7 +158,7 @@ module JamRuby
metrocode = row[7]
areacode = row[8]
vals = vals+sep+"(#{locid}, '#{countrycode}', '#{region}', #{MaxMindIsp.quote_value(city)}, '#{postalcode}', #{latitude}, #{longitude}, #{i(metrocode)}, '#{areacode}')"
vals = vals+sep+"(#{locid}, '#{countrycode}', '#{region}', #{quote_value(city)}, '#{postalcode}', #{latitude}, #{longitude}, #{i(metrocode)}, '#{areacode}')"
sep = ','
i += 1
@ -82,13 +170,13 @@ module JamRuby
i = 0
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1 then
ActiveRecord::Base.logger.debug "... logging inserts into #{self.table_name} suspended ..."
ActiveRecord::Base.logger.debug "... logging inserts into #{copied_table_name} suspended ..."
ActiveRecord::Base.logger.level = 1
end
if ActiveRecord::Base.logger and count%10000 < n then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "... inserted #{count} into #{self.table_name} ..."
ActiveRecord::Base.logger.debug "... inserted #{count} into #{copied_table_name} ..."
ActiveRecord::Base.logger.level = 1
end
end
@ -101,50 +189,50 @@ module JamRuby
if ActiveRecord::Base.logger then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "loaded #{count} records into #{self.table_name}"
ActiveRecord::Base.logger.debug "loaded #{count} records into #{copied_table_name}"
end
end
end
sts = self.connection.execute "ALTER TABLE #{self.table_name} DROP COLUMN geog;"
# create primary key index -- this will be renamed later in the import process
GeoIpLocations.connection.execute("CREATE UNIQUE INDEX #{COPIED_PRIMARY_KEY_NAME} ON #{copied_table_name} USING btree (locid)").check
GeoIpLocations.connection.execute("ALTER TABLE #{copied_table_name} ADD CONSTRAINT #{COPIED_PRIMARY_KEY_NAME} PRIMARY KEY USING INDEX #{COPIED_PRIMARY_KEY_NAME}").check
sts = self.connection.execute "ALTER TABLE #{copied_table_name} DROP COLUMN geog;"
ActiveRecord::Base.logger.debug "DROP COLUMN geog returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
# sts.check [we don't care]
sts = self.connection.execute "ALTER TABLE #{self.table_name} ADD COLUMN geog geography(point, 4326);"
sts = self.connection.execute "ALTER TABLE #{copied_table_name} ADD COLUMN geog geography(point, 4326);"
ActiveRecord::Base.logger.debug "ADD COLUMN geog returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "UPDATE #{self.table_name} SET geog = ST_SetSRID(ST_MakePoint(longitude, latitude), 4326)::geography;"
sts = self.connection.execute "UPDATE #{copied_table_name} SET geog = ST_SetSRID(ST_MakePoint(longitude, latitude), 4326)::geography;"
ActiveRecord::Base.logger.debug "SET geog returned sts #{sts.cmd_tuples}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "CREATE INDEX #{self.table_name}_geog_gix ON #{self.table_name} USING GIST (geog);"
ActiveRecord::Base.logger.debug "CREATE INDEX #{self.table_name}_geog_gix returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts = self.connection.execute "CREATE INDEX #{COPIED_GEOIPLOCATIONS_INDEX_NAME} ON #{copied_table_name} USING GIST (geog);"
ActiveRecord::Base.logger.debug "CREATE INDEX #{COPIED_GEOIPLOCATIONS_INDEX_NAME} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "DELETE FROM #{CITIES_TABLE};"
ActiveRecord::Base.logger.debug "DELETE FROM #{CITIES_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts = self.connection.execute "INSERT INTO #{city_copied_table_name} (city, region, countrycode) SELECT DISTINCT city, region, countrycode FROM #{copied_table_name} WHERE length(city) > 0 AND length(countrycode) > 0;"
ActiveRecord::Base.logger.debug "INSERT INTO #{city_copied_table_name} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "INSERT INTO #{CITIES_TABLE} (city, region, countrycode) SELECT DISTINCT city, region, countrycode FROM #{self.table_name} WHERE length(city) > 0 AND length(countrycode) > 0;"
ActiveRecord::Base.logger.debug "INSERT INTO #{CITIES_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
elapsed = Time.now - start
@@log.debug("#{copied_table_name} import took #{elapsed} seconds")
end
sts = self.connection.execute "DELETE FROM #{REGIONS_TABLE};"
ActiveRecord::Base.logger.debug "DELETE FROM #{REGIONS_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
def self.after_maxmind_import
# handle geoiplocations
self.connection.execute("DROP TABLE #{self.table_name}").check
self.connection.execute("ALTER INDEX #{COPIED_PRIMARY_KEY_NAME} RENAME TO #{PRIMARY_KEY_NAME}").check
self.connection.execute("ALTER INDEX #{COPIED_GEOIPLOCATIONS_INDEX_NAME} RENAME TO #{GEOIPLOCATIONS_INDEX_NAME}").check
self.connection.execute("ALTER TABLE #{self.table_name}_copied RENAME TO #{self.table_name}").check
sts = self.connection.execute "INSERT INTO #{REGIONS_TABLE} (region, regionname, countrycode) SELECT DISTINCT region, region, countrycode FROM #{CITIES_TABLE};"
ActiveRecord::Base.logger.debug "INSERT INTO #{REGIONS_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "DELETE FROM #{COUNTRIES_TABLE};"
ActiveRecord::Base.logger.debug "DELETE FROM #{COUNTRIES_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = self.connection.execute "INSERT INTO #{COUNTRIES_TABLE} (countrycode, countryname) SELECT DISTINCT countrycode, countrycode FROM #{REGIONS_TABLE};"
ActiveRecord::Base.logger.debug "INSERT INTO #{COUNTRIES_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
end
end
# handle cities
self.connection.execute("DROP TABLE #{City.table_name}").check
self.connection.execute("ALTER TABLE #{City.table_name}_copied RENAME TO #{City.table_name}").check
end
end
end

View File

@ -3,6 +3,29 @@ require 'ipaddr'
module JamRuby
class JamIsp < ActiveRecord::Base
# index names created on the copied table used during import.
# they do not exist except during import
GEOIPISP_INDEX_NAME = 'geoipisp_company_ndx'
COPIED_GEOIPISP_INDEX_NAME = 'geoipisp_copied_company_ndx'
JAMCOMPANY_UNIQUE_INDEX = 'jamcompany_company_ndx'
COPIED_JAMCOMPANY_UNIQUE_INDEX = 'jamcompany_copied_company_ndx'
JAMCOMPANY_PRIMARY_KEY_NAME = 'jamcompany_pkey'
COPIED_JAMCOMPANY_PRIMARY_KEY_NAME = 'jamcompany_copied_pkey'
COPIED_JAMCOMPANY_COID_SEQUENCE = 'jamcompany_copied_coid_seq'
JAMCOMPANY_COID_SEQUENCE = 'jamcompany_coid_seq'
JAMISP_GEOM_INDEX_NAME = 'jamisp_geom_gix'
COPIED_JAMISP_GEOM_INDEX_NAME = 'jamisp_copied_geom_gix'
JAMISP_COID_INDEX_NAME = 'jamisp_coid_ndx'
COPIED_JAMISP_COID_INDEX_NAME = 'jamisp_copied_coid_ndx'
@@log = Logging.logger[JamIsp]
self.table_name = 'jamisp'
COMPANY_TABLE = 'jamcompany'
GEOIPISP_TABLE = 'geoipisp'
@ -38,14 +61,24 @@ module JamRuby
raise "mother trucker"
end
def self.import_from_max_mind(file)
def self.import_from_max_mind(options)
file = options[:file]
use_copy = options[:use_copy]
# File Geo-124
# Format:
# startIpNum,endIpNum,isp
self.transaction do
self.connection.execute "delete from #{GEOIPISP_TABLE}"
start = Time.now
copied_table_name = Database.copy_table(GEOIPISP_TABLE)
copied_jamcompany_table_name = Database.copy_table(COMPANY_TABLE)
copied_jamisp_table_name = Database.copy_table(self.table_name)
if use_copy
Database.copy(copied_table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
#s = io.gets.strip # eat the copyright line. gah, why do they have that in their file??
#unless s.eql? 'Copyright (c) 2012 MaxMind LLC. All Rights Reserved.'
@ -64,7 +97,7 @@ module JamRuby
saved_level = ActiveRecord::Base.logger ? ActiveRecord::Base.logger.level : 0
count = 0
stmt = "insert into #{GEOIPISP_TABLE} (beginip, endip, company) values"
stmt = "INSERT INTO #{copied_table_name} (beginip, endip, company) VALUES"
vals = ''
sep = ''
@ -75,11 +108,11 @@ module JamRuby
csv.each do |row|
raise "file does not have expected number of columns (3): #{row.length}" unless row.length == 3
beginip = MaxMindIsp.ip_address_to_int(MaxMindIsp.strip_quotes(row[0]))
endip = MaxMindIsp.ip_address_to_int(MaxMindIsp.strip_quotes(row[1]))
beginip = ip_address_to_int(strip_quotes(row[0]))
endip = ip_address_to_int(strip_quotes(row[1]))
company = row[2]
vals = vals+sep+"(#{beginip}, #{endip}, #{MaxMindIsp.quote_value(company)})"
vals = vals+sep+"(#{beginip}, #{endip}, #{quote_value(company)})"
sep = ','
i += 1
@ -91,13 +124,13 @@ module JamRuby
i = 0
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1 then
ActiveRecord::Base.logger.debug "... logging inserts into #{GEOIPISP_TABLE} suspended ..."
ActiveRecord::Base.logger.debug "... logging inserts into #{copied_table_name} suspended ..."
ActiveRecord::Base.logger.level = 1
end
if ActiveRecord::Base.logger and count%10000 < n then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "... inserted #{count} into #{GEOIPISP_TABLE} ..."
ActiveRecord::Base.logger.debug "... inserted #{count} into #{copied_table_name} ..."
ActiveRecord::Base.logger.level = 1
end
end
@ -110,46 +143,76 @@ module JamRuby
if ActiveRecord::Base.logger then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "loaded #{count} records into #{GEOIPISP_TABLE}"
ActiveRecord::Base.logger.debug "loaded #{count} records into #{copied_table_name}"
end
end
end
sts = GeoIpLocations.connection.execute "DELETE FROM #{COMPANY_TABLE};"
ActiveRecord::Base.logger.debug "DELETE FROM #{COMPANY_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
# add index to copied geoipisp table
GeoIpLocations.connection.execute("CREATE INDEX #{COPIED_GEOIPISP_INDEX_NAME} ON #{copied_table_name} (company)").check
# add sequence to copied_jamcompany table
GeoIpLocations.connection.execute("ALTER TABLE #{copied_jamcompany_table_name} ALTER COLUMN coid DROP DEFAULT").check
GeoIpLocations.connection.execute("CREATE SEQUENCE #{COPIED_JAMCOMPANY_COID_SEQUENCE} START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1").check
GeoIpLocations.connection.execute("ALTER SEQUENCE #{COPIED_JAMCOMPANY_COID_SEQUENCE} OWNED BY #{copied_jamcompany_table_name}.coid").check
GeoIpLocations.connection.execute("ALTER TABLE ONLY #{copied_jamcompany_table_name} ALTER COLUMN coid SET DEFAULT nextval('#{COPIED_JAMCOMPANY_COID_SEQUENCE}'::regclass)").check
sts = GeoIpLocations.connection.execute("INSERT INTO #{copied_jamcompany_table_name} (company) SELECT DISTINCT company FROM #{copied_table_name} ORDER BY company").check
ActiveRecord::Base.logger.debug "INSERT INTO #{copied_table_name} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "ALTER SEQUENCE #{COMPANY_TABLE}_coid_seq RESTART WITH 1;"
ActiveRecord::Base.logger.debug "ALTER SEQUENCE #{COMPANY_TABLE}_coid_seq returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
# add unique index to copied jamcompany table
GeoIpLocations.connection.execute("CREATE UNIQUE INDEX #{COPIED_JAMCOMPANY_UNIQUE_INDEX} ON #{copied_jamcompany_table_name} (company)").check
# add primary index to copied jamcompany table
GeoIpLocations.connection.execute("CREATE UNIQUE INDEX #{COPIED_JAMCOMPANY_PRIMARY_KEY_NAME} ON #{copied_jamcompany_table_name} USING btree (coid)").check
GeoIpLocations.connection.execute("ALTER TABLE #{copied_jamcompany_table_name} ADD CONSTRAINT #{COPIED_JAMCOMPANY_PRIMARY_KEY_NAME} PRIMARY KEY USING INDEX #{COPIED_JAMCOMPANY_PRIMARY_KEY_NAME}").check
sts = GeoIpLocations.connection.execute "INSERT INTO #{copied_jamisp_table_name} (beginip, endip, coid) SELECT x.beginip, x.endip, y.coid FROM #{copied_table_name} x, #{copied_jamcompany_table_name} y WHERE x.company = y.company"
ActiveRecord::Base.logger.debug "INSERT INTO #{copied_jamisp_table_name} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "INSERT INTO #{COMPANY_TABLE} (company) SELECT DISTINCT company FROM #{GEOIPISP_TABLE} ORDER BY company;"
ActiveRecord::Base.logger.debug "INSERT INTO #{COMPANY_TABLE} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "DELETE FROM #{self.table_name};"
ActiveRecord::Base.logger.debug "DELETE FROM #{self.table_name} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "INSERT INTO #{self.table_name} (beginip, endip, coid) SELECT x.beginip, x.endip, y.coid FROM #{GEOIPISP_TABLE} x, #{COMPANY_TABLE} y WHERE x.company = y.company;"
ActiveRecord::Base.logger.debug "INSERT INTO #{self.table_name} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "ALTER TABLE #{self.table_name} DROP COLUMN geom;"
sts = GeoIpLocations.connection.execute "ALTER TABLE #{copied_jamisp_table_name} DROP COLUMN geom"
ActiveRecord::Base.logger.debug "DROP COLUMN geom returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
#sts.check [we don't care]
sts = GeoIpLocations.connection.execute "ALTER TABLE #{self.table_name} ADD COLUMN geom geometry(polygon);"
sts = GeoIpLocations.connection.execute "ALTER TABLE #{copied_jamisp_table_name} ADD COLUMN geom geometry(polygon)"
ActiveRecord::Base.logger.debug "ADD COLUMN geom returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "UPDATE #{self.table_name} SET geom = ST_MakeEnvelope(beginip, -1, endip, 1);"
sts = GeoIpLocations.connection.execute "UPDATE #{copied_jamisp_table_name} SET geom = ST_MakeEnvelope(beginip, -1, endip, 1)"
ActiveRecord::Base.logger.debug "SET geom returned sts #{sts.cmd_tuples}" if ActiveRecord::Base.logger
sts.check
sts = GeoIpLocations.connection.execute "CREATE INDEX #{self.table_name}_geom_gix ON #{self.table_name} USING GIST (geom);"
ActiveRecord::Base.logger.debug "CREATE INDEX #{self.table_name}_geom_gix returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
# recreate indexes on jamisp
sts = GeoIpLocations.connection.execute "CREATE INDEX #{COPIED_JAMISP_GEOM_INDEX_NAME} ON #{copied_jamisp_table_name} USING GIST (geom)"
ActiveRecord::Base.logger.debug "CREATE INDEX #{COPIED_JAMISP_GEOM_INDEX_NAME} returned sts #{sts.cmd_status}" if ActiveRecord::Base.logger
sts.check
end
end
GeoIpLocations.connection.execute("CREATE INDEX #{COPIED_JAMISP_COID_INDEX_NAME} ON #{copied_jamisp_table_name} (coid)").check
elapsed = Time.now - start
@@log.debug("#{copied_jamisp_table_name} import took #{elapsed} seconds")
end
def self.after_maxmind_import
# handle jamisp
self.connection.execute("DROP TABLE #{self.table_name}").check
self.connection.execute("ALTER INDEX #{COPIED_JAMISP_GEOM_INDEX_NAME} RENAME TO #{JAMISP_GEOM_INDEX_NAME}").check
self.connection.execute("ALTER INDEX #{COPIED_JAMISP_COID_INDEX_NAME} RENAME TO #{JAMISP_COID_INDEX_NAME}").check
self.connection.execute("ALTER TABLE #{self.table_name}_copied RENAME TO #{self.table_name}").check
# handle geoipisp
self.connection.execute("DROP TABLE #{GEOIPISP_TABLE}").check
self.connection.execute("ALTER INDEX #{COPIED_GEOIPISP_INDEX_NAME} RENAME TO #{GEOIPISP_INDEX_NAME}").check
self.connection.execute("ALTER TABLE #{GEOIPISP_TABLE}_copied RENAME TO #{GEOIPISP_TABLE}").check
# handle jamcompany
self.connection.execute("DROP TABLE #{COMPANY_TABLE}").check
self.connection.execute("ALTER INDEX #{COPIED_JAMCOMPANY_UNIQUE_INDEX} RENAME TO #{JAMCOMPANY_UNIQUE_INDEX}").check
self.connection.execute("ALTER INDEX #{COPIED_JAMCOMPANY_PRIMARY_KEY_NAME} RENAME TO #{JAMCOMPANY_PRIMARY_KEY_NAME}").check
self.connection.execute("ALTER SEQUENCE #{COPIED_JAMCOMPANY_COID_SEQUENCE} RENAME TO #{JAMCOMPANY_COID_SEQUENCE}").check
self.connection.execute("ALTER TABLE #{COMPANY_TABLE}_copied RENAME TO #{COMPANY_TABLE}").check
end
end
end

View File

@ -52,7 +52,7 @@ module JamRuby
if isp.nil? then ispid = 0 else ispid = isp.coid end
block = GeoIpBlocks.lookup(addr)
if block.nil? then locid = 0 else locid = block.locid end
location = GeoIpLocations.lookup(locid)
location = GeoIpLocations.find_by_locid(locid)
if location.nil?
# todo what's a better default location?
locidispid = 0

View File

@ -5,21 +5,24 @@ module JamRuby
self.table_name = 'max_mind_geo'
def self.ip_lookup(ip_addy)
addr = MaxMindIsp.ip_address_to_int(ip_addy)
self.where(["ip_start <= ? AND ? <= ip_end", addr, addr])
.limit(1)
.first
end
@@log = Logging.logger[MaxMindGeo]
def self.import_from_max_mind(file)
def self.import_from_max_mind(options)
file = options[:file]
use_copy = options[:use_copy]
# File Geo-139
# Format:
# startIpNum,endIpNum,country,region,city,postalCode,latitude,longitude,dmaCode,areaCode
MaxMindGeo.transaction do
start = Time.now
MaxMindGeo.delete_all
if use_copy
Database.copy(MaxMindGeo.table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
s = io.gets.strip # eat the headers line
unless s.eql? 'startIpNum,endIpNum,country,region,city,postalCode,latitude,longitude,dmaCode,areaCode'
@ -42,8 +45,8 @@ module JamRuby
csv.each do |row|
raise "file does not have expected number of columns (10): #{row.length}" unless row.length == 10
ip_start = MaxMindIsp.ip_address_to_int(MaxMindIsp.strip_quotes(row[0]))
ip_end = MaxMindIsp.ip_address_to_int(MaxMindIsp.strip_quotes(row[1]))
ip_start = ip_address_to_int(strip_quotes(row[0]))
ip_end = ip_address_to_int(strip_quotes(row[1]))
country = row[2]
region = row[3]
city = row[4]
@ -88,8 +91,14 @@ module JamRuby
end
end
end
# User.find_each { |usr| usr.update_lat_lng }
# THIS DOESNT ACTUALLY DO ANYTHING BECAUSE IT NEVER SAVES
Band.find_each { |bnd| bnd.update_lat_lng }
elapsed = Time.now - start
@@log.debug("#{MaxMindGeo.table_name} import took #{elapsed} seconds")
end
def self.where_latlng(relation, params, current_user=nil)
@ -103,13 +112,13 @@ module JamRuby
remote_ip = params[:remote_ip]
if location_city and location_state and location_country
geo = self.where(city: location_city, region: location_state, country: location_country).limit(1).first
geo = self.where(city: location_city, region: location_state, countrycode: location_country).limit(1).first
if geo and geo.lat and geo.lng and (geo.lat != 0 or geo.lng != 0)
# it isn't reasonable for both to be 0...
latlng = [geo.lat, geo.lng]
end
elsif current_user and current_user.locidispid and current_user.locidispid != 0
location = GeoIpLocations.lookup(current_user.locidispid/1000000)
location = GeoIpLocations.find_by_locid(current_user.locidispid/1000000)
if location and location.latitude and location.longitude and (location.latitude != 0 or location.longitude != 0)
# it isn't reasonable for both to be 0...
latlng = [location.latitude, location.longitude]

View File

@ -5,14 +5,24 @@ module JamRuby
self.table_name = 'max_mind_isp'
def self.import_from_max_mind(file)
@@log = Logging.logger[MaxMindIsp]
def self.import_from_max_mind(options)
file = options[:file]
use_copy = options[:use_copy]
# File Geo-142
# Format:
# "beginIp","endIp","countryCode","ISP"
MaxMindIsp.transaction do
# drop indexes on start, then add them back when done
start = Time.now
MaxMindIsp.delete_all
if use_copy
Database.copy(MaxMind.table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
s = io.gets.strip # eat the copyright line. gah, why do they have that in their file??
unless s.eql? 'Copyright (c) 2011 MaxMind Inc. All Rights Reserved.'
@ -36,7 +46,7 @@ module JamRuby
vals = ''
sep = ''
i = 0
n = 20 # going from 20 to 40 only changed things a little bit
n = 20 # going from 20 to 40 only changed things a little bit, and 512 was slower... and 1024 was even slower (weird)
csv = ::CSV.new(io, {encoding: 'ISO-8859-1', headers: false})
csv.each do |row|
@ -51,19 +61,19 @@ module JamRuby
sep = ','
i += 1
if count == 0 or i >= n then
if count == 0 or i >= n
MaxMindIsp.connection.execute stmt+vals
count += i
vals = ''
sep = ''
i = 0
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1 then
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1
ActiveRecord::Base.logger.debug "... logging inserts into #{MaxMindIsp.table_name} suspended ..."
ActiveRecord::Base.logger.level = 1
end
if ActiveRecord::Base.logger and count%10000 < n then
if ActiveRecord::Base.logger and count%10000 < n
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "... inserted #{count} into #{MaxMindIsp.table_name} ..."
ActiveRecord::Base.logger.level = 1
@ -71,17 +81,21 @@ module JamRuby
end
end
if i > 0 then
if i > 0
MaxMindIsp.connection.execute stmt+vals
count += i
end
if ActiveRecord::Base.logger then
if ActiveRecord::Base.logger
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "loaded #{count} records into #{MaxMindIsp.table_name}"
end
end
end
elapsed = Time.now - start
@@log.debug("#{MaxMindIsp.table_name} import took #{elapsed} seconds")
end
# Make an IP address fit in a signed int. Just divide it by 2, as the least significant part

View File

@ -7,27 +7,84 @@ module JamRuby
mount_uploader :geo_ip_124_url, MaxMindReleaseUploader
mount_uploader :geo_ip_134_url, MaxMindReleaseUploader
mount_uploader :geo_ip_139_url, MaxMindReleaseUploader
mount_uploader :geo_ip_142_url, MaxMindReleaseUploader
#mount_uploader :geo_ip_139_url, MaxMindReleaseUploader
#mount_uploader :geo_ip_142_url, MaxMindReleaseUploader
mount_uploader :iso3166_url, MaxMindReleaseUploader
mount_uploader :region_codes_url, MaxMindReleaseUploader
mount_uploader :table_dumps_url, MaxMindReleaseUploader
def store_dir
"maxmind/#{released_at}"
end
def import
# you can only import a maxmind release that has released_at specified
unless released_at
raise "released_at not set in import"
# if a dump file is found, use it and specify that COPY should be used
def file_or_dump(file, dump)
if dump
{file: dump, use_copy:true}
else
{file: file, use_copy:false}
end
end
def import(force_from_source=false)
@@log.debug("-----------------------------------")
@@log.debug("--------- STARTING IMPORT ---------")
@@log.debug("-----------------------------------")
start = Time.now
geo_ip_124_files, geo_ip_134_files, iso3166, region_codes, table_dump_files = download_assets(force_from_source)
import_to_database(geo_ip_124_files, geo_ip_134_files, iso3166, region_codes, table_dump_files)
@@log.debug("IMPORT TOOK: #{Time.now - start} SECONDS")
@@log.debug("-----------------------------------")
@@log.debug("--------- FINISHED IMPORT ---------")
@@log.debug("-----------------------------------")
end
def import_to_database(geo_ip_124_files, geo_ip_134_files, iso3166, region_codes, table_dump_files = {})
MaxMindRelease.transaction do
#MaxMindIsp.import_from_max_mind(file_or_dump(geo_ip_142_files['GeoIPISP-142.csv'], table_dump_files['max_mind_isp.txt']))
#MaxMindGeo.import_from_max_mind(file_or_dump(geo_ip_139_files['GeoIPCity.csv'], table_dump_files['max_mind_geo.txt']))
GeoIpBlocks.import_from_max_mind(file_or_dump(geo_ip_134_files['GeoIPCity-134-Blocks.csv'], table_dump_files['geoipblocks.txt']))
GeoIpLocations.import_from_max_mind(file_or_dump(geo_ip_134_files['GeoIPCity-134-Location.csv'], table_dump_files['geoiplocations.txt']))
JamIsp.import_from_max_mind(file_or_dump(geo_ip_124_files['GeoIPISP.csv'], table_dump_files['geoipisp.txt']))
Country.import_from_iso3166(file_or_dump(iso3166, table_dump_files['countries.txt']))
Region.import_from_region_codes(file_or_dump(region_codes, table_dump_files['regions.txt']))
# updating all scores to an old data to jump-start scoring
@@log.debug("setting all scores 'score_dt' to one day older than initial time")
Score.connection.execute("UPDATE scores SET score_dt = score_dt - interval '1 day'")
# update all user, band, and connection info that is dependent on maxmind
User.after_maxmind_import
Connection.after_maxmind_import
Band.after_maxmind_import
@@log.debug("rename temporary tables over existing tables")
# replace existing tables with new tables
GeoIpBlocks.after_maxmind_import
GeoIpLocations.after_maxmind_import
JamIsp.after_maxmind_import
Country.after_maxmind_import
Region.after_maxmind_import
self.imported = true
self.imported_at = Time.now
self.save!
end
end
def download_assets(force_from_source)
working_dir = dated_working_dir
@@log.debug("downloading and unzipping geoip-142")
geo_ip_142_files = download_and_unzip(working_dir, :geo_ip_142_url, self[:geo_ip_142_md5])
#@@log.debug("downloading and unzipping geoip-142")
#geo_ip_142_files = download_and_unzip(working_dir, :geo_ip_142_url, self[:geo_ip_142_md5])
@@log.debug("downloading and unzipping geoip-139")
geo_ip_139_files = download_and_unzip(working_dir, :geo_ip_139_url, self[:geo_ip_139_md5])
#@@log.debug("downloading and unzipping geoip-139")
#geo_ip_139_files = download_and_unzip(working_dir, :geo_ip_139_url, self[:geo_ip_139_md5])
@@log.debug("downloading and unzipping geoip-134")
geo_ip_134_files = download_and_unzip(working_dir, :geo_ip_134_url, self[:geo_ip_134_md5])
@ -35,15 +92,18 @@ module JamRuby
@@log.debug("downloading and unzipping geoip-124")
geo_ip_124_files = download_and_unzip(working_dir, :geo_ip_124_url, self[:geo_ip_124_md5])
MaxMindIsp.import_from_max_mind(geo_ip_142_files['GeoIPISP-142.csv'])
MaxMindGeo.import_from_max_mind(geo_ip_139_files['GeoIPCity.csv'])
GeoIpBlocks.import_from_max_mind(geo_ip_134_files['GeoIPCity-134-Blocks.csv'])
GeoIpLocations.import_from_max_mind(geo_ip_134_files['GeoIPCity-134-Location.csv'])
JamIsp.import_from_max_mind(geo_ip_124_files['GeoIPISP.csv'])
Country.import_from_iso3166
Region.import_from_region_codes
@@log.debug("downloading region_codes")
region_codes = download(working_dir, :region_codes_url, self[:region_codes_md5])
@@log.debug("downloading iso3166")
iso3166 = download(working_dir, :iso3166_url, self[:iso3166_md5])
table_dump_files = {}
if self[:table_dumps_url] && !force_from_source
@@log.debug("downloading table dumps")
table_dump_files = download_and_unzip(working_dir, :table_dumps_url, self[:table_dumps_md5])
end
return geo_ip_124_files, geo_ip_134_files, iso3166, region_codes, table_dump_files
end
def download_and_unzip(working_dir, field, md5)

View File

@ -19,8 +19,6 @@ module JamRuby
attr_writer :current_user
SOUND = %w(mono stereo)
MAX_PART_FAILURES = 3
MAX_UPLOAD_FAILURES = 10
mount_uploader :url, RecordedTrackUploader
@ -71,7 +69,7 @@ module JamRuby
end
def validate_too_many_upload_failures
if upload_failures >= MAX_UPLOAD_FAILURES
if upload_failures >= APP_CONFIG.max_track_upload_failures
errors.add(:upload_failures, ValidationMessages::UPLOAD_FAILURES_EXCEEDED)
end
end

View File

@ -62,7 +62,7 @@ module JamRuby
if recorded_track.is_part_uploading_was
#recorded_track.reload # we don't want anything else that the user set to get applied
recorded_track.increment_part_failures(recorded_track.part_failures_was)
if recorded_track.part_failures >= RecordedTrack::MAX_PART_FAILURES
if recorded_track.part_failures >= APP_CONFIG.max_track_part_upload_failures
# save upload id before we abort this bad boy
upload_id = recorded_track.upload_id
begin
@ -71,7 +71,7 @@ module JamRuby
puts e.inspect
end
recorded_track.reset_upload
if recorded_track.upload_failures >= RecordedTrack::MAX_UPLOAD_FAILURES
if recorded_track.upload_failures >= APP_CONFIG.max_track_upload_failures
# do anything?
end
end

View File

@ -303,7 +303,7 @@ module JamRuby
.where(:user_id => user.id)
.where(:fully_uploaded => false)
.where('recorded_tracks.id > ?', since)
.where("upload_failures <= #{RecordedTrack::MAX_UPLOAD_FAILURES}")
.where("upload_failures <= #{APP_CONFIG.max_track_upload_failures}")
.where("duration IS NOT NULL")
.where('all_discarded = false')
.order('recorded_tracks.id')

View File

@ -1,29 +1,97 @@
module JamRuby
class Region < ActiveRecord::Base
# index names created on the copied table used during import.
# they do not exist except during import
COUNTRY_CODE_INDEX_NAME = 'regions_countrycode_ndx'
COPIED_COUNTRY_CODE_INDEX_NAME = 'regions_copied_countrycode_ndx'
UNIQUE_INDEX_NAME = 'regions_countrycode_region_ndx'
COPIED_UNIQUE_INDEX_NAME = 'regions_copied_countrycode_region_ndx'
@@log = Logging.logger[Region]
self.table_name = 'regions'
def self.get_all(country)
self.where(countrycode: country).order('regionname asc').all
end
def self.find_region_codes
gem_dir = Gem::Specification.find_by_name("jam_ruby").gem_dir
File.join(gem_dir, 'lib', 'jam_ruby', 'geodata', 'region_codes.csv')
end
def self.import_from_region_codes(options)
def self.import_from_region_codes(file = find_region_codes)
self.delete_all
file = options[:file]
use_copy = options[:use_copy]
start = Time.now
copied_table_name = Database.copy_table(self.table_name)
if use_copy
Database.copy(copied_table_name, file)
else
File.open(file, 'r:ISO-8859-1') do |io|
saved_level = ActiveRecord::Base.logger ? ActiveRecord::Base.logger.level : 0
count = 0
stmt = "INSERT INTO #{copied_table_name} (countrycode, region, regionname) VALUES"
vals = ''
sep = ''
i = 0
n = 20
csv = ::CSV.new(io, {encoding: 'ISO-8859-1', headers: false})
csv.each do |row|
rr = Region.new
rr.countrycode = row[0]
rr.region = row[1]
rr.regionname = row[2]
rr.save
vals = vals+sep+"(#{ActiveRecord::Base.quote_value(row[0])}, #{ActiveRecord::Base.quote_value(row[1])}, #{ActiveRecord::Base.quote_value(row[2])})"
sep = ','
i += 1
if count == 0 or i >= n then
self.connection.execute stmt+vals
count += i
vals = ''
sep = ''
i = 0
if ActiveRecord::Base.logger and ActiveRecord::Base.logger.level > 1 then
ActiveRecord::Base.logger.debug "... logging inserts into #{copied_table_name} suspended ..."
ActiveRecord::Base.logger.level = 1
end
if ActiveRecord::Base.logger and count%10000 < n then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "... inserted #{count} into #{copied_table_name} ..."
ActiveRecord::Base.logger.level = 1
end
end
end
if i > 0
self.connection.execute stmt+vals
count += i
end
if ActiveRecord::Base.logger then
ActiveRecord::Base.logger.level = saved_level
ActiveRecord::Base.logger.debug "loaded #{count} records into #{copied_table_name}"
end
end
end
# create indexes -- these will be renamed later in the import process
Region.connection.execute("CREATE INDEX #{COPIED_COUNTRY_CODE_INDEX_NAME} ON #{copied_table_name} (countrycode)").check
Region.connection.execute("CREATE UNIQUE INDEX #{COPIED_UNIQUE_INDEX_NAME} ON #{copied_table_name} (countrycode, region)").check
elapsed = Time.now - start
@@log.debug("#{copied_table_name} import took #{elapsed} seconds")
end
def self.after_maxmind_import
self.connection.execute("DROP TABLE #{self.table_name}").check
self.connection.execute("ALTER INDEX #{COPIED_COUNTRY_CODE_INDEX_NAME} RENAME TO #{COUNTRY_CODE_INDEX_NAME}").check
self.connection.execute("ALTER INDEX #{COPIED_UNIQUE_INDEX_NAME} RENAME TO #{UNIQUE_INDEX_NAME}").check
self.connection.execute("ALTER TABLE #{self.table_name}_copied RENAME TO #{self.table_name}").check
end
end
end

View File

@ -415,7 +415,7 @@ module JamRuby
.where(['bgenres.genre_id = ? AND bands.id IS NOT NULL', genre])
end
rel = MaxMindGeo.where_latlng(rel, params, current_user)
rel = GeoIpLocations.where_latlng(rel, params, current_user)
sel_str = 'bands.*'
case ordering = self.order_param(params)

View File

@ -6,6 +6,11 @@ module JamRuby
#devise: for later: :trackable
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
JAM_REASON_REGISTRATION = 'r'
JAM_REASON_NETWORK_TEST = 'n'
JAM_REASON_FTUE = 'f'
JAM_REASON_JOIN = 'j'
JAM_REASON_IMPORT = 'i'
devise :database_authenticatable, :recoverable, :rememberable
@ -130,6 +135,7 @@ module JamRuby
validates :show_whats_next, :inclusion => {:in => [nil, true, false]}
validates :mods, json: true
validates_numericality_of :last_jam_audio_latency, greater_than:0, :allow_nil => true
validates :last_jam_updated_reason, :inclusion => {:in => [nil, JAM_REASON_REGISTRATION, JAM_REASON_NETWORK_TEST, JAM_REASON_FTUE, JAM_REASON_JOIN, JAM_REASON_IMPORT] }
# custom validators
validate :validate_musician_instruments
@ -218,6 +224,7 @@ module JamRuby
loc = self.city.blank? ? '' : self.city
loc = loc.blank? ? self.state : "#{loc}, #{self.state}" unless self.state.blank?
#loc = loc.blank? ? self.country : "#{loc}, #{self.country}" unless self.country.blank?
# XXX WHY IS COUNTRY COMMENTED OUT?
loc
end
@ -808,7 +815,7 @@ module JamRuby
if musician
user.last_jam_addr = location[:addr]
user.last_jam_locidispid = location[:locidispid]
user.last_jam_updated_reason = 'r'
user.last_jam_updated_reason = JAM_REASON_REGISTRATION
user.last_jam_updated_at = Time.now
end
@ -971,6 +978,15 @@ module JamRuby
self.save
end
def update_last_jam(remote_ip, reason)
location = GeoIpLocations.lookup(remote_ip)
self.last_jam_addr = location[:addr]
self.last_jam_locidispid = location[:locidispid]
self.last_jam_updated_reason = reason
self.last_jam_updated_at = Time.now
save!
end
def escape_filename(path)
dir = File.dirname(path)
file = File.basename(path)
@ -1126,6 +1142,24 @@ module JamRuby
!self.city.blank? && (!self.state.blank? || !self.country.blank?)
end
def self.update_locidispids(use_copied=true)
# using last_jam_addr, we can rebuild
# * last_jam_locidispid
# * last_jam_updated_reason
# * last_jam_updated_at
# this will set a user's last_jam_locidispid = NULL if there are no geoiplocations/blocks that match their IP address, or if there are no JamIsps that match the IP address
# otherwise, last_jam_locidispid will be updated to the correct new value.
# updates all user's locidispids
table_suffix = use_copied ? '_copied' : ''
User.connection.execute("UPDATE users SET last_jam_locidispid = (SELECT geolocs.locid as geolocid FROM geoipblocks#{table_suffix} as geoblocks INNER JOIN geoiplocations#{table_suffix} AS geolocs ON geoblocks.locid = geolocs.locid WHERE geoblocks.geom && ST_MakePoint(users.last_jam_addr, 0) AND users.last_jam_addr BETWEEN geoblocks.beginip AND geoblocks.endip LIMIT 1) * 1000000::bigint +(SELECT coid FROM jamisp#{table_suffix} as jisp WHERE geom && ST_MakePoint(users.last_jam_addr, 0) AND users.last_jam_addr BETWEEN beginip AND endip LIMIT 1), last_jam_updated_at = NOW(), last_jam_updated_reason='i' ").check
end
def self.after_maxmind_import
update_locidispids
end
# def check_lat_lng
# if (city_changed? || state_changed? || country_changed?) && !lat_changed? && !lng_changed?
# update_lat_lng

View File

@ -245,15 +245,6 @@ FactoryGirl.define do
factory :crash_dump, :class => JamRuby::CrashDump do
end
factory :geocoder, :class => JamRuby::MaxMindGeo do
country 'US'
sequence(:region) { |n| ['NC', 'CA'][(n-1).modulo(2)] }
sequence(:city) { |n| ['Apex', 'San Francisco'][(n-1).modulo(2)] }
sequence(:ip_start) { |n| [MaxMindIsp.ip_address_to_int('1.1.0.0'), MaxMindIsp.ip_address_to_int('1.1.255.255')][(n-1).modulo(2)] }
sequence(:ip_end) { |n| [MaxMindIsp.ip_address_to_int('1.2.0.0'), MaxMindIsp.ip_address_to_int('1.2.255.255')][(n-1).modulo(2)] }
sequence(:lat) { |n| [35.73265, 37.7742075][(n-1).modulo(2)] }
sequence(:lng) { |n| [-78.85029, -122.4155311][(n-1).modulo(2)] }
end
factory :promo_buzz, :class => JamRuby::PromoBuzz do
text_short Faker::Lorem.characters(10)

View File

@ -3,8 +3,6 @@ require 'spec_helper'
describe 'Band search' do
before(:each) do
@geocode1 = FactoryGirl.create(:geocoder)
@geocode2 = FactoryGirl.create(:geocoder)
@bands = []
@bands << @band1 = FactoryGirl.create(:band)
@bands << @band2 = FactoryGirl.create(:band)

View File

@ -2,14 +2,8 @@ require 'spec_helper'
describe Band do
before(:all) do
MaxMindIsp.delete_all
MaxMindGeo.delete_all
end
before do
@geocode1 = FactoryGirl.create(:geocoder)
@geocode2 = FactoryGirl.create(:geocoder)
#@geocode2 = FactoryGirl.create(:geocoder)
@band = FactoryGirl.create(:band)
end

View File

@ -16,8 +16,6 @@ describe User do
}
before(:each) do
@geocode1 = FactoryGirl.create(:geocoder)
@geocode2 = FactoryGirl.create(:geocoder)
@user = FactoryGirl.create(:user)
band.touch

View File

@ -6,6 +6,7 @@ describe Band do
let(:user2) { FactoryGirl.create(:user) }
let(:fan) { FactoryGirl.create(:fan) }
let(:band) { FactoryGirl.create(:band) }
let(:band_in_austin) { FactoryGirl.create(:band, country: 'US', state: 'TX', city: 'Austin')}
let(:new_band) { FactoryGirl.build(:band) }
let(:band_params) {
{
@ -87,4 +88,48 @@ describe Band do
band.errors[:name].should == ["can't be blank"]
end
end
describe "after_maxmind_import" do
before(:each) do
create_phony_database
end
after(:all) do
create_phony_database
end
it "updates to non-null after import if matching location is available" do
band_in_austin.lat.should be_nil
band_in_austin.lng.should be_nil
Band.after_maxmind_import(false)
band_in_austin.reload
band_in_austin.lat.should == 30.2076
band_in_austin.lng.should == -97.8587
end
it "updates to non-null after import if matching location is available, with two matching geoip locations" do
band_in_austin.lat.should be_nil
band_in_austin.lng.should be_nil
# change the dallas entry to austin, to make two austin entries
GeoIpLocations.connection.execute("UPDATE geoiplocations SET city = 'Austin', region = 'TX', countrycode ='US' WHERE city = 'Dallas' AND region = 'TX' and countrycode = 'US'").check
Band.after_maxmind_import(false)
band_in_austin.reload
# you don't know which GeoIpLocation it'll be. So we need to check both
[30.2076, 32.7825].include?(band_in_austin.lat).should be_true
[-97.8587, -96.8207].include?(band_in_austin.lng).should be_true
end
it "updates to null if no matching location available" do
band_in_austin.city = 'Blibbity'
band_in_austin.save!
# change the dallas entry to austin, to make two austin entries
GeoIpLocations.connection.execute("UPDATE geoiplocations SET city = 'Austin', region = 'TX', countrycode ='US' WHERE city = 'Dallas' AND region = 'TX' and countrycode = 'US'").check
Band.after_maxmind_import(false)
band_in_austin.reload
band_in_austin.lat.should be_nil
band_in_austin.lng.should be_nil
end
end
end

View File

@ -71,4 +71,33 @@ describe JamRuby::Connection do
conn.errors[:last_jam_audio_latency].should == ['is not a number']
end
end
describe "update_locidispids" do
before(:each) do
create_phony_database
end
after(:all) do
create_phony_database
end
it "updates locidispid with valid maxmind data" do
conn.locidispid.should == 0 # default in factory girl
Connection.update_locidispids(false)
conn.reload
conn.locidispid.should == 17192 * 1000000 + JamIsp.lookup(conn.addr).coid
end
it "updates locidispid to 0 with no maxmind data" do
# delete the ATX location info, and update. should be 0
conn.locidispid = 5 # make it not zero to start
conn.save!
GeoIpLocations.connection.execute("DELETE from geoiplocations where city = 'Austin'").check
Connection.update_locidispids(false)
conn.reload
conn.locidispid.should == 0
end
end
end

View File

@ -0,0 +1,46 @@
require 'spec_helper'
describe Country do
include UsesTempFiles
ISO3166_CSV = 'iso3166.csv'
in_directory_with_file(ISO3166_CSV)
let(:iso3166_data) {tiny_maxmind_dataset[:iso3166]}
before(:each) do
content_for_file(to_csv(iso3166_data))
end
describe "import_from_iso3166" do
after(:all) do
# anything that calls after_maxmind_import seems to break transactions (DatabaseCleaner)
create_phony_database
end
it "succeeds" do
Country.import_from_iso3166(file: ISO3166_CSV)
result = Country.connection.execute("SELECT * FROM countries_copied")
result.ntuples.should == 1
row1 = iso3166_data[0]
result[0]['countrycode'].should == row1[ISO3166_COUNTRYCODE_INDEX]
result[0]['countryname'].should == row1[ISO3166_COUNTRYNAME_INDEX]
list_indexes('countries_copied').should == []
# verify we can swap out tables
Country.after_maxmind_import
table_exists?('countries_copied').should be_false
result = Country.connection.execute("SELECT * FROM countries")
result.ntuples.should == 1
list_indexes('countries').should =~ []
end
end
end

View File

@ -132,7 +132,7 @@ describe EmailBatch do
expect(ebatch.fetch_recipients(0,5).count).to eq(0)
dd = users[0].created_at + ebatch.days_past_for_trigger_index(0).days
Timecop.travel(dd)
expect(ebatch.fetch_recipients(0,5).count).to eq(20)
expect(ebatch.fetch_recipients(0,5).count).to eq(users.length)
users.each { |uu| ebatch.make_set(uu, 0) }
expect(ebatch.fetch_recipients(0,5).count).to eq(0)
users.map &:destroy
@ -172,7 +172,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user) }
3.times { |nn| users << FactoryGirl.create(:user) }
loops_bunch_of_users(batchp, users)
end
end
@ -207,7 +207,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user, :first_downloaded_client_at => Time.now) }
3.times { |nn| users << FactoryGirl.create(:user, :first_downloaded_client_at => Time.now) }
loops_bunch_of_users(batchp, users)
end
end
@ -242,7 +242,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user, :first_ran_client_at => Time.now) }
3.times { |nn| users << FactoryGirl.create(:user, :first_ran_client_at => Time.now) }
loops_bunch_of_users(batchp, users)
end
end
@ -277,7 +277,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user, :first_certified_gear_at => Time.now) }
3.times { |nn| users << FactoryGirl.create(:user, :first_certified_gear_at => Time.now) }
loops_bunch_of_users(batchp, users)
end
end
@ -312,7 +312,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user, :first_real_music_session_at => Time.now) }
3.times { |nn| users << FactoryGirl.create(:user, :first_real_music_session_at => Time.now) }
loops_bunch_of_users(batchp, users)
end
end
@ -347,7 +347,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user, :first_real_music_session_at => Time.now) }
3.times { |nn| users << FactoryGirl.create(:user, :first_real_music_session_at => Time.now) }
loops_bunch_of_users(batchp, users)
end
end
@ -381,7 +381,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user) }
3.times { |nn| users << FactoryGirl.create(:user) }
loops_bunch_of_users(batchp, users)
end
end
@ -415,7 +415,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user) }
3.times { |nn| users << FactoryGirl.create(:user) }
loops_bunch_of_users(batchp, users)
end
end
@ -449,7 +449,7 @@ describe EmailBatch do
end
it 'loops bunch of users' do
users = []
20.times { |nn| users << FactoryGirl.create(:user) }
3.times { |nn| users << FactoryGirl.create(:user) }
loops_bunch_of_users(batchp, users)
end
end

View File

@ -2,16 +2,9 @@ require 'spec_helper'
describe GeoIpBlocks do
#before do
#GeoIpBlocks.delete_all
#GeoIpBlocks.createx(0x01020300, 0x010203ff, 1)
#GeoIpBlocks.createx(0x02030400, 0x020304ff, 2)
#end
include UsesTempFiles
#after do
#GeoIpBlocks.delete_all
#GeoIpBlocks.createx(0x00000000, 0xffffffff, 17192)
#end
GEOIPCITY_BLOCKS = 'geo_ip_blocks.csv'
it "count" do GeoIpBlocks.count.should == 16 end
@ -30,4 +23,45 @@ describe GeoIpBlocks do
it "second.locid" do second.locid.should == 667 end
it "third" do third.should be_nil end
describe "import_from_max_mind" do
in_directory_with_file(GEOIPCITY_BLOCKS)
let(:geo_ip_city_blocks_data) {tiny_maxmind_dataset[:geo_ip_city_134_blocks]}
before(:each) do
content_for_file("Copyright (c) 2011 MaxMind Inc. All Rights Reserved.\n" +
"startIpNum,endIpNum,locId\n" +
to_csv(geo_ip_city_blocks_data))
end
after(:all) do
# anything that calls after_maxmind_import seems to break transactions (DatabaseCleaner)
create_phony_database
end
it "succeeds" do
GeoIpBlocks.import_from_max_mind(file: GEOIPCITY_BLOCKS)
result = GeoIpBlocks.connection.execute("SELECT * FROM geoipblocks_copied")
result.ntuples.should == 3
row1 = geo_ip_city_blocks_data[0]
result[0]['beginip'].to_i.should == row1[GEOIPBLOCKS_BEGINIP_INDEX]
result[0]['endip'].to_i.should == row1[GEOIPBLOCKS_ENDIP_INDEX]
result[0]['locid'].to_i.should == row1[GEOIPBLOCKS_LOCID_INDEX]
result[0]['geom'].should_not be_nil
list_indexes('geoipblocks_copied').should =~ [GeoIpBlocks::COPIED_GEOIPBLOCKS_INDEX_NAME]
# verify we can swap out tables
GeoIpBlocks.after_maxmind_import
table_exists?('geoipblocks_copied').should be_false
result = GeoIpBlocks.connection.execute("SELECT * FROM geoipblocks")
result.ntuples.should == 3
list_indexes('geoipblocks').should =~ [GeoIpBlocks::GEOIPBLOCKS_INDEX_NAME]
end
end
end

View File

@ -2,17 +2,20 @@ require 'spec_helper'
describe GeoIpLocations do
before do
#GeoIpLocations.delete_all
#GeoIpLocations.createx(17192, 'US', 'TX', 'Austin', '78749', 30.2076, -97.8587, 635, '512')
#GeoIpLocations.createx(48086, 'MX', '28', 'Matamoros', '', 25.8833, -97.5000, nil, '')
end
include UsesTempFiles
GEOIPCITY_LOCATIONS = 'geo_ip_locations.csv'
it "count" do GeoIpLocations.count.should == 16 end
let(:first) { GeoIpLocations.lookup(17192) }
let(:second) { GeoIpLocations.lookup(1539) }
let(:third) { GeoIpLocations.lookup(999999) } # bogus
before(:all) do
create_phony_database
end
let(:first) { GeoIpLocations.find_by_locid(17192) }
let(:second) { GeoIpLocations.find_by_locid(1539) }
let(:third) { GeoIpLocations.find_by_locid(999999) } # bogus
describe "first" do
it "first" do first.should_not be_nil end
@ -35,4 +38,69 @@ describe GeoIpLocations do
end
it "third" do third.should be_nil end
describe "import_from_max_mind" do
in_directory_with_file(GEOIPCITY_LOCATIONS)
let(:geo_ip_city_locations_data) {tiny_maxmind_dataset[:geo_ip_city_134_locations]}
before(:each) do
content_for_file("Copyright (c) 2012 MaxMind LLC. All Rights Reserved.\n" +
"locId,country,region,city,postalCode,latitude,longitude,metroCode,areaCode\n" +
to_csv(geo_ip_city_locations_data))
create_phony_database
end
after(:all) do
# anything that calls after_maxmind_import seems to break transactions (DatabaseCleaner)
create_phony_database
end
it "succeeds" do
GeoIpLocations.import_from_max_mind(file: GEOIPCITY_LOCATIONS)
result = GeoIpLocations.connection.execute("SELECT * FROM geoiplocations_copied")
result.ntuples.should == 3
row1 = geo_ip_city_locations_data[0]
result[0]['locid'].to_i.should == row1[GEOIPLOCATIONS_LOCID_INDEX]
result[0]['countrycode'].should == row1[GEOIPLOCATIONS_COUNTRY_INDEX]
result[0]['region'].should == row1[GEOIPLOCATIONS_REGION_INDEX]
result[0]['city'].should == row1[GEOIPLOCATIONS_CITY_INDEX]
result[0]['postalcode'].should == row1[GEOIPLOCATIONS_POSTALCODE_INDEX]
result[0]['latitude'].to_f.should == row1[GEOIPLOCATIONS_LATITUDE_INDEX]
result[0]['longitude'].to_f.should == row1[GEOIPLOCATIONS_LONGITUDE_INDEX]
result[0]['metrocode'].to_i.should == row1[GEOIPLOCATIONS_METROCODE_INDEX]
result[0]['areacode'].to_i.should == row1[GEOIPLOCATIONS_AREACODE_INDEX]
result[0]['geog'].should_not be_nil
list_indexes('geoiplocations_copied').should =~ [GeoIpLocations::COPIED_PRIMARY_KEY_NAME, GeoIpLocations::COPIED_GEOIPLOCATIONS_INDEX_NAME]
# confirm that the cities table also got copied
result = GeoIpLocations.connection.execute('SELECT * FROM cities_copied')
result.ntuples.should == 1
result[0]['city'].should == row1[GEOIPLOCATIONS_CITY_INDEX]
result[0]['region'].should == row1[GEOIPLOCATIONS_REGION_INDEX]
result[0]['countrycode'].should == row1[GEOIPLOCATIONS_COUNTRY_INDEX]
list_indexes('cities_copied').should =~ []
# verify we can swap out tables
GeoIpLocations.after_maxmind_import
table_exists?('geoiplocations_copied').should be_false
result = GeoIpLocations.connection.execute("SELECT * FROM geoiplocations")
result.ntuples.should == 3
list_indexes('geoiplocations').should =~ [GeoIpLocations::PRIMARY_KEY_NAME, GeoIpLocations::GEOIPLOCATIONS_INDEX_NAME]
table_exists?('cities_copied').should be_false
result = GeoIpLocations.connection.execute("SELECT * FROM cities")
result.ntuples.should == 1
list_indexes('cities').should =~ []
end
end
end

View File

@ -2,20 +2,9 @@ require 'spec_helper'
describe JamIsp do
#before do
# JamIsp.delete_all
# JamIsp.createx(0x01020300, 0x010203ff, 1)
# JamIsp.createx(0x02030400, 0x020304ff, 2)
# JamIsp.createx(0x03040500, 0x030405ff, 3)
# JamIsp.createx(0x04050600, 0x040506ff, 4)
# JamIsp.createx(0xc0A80100, 0xc0A801ff, 5)
# JamIsp.createx(0xfffefd00, 0xfffefdff, 6)
#end
include UsesTempFiles
#after do
# JamIsp.delete_all
# JamIsp.createx(0x00000000, 0xffffffff, 1)
#end
GEOIPISP = 'geoip_isp.csv'
it "count" do JamIsp.count.should == 16 end
@ -42,4 +31,73 @@ describe JamIsp do
it "second.coid" do second.coid.should == 3 end
it "third.coid" do third.coid.should == 4 end
it "seventh" do seventh.should be_nil end
describe "import_from_max_mind" do
in_directory_with_file(GEOIPISP)
let(:geo_ip_isp_data) {tiny_maxmind_dataset[:geo_ip_isp]}
before(:each) do
create_phony_database
content_for_file(to_csv(geo_ip_isp_data))
end
after(:all) do
# anything that calls after_maxmind_import seems to break transactions (DatabaseCleaner)
create_phony_database
end
it "succeeded" do
JamIsp.import_from_max_mind(file: GEOIPISP)
# verify geoipisp
result = JamIsp.connection.execute("SELECT * FROM geoipisp_copied")
result.ntuples.should == 1
row1 = geo_ip_isp_data[0]
result[0]['beginip'].to_i.should == row1[GEOIPISP_BEGINIP_INDEX]
result[0]['endip'].to_i.should == row1[GEOIPISP_ENDIP_INDEX]
result[0]['company'].should == row1[GEOIPISP_COMPANY_INDEX]
list_indexes('geoipisp_copied').should =~ [JamIsp::COPIED_GEOIPISP_INDEX_NAME]
# verify jamcompany
result = JamIsp.connection.execute("SELECT * FROM jamcompany_copied")
result.ntuples.should == 1
row1 = geo_ip_isp_data[0]
result[0]['coid'].to_i.should == 1
result[0]['company'].should == row1[GEOIPISP_COMPANY_INDEX]
list_indexes('jamcompany_copied').should =~ [JamIsp::COPIED_JAMCOMPANY_UNIQUE_INDEX, JamIsp::COPIED_JAMCOMPANY_PRIMARY_KEY_NAME]
# verify jamisp
result = JamIsp.connection.execute("SELECT * FROM jamisp_copied")
result.ntuples.should == 1
row1 = geo_ip_isp_data[0]
result[0]['beginip'].to_i.should == row1[GEOIPISP_BEGINIP_INDEX]
result[0]['endip'].to_i.should == row1[GEOIPISP_ENDIP_INDEX]
result[0]['coid'].to_i.should == 1
result[0]['geom'].should_not be_nil
list_indexes('jamisp_copied').should =~ [JamIsp::COPIED_JAMISP_GEOM_INDEX_NAME, JamIsp::COPIED_JAMISP_COID_INDEX_NAME]
# verify we can swap out tables
JamIsp.after_maxmind_import
table_exists?('jamisp_copied').should be_false
result = JamIsp.connection.execute("SELECT * FROM jamisp")
result.ntuples.should == 1
list_indexes('jamisp').should =~ [JamIsp::JAMISP_GEOM_INDEX_NAME, JamIsp::JAMISP_COID_INDEX_NAME]
table_exists?('jamcompany_copied').should be_false
result = JamIsp.connection.execute("SELECT * FROM jamcompany")
result.ntuples.should == 1
list_indexes('jamcompany').should =~ [JamIsp::JAMCOMPANY_UNIQUE_INDEX, JamIsp::JAMCOMPANY_PRIMARY_KEY_NAME]
table_exists?('geoipisp_copied').should be_false
result = JamIsp.connection.execute("SELECT * FROM geoipisp")
result.ntuples.should == 1
list_indexes('geoipisp').should =~ [JamIsp::GEOIPISP_INDEX_NAME]
end
end
end

View File

@ -1,37 +0,0 @@
require 'spec_helper'
describe MaxMindGeo do
include UsesTempFiles
GEO_CSV='small.csv'
in_directory_with_file(GEO_CSV)
before do
content_for_file('startIpNum,endIpNum,country,region,city,postalCode,latitude,longitude,dmaCode,areaCode
0.116.0.0,0.119.255.255,"AT","","","",47.3333,13.3333,123,123
1.0.0.0,1.0.0.255,"AU","","","",-27.0000,133.0000,,
1.0.1.0,1.0.1.255,"CN","07","Fuzhou","",26.0614,119.3061,,'.encode(Encoding::ISO_8859_1))
MaxMindGeo.import_from_max_mind(GEO_CSV)
end
it { MaxMindGeo.count.should == 3 }
let(:first) { MaxMindGeo.find_by_ip_start(MaxMindIsp.ip_address_to_int('0.116.0.0')) }
let(:second) { MaxMindGeo.find_by_ip_start(MaxMindIsp.ip_address_to_int('1.0.0.0')) }
let(:third) { MaxMindGeo.find_by_ip_start(MaxMindIsp.ip_address_to_int('1.0.1.0')) }
it { first.country.should == 'AT' }
it { first.ip_start.should == MaxMindIsp.ip_address_to_int('0.116.0.0') }
it { first.ip_end.should == MaxMindIsp.ip_address_to_int('0.119.255.255') }
it { second.country.should == 'AU' }
it { second.ip_start.should == MaxMindIsp.ip_address_to_int('1.0.0.0') }
it { second.ip_end.should == MaxMindIsp.ip_address_to_int('1.0.0.255') }
it { third.country.should == 'CN' }
it { third.ip_start.should == MaxMindIsp.ip_address_to_int('1.0.1.0') }
it { third.ip_end.should == MaxMindIsp.ip_address_to_int('1.0.1.255') }
end

View File

@ -1,43 +0,0 @@
require 'spec_helper'
describe MaxMindIsp do
include UsesTempFiles
ISP_CSV='small.csv'
in_directory_with_file(ISP_CSV)
before do
content_for_file('Copyright (c) 2011 MaxMind Inc. All Rights Reserved.
"beginIp","endIp","countryCode","ISP"
"1.0.0.0","1.0.0.255","AU","APNIC Debogon Project"
"1.0.1.0","1.0.1.255","CN","Chinanet Fujian Province Network"
"1.0.4.0","1.0.7.255","AU","Bigred,inc"'.encode(Encoding::ISO_8859_1))
MaxMindIsp.import_from_max_mind(ISP_CSV)
end
let(:first) { MaxMindIsp.find_by_ip_bottom(MaxMindIsp.ip_address_to_int('1.0.0.0')) }
let(:second) { MaxMindIsp.find_by_ip_bottom(MaxMindIsp.ip_address_to_int('1.0.1.0')) }
let(:third) { MaxMindIsp.find_by_ip_bottom(MaxMindIsp.ip_address_to_int('1.0.4.0')) }
it { MaxMindIsp.count.should == 3 }
it { first.country.should == 'AU' }
it { first.ip_bottom.should == MaxMindIsp.ip_address_to_int('1.0.0.0') }
it { first.ip_top.should == MaxMindIsp.ip_address_to_int('1.0.0.255') }
it { first.isp.should == 'APNIC Debogon Project' }
it { second.country.should == 'CN' }
it { second.ip_bottom.should == MaxMindIsp.ip_address_to_int('1.0.1.0') }
it { second.ip_top.should == MaxMindIsp.ip_address_to_int('1.0.1.255') }
it { second.isp.should == 'Chinanet Fujian Province Network' }
it { third.country.should == 'AU' }
it { third.ip_bottom.should == MaxMindIsp.ip_address_to_int('1.0.4.0') }
it { third.ip_top.should == MaxMindIsp.ip_address_to_int('1.0.7.255') }
it { third.isp.should == 'Bigred,inc' }
end

View File

@ -47,25 +47,14 @@ describe MaxMindRelease do
Digest::MD5.file(downloaded_filename ).hexdigest.should == Digest::MD5.file(zipfile).hexdigest
end
#it "uploads to s3 with correct name, and then downloads via signed URL" do
# pending "use"
# jam_track = FactoryGirl.create(:jam_track)
# uploader = JamTrackUploader.new(jam_track, :url)
# uploader.store!(File.open(JKA_NAME)) # uploads file
# jam_track.save!
#
# # verify that the uploader stores the correct path
# jam_track[:url].should == jam_track.store_dir + '/' + jam_track.filename
#
# # verify it's on S3
# s3 = S3Manager.new(APP_CONFIG.aws_bucket, APP_CONFIG.aws_access_key_id, APP_CONFIG.aws_secret_access_key)
# s3.exists?(jam_track[:url]).should be_true
# s3.length(jam_track[:url]).should == 'abc'.length
#
# # download it via signed URL, and check contents
# url = jam_track.sign_url
# downloaded_contents = open(url).read
# downloaded_contents.should == 'abc'
#end
describe "import" do
it "succeeds" do
release.touch
dataset = dataset_to_tmp_files
release.import_to_database(dataset[:geo_ip_124_files], dataset[:geo_ip_134_files], dataset[:iso3166], dataset[:region_codes])
release.imported.should be_true
release.imported_at.should_not be_nil
end
end
end

View File

@ -126,10 +126,10 @@ describe RecordedTrack do
@recorded_track.upload_start(File.size(upload_file), md5)
@recorded_track.upload_next_part(File.size(upload_file), md5)
@recorded_track.errors.any?.should be_false
RecordedTrack::MAX_PART_FAILURES.times do |i|
APP_CONFIG.max_track_part_upload_failures.times do |i|
@recorded_track.upload_part_complete(@recorded_track.next_part_to_upload, File.size(upload_file))
@recorded_track.errors[:next_part_to_upload] == [ValidationMessages::PART_NOT_FOUND_IN_AWS]
part_failure_rollover = i == RecordedTrack::MAX_PART_FAILURES - 1
part_failure_rollover = i == APP_CONFIG.max_track_part_upload_failures - 1
expected_is_part_uploading = !part_failure_rollover
expected_part_failures = part_failure_rollover ? 0 : i + 1
@recorded_track.reload
@ -147,15 +147,17 @@ describe RecordedTrack do
end
it "enough upload failures fails the upload forever" do
APP_CONFIG.stub(:max_track_upload_failures).and_return(1)
APP_CONFIG.stub(:max_track_part_upload_failures).and_return(2)
@recorded_track = RecordedTrack.create_from_track(@track, @recording)
RecordedTrack::MAX_UPLOAD_FAILURES.times do |j|
APP_CONFIG.max_track_upload_failures.times do |j|
@recorded_track.upload_start(File.size(upload_file), md5)
@recorded_track.upload_next_part(File.size(upload_file), md5)
@recorded_track.errors.any?.should be_false
RecordedTrack::MAX_PART_FAILURES.times do |i|
APP_CONFIG.max_track_part_upload_failures.times do |i|
@recorded_track.upload_part_complete(@recorded_track.next_part_to_upload, File.size(upload_file))
@recorded_track.errors[:next_part_to_upload] == [ValidationMessages::PART_NOT_FOUND_IN_AWS]
part_failure_rollover = i == RecordedTrack::MAX_PART_FAILURES - 1
part_failure_rollover = i == APP_CONFIG.max_track_part_upload_failures - 1
expected_is_part_uploading = part_failure_rollover ? false : true
expected_part_failures = part_failure_rollover ? 0 : i + 1
@recorded_track.reload
@ -166,7 +168,7 @@ describe RecordedTrack do
end
@recorded_track.reload
@recorded_track.upload_failures.should == RecordedTrack::MAX_UPLOAD_FAILURES
@recorded_track.upload_failures.should == APP_CONFIG.max_track_upload_failures
@recorded_track.file_offset.should == 0
@recorded_track.next_part_to_upload.should == 0
@recorded_track.upload_id.should be_nil

View File

@ -0,0 +1,46 @@
require 'spec_helper'
describe Region do
include UsesTempFiles
REGION_CODES_CSV = 'region_codes.csv'
in_directory_with_file(REGION_CODES_CSV)
let(:region_codes_data) {tiny_maxmind_dataset[:region_codes]}
before(:each) do
content_for_file(to_csv(region_codes_data))
end
describe "import_from_region_codes" do
after(:all) do
# anything that calls after_maxmind_import seems to break transactions (DatabaseCleaner)
create_phony_database
end
it "succeeds" do
Region.import_from_region_codes(file: REGION_CODES_CSV)
result = Region.connection.execute("SELECT * FROM regions_copied")
result.ntuples.should == 1
row1 = region_codes_data[0]
result[0]['region'].should == row1[REGIONCODES_REGIONCODE_INDEX]
result[0]['regionname'].should == row1[REGIONCODES_REGIONNAME_INDEX]
result[0]['countrycode'].should == row1[REGIONCODES_COUNTRYCODE_INDEX]
list_indexes('regions_copied').should =~ [Region::COPIED_COUNTRY_CODE_INDEX_NAME, Region::COPIED_UNIQUE_INDEX_NAME]
# verify we can swap out tables
Region.after_maxmind_import
table_exists?('regions_copied').should be_false
result = Region.connection.execute("SELECT * FROM regions")
result.ntuples.should == 1
list_indexes('regions').should =~ [Region::COUNTRY_CODE_INDEX_NAME, Region::UNIQUE_INDEX_NAME]
end
end
end

View File

@ -22,9 +22,9 @@ X If no profile location is provided, and the user creates/joins a music session
describe "with profile location data" do
it "should have lat/lng values" do
pending 'distance search changes'
geo = MaxMindGeo.find_by_city(@user.city)
@user.lat.should == geo.lat
@user.lng.should == geo.lng
geo = GeoIpLocations.find_by_city(@user.city).blocks.first
@user.lat.should == geo.latitude
@user.lng.should == geo.longitude
end
it "should have updated lat/lng values" do
@ -33,9 +33,9 @@ X If no profile location is provided, and the user creates/joins a music session
:state => @geocode2.region,
:country => @geocode2.country,
})
geo = MaxMindGeo.find_by_city(@user.city)
@user.lat.should == geo.lat
@user.lng.should == geo.lng
geo = GeoIpLocations.find_by_city(@user.city).blocks.first
@user.lat.should == geo.latitude
@user.lng.should == geo.longitude
end
end
@ -48,13 +48,13 @@ X If no profile location is provided, and the user creates/joins a music session
})
@user.lat.should == nil
@user.lng.should == nil
geo = JamRuby::MaxMindGeo.ip_lookup('1.1.0.0')
geo = GeoIpBlocks.ip_lookup('1.1.0.0')
geo.should_not be_nil
geo = JamRuby::MaxMindGeo.ip_lookup('1.1.0.255')
geo = GeoIpBlocks.ip_lookup('1.1.0.255')
geo.should_not be_nil
@user.update_lat_lng('1.1.0.255')
@user.lat.should == geo.lat
@user.lng.should == geo.lng
@user.lat.should == geo.latitude
@user.lng.should == geo.longitude
end
end

View File

@ -492,6 +492,64 @@ describe User do
@user.errors[:last_jam_audio_latency].should == ['is not a number']
end
end
describe "update_locidispids" do
before(:each) do
@user.save
create_phony_database
end
after(:all) do
create_phony_database
end
it "remains null if the user's last_jam_addr is null" do
@user.last_jam_addr.should be_nil # make sure the factory still makes a null addr to start
User.update_locidispids(false)
@user.reload
@user.last_jam_addr.should be_nil
end
it "locidispid remains non-null and the same as before, if no maxmind info has changed" do
@user.update_last_jam('1.1.1.1', User::JAM_REASON_REGISTRATION)
initial_locidispid = @user.last_jam_locidispid
initial_locidispid.should_not be_nil
User.update_locidispids(false)
@user.reload
@user.last_jam_locidispid.should == initial_locidispid
@user.last_jam_updated_reason.should == User::JAM_REASON_IMPORT
end
it "locidispid goes to null if geoip info is null" do
@user.update_last_jam('1.1.1.1', User::JAM_REASON_REGISTRATION)
initial_locidispid = @user.last_jam_locidispid
initial_locidispid.should_not be_nil
GeoIpBlocks.delete_all
User.update_locidispids(false)
@user.reload
@user.last_jam_locidispid.should be_nil
@user.last_jam_updated_reason.should == User::JAM_REASON_IMPORT
end
it "locidispid updates to a new value if geoip info changes" do
@user.update_last_jam('1.1.1.1', User::JAM_REASON_REGISTRATION)
initial_locidispid = @user.last_jam_locidispid
initial_locidispid.should_not be_nil
GeoIpBlocks.connection.execute("UPDATE geoipblocks SET locid = 17193::bigint where locid = 17192::bigint").check
GeoIpLocations.connection.execute("UPDATE geoiplocations SET locid = 17193::bigint where locid = 17192::bigint").check
GeoIpLocations.find_by_locid(17193).should_not be_nil
GeoIpBlocks.find_by_locid(17193).should_not be_nil
User.update_locidispids(false)
@user.reload
@user.last_jam_locidispid.should_not == initial_locidispid
@user.last_jam_locidispid.should == 17193 * 1000000 + JamIsp.lookup(@user.last_jam_addr).coid
@user.last_jam_updated_reason.should == User::JAM_REASON_IMPORT
end
end
=begin
describe "update avatar" do

View File

@ -21,12 +21,12 @@ describe "ActiveMusicSessionCleaner" do
c.delete
end
# hasn't been 1 minute yet
# hasn't been 1 second yet
@cleaner.run
ActiveMusicSession.all.count.should == 1
# wait 3 seconds so the updated_at expires
sleep 3
ams = ActiveMusicSession.first
ams.updated_at = 3.seconds.ago
ams.save!
@cleaner.run
ActiveMusicSession.all.count.should == 0

View File

@ -4,6 +4,7 @@ ENV["RAILS_ENV"] = "test"
require 'simplecov'
require 'support/utilities'
require 'support/profile'
require 'support/maxmind'
require 'active_record'
require 'jam_db'
require 'spec_db'
@ -56,7 +57,7 @@ end
#uncomment the following line to use spork with the debugger
#require 'spork/ext/ruby-debug'
Spork.prefork do
#Spork.prefork do
# Loading more in this block will cause your tests to run faster. However,
# if you change any configuration or code from libraries loaded here, you'll
# need to restart spork for it take effect.
@ -83,8 +84,9 @@ Spork.prefork do
config.filter_run_excluding aws: true unless run_tests? :aws
config.before(:suite) do
DatabaseCleaner.strategy = :truncation, {:except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries] }
DatabaseCleaner.clean_with(:truncation, {:except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries] })
DatabaseCleaner.strategy = :transaction
#DatabaseCleaner.strategy = :deletion, {pre_count: true, reset_ids:false, :except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries] }
DatabaseCleaner.clean_with(:deletion, {pre_count: true, reset_ids:false, :except => %w[instruments genres icecast_server_groups jamcompany jamisp geoipblocks geoipisp geoiplocations cities regions countries] })
end
config.before(:each) do
@ -137,9 +139,9 @@ Spork.prefork do
}
end
end
end
#end
Spork.each_run do
#Spork.each_run do
# This code will be run each time you run your specs.
end
#end

View File

@ -0,0 +1,160 @@
ISO3166_COUNTRYCODE_INDEX = 0
ISO3166_COUNTRYNAME_INDEX = 1
REGIONCODES_COUNTRYCODE_INDEX = 0
REGIONCODES_REGIONCODE_INDEX = 1
REGIONCODES_REGIONNAME_INDEX = 2
GEOIPBLOCKS_BEGINIP_INDEX = 0
GEOIPBLOCKS_ENDIP_INDEX = 1
GEOIPBLOCKS_LOCID_INDEX = 2
GEOIPLOCATIONS_LOCID_INDEX = 0
GEOIPLOCATIONS_COUNTRY_INDEX = 1
GEOIPLOCATIONS_REGION_INDEX = 2
GEOIPLOCATIONS_CITY_INDEX = 3
GEOIPLOCATIONS_POSTALCODE_INDEX = 4
GEOIPLOCATIONS_LATITUDE_INDEX = 5
GEOIPLOCATIONS_LONGITUDE_INDEX = 6
GEOIPLOCATIONS_METROCODE_INDEX = 7
GEOIPLOCATIONS_AREACODE_INDEX = 8
GEOIPISP_BEGINIP_INDEX = 0
GEOIPISP_ENDIP_INDEX = 1
GEOIPISP_COMPANY_INDEX = 2
JAMISP_BEGINIP_INDEX = 0
JAMISP_ENDIP_INDEX = 1
JAMISP_COMPANY_INDEX = 2
# the goal is to specify just enough data to build out a score (a 'leaf' in maxmind data)
def tiny_maxmind_dataset
region_codes = [
["US","TX","Texas"]
]
iso3166 = [
["US", "United States"]
]
# table=max_mind_isp
geo_ip_isp_142 = [
["1.0.0.0","1.0.0.255","US","Google"],
["1.0.1.0","1.0.1.255","US","Time Warner"],
["1.0.2.0","1.0.2.255","US","AT&T"]
]
# table=max_mind_geo
geo_ip_city_139 = [
["1.0.0.0","1.0.0.255","US","TX","Austin","78759","30.4000","-97.7528","635","512"], # original: 4.15.0.0, 4.15.0.255 (68091904, 68092159), locid=1504
["1.0.1.0","1.0.1.255","US","TX","Austin","78701","30.2678","-97.7426","635","512"], # original: 4.28.169.0, 4.28.169.255 (68987136, 68987391), locid=1102
["1.0.2.0","1.0.2.255","US","TX","Austin","78729","30.4549","-97.7565","635","512"] # original: 4.30.69.0, 4.30.69.127 (69092608, 69092735), locid=14655
]
# table=geoipblocks, file=GeoIPCity-134-Blocks.csv
geo_ip_city_134_blocks = [
[68091904, 68092159, 1504],
[68987136, 68987391, 1102],
[69092608, 69092735, 14655]
]
# table=geoiplocations, file=GeoIpCity-134-Locations.csv
geo_ip_city_134_locations = [
[1504,"US","TX","Austin","78759",30.4000,-97.7528,635,512],
[1102,"US","TX","Austin","78701",30.2678,-97.7426,635,512],
[14655,"US","TX","Austin","78729",30.4549,-97.7565,635,512]
]
#table=geoipisp
geo_ip_isp = [
[401604608,401866751,"Time Warner Cable"]
]
{
region_codes: region_codes,
iso3166: iso3166,
geo_ip_isp_142: geo_ip_isp_142,
geo_ip_city_139: geo_ip_city_139,
geo_ip_city_134_blocks: geo_ip_city_134_blocks,
geo_ip_city_134_locations: geo_ip_city_134_locations,
geo_ip_isp: geo_ip_isp
}
end
def write_content_to_tmp_file(name, content, prefix = '')
file = Tempfile.new([name.to_s, '.csv'])
File.open(file, 'w') {|f| f.write(to_csv(content, prefix)) }
file
end
def dataset_to_tmp_files(dataset=tiny_maxmind_dataset)
tmp_files = {}
geo_ip_isp_124 = write_content_to_tmp_file(:geo_ip_isp, dataset[:geo_ip_isp])
tmp_files[:geo_ip_124_files] = { 'GeoIPISP.csv' => geo_ip_isp_124 }
geo_ip_isp_134_blocks = write_content_to_tmp_file(:geo_ip_city_134_blocks, dataset[:geo_ip_city_134_blocks],
"Copyright (c) 2011 MaxMind Inc. All Rights Reserved.\n" +
"startIpNum,endIpNum,locId\n")
geo_ip_isp_134_locations = write_content_to_tmp_file(:geo_ip_city_134_locations, dataset[:geo_ip_city_134_locations],
"Copyright (c) 2012 MaxMind LLC. All Rights Reserved.\n" +
"locId,country,region,city,postalCode,latitude,longitude,metroCode,areaCode\n")
tmp_files[:geo_ip_134_files] = { 'GeoIPCity-134-Blocks.csv' => geo_ip_isp_134_blocks , 'GeoIPCity-134-Location.csv' => geo_ip_isp_134_locations }
tmp_files[:region_codes] = write_content_to_tmp_file(:region_codes, dataset[:region_codes])
tmp_files[:iso3166] = write_content_to_tmp_file(:iso3166, dataset[:iso3166])
tmp_files
end
# to be used with maxmind datasets (should be an array of arrays)
def to_csv(content, prefix = '')
buffer = prefix
count = 0
while count < content.length
buffer += content[count].to_csv
count = count + 1
end
buffer[0..buffer.length-2] # take off last trailing \n
end
# from here: http://stackoverflow.com/questions/2204058/show-which-columns-an-index-is-on-in-postgresql
def list_indexes(table_name)
result = GeoIpBlocks.connection.execute("select
i.relname as index_name,
array_to_string(array_agg(a.attname), ', ') as column_names
from
pg_class t,
pg_class i,
pg_index ix,
pg_attribute a
where
t.oid = ix.indrelid
and i.oid = ix.indexrelid
and a.attrelid = t.oid
and a.attnum = ANY(ix.indkey)
and t.relkind = 'r'
and t.relname = '#{table_name}'
group by
t.relname,
i.relname
order by
t.relname,
i.relname;")
result.values.map { |row| row[0] }
end
def table_exists?(table_name)
GeoIpBlocks.connection.select_value("SELECT 1
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE n.nspname = 'public'
AND c.relname = '#{table_name}'");
end
def create_phony_database
GeoIpBlocks.connection.execute("select generate_scores_dataset()").check
end

View File

@ -101,6 +101,14 @@ def app_config
def max_mind_working_dir
'tmp'
end
def max_track_upload_failures
10
end
def max_track_part_upload_failures
3
end
private
def audiomixer_workspace_path

View File

@ -294,7 +294,7 @@
// make the 3 slower requests, which only matter if the user wants to affect their ISP or location
api.getCountriesx()
api.getCountries()
.done(function(countriesx) { populateCountriesx(countriesx["countriesx"], userDetail.country); } )
.fail(app.ajaxError)
.always(function() { loadingCountriesData = false; })

View File

@ -273,7 +273,7 @@
nilOption.text(nilOptionText);
countrySelect.append(nilOption);
rest.getCountriesx().done(function (response) {
rest.getCountries().done(function (response) {
$.each(response["countriesx"], function (index, countryx) {
if (!countryx.countrycode) return;
var option = $(nilOptionStr);

View File

@ -469,21 +469,6 @@
});
}
function getCountriesx() {
return $.ajax('/api/countriesx', {
dataType : 'json'
});
}
function getIsps(options) {
var country = options["country"]
return $.ajax('/api/isps', {
data : { country: country},
dataType : 'json'
});
}
function getResolvedLocation() {
return $.ajax('/api/resolved_location', {
dataType: 'json'
@ -1207,8 +1192,6 @@
this.getCities = getCities;
this.getRegions = getRegions;
this.getCountries = getCountries;
this.getCountriesx = getCountriesx;
this.getIsps = getIsps;
this.getResolvedLocation = getResolvedLocation;
this.getInstruments = getInstruments;
this.getGenres = getGenres;

View File

@ -476,6 +476,7 @@
function primePumpTimeout(data) {
clearPrimeGuard();
scoring = false;
logger.debug("the prime pump routine timed out")
primeDeferred.reject();
}

View File

@ -3,13 +3,7 @@ class ApiMaxmindRequestsController < ApiController
respond_to :json
def countries
raise "no longer supported, use countriesx"
#countries = MaxMindManager.countries()
#render :json => { :countries => countries }, :status => 200
end
def countriesx
countriesx = MaxMindManager.countriesx()
countriesx = MaxMindManager.countries
render :json => { :countriesx => countriesx }, :status => 200
end
@ -31,18 +25,9 @@ class ApiMaxmindRequestsController < ApiController
end
end
def isps
isps = MaxMindManager.isps(params[:country])
if isps && isps.length > 0
render :json => { :isps => isps }, :status => 200
else
render :json => { :message => "Unrecognized Country" }, :status => 422
end
end
# returns location hash (country, region, state) based on requesting IP
def resolved_location
location = MaxMindManager.lookup(request.remote_ip)
location = GeoIpLocations.lookup(request.remote_ip)
render :json => { :country => location[:country], :region => location[:state], :city => location[:city] }, :status => 200
end

View File

@ -466,13 +466,13 @@ JS
@location = location
if @location.nil?
@location = MaxMindManager.lookup(remote_ip)
@location = GeoIpLocations.lookup(remote_ip)
end
@location[:country] = "US" if @location[:country].nil?
# right now we only accept US signups for beta
@countriesx = MaxMindManager.countriesx()
@countriesx = MaxMindManager.countries
# populate regions based on current country
@regions = MaxMindManager.regions(@location[:country])
@cities = @location[:state].nil? ? [] : MaxMindManager.cities(@location[:country], @location[:state])

View File

@ -245,5 +245,9 @@ if defined?(Bundler)
config.ftue_maximum_gear_latency = 20
config.max_mind_working_dir = 'tmp'
# recording upload/download configs
config.max_track_upload_failures = 10
config.max_track_part_upload_failures = 3
end
end

View File

@ -23,6 +23,7 @@ test: &test
host: localhost
pool: 5
timeout: 5000
min_messages: warning
production:
adapter: postgresql

View File

@ -371,10 +371,8 @@ SampleApp::Application.routes.draw do
# Location lookups
match '/countries' => 'api_maxmind_requests#countries', :via => :get
match '/countriesx' => 'api_maxmind_requests#countriesx', :via => :get
match '/regions' => 'api_maxmind_requests#regions', :via => :get
match '/cities' => 'api_maxmind_requests#cities', :via => :get
match '/isps' => 'api_maxmind_requests#isps', :via => :get
match '/resolved_location' => 'api_maxmind_requests#resolved_location', :via => :get
# Recordings

View File

@ -4,180 +4,32 @@ class MaxMindManager < BaseManager
super(options)
end
# Returns a hash with location information. Fields are nil if they can't be figured.
# This is a class method because it doesn't need to be in a transaction.
def self.lookup(ip_address)
city = state = country = nil
locid = ispid = 0
unless ip_address.nil? || ip_address !~ /^\d+\.\d+\.\d+\.\d+$/
#ActiveRecord::Base.connection_pool.with_connection do |connection|
# pg_conn = connection.instance_variable_get("@connection")
# ip_as_int = ip_address_to_int(ip_address)
# pg_conn.exec("SELECT country, region, city FROM max_mind_geo WHERE ip_start <= $1 AND $2 <= ip_end limit 1", [ip_as_int, ip_as_int]) do |result|
# if !result.nil? && result.ntuples > 0
# country = result[0]['country']
# state = result[0]['region']
# city = result[0]['city']
# end
# end
#end
addr = ip_address_to_int(ip_address)
block = GeoIpBlocks.lookup(addr)
if block
locid = block.locid
location = GeoIpLocations.lookup(locid)
if location
# todo translate countrycode to country, region(code) to region
country = location.countrycode
state = location.region
city = location.city
end
end
isp = JamIsp.lookup(addr)
if isp
ispid = isp.coid
end
end
{city: city, state: state, country: country, addr: addr, locidispid: locid*1000000+ispid}
end
def self.lookup_isp(ip_address)
isp = nil
unless ip_address.nil? || ip_address !~ /^\d+\.\d+\.\d+\.\d+$/
ActiveRecord::Base.connection_pool.with_connection do |connection|
pg_conn = connection.instance_variable_get("@connection")
ip_as_int = ip_address_to_int(ip_address)
pg_conn.exec("SELECT isp FROM max_mind_isp WHERE ip_bottom <= $1 AND $2 <= ip_top limit 1", [ip_as_int, ip_as_int]) do |result|
if !result.nil? && result.ntuples > 0
isp = result.getvalue(0, 0)
end
end
end
end
return isp
end
def self.countries()
#ActiveRecord::Base.connection_pool.with_connection do |connection|
# pg_conn = connection.instance_variable_get("@connection")
# pg_conn.exec("SELECT DISTINCT country FROM max_mind_geo ORDER BY country ASC").map do |tuple|
# tuple["country"]
# end
#end
raise "no longer supported, use countriesx"
# returns ordered array of Country objects (countrycode, countryname)
#Country.get_all.map { |c| c.countrycode }
end
def self.countriesx()
#ActiveRecord::Base.connection_pool.with_connection do |connection|
# pg_conn = connection.instance_variable_get("@connection")
# pg_conn.exec("SELECT DISTINCT country FROM max_mind_geo ORDER BY country ASC").map do |tuple|
# tuple["country"]
# end
#end
# returns ordered array of Country objects (countrycode, countryname)
def self.countries
Country.get_all.map { |c| {countrycode: c.countrycode, countryname: c.countryname} }
end
def self.regions(country)
#ActiveRecord::Base.connection_pool.with_connection do |connection|
# pg_conn = connection.instance_variable_get("@connection")
# pg_conn.exec("SELECT DISTINCT region FROM max_mind_geo WHERE country = $1 ORDER BY region ASC", [country]).map do |tuple|
# tuple["region"]
# end
#end
# returns an ordered array of Region objects (region, regionname, countrycode)
Region.get_all(country).map { |r| { region: r.region, name: r.regionname } }
end
def self.cities(country, region)
#ActiveRecord::Base.connection_pool.with_connection do |connection|
# pg_conn = connection.instance_variable_get("@connection")
# pg_conn.exec("SELECT DISTINCT city FROM max_mind_geo WHERE country = $1 AND region = $2 ORDER BY city ASC", [country, region]).map do |tuple|
# tuple["city"]
# end
#end
# returns an ordered array of City (city, region, countrycode)
City.get_all(country, region).map { |c| c.city }
end
def self.isps(country)
ActiveRecord::Base.connection_pool.with_connection do |connection|
pg_conn = connection.instance_variable_get("@connection")
pg_conn.exec("SELECT DISTINCT isp FROM max_mind_isp WHERE country = $1 ORDER BY isp ASC", [country]).map do |tuple|
tuple["isp"]
end
end
end
# Note that there's one big country, and then two cities in each region.
def create_phony_database()
clear_location_table
(0..255).each do |top_octet|
@pg_conn.exec("INSERT INTO max_mind_geo (ip_start, ip_end, country, region, city, lat, lng) VALUES ($1, $2, $3, $4, $5, 0, 0)",
[
self.class.ip_address_to_int("#{top_octet}.0.0.0"),
self.class.ip_address_to_int("#{top_octet}.255.255.255"),
"US",
['AB', 'BC', 'CD', 'DE'][top_octet % 4],
"City #{top_octet}"
]).clear
end
clear_isp_table
(0..255).each do |top_octet|
@pg_conn.exec("INSERT INTO max_mind_isp (ip_bottom, ip_top, isp, country) VALUES ($1, $2, $3, $4)",
[
self.class.ip_address_to_int("#{top_octet}.0.0.0"),
self.class.ip_address_to_int("#{top_octet}.255.255.255"),
"ISP #{top_octet}",
"US"
]).clear
end
@pg_conn.exec "DELETE FROM cities"
@pg_conn.exec "INSERT INTO cities (city, region, countrycode) SELECT DISTINCT city, region, country FROM max_mind_geo"
@pg_conn.exec "DELETE FROM regions"
@pg_conn.exec "INSERT INTO regions (region, regionname, countrycode) select distinct region, region, countrycode from cities"
@pg_conn.exec "DELETE FROM countries"
@pg_conn.exec "INSERT INTO countries (countrycode, countryname) SELECT DISTINCT countrycode, countrycode FROM regions"
def self.create_phony_database
GeoIpBlocks.connection.execute("select generate_scores_dataset()").check
end
private
# Make an IP address into an int (bigint)
def self.ip_address_to_int(ip)
ip.split('.').inject(0) {|total,value| (total << 8 ) + value.to_i}
end
def clear_location_table
@pg_conn.exec("DELETE FROM max_mind_geo").clear
@pg_conn.exec("DELETE FROM geoiplocations").clear
end
def clear_isp_table
@pg_conn.exec("DELETE FROM max_mind_isp").clear
@pg_conn.exec("DELETE FROM geoispip").clear
end
end

View File

@ -9,17 +9,13 @@ namespace :db do
release = MaxMindRelease.order('released_at DESC').first
end
release.import
if release.imported && ENV['REIMPORT'] != '1'
puts "The MaxMindRelease for #{release.released_at} has already been imported."
puts "If you really want to import it again, specify REIMPORT=1"
return
end
desc "Import a maxmind geo (139) database; run like this: rake db:import_maxmind_geo file=<path_to_GeoIPCity.csv>"
task import_maxmind_geo: :environment do
MaxMindGeo.import_from_max_mind ENV['file']
end
desc "Import a maxmind isp (142) database; run like this: rake db:import_maxmind_isp file=<path_to_GeoIPISP-142.csv>"
task import_maxmind_isp: :environment do
MaxMindIsp.import_from_max_mind ENV['file']
release.import(ENV['FORCE_FROM_SOURCE'] == '1')
end
desc "Import a maxmind blocks (134) database; run like this: rake db:import_geoip_blocks file=<path_to_GeoIPCity-134-Blocks.csv>"
@ -49,6 +45,7 @@ namespace :db do
desc "Help"
task help: :environment do
puts "bundle exec rake db:import_maxmind"
puts "bundle exec rake db:import_maxmind_isp file=/path/to/GeoIPISP-142.csv # geo-142"
puts "bundle exec rake db:import_maxmind_geo file=/path/to/GeoIPCity.csv # geo-139"
puts "bundle exec rake db:import_geoip_blocks file=/path/to/GeoIPCity-134-Blocks.csv # geo-134"
@ -61,7 +58,7 @@ namespace :db do
desc "Create a fake set of maxmind data"
task phony_maxmind: :environment do
MaxMindManager.active_record_transaction do |manager|
manager.create_phony_database()
MaxMindManager.create_phony_database
end
end
end

View File

@ -35,7 +35,7 @@ class UserManager < BaseManager
raise PermissionError, "Signups are currently disabled"
end
loc = MaxMindManager.lookup(remote_ip)
loc = GeoIpLocations.lookup(remote_ip)
# there are three cases here: if location is missing, we'll auto set the city, etc. from
# the ip address; if location is present, empty or not empty, we'll set the city, etc. from
# what is present in location. we should NOT normally default city, etc. for the user, they
@ -77,7 +77,7 @@ class UserManager < BaseManager
def signup_confirm(signup_token, remote_ip=nil)
begin
user = User.signup_confirm(signup_token)
user.location = MaxMindManager.lookup(remote_ip) if remote_ip
user.location = GeoIpLocations.lookup(remote_ip) if remote_ip
rescue ActiveRecord::RecordNotFound
user = nil
end

View File

@ -251,16 +251,6 @@ FactoryGirl.define do
end
factory :geocoder, :class => JamRuby::MaxMindGeo do
country 'US'
sequence(:region) { |n| ['NC', 'CA'][(n-1).modulo(2)] }
sequence(:city) { |n| ['Apex', 'San Francisco'][(n-1).modulo(2)] }
sequence(:ip_start) { |n| ['1.1.0.0', '1.1.255.255'][(n-1).modulo(2)] }
sequence(:ip_end) { |n| ['1.2.0.0', '1.2.255.255'][(n-1).modulo(2)] }
sequence(:lat) { |n| [35.73265, 37.7742075][(n-1).modulo(2)] }
sequence(:lng) { |n| [-78.85029, -122.4155311][(n-1).modulo(2)] }
end
factory :icecast_limit, :class => JamRuby::IcecastLimit do
clients 5
sources 1
@ -518,6 +508,7 @@ FactoryGirl.define do
factory :rsvp_request, class: JamRuby::RsvpRequest do
canceled false
cancel_all false
association :user, :factory => :user
# creates *number* slots for a new rsvp_request
factory :rsvp_request_for_multiple_slots do

View File

@ -5,15 +5,7 @@ describe "Bands", :js => true, :type => :feature, :capybara_feature => true do
subject { page }
before(:all) do
Capybara.javascript_driver = :poltergeist
Capybara.current_driver = Capybara.javascript_driver
Capybara.default_wait_time = 15
# MaxMindIsp.delete_all # prove that city/state/country will remain nil if no maxmind data
# MaxMindGeo.delete_all
#MaxMindManager.active_record_transaction do |manager|
# manager.create_phony_database()
#end
end
let(:fan) { FactoryGirl.create(:fan) }

View File

@ -1,43 +1,29 @@
require 'spec_helper'
# these tests avoid the use of ActiveRecord and FactoryGirl to do blackbox, non test-instrumented tests
describe MaxMindManager do
before(:each) do
@maxmind_manager = MaxMindManager.new(:conn => @conn)
MaxMindManager.active_record_transaction do |manager|
manager.create_phony_database()
end
MaxMindManager.create_phony_database
end
it "looks up countries successfully" do
countries = MaxMindManager.countriesx()
countries = MaxMindManager.countries
countries.length.should == 1
countries[0] == {countrycode: "US", countryname: "United States"}
end
it "looks up regions successfully" do
regions = MaxMindManager.regions("US")
regions.length.should == 4
regions.first[:region].should == "AB"
regions.last[:region].should == "DE"
regions.length.should == 11
end
it "looks up cities successfully" do
cities = MaxMindManager.cities("US", "AB")
cities.length.should == 64
cities.first.should == "City 0"
cities.last.should == "City 96"
cities = MaxMindManager.cities("US", "TX")
cities.length.should == 4
cities.first.should == "Austin"
cities.last.should == "San Antonio"
end
it "looks up isp successfully" do
isp = MaxMindManager.lookup_isp("127.0.0.1")
isp.should == "ISP 127"
end
it "looks up isp-by-country succesfully" do
isps = MaxMindManager.isps("US")
isps.length.should == 256 # because the phone_database method creates 256 isps, all in US
end
end

View File

@ -187,8 +187,7 @@ describe UserManager do
}
it "signup successfully" do
MaxMindIsp.delete_all # prove that city/state/country will remain nil if no maxmind data
MaxMindGeo.delete_all
GeoIpLocations.delete_all # prove that city/state/country will remain nil if no maxmind data
user = @user_manager.signup(remote_ip: "127.0.0.1",
first_name: "bob",
@ -207,9 +206,9 @@ describe UserManager do
user.last_name.should == "smith"
user.email.should == "userman1@jamkazam.com"
user.email_confirmed.should be_false
user.city.should == 'Boston'
user.state.should == 'MA'
user.country.should == 'US'
user.city.should be_nil
user.state.should be_nil
user.country.should be_nil
user.instruments.length.should == 1
user.subscribe_email.should be_true
user.signup_token.should_not be_nil
@ -257,9 +256,7 @@ describe UserManager do
end
it "sets the location properly from maxmind" do
MaxMindManager.active_record_transaction do |manager|
manager.create_phony_database()
end
MaxMindManager.create_phony_database
user = @user_manager.signup(remote_ip: "127.0.0.1",
first_name: "bob",
last_name: "smith",
@ -278,9 +275,7 @@ describe UserManager do
end
it "accepts location if specified" do
MaxMindManager.active_record_transaction do |manager|
manager.create_phony_database()
end
MaxMindManager.create_phony_database
user = @user_manager.signup(remote_ip: "127.0.0.1",
first_name: "bob",
last_name: "smith",
@ -300,9 +295,7 @@ describe UserManager do
end
it "accepts a nil location, if specified" do
MaxMindManager.active_record_transaction do |manager|
manager.create_phony_database()
end
MaxMindManager.create_phony_database
user = @user_manager.signup(remote_ip: "127.0.0.1",
first_name: "bob",
last_name: "smith",
@ -323,9 +316,7 @@ describe UserManager do
it "accepts birth_date if specified" do
MaxMindManager.active_record_transaction do |manager|
manager.create_phony_database()
end
MaxMindManager.create_phony_database
user = @user_manager.signup(remote_ip: "127.0.0.1",
first_name: "bob",
last_name: "smith",

View File

@ -46,6 +46,14 @@ def web_config
def max_audio_downloads
10
end
def max_track_upload_failures
10
end
def max_track_part_upload_failures
3
end
end
klass.new
end