Skip to content

Commit 093a173

Browse files
committed
Deleted dead code. Leaving the Event eval in place because it's entirely dependant on internal data, no outside user input
1 parent 5305904 commit 093a173

File tree

3 files changed

+1
-57
lines changed

3 files changed

+1
-57
lines changed

app/models/event.rb

Lines changed: 0 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,12 @@ def generate_event(event_type, audience, data={}, drip_rate=:immediately)
1818
activity_feed_keys = channels.map { |channel| Audience.channel_to_key(channel) }
1919

2020
if drip_rate == :immediately
21-
#Rails.logger.debug("data=#{data.class.name},#{data.inspect}, #{extra_information(data)}, #{data[:user].inspect}, #{drip_rate}")
2221
channels.each do |channel|
2322
publish_event(channel, data)
2423
end
2524
else
2625
activity_feed_keys.each_with_index do |activity_feed_key, index|
2726
data.merge!({ channel: channels[index] }.with_indifferent_access)
28-
#Rails.logger.debug("data=#{data.class.name},#{data.inspect}, #{extra_information(data)}, #{data[:user].inspect}, #{drip_rate}")
29-
#last_event = REDIS.LINDEX(user.activity_feed_key, 0).collect{|event| Event.new(event)}.first
3027
score_for_event = Time.now.to_f
3128
REDIS.zadd(activity_feed_key, score_for_event.to_f, data)
3229
count = REDIS.zcard(activity_feed_key)
@@ -35,13 +32,6 @@ def generate_event(event_type, audience, data={}, drip_rate=:immediately)
3532
end
3633
end
3734

38-
def score_based_on_drip_rate(activity_feed_key, event_type, drip_rate)
39-
time_difference_requested = drip_rate_to_time(drip_rate)
40-
last_similar_event_timestamp = last_similar_event_timestamp(activity_feed_key, event_type)
41-
score_for_event = last_similar_event_timestamp.to_f + time_difference_requested.to_f
42-
score_for_event
43-
end
44-
4535
def send_admin_notifications(event_type, data, queue)
4636
unless queue.nil?
4737
if event_type.to_sym == :new_protip
@@ -53,48 +43,10 @@ def send_admin_notifications(event_type, data, queue)
5343
end
5444

5545
def publish_event(channel, data)
56-
#puts data.inspect
5746
data.merge!(timestamp: Time.now.to_i)
58-
59-
#puts "publish event #{data[:event_type]}"
60-
6147
publish(channel, data.to_json)
6248
end
6349

64-
def drip_rate_to_time(drip_rate)
65-
case drip_rate
66-
when :immediately
67-
Time.now
68-
when :hourly
69-
1.hour
70-
when :daily
71-
1.day
72-
when :weekly
73-
1.week
74-
when :monthly
75-
1.month
76-
else
77-
drip_rate
78-
end
79-
end
80-
81-
def last_similar_event_timestamp(activity_feed_key, event_type)
82-
# we basically look at every event in the future (if any) and find the last event of same event type so we can space it from that. Otherwise, we
83-
# just space it from now. it is not perfect because we miss the corner case that similar event was in the past but limits the searches.
84-
85-
i = 1
86-
Hash[*REDIS.zrangebyscore(activity_feed_key, Time.now.to_f, "inf", withscores: true)].sort_by { |k, v| v }.reverse.each do |activity, score|
87-
88-
Rails.logger.warn("[EVAL:#{i}] Event#last_similar_event_timestamp(activity_feed_key = #{activity_feed_key.inspect}, event_type = #{event_type.inspect}) set to eval activity = #{activity.inspect}")
89-
i += 1
90-
91-
if eval(activity)[:event_type] == event_type
92-
return score
93-
end
94-
end
95-
REDIS.zrange(activity_feed_key, 0, 0, withscores: true)[1] || Time.now.to_i
96-
end
97-
9850
def user_activity(user, from, to, limit, publish=false)
9951

10052
activity_feed_keys = user.nil? ? Audience.to_key(Audience.all).to_a : user.subscribed_channels.map { |channel| Audience.channel_to_key(channel) }
@@ -104,15 +56,13 @@ def user_activity(user, from, to, limit, publish=false)
10456
activities = []
10557

10658
activity_feed_keys.each do |activity_feed_key|
107-
#puts "activity_feed_key=#{activity_feed_key}, #{from}, #{to}, #{count}, #{limit}, #{publish}"
10859
i = 1
10960
REDIS.zrangebyscore(activity_feed_key, from, to).each do |activity|
11061

11162
Rails.logger.warn("[EVAL:#{i}] Event#user_activity(user = #{user.inspect}, from = #{from.inspect}, limit = #{limit.inspect}, publish = #{publish.inspect}) set to eval activity = #{activity.inspect}")
11263
i += 1
11364

11465
break if count == limit
115-
#puts "PUBLISHING #{activity}"
11666
data = eval(activity).with_indifferent_access
11767
channel = data[:channel]
11868
data.delete(:channel)
@@ -122,7 +72,6 @@ def user_activity(user, from, to, limit, publish=false)
12272
else
12373
activities << data.merge({ timestamp: (data[:event_id] || Time.now.to_i) })
12474
end
125-
#REDIS.zrem(activity_feed_key, activity)
12675
count += 1
12776
end
12877
end

app/models/user.rb

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -853,10 +853,6 @@ def activity
853853
def refresh_github!
854854
unless github.blank?
855855
load_github_profile
856-
# token = github_token || User.with_tokens.first.github_token
857-
# client = Github.new(token)
858-
# github_profile.refresh!(client)
859-
# load_github_profile
860856
end
861857
end
862858

lib/publisher.rb

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
module Publisher
2-
32
def agent
43
@@pubnub ||= Pubnub.new(
54
ENV['PUBNUB_PUBLISH_KEY'],
@@ -23,4 +22,4 @@ def agent_active?
2322
end
2423
end
2524

26-
end
25+
end

0 commit comments

Comments
 (0)