From 6d82f54f5bc935c3522e9f237bc719d9ff3875b4 Mon Sep 17 00:00:00 2001 From: Abdelkader Boudih Date: Wed, 31 Dec 2014 15:31:00 +0000 Subject: [PATCH] Press `Merge` to pay respects to MongoDB --- .travis.yml | 3 +- Gemfile | 8 +- Gemfile.lock | 27 --- Vagrantfile | 1 - app/models/github_profile.rb | 97 -------- app/models/github_repo.rb | 208 ------------------ app/models/github_user.rb | 32 --- config/initializers/mongoid_monkeypatch.rb | 9 - config/initializers/omniauth.rb | 5 +- config/mongoid.yml | 55 ----- public/humans.txt | 4 +- script/bson2json.rb | 55 ----- script/convert.sh | 12 - script/import_team_data.rb | 132 ----------- spec/fabricators/github_profile_fabricator.rb | 42 ---- .../users_github_profile_fabricator.rb | 2 +- spec/models/github_profile_spec.rb | 115 ---------- spec/models/github_repo_spec.rb | 160 -------------- spec/models/users/github/repository_spec.rb | 9 - spec/spec_helper.rb | 1 - vagrant/coderwall-box/scripts/postinstall.sh | 10 - 21 files changed, 7 insertions(+), 980 deletions(-) delete mode 100644 app/models/github_profile.rb delete mode 100644 app/models/github_repo.rb delete mode 100644 app/models/github_user.rb delete mode 100644 config/initializers/mongoid_monkeypatch.rb delete mode 100644 config/mongoid.yml delete mode 100755 script/bson2json.rb delete mode 100755 script/convert.sh delete mode 100644 script/import_team_data.rb delete mode 100644 spec/fabricators/github_profile_fabricator.rb delete mode 100644 spec/models/github_profile_spec.rb delete mode 100644 spec/models/github_repo_spec.rb diff --git a/.travis.yml b/.travis.yml index 47a96043..c5721de3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,8 @@ language: ruby rvm: - 2.1.5 -bundler_args: "--without development production autotest" +bundler_args: "--without development production" services: -- mongodb - redis-server before_install: - wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.90.13.deb diff --git a/Gemfile b/Gemfile index a9128224..2ed826e8 100644 --- a/Gemfile +++ b/Gemfile @@ -7,7 +7,6 @@ gem 'rails', '~> 3.2' gem 'sass', '~> 3.2.9' gem 'coffee-rails', '~> 3.2.1' -gem 'compass-rails' gem 'sass-rails', '~> 3.2.6' gem 'uglifier', '>= 1.0.3' # Assets @@ -29,7 +28,6 @@ gem 'dotenv-rails', groups: [:development, :test] # Attachements gem 'carrierwave' gem 'carrierwave_backgrounder' #background processing of images -gem 'carrierwave-mongoid', require: 'carrierwave/mongoid' # HTML gem 'haml' @@ -133,11 +131,7 @@ gem 'elasticsearch-model' gem 'elasticsearch-rails' # DROP BEFORE RAILS 4 -# Mongo -gem 'mongoid' -gem 'mongo' -gem 'mongoid_taggable' -gem 'bson_ext' +gem 'compass-rails' gem 'strong_parameters' gem 'postgres_ext' # ElasticSearch client diff --git a/Gemfile.lock b/Gemfile.lock index 22e5e9bb..5108c75b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -119,9 +119,6 @@ GEM binding_of_caller (0.7.2) debug_inspector (>= 0.0.1) blankslate (3.1.3) - bson (1.10.2) - bson_ext (1.10.2) - bson (~> 1.10.2) buftok (0.2.0) builder (3.0.4) byebug (2.7.0) @@ -142,10 +139,6 @@ GEM activesupport (>= 3.2.0) json (>= 1.7) mime-types (>= 1.16) - carrierwave-mongoid (0.7.1) - carrierwave (>= 0.8.0, < 0.11.0) - mongoid (>= 3.0, < 5.0) - mongoid-grid_fs (>= 1.3, < 3.0) carrierwave_backgrounder (0.4.1) carrierwave (~> 0.5) celluloid (0.16.0) @@ -406,20 +399,6 @@ GEM escape json rack - mongo (1.10.2) - bson (= 1.10.2) - mongoid (3.1.6) - activemodel (~> 3.2) - moped (~> 1.4) - origin (~> 1.0) - tzinfo (~> 0.3.29) - mongoid-grid_fs (2.1.0) - mime-types (>= 1.0, < 3.0) - mongoid (>= 3.0, < 5.0) - mongoid_taggable (1.1.1) - mongoid (>= 3) - rake - moped (1.5.2) multi_json (1.10.1) multi_xml (0.5.5) multipart-post (1.2.0) @@ -466,7 +445,6 @@ GEM omniauth-twitter (0.0.18) multi_json (~> 1.3) omniauth-oauth (~> 1.0) - origin (1.1.0) parser (2.1.9) ast (>= 1.1, < 3.0) slop (~> 3.4, >= 3.4.5) @@ -766,11 +744,9 @@ DEPENDENCIES backbone-on-rails better_errors binding_of_caller - bson_ext capybara capybara-screenshot carrierwave - carrierwave-mongoid carrierwave_backgrounder chronic clockwork @@ -815,9 +791,6 @@ DEPENDENCIES metamagic mini_magick mixpanel - mongo - mongoid - mongoid_taggable multi_json never_wastes newrelic_rpm diff --git a/Vagrantfile b/Vagrantfile index 590e4352..85752e03 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -40,7 +40,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.network :private_network, ip: '192.168.237.95' # 192.168.cdr.wl set_port_mapping_for(config, 'elasticsearch', 9200, custom_settings) - set_port_mapping_for(config, 'mongodb', 27017, custom_settings) set_port_mapping_for(config, 'postgres', 5432, custom_settings) set_port_mapping_for(config, 'redis', 6379, custom_settings) set_port_mapping_for(config, 'rails', 3000, custom_settings, true) diff --git a/app/models/github_profile.rb b/app/models/github_profile.rb deleted file mode 100644 index 7f924d52..00000000 --- a/app/models/github_profile.rb +++ /dev/null @@ -1,97 +0,0 @@ -class GithubProfile - include Mongoid::Document - include Mongoid::Timestamps - - index({login: 1}, {unique: true, background: true}) - index({github_id: 1}, {unique: true, background: true}) - - field :github_id - field :name, type: String - field :login, type: String - field :company, type: String - field :avatar_url, type: String - field :location, type: String - field :type, type: String - - embeds_many :followers, class_name: GithubUser.name.to_s, as: :personable - - has_and_belongs_to_many :orgs, class_name: GithubProfile.name.to_s - - ORGANIZATION = "Organization" - USER = "User" - VALID_TYPES = [ORGANIZATION, USER] - - class << self - def for_username(username, since=1.day.ago) - find_or_initialize_by(login: username).tap do |profile| - if profile.new_record? - logger.info "ALERT: No cached profile for user #{username}" - profile.refresh!(nil, since) - end - end - end - end - - def facts - facts = [] - GithubRepo.where('owner.github_id' => github_id).all.each do |repo| - if repo.has_contents? - facts << convert_repo_into_fact(repo) - end - end - GithubRepo.where('contributors.github_id' => github_id, "owner.github_id" => { '$in' => orgs.map(&:github_id) }).all.each do |repo| - if repo.original? && repo.significant_contributions?(github_id) - facts << convert_repo_into_fact(repo, orgrepo = true) - end - end - facts << Fact.append!("github:#{login}", "github:#{login}", "Joined GitHub", created_at, "https://github.com/#{login}", ['github', 'account-created']) - return facts - end - - def convert_repo_into_fact(repo, orgrepo = false) - tags = repo.tags + ['repo', 'github', repo.dominant_language] - if orgrepo - tags << 'org' - else - tags << 'personal' - end - if repo.fork? - tags << 'fork' - else - tags << 'original' - end - metadata = { - languages: repo.languages_that_meet_threshold, - original: repo.original?, - times_forked: repo.forks ? repo.forks.size : 0, - watchers: repo.followers.collect(&:login) - } - Fact.append!("#{repo.html_url}:#{login}", "github:#{login}", repo.name, repo.created_at, repo.html_url, tags, metadata) - end - - def refresh!(client=nil, since) - client ||= GithubOld.new - username = self.login - - profile = client.profile(username, since) - github_id = profile.delete(:id) - - repos = client.repos_for(username, since).map do |repo| - owner, name = repo.owner.login, repo.name - GithubRepo.for_owner_and_name(owner, name, client, repo) - end - - update_attributes! profile.merge( - github_id: github_id, - followers: client.followers_for(username, since), - following: client.following_for(username, since), - watched: client.watched_repos_for(username, since), - orgs: orgs, - repos: repos.map { |r| { id: r.id, name: r.name } } - ) - end - - def stale? - updated_at < 24.hours.ago - end -end diff --git a/app/models/github_repo.rb b/app/models/github_repo.rb deleted file mode 100644 index 91e3f969..00000000 --- a/app/models/github_repo.rb +++ /dev/null @@ -1,208 +0,0 @@ -class GithubRepo - include Mongoid::Document - include Mongoid::Timestamps - - field :name, type: String - field :html_url, type: String - field :tags, type: Array, default: [] - field :languages - field :fork, type: Boolean - field :forks - field :pushed_at - field :watchers - - embeds_one :owner, class_name: GithubUser.name.to_s, as: :personable - embeds_many :followers, class_name: GithubUser.name.to_s, as: :personable - embeds_many :contributors, class_name: GithubUser.name.to_s, as: :personable - - index('owner.login' => 1) - index('owner.github_id' => 1) - index({name: 1}) - - before_save :update_tags! - - class << self - def for_owner_and_name(owner, name, client=nil, prefetched={}) - (where('owner.login' => owner, 'name' => name).first || new('name' => name, 'owner' => { 'login' => owner })).tap do |repo| - if repo.new_record? - logger.info "ALERT: No cached repo for #{owner}/#{name}" - repo.refresh!(client, prefetched) - end - end - end - end - - def refresh!(client=nil, repo={}) - client ||= GithubOld.new - owner, name = self.owner.login, self.name - - repo = client.repo(owner, name) if repo.empty? - - if repo[:fork].blank? - repo.merge!( - forks: client.repo_forks(owner, name), - contributors: client.repo_contributors(owner, name), - ) - end - - repo.delete(:id) - - update_attributes! repo.merge( - owner: GithubUser.new(repo[:owner]), - followers: client.repo_watchers(owner, name), - languages: client.repo_languages(owner, name) # needed so we can determine contents - ) - end - - def full_name - "#{self.owner.login}/#{self.name}" - end - - def times_forked - if self[:forks].is_a? Array - self[:forks].size - else - self[:forks] || 0 - end - end - - def dominant_language_percentage - main_language = self.dominant_language - bytes_of_other_langs = languages.collect { |k, v| k != main_language ? v : 0 }.sum - bytes_of_main_lang = languages[main_language] - return 0 if bytes_of_main_lang == 0 - return 100 if bytes_of_other_langs == 0 - 100 - (bytes_of_other_langs.quo(bytes_of_main_lang).to_f * 100).round - end - - def total_commits - self.contributors.to_a.sum do |c| - c['contributions'] - end - end - - def total_contributions_for(github_id) - contributor = self.contributors.first { |c| c['github_id'] == github_id } - (contributor && contributor['contributions']) || 0 - end - - CONTRIBUTION_COUNT_THRESHOLD = 10 - CONTRIBUTION_PERCENT_THRESHOLD = 0.10 - - def percent_contributions_for(github_id) - total_contributions_for(github_id) / self.total_commits.to_f - end - - def significant_contributions?(github_id) - total_contributions_for(github_id) >= CONTRIBUTION_COUNT_THRESHOLD || percent_contributions_for(github_id) > CONTRIBUTION_PERCENT_THRESHOLD - end - - def dominant_language - return '' if languages.blank? - primary_language = languages.sort_by { |k, v| v }.last - if primary_language - primary_language.first - else - '' - end - end - - def languages_that_meet_threshold - languages.collect do |key, value| - key if value.to_i >= 200 - end.compact - end - - def original? - !fork? - end - - def has_contents? - !languages_that_meet_threshold.blank? - end - - def readme - @readme ||= raw_readme - end - - def popularity - @popularity ||= begin - rank = times_forked + watchers #(times_forked + followers.size) - case - when rank > 600 then - 5 - when rank > 300 then - 4 - when rank > 100 then - 3 - when rank > 20 then - 2 - else - 1 - end - end - end - - def raw_readme - %w{ - README - README.markdown - README.md - README.txt - }.each do |file_type| - begin - return Servant.get("#{html_url}/raw/master/#{file_type}").result - rescue RestClient::ResourceNotFound - Rails.logger.debug("Looking for readme, did not find #{file_type}") if ENV['DEBUG'] - end - end - return empty_string = '' - end - - def update_tags! - tag_dominant_lanugage! - tag_project_types! - tags.uniq! - end - - def tag_dominant_lanugage! - tags << dominant_language unless languages.blank? - end - - def add_tag(tag) - self.tags << tag - end - - def tagged?(tag) - tags.include?(tag) - end - - NODE_MATCHER = /(node.js|no.de|nodejs|(\s|\A|^)node(\s|\A|-|_|^))/i - JQUERY_MATCHER = /jquery/i - - def tag_project_types! - tag_when_project_matches('JQuery', JQUERY_MATCHER, disable_readme_inspection = nil, 'JavaScript') || - tag_when_project_matches('Node', NODE_MATCHER, disable_readme_inspection = nil, 'JavaScript') || - tag_when_project_matches('Prototype', /prototype/i, nil, 'JavaScript') - end - - def tag_when_project_matches(tag_name, matcher, readme_matcher, language = nil) - if language && dominant_language.downcase == language.downcase - if field_matches?('name', matcher) || - field_matches?('description', matcher) || - (readme_matcher && dominant_language_percentage > 90 && readme_matches?(readme_matcher)) - tags << tag_name - return true - end - end - return false - end - - def field_matches?(field, regex) - self[field] && !self[field].match(regex).nil? - end - - def readme_matches?(regex) - !readme.match(regex).nil? - end -end diff --git a/app/models/github_user.rb b/app/models/github_user.rb deleted file mode 100644 index de9cd8c9..00000000 --- a/app/models/github_user.rb +++ /dev/null @@ -1,32 +0,0 @@ -class GithubUser - include Mongoid::Document - - field :github_id - field :avatar_url - field :login - field :gravatar - - after_initialize :extract_gravatar_from_avatar_url - before_save :extract_gravatar_from_avatar_url - - after_initialize :extract_github_id - before_save :extract_github_id - - embedded_in :personable, polymorphic: true - - def extract_github_id - temp_id = attributes['id'] || attributes['_id'] - if github_id.nil? && temp_id.is_a?(Fixnum) - self.github_id = temp_id - attributes.delete '_id' - attributes.delete 'id' - end - end - - def extract_gravatar_from_avatar_url - if attributes['avatar_url'] && attributes['avatar_url'] =~ /avatar\/([\w|\d]*)\?/i - self.gravatar = attributes['avatar_url'].match(/avatar\/([\w|\d]*)\?/i)[1] - attributes.delete 'avatar_url' - end - end -end \ No newline at end of file diff --git a/config/initializers/mongoid_monkeypatch.rb b/config/initializers/mongoid_monkeypatch.rb deleted file mode 100644 index 00423787..00000000 --- a/config/initializers/mongoid_monkeypatch.rb +++ /dev/null @@ -1,9 +0,0 @@ -if defined?(Moped) - class Moped::BSON::ObjectId - def to_json(*args) - "\"#{to_s}\"" - end - end -else - Rails.logger.error('REMOVE Mongoid monkeypatch') -end \ No newline at end of file diff --git a/config/initializers/omniauth.rb b/config/initializers/omniauth.rb index 4e799e6b..4221750c 100644 --- a/config/initializers/omniauth.rb +++ b/config/initializers/omniauth.rb @@ -2,9 +2,9 @@ # http://rubydoc.info/gems/omniauth/OmniAuth/Strategies/Developer provider :developer unless Rails.env.production? - provider :github, GithubOld::GITHUB_CLIENT_ID, GithubOld::GITHUB_SECRET + provider :github, ENV['GITHUB_CLIENT_ID'], ENV['GITHUB_SECRET'] provider :twitter, ENV['TWITTER_CONSUMER_KEY'], ENV['TWITTER_CONSUMER_SECRET'] - provider :linkedin, LinkedInStream::KEY, LinkedInStream::SECRET + provider :linkedin, ENV['LINKEDIN_KEY'], ENV['LINKEDIN_SECRET'] end OmniAuth.config.on_failure do |env| @@ -13,7 +13,6 @@ strategy = env['omniauth.error.strategy'] Rails.logger.error("OmniAuth #{strategy.class.name}::#{error_type}: #{exception.inspect}") - # Honeybadger::Rack.new(Rack::Request.new(env)).notify_honeybadger(exception, env) if Rails.env.production? new_path = "#{env['SCRIPT_NAME']}#{OmniAuth.config.path_prefix}/failure?message=#{error_type}" [302, {'Location' => new_path, 'Content-Type' => 'text/html'}, []] diff --git a/config/mongoid.yml b/config/mongoid.yml deleted file mode 100644 index dd0e9b3d..00000000 --- a/config/mongoid.yml +++ /dev/null @@ -1,55 +0,0 @@ -development: - sessions: - default: - database: <%= ENV['MONGO_DATABASE'] || 'badgify_development' %> - hosts: - - <%= ENV['MONGO_URL'] || 'localhost:27017' %> - -test: - sessions: - default: - database: heroku_app2303161 - hosts: - - localhost:27017 - -staging: - sessions: - default: - uri: <%= ENV['MONGOLAB_URI'] %> - -# mongoid 3.x -# -# As discussed in: http://blog.mongolab.com/2014/02/mongodb-driver-tips-tricks-mongoid-3 -# -production: - sessions: - default: - # The standard MongoDB connection URI allows for easy replica set connection setup. - # Use environment variables or a config file to keep your credentials safe. - uri: <%= ENV['MONGOLAB_URI'] %> - - options: - # The default consistency is :eventual, which reads from secondaries when possible. - # Strong forces reads to primary. We recommend using strong consistency. - consistency: :strong - - # max_retries specifies the number of times to attempt an operation before giving up. - max_retries: 30 - - # retry_interval specifies the number of seconds to wait before retrying a single operation. - retry_interval: 1 - - # The default timeout is 5, which is the time in seconds for an operation to time out. - # We recommend 15 because it allows for plenty of time in most operating environments. - # Mongoid doubles the configured value (known issue) so 15 results in a timeout of 30s. - # Note that if you have a long-running query (over 30 seconds), it will time out. - # See our example for long-running queries in the blog post referenced above. - timeout: 15 - - # Set this to ensure that your writes are a round-trip operation - # and are confirmed by the system. - safe: true - - # refresh_interval specifies the number of seconds to cache server information. - # Lowering this number will help the driver recover more quickly from changes to replica set reconfiguration - refresh_interval: 10 diff --git a/public/humans.txt b/public/humans.txt index 390e4b44..546eee14 100644 --- a/public/humans.txt +++ b/public/humans.txt @@ -127,9 +127,9 @@ Location: China /* SITE */ -Last update: 2014/10/06 +Last update: 2014/31/12 Standards: HTML5, CSS3 -Components: Ruby on Rails, jQuery, Sass, Backbone.js, Ember.js, PostgreSQL, ElasticSearch, MongoDB, Redis, etc. +Components: Ruby on Rails, jQuery, Sass, Backbone.js, Ember.js, PostgreSQL, ElasticSearch, Redis, etc. Software: Vim, Tmux, Vagrant, Git, etc. Language: English IDE: Vim diff --git a/script/bson2json.rb b/script/bson2json.rb deleted file mode 100755 index 34f44bce..00000000 --- a/script/bson2json.rb +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/ruby - -# This script acts as a command-line filter to convert a BSON file (such as from mongodump) to an equivalent JSON file -# The resulting JSON file will be an array of hashes -# Any binary values from the BSON file are converted to base64 (such as Mongo's _id fields) -# I originally wrote this script so that Mongo files can be easily used with jsawk for -# offline data processing -- https://github.com/micha/jsawk -# -# To invoke, assuming mycollection.bson is a file from mongodump: -# ruby bson2json.rb < mycollection.bson > mycollection.json - -require 'rubygems' -require 'bson' -require 'json' -require 'base64' - -def process(file) - puts '[' - - while not file.eof? do - bson = BSON.read_bson_document(file) - bson = bson_debinarize(bson) - puts bson.to_json + (file.eof? ? '' : ',') - end - - puts ']' -end - -# Accept BSON document object; return equivalent, but with any BSON::Binary values converted with Base64 -def bson_debinarize(bson_doc) - raise ArgumentError, "bson_doc must be a BSON::OrderedHash" unless bson_doc.is_a?(BSON::OrderedHash) - - # each key and value is passed by reference and is modified in-place - bson_doc.each do |k,v| - if v.is_a?(BSON::Binary) - bson_doc[k] = Base64.encode64(v.to_s) - elsif v.is_a?(BSON::OrderedHash) - bson_doc[k] = bson_debinarize(v) - end - end - - bson_doc -end - -process(STDIN) - -__END__ - -Copyright (c) 2012 SPARC, LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/script/convert.sh b/script/convert.sh deleted file mode 100755 index be0c299e..00000000 --- a/script/convert.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env zsh -source ~/.rvm/scripts/rvm - -rvm use 2.1.2@coderwall - -echo Converting github_repos -ruby bson2json.rb < github_repos.bson > github_repos.json -echo Converting github_profiles -ruby bson2json.rb < github_profiles.bson > github_profiles.json -echo Converting teams -ruby bson2json.rb < teams.bson > teams.json -echo done diff --git a/script/import_team_data.rb b/script/import_team_data.rb deleted file mode 100644 index 85f8fc5e..00000000 --- a/script/import_team_data.rb +++ /dev/null @@ -1,132 +0,0 @@ -class ImportTeamData - DATE_FIELDS = %i(updated_at upgraded_at created_at) - - def initialize(data_file) - - PgTeam.delete_all - - $pg_team_attrs ||= PgTeam.new.attributes.symbolize_keys.keys - $pg_teams_member_attrs ||= Teams::Member.new.attributes.symbolize_keys.keys - - print PgTeam.count - - File.open(data_file) do |file| - PgTeam.transaction do - - file.each_line.with_index(1) do |line, lineno| - line = line.chomp.chomp(',') - next if %w([ ]).include?(line) - - data = process(MultiJson.load(line, symbolize_keys: true)) - - team = save_team!(data[:team]) - - # at this point `team` is a live ActiveRecord model - - save_team_members!(team, data[:team_members]) - - - # TODO: FIX that I create a new instance with fields. Remove them from the migration and - # just create a new join table instance. Although there might need to be `state` field - # for the state of the user being a member or pending member - - save_team_pending_members!(team, data[:pending_team_members]) - - print '.' - end - end - end - - puts PgTeam.count - end - - private - - def save_team!(data) - validate_fields!('PgTeam', data, $pg_team_attrs) - - PgTeam.create!(data) - end - - def save_team_members!(team, data) - return unless data - - data.each do |team_members| - validate_fields!('Teams::Member', team_members, $pg_teams_member_attrs) - team.members.build(team_members) - end - - team.save! - end - - def validate_fields!(klass, data, required_keys) - undefined_keys = data.keys - required_keys - fail "Undefined keys for #{klass} found in import data: #{undefined_keys.inspect}" unless undefined_keys.empty? - end - - def process(input) - data = {team: {}} - - input.each_pair do |key, value| - next if can_skip?(key, value) - - transform(data, key, prepare(key, value)) - end - - data - end - - def transform(data, key, value) - if %i( - admins - editors - interview_steps - invited_emails - office_photos - pending_join_requests - pending_team_members - stack_list - team_locations - team_members - upcoming_events - ).include?(key) - data[key] = value - else - data[:team][key] = value - end - end - - def can_skip?(key, value) - return true if key == :_id - return true unless value - return true if value.is_a?(Array) && value.empty? - return true if value.is_a?(Hash) && value['$oid'] && value.keys.count == 1 - - false - end - - def prepare(key, value) - return value if [Fixnum, Float, TrueClass].any? {|type| value.is_a?(type) } - return value.map { |v| clean(key, v) } if value.is_a?(Array) - - clean(key, value) - end - - def clean(key, value) - if value.is_a?(Hash) - value.delete(:_id) - DATE_FIELDS.each do |k| - value[k] = DateTime.parse(value[k]) if value[k] - end - else - if DATE_FIELDS.include?(key) - value = DateTime.parse(value) - end - end - - value - end -end - -# be rake db:drop:all db:create:all db:schema:load db:migrate db:seed ; be rake db:test:prepare ; clear ; be rails runner script/import_team_data.rb -ImportTeamData.new(File.join(Rails.root, 'dump', 'teams_short.json')) diff --git a/spec/fabricators/github_profile_fabricator.rb b/spec/fabricators/github_profile_fabricator.rb deleted file mode 100644 index 209fec2d..00000000 --- a/spec/fabricators/github_profile_fabricator.rb +++ /dev/null @@ -1,42 +0,0 @@ -Fabricator(:github_profile) do - name { Faker::Name.name } - login { 'mdeiters' } - _id { 7330 } - type { GithubProfile::ORGANIZATION } -end - -Fabricator(:owner, from: :github_user) do - _id { 7330 } - login { 'mdeiters' } - gravatar { 'aacb7c97f7452b3ff11f67151469e3b0' } -end - -Fabricator(:follower, from: :github_user) do - github_id { sequence(:github_id) } - login { sequence(:login) { |i| "user#{i}" } } - gravatar { 'aacb7c97f7452b3ff11f67151469e3b0' } -end - -Fabricator(:watcher, from: :github_user) do - github_id { 1 } - login { 'mojombo' } - gravatar { '25c7c18223fb42a4c6ae1c8db6f50f9b' } -end - -Fabricator(:github_repo) do - after_build { |repo| repo.forks = 1 } - name { sequence(:repo) { |i| "repo#{i}" } } - owner { Fabricate.attributes_for(:owner) } - html_url { 'https://github.com/mdeiters/semr' } - languages do { - 'Ruby' => 111_435, - 'JavaScript' => 50_164 - } end -end - -Fabricator(:github_org, class_name: 'GithubProfile') do - name { Faker::Company.name } - login { 'coderwall' } - _id { 1234 } - type { GithubProfile::ORGANIZATION } -end diff --git a/spec/fabricators/users_github_profile_fabricator.rb b/spec/fabricators/users_github_profile_fabricator.rb index 4fb9c7e9..52f49699 100644 --- a/spec/fabricators/users_github_profile_fabricator.rb +++ b/spec/fabricators/users_github_profile_fabricator.rb @@ -1,2 +1,2 @@ -Fabricator(:pg_github_profile, from: 'users/github/profile') do +Fabricator(:github_profile, from: 'users/github/profile') do end diff --git a/spec/models/github_profile_spec.rb b/spec/models/github_profile_spec.rb deleted file mode 100644 index 1e7b72e1..00000000 --- a/spec/models/github_profile_spec.rb +++ /dev/null @@ -1,115 +0,0 @@ -require 'vcr_helper' - -# TODO: Deprecate GithubOld, and related testing -RSpec.describe GithubProfile, type: :model, skip: ENV['TRAVIS'] do - let(:languages) do - { - 'C' => 194_738, - 'C++' => 105_902, - 'Perl' => 2_519_686 - } - end - ## test we don't create a fact for an empty repo - let(:access_token) { '9432ed76b16796ec034670524d8176b3f5fee9aa' } - let(:client_id) { '974695942065a0e00033' } - let(:client_secret) { '7d49c0deb57b5f6c75e6264ca12d20d6a8ffcc68' } - - it 'should have a timesamp' do - profile = Fabricate(:github_profile) - expect(profile.created_at).not_to be_nil - expect(profile.updated_at).not_to be_nil - end - - def response_body(file) - File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', file)) - end - - describe 'facts' do - let (:profile) do - VCR.use_cassette('GithubProfile') do - GithubProfile.for_username('mdeiters') - end - end - - it 'creates facts for original repos' do - expect(profile.facts).not_to be_empty - fact = profile.facts.select { |fact| fact.identity =~ /mdeiters\/semr:mdeiters$/i }.first - - expect(fact.identity).to eq('https://github.com/mdeiters/semr:mdeiters') - expect(fact.owner).to eq('github:mdeiters') - expect(fact.name).to eq('semr') - expect(fact.relevant_on.to_date).to eq(Date.parse('2008-05-08')) - expect(fact.url).to eq('https://github.com/mdeiters/semr') - expect(fact.tags).to include('repo') - expect(fact.metadata[:languages]).to include('Ruby', 'JavaScript') - end - - it 'creates facts for when user signed up' do - expect(profile.facts).not_to be_empty - fact = profile.facts.last - expect(fact.identity).to eq('github:mdeiters') - expect(fact.owner).to eq('github:mdeiters') - expect(fact.name).to eq('Joined GitHub') - expect(fact.relevant_on.to_date).to eq(Date.parse('2008-04-14')) - expect(fact.url).to eq('https://github.com/mdeiters') - expect(fact.tags).to include('account-created') - end - end - - describe 'profile not on file' do - let (:profile) do - VCR.use_cassette('github_profile_for_mdeiters') do - GithubProfile.for_username('mdeiters') - end - end - - it 'will indicate stale if older then an 24 hours', skip: 'timezone is incorrect' do - expect(profile.updated_at).to be > 1.minute.ago - expect(profile).not_to be_stale - expect(profile).to receive(:updated_at).and_return(25.hours.ago) - expect(profile).to be_stale - end - - it 'builds a profile if there is none on file' do - expect(profile.name).to eq('Matthew Deiters') - end - - it 'populates followers' do - expect(profile.followers.map { |f| f[:login] }).to include('amanelis') - end - - it 'populates following' do - expect(profile.following.map { |f| f[:login] }).to include('atmos') - end - - it 'populates watched repos' do - expect(profile.watched.map { |w| w[:name] }).to include('rails') - end - - describe 'populates owned repos' do - before do - @repo = GithubRepo.find(profile.repos.first[:id]) - end - - it 'gets a list of repos' do - expect(profile.repos.map { |r| r[:name] }).to include ('semr') - end - - it 'adds languages' do - expect(@repo.language).to eq('Ruby') - end - - it 'adds watchers' do - expect(@repo.followers.first.login).to eq('mdeiters') - end - - it 'adds contributors', skip: 'fragile integration' do - expect(@repo.contributors.first['login']).to eq('mdeiters') - end - - it 'adds forks', skip: 'fragile integration' do - expect(@repo.forks.size).to eq(1) - end - end - end -end diff --git a/spec/models/github_repo_spec.rb b/spec/models/github_repo_spec.rb deleted file mode 100644 index 8f8e7830..00000000 --- a/spec/models/github_repo_spec.rb +++ /dev/null @@ -1,160 +0,0 @@ -require 'vcr_helper' - -RSpec.describe GithubRepo, type: :model, skip: ENV['TRAVIS'] do - before :each do - register_fake_paths - - u = Fabricate(:user) - u.admin = true - u.github_token = access_token - u.save - end - - def register_fake_paths - access_token = '9432ed76b16796ec034670524d8176b3f5fee9aa' - client_id = '974695942065a0e00033' - client_secret = '7d49c0deb57b5f6c75e6264ca12d20d6a8ffcc68' - - stub_request(:get, "https://api.github.com/repos/mdeiters/semr/languages?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100").to_return(body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_languages.js')), content_type: 'application/json; charset=utf-8') - stub_request(:get, "https://api.github.com/repos/mdeiters/semr/forks?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100").to_return(body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_forks.js')), content_type: 'application/json; charset=utf-8') - stub_request(:get, "https://api.github.com/repos/mdeiters/semr/contributors?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100&anon=false").to_return(body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_contributors.js')), content_type: 'application/json; charset=utf-8') - stub_request(:get, "https://api.github.com/repos/mdeiters/semr/stargazers?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100").to_return(body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_watchers.js')), content_type: 'application/json; charset=utf-8') - end - - let(:data) { JSON.parse(File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'user_repo.js'))).with_indifferent_access } - let(:repo) do - repo = nil - # TODO: Refactor api calls to Sidekiq job - VCR.use_cassette('GithubRepo') do - repo = GithubRepo.for_owner_and_name('mdeiters', 'semr', nil, data) - end - repo - end - let(:access_token) { '9432ed76b16796ec034670524d8176b3f5fee9aa' } - let(:client_id) { '974695942065a0e00033' } - let(:client_secret) { '7d49c0deb57b5f6c75e6264ca12d20d6a8ffcc68' } - - describe 'contributions' do - it 'should filter the repos the user has contributed to' do - user = Fabricate(:user) - org = Fabricate(:github_org) - profile = Fabricate(:github_profile, github_id: user.github_id, orgs: [org]) - - contributed_by_count_repo = Fabricate(:github_repo, owner: { github_id: org.github_id }, contributors: [ - { 'github_id' => user.github_id, 'contributions' => 10 }, - { 'github_id' => nil, 'contributions' => 1000 } - ]) - - non_contributed_repo = Fabricate(:github_repo, owner: { github_id: org.github_id }, contributors: [ - { 'github_id' => user.github_id, 'contributions' => 5 }, - { 'github_id' => nil, 'contributions' => 18_000 } - ]) - - expect(contributed_by_count_repo.significant_contributions?(user.github_id)).to eq(true) - expect(non_contributed_repo.significant_contributions?(user.github_id)).to eq(false) - end - end - - it 'should have an owner' do - expect(repo.owner.github_id).to eq(7330) - expect(repo.owner.login).to eq('mdeiters') - expect(repo.owner.gravatar).to eq('aacb7c97f7452b3ff11f67151469e3b0') - end - - it 'should update repo on second call' do - # TODO: Refactor api calls to Sidekiq job - VCR.use_cassette('GithubRepo') do - - data = JSON.parse(File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'user_repo.js'))).with_indifferent_access - 2.times do - GithubRepo.for_owner_and_name('mdeiters', 'semr', nil, data) - end - expect(GithubRepo.count).to eq(1) - - end - end - - it 'should indicate dominant language' do - expect(repo.dominant_language).to eq('Ruby') - end - - it 'should indicate dominant language percantage' do - expect(repo.dominant_language_percentage).to eq(55) - end - - it 'should indicate if contents' do - expect(repo.has_contents?).to eq(true) - end - - it 'should indicate no contents if there are no languages', skip: 'incorrect data' do - stub_request(:get, "https://api.github.com/repos/mdeiters/semr/languages?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100").to_return(body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_languages_empty.js')), content_type: 'application/json; charset=utf-8') - expect(repo.has_contents?).to eq(false) - end - - it 'should not modify users on refresh' do - original_follower = repo.followers.first - - refreshed_repo = GithubRepo.for_owner_and_name('mdeiters', 'semr', nil, data) - refreshed_follower = refreshed_repo.followers.first - - expect(refreshed_follower.login).to eq(original_follower.login) - expect(refreshed_follower.gravatar).to eq(original_follower.gravatar) - end - - describe 'tagging' do - - it 'contains tags between refreshes' do - modified_repo = GithubRepo.find(repo._id) - modified_repo.add_tag 'a' - modified_repo.add_tag 'b' - modified_repo.save! - - refreshed_repo = GithubRepo.for_owner_and_name('mdeiters', 'semr', nil, data) - expect(refreshed_repo.tags).to include('a', 'b') - end - - it 'should tag dominant language' do - expect(repo.tags).to include('Ruby') - end - - it 'does not duplicate tags on refresh' do - expect(repo.tags).to eq(GithubRepo.for_owner_and_name('mdeiters', 'semr', nil, data).tags) - end - - describe 'tags javascript projects' do - it 'tags jquery if dominant lanugage is js and description to include jquery' do - stub_request(:get, 'https://github.com/mdeiters/semr/raw/master/README').to_return(body: 'empty') - stub_request(:get, "https://api.github.com/repos/mdeiters/semr/languages?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100").to_return(body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_languages_js.js')), content_type: 'application/json; charset=utf-8') - - data[:description] = 'something for jquery' - expect(repo.tags).to include('Ruby') - end - - it 'tags node if dominant lanugage is js and description has nodejs in it' do - skip 'Disabled inspecting README because of false positives' - # FakeWeb.register_uri(:get, 'https://github.com/mdeiters/semr/raw/master/README', body: 'empty') - # FakeWeb.register_uri(:get, "https://api.github.com/repos/mdeiters/semr/languages?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100", body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_languages_js.js')), content_type: 'application/json; charset=utf-8') - - data[:description] = 'Node Routing' - expect(repo.tags).to include('Node') - end - - it 'tags node if dominant lanugage is js and readme has node in it' do - skip 'Disabled inspecting README because of false positives' - # FakeWeb.register_uri(:get, "https://api.github.com/repos/mdeiters/semr/languages?client_id=#{client_id}&client_secret=#{client_secret}&per_page=100", body: File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'repo_languages_js.js')), content_type: 'application/json; charset=utf-8') - # FakeWeb.register_uri(:get, 'https://github.com/mdeiters/semr/raw/master/README', body: 'trying out node') - expect(repo.tags).to include('Node') - end - end - end - - describe 'viewing readme' do - it 'finds the readme for .txt files', functional: true do - expect(repo.readme).to match(/semr gem uses the oniguruma library/) - end - - it 'should cache readme for repeat calls' do - expect(repo.readme).to eq(repo.readme) - end - end -end diff --git a/spec/models/users/github/repository_spec.rb b/spec/models/users/github/repository_spec.rb index 57d85436..38527f5d 100644 --- a/spec/models/users/github/repository_spec.rb +++ b/spec/models/users/github/repository_spec.rb @@ -5,13 +5,4 @@ it { is_expected.to have_many :contributors } it { is_expected.to belong_to :organization } it { is_expected.to belong_to :owner } - - let(:data) { JSON.parse(File.read(File.join(Rails.root, 'spec', 'fixtures', 'githubv3', 'user_repo.js'))).with_indifferent_access } - let(:repo) do - GithubRepo.for_owner_and_name('mdeiters', 'semr', nil, data) - end - let(:access_token) { '9432ed76b16796ec034670524d8176b3f5fee9aa' } - let(:client_id) { '974695942065a0e00033' } - let(:client_secret) { '7d49c0deb57b5f6c75e6264ca12d20d6a8ffcc68' } - end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index f62094ca..561bebd8 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -35,7 +35,6 @@ DatabaseCleaner.strategy = :transaction DatabaseCleaner.start - Mongoid::Sessions.default.collections.reject { |c| c.name =~ /^system/ }.each(&:drop) ActionMailer::Base.deliveries.clear end diff --git a/vagrant/coderwall-box/scripts/postinstall.sh b/vagrant/coderwall-box/scripts/postinstall.sh index e2898d14..35935b22 100644 --- a/vagrant/coderwall-box/scripts/postinstall.sh +++ b/vagrant/coderwall-box/scripts/postinstall.sh @@ -27,8 +27,6 @@ export MAILGUN_SIGNATURE=NEEDS_TO_COPY_FROM_DOTENV export MAILGUN_TOKEN=NEEDS_TO_COPY_FROM_DOTENV export MIXPANEL_API_SECRET=NEEDS_TO_COPY_FROM_DOTENV export MIXPANEL_TOKEN=NEEDS_TO_COPY_FROM_DOTENV -export MONGODB_DATABASE_NAME=NEEDS_TO_COPY_FROM_DOTENV -export MONGO_DATABASE=NEEDS_TO_COPY_FROM_DOTENV export NEW_RELIC_PROMOTION=NEEDS_TO_COPY_FROM_DOTENV export NOTIFIER_ADMIN_EMAILS=NEEDS_TO_COPY_FROM_DOTENV export PARTY_FOUL_OAUTH_TOKEN=NEEDS_TO_COPY_FROM_DOTENV @@ -225,12 +223,6 @@ groupadd puppet # Install Foreman /opt/ruby/bin/gem install foreman --no-ri --no-rdoc -# MongoDB -apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 -echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | tee /etc/apt/sources.list.d/mongodb.list -apt-get -y update -apt-get -y install mongodb-10gen - REDIS_VERSION="2.8.4" wget http://download.redis.io/releases/redis-$REDIS_VERSION.tar.gz tar xzf redis-$REDIS_VERSION.tar.gz @@ -380,8 +372,6 @@ su - vagrant <<-'EOF' export MAILGUN_TOKEN=NEEDS_TO_COPY_FROM_DOTENV export MIXPANEL_API_SECRET=NEEDS_TO_COPY_FROM_DOTENV export MIXPANEL_TOKEN=NEEDS_TO_COPY_FROM_DOTENV - export MONGODB_DATABASE_NAME=NEEDS_TO_COPY_FROM_DOTENV - export MONGO_DATABASE=NEEDS_TO_COPY_FROM_DOTENV export NEW_RELIC_PROMOTION=NEEDS_TO_COPY_FROM_DOTENV export NOTIFIER_ADMIN_EMAILS=NEEDS_TO_COPY_FROM_DOTENV export PARTY_FOUL_OAUTH_TOKEN=NEEDS_TO_COPY_FROM_DOTENV