diff --git a/app/models/list.rb b/app/models/list.rb index b639dd29..1ee21717 100644 --- a/app/models/list.rb +++ b/app/models/list.rb @@ -1,7 +1,8 @@ class List < ApplicationRecord validates :url, presence: true - has_many :projects + has_many :list_projects + has_many :projects, through: :list_projects def to_s name @@ -118,10 +119,11 @@ def parse_readme def load_projects readme_links.each do |link| - - # find or create project - # create join between project and list - # join holds name, description, category, sub_category + + project = Project.find_or_create_by(url: link[:url]) + project.sync_async + list_project = list_projects.find_or_create_by(project_id: project.id) + list_project.update(name: link[:name], description: link[:description], category: link[:category], sub_category: link[:sub_category]) end end diff --git a/app/models/list_project.rb b/app/models/list_project.rb new file mode 100644 index 00000000..08a27a29 --- /dev/null +++ b/app/models/list_project.rb @@ -0,0 +1,4 @@ +class ListProject < ApplicationRecord + belongs_to :list + belongs_to :project +end diff --git a/app/models/project.rb b/app/models/project.rb index 7b5b7c55..f5bad267 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -2,6 +2,9 @@ class Project < ApplicationRecord + has_many :project_lists + has_many :lists, through: :project_lists + validates :url, presence: true, uniqueness: { case_sensitive: false } scope :active, -> { where("(repository ->> 'archived') = ?", 'false') } @@ -10,153 +13,9 @@ class Project < ApplicationRecord scope :language, ->(language) { where("(repository ->> 'language') = ?", language) } scope :owner, ->(owner) { where("(repository ->> 'owner') = ?", owner) } scope :keyword, ->(keyword) { where("keywords @> ARRAY[?]::varchar[]", keyword) } - scope :reviewed, -> { where(reviewed: true) } - scope :unreviewed, -> { where(reviewed: nil) } - scope :matching_criteria, -> { where(matching_criteria: true) } scope :with_readme, -> { where.not(readme: nil) } - scope :with_works, -> { where('length(works::text) > 2') } scope :with_repository, -> { where.not(repository: nil) } - def self.import_from_csv - - # url = 'https://raw.githubusercontent.com/protontypes/open-source-in-environmental-sustainability/main/open-source-in-environmental-sustainability/csv/projects.csv' - url = 'https://gist.githubusercontent.com/andrew/44442a6a84395df81bc1b0a153c5abaf/raw/fb4d3ff68eb65ebca9c9387fadb30349e3563e1b/Projects-Gridview.csv' - - conn = Faraday.new(url: url) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - - response = conn.get - return unless response.success? - csv = response.body - csv_data = CSV.new(csv, headers: true) - - csv_data.each do |row| - next if row['git_url'].blank? - project = Project.find_or_create_by(url: row['git_url'].downcase) - project.name = row['project_name'] - project.description = row['oneliner'] - project.rubric = row['rubric'] - project.reviewed = true - project.save - project.sync_async unless project.last_synced_at.present? - end - end - - def self.import_from_readme - url = 'https://raw.githubusercontent.com/protontypes/open-sustainable-technology/main/README.md' - readme = ReadmeParser.load(url) - - urls = [] - - readme.parse_links.each do |category, sub_categories| - sub_categories.each do |sub_category, links| - links.each do |link| - conn = Faraday.new(url: link[:url].downcase) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - - begin - response = conn.get - if response.success? - url = response.env.url.to_s.downcase - else - url = link[:url].downcase - end - rescue - url = link[:url].downcase - end - - url.chomp!('/') - - urls << url - - project = Project.find_or_create_by(url: url) - project.name = link[:name] - project.description = link[:description] - project.reviewed = true - project.category = category - project.sub_category = sub_category - project.save - project.sync_async unless project.last_synced_at.present? - end - end - end - - # mark projects that are no longer in the readme as unreviewed - removed = Project.where.not(url: urls).reviewed - removed.each do |p| - puts "Marking #{p.url} as unreviewed" - end - - puts "Removed #{removed.length} projects" - removed.update_all(reviewed: false) - end - - def self.discover_via_topics(limit=100) - relevant_keywords.shuffle.first(limit).each do |topic| - import_topic(topic) - end - end - - def self.discover_via_keywords(limit=100) - relevant_keywords.shuffle.first(limit).each do |topic| - import_keyword(topic) - end - end - - def self.keywords - @keywords ||= Project.reviewed.pluck(:keywords).flatten.group_by(&:itself).transform_values(&:count).sort_by{|k,v| v}.reverse - end - - def self.ignore_words - ['hacktoberfest', 'python', 'java', "open-data", "open-source", 'network', 'r', 'database', 'ruby', 'iot', 'julia', 'numpy', 'pandas', 'rstats', 'react','python3','env', 'map', 'api', 'cran', - 'awesome','data','dataset','awesome-list', 'javascript','r-package','raspberry-pi','matlab', 'typescript', 'svelte', 'nodejs', 'c', 'fortran', 'modbus','matplotlib', - 'sqlite','arduino','golang','influxdb','esp8266','laravel','gui','smart-meter','docker','dashboard','platform','building','geospatial','pytorch','deep-learning','documentation', 'gis','remote-sensing', - 'machine-learning', 'satellite','xarray','google-earth-engine','earth-engine','linear-programming','management','netcdf','sql','3d','rstudio','firebase','webgl','flask','blockchain','addon','osm','go', - 'rust','vue','vuejs','rest-api','ios','linux','python-3','postgresql','postgis','jupyter-notebook','game','ethereum','d3','d3js','code','android','ai','library','client','django','package','energy-monitor', - 'finance', 'risk','time-series','raster','hpc','scipy','workflow','numba','nasa','cpp','cmake','c-plus-plus','analysis','data-science','plotting','iot-platform','transport','artificial-intelligence', 'aws', - 'neural-networks', 'time-series-forecasting', 'timeseries', 'torch','models','datasets','high-performance-computing', 'peer-reviewed', 'reproducible-research','websocket','fleet-management','citation', - 'credit', 'metadata', 'standard', 'nasa-data', 'satellite-data', 'space','geographic-information-systems', 'satellite-imagery', 'satellite-images', 'energy', 'statistics','openfoodfacts','tensorflow', - 'nutrition','azure','modeling', 'tuning','iobroker','benchmark','kubernetes','k8s', 'helm', 'github-action', 'github-actions', 'svg','cnc','spark', 'scala', 'pyspark','microsoft', 'http','apache-spark', - 'hacktoberfest2020','neural-network','farm','python-library','uk','openstreetmap','robotics','mechanical-engineering','lidar','sdk','cli','gpu','ml','landsat','food','automation','gtfs','ggplot2', 'github', - 'kotlin', 'sentinel','visualization','maps','mapping','dask','pipeline','api-client','transit','education','api-wrapper','course','mapbox','engineering','atmosphere','scenario','optimization', - 'data-analysis','data-visualization','backend','model','modelling','nextjs','pyam','australia','object-detection','monte-carlo-simulation','time-series-analysis','cnn','forecasting','forecast','openai-gym', - 'rails','ruby-on-rails','science',"computer-vision","image-processing","image-classification","segmentation","spatial","classification","electricity","image-segmentation","simulation",'php','leaflet', - 'regression','vector','mobile','leaflet-plugins','sentinel-1','cpu','fastapi','zigbee','metrics','big-data','cross-platform','self-driving-car','json','computing','framework','frontend', - 'pwa','web','web-framework','react-native','analytics','electron','homeassistant','home-assistant','smarthome','home-automation','pi0'] - end - - def self.stop_words - [] - end - - def self.update_matching_criteria - unreviewed.find_each(&:update_matching_criteria) - end - - def update_matching_criteria - update(matching_criteria: matching_criteria?) - end - - def self.potential_good_topics - Project.unreviewed.pluck(:keywords).flatten.group_by(&:itself).transform_values(&:count).sort_by{|k,v| v}.reverse.select{|k,v| v > 1}.map(&:first) - ignore_words - end - - def self.potential_ignore_words - Project.unreviewed.pluck(:keywords).flatten.group_by(&:itself).transform_values(&:count).sort_by{|k,v| v}.reverse.select{|k,v| v > 1}.map(&:first) - ignore_words - end - - def self.relevant_keywords - keywords.select{|k,v| v > 1}.map(&:first) - ignore_words - end - - def self.rubric_keywords(rubric) - Project.where(rubric: rubric).pluck(:keywords).flatten.group_by(&:itself).transform_values(&:count).sort_by{|k,v| v}.reverse - end - def self.sync_least_recently_synced Project.where(last_synced_at: nil).or(Project.where("last_synced_at < ?", 1.day.ago)).order('last_synced_at asc nulls first').limit(500).each do |project| project.sync_async @@ -203,17 +62,9 @@ def first_created def sync check_url fetch_repository - fetch_owner - fetch_dependencies - fetch_packages combine_keywords - fetch_commits - fetch_events - fetch_issue_stats - fetch_citation_file if reviewed? - fetch_readme if reviewed? - update(last_synced_at: Time.now, matching_criteria: matching_criteria?) - update_score + fetch_readme + update(last_synced_at: Time.now) ping end @@ -241,8 +92,7 @@ def check_url def combine_keywords keywords = [] - keywords += repository["topics"] if repository.present? - keywords += packages.map{|p| p["keywords"]}.flatten if packages.present? + keywords += repository["topics"] if repository.present? && repository["topics"].present? self.keywords = keywords.uniq.reject(&:blank?) self.save end @@ -254,33 +104,15 @@ def ping end def ping_urls - repos_ping_url + [repos_ping_url].compact end def repos_ping_url return unless repository.present? "https://repos.ecosyste.ms/api/v1/hosts/#{repository['host']['name']}/repositories/#{repository['full_name']}/ping" end - - def commits_ping_url - return unless repository.present? - "https://commits.ecosyste.ms/api/v1/hosts/#{repository['host']['name']}/repositories/#{repository['full_name']}/ping" - end - - def packages_ping_urls - return [] unless packages.present? - packages.map do |package| - "https://packages.ecosyste.ms/api/v1/registries/#{package['registry']['name']}/packages/#{package['name']}/ping" - end - end - - def owner_ping_url - return unless repository.present? - "https://repos.ecosyste.ms/api/v1/hosts/#{repository['host']['name']}/owner/#{repository['owner']}/ping" - end - + def description - return read_attribute(:description) if read_attribute(:description).present? return unless repository.present? repository["description"] end @@ -308,37 +140,7 @@ def fetch_repository puts "Error fetching repository for #{repository_url}" end - def owner_api_url - return unless repository.present? - return unless repository["owner"].present? - return unless repository["host"].present? - return unless repository["host"]["name"].present? - "https://repos.ecosyste.ms/api/v1/hosts/#{repository['host']['name']}/owners/#{repository['owner']}" - end - - def owner_url - return unless repository.present? - return unless repository["owner"].present? - return unless repository["host"].present? - return unless repository["host"]["name"].present? - "https://repos.ecosyste.ms/hosts/#{repository['host']['name']}/owners/#{repository['owner']}" - end - - def fetch_owner - return unless owner_api_url.present? - conn = Faraday.new(url: owner_api_url) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - - response = conn.get - return unless response.success? - self.owner = JSON.parse(response.body) - self.save - rescue - puts "Error fetching owner for #{repository_url}" - end - + def timeline_url return unless repository.present? return unless repository["host"]["name"] == "GitHub" @@ -346,139 +148,6 @@ def timeline_url "https://timeline.ecosyste.ms/api/v1/events/#{repository['full_name']}/summary" end - def fetch_events - return unless timeline_url.present? - conn = Faraday.new(url: timeline_url) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - - response = conn.get - return unless response.success? - summary = JSON.parse(response.body) - - conn = Faraday.new(url: timeline_url+'?after='+1.year.ago.to_fs(:iso8601)) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - - response = conn.get - return unless response.success? - last_year = JSON.parse(response.body) - - self.events = { - "total" => summary, - "last_year" => last_year - } - self.save - rescue - puts "Error fetching events for #{repository_url}" - end - - # TODO fetch repo dependencies - # TODO fetch repo tags - - def packages_url - "https://packages.ecosyste.ms/api/v1/packages/lookup?repository_url=#{repository_url}" - end - - def fetch_packages - conn = Faraday.new(url: packages_url) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - - response = conn.get - return unless response.success? - self.packages = JSON.parse(response.body) - self.save - rescue - puts "Error fetching packages for #{repository_url}" - end - - def commits_api_url - "https://commits.ecosyste.ms/api/v1/repositories/lookup?url=#{repository_url}" - end - - def commits_url - "https://commits.ecosyste.ms/repositories/lookup?url=#{repository_url}" - end - - def fetch_commits - conn = Faraday.new(url: commits_api_url) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - response = conn.get - return unless response.success? - self.commits = JSON.parse(response.body) - self.save - rescue - puts "Error fetching commits for #{repository_url}" - end - - def committers_names - return [] unless commits.present? - return [] unless commits["committers"].present? - commits["committers"].map{|c| c["name"].downcase }.uniq - end - - def committers - return [] unless commits.present? - return [] unless commits["committers"].present? - commits["committers"].map{|c| [c["name"].downcase, c["count"]]}.each_with_object(Hash.new {|h,k| h[k] = 0}) { |(x,d),h| h[x] += d } - end - - def raw_committers - return [] unless commits.present? - return [] unless commits["committers"].present? - commits["committers"] - end - - def fetch_dependencies - return unless repository.present? - conn = Faraday.new(url: repository['manifests_url']) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - response = conn.get - return unless response.success? - self.dependencies = JSON.parse(response.body) - self.save - rescue - puts "Error fetching dependencies for #{repository_url}" - end - - def ignored_ecosystems - ['actions', 'docker', 'homebrew'] - end - - def dependency_packages - return [] unless dependencies.present? - dependencies.map{|d| d["dependencies"]}.flatten.select{|d| d['direct'] }.reject{|d| ignored_ecosystems.include?(d['ecosystem']) }.map{|d| [d['ecosystem'],d["package_name"].downcase]}.uniq - end - - def dependency_ecosystems - return [] unless dependencies.present? - dependencies.map{|d| d["dependencies"]}.flatten.select{|d| d['direct'] }.reject{|d| ignored_ecosystems.include?(d['ecosystem']) }.map{|d| d['ecosystem']}.uniq - end - - def fetch_dependent_repos - return unless packages.present? - dependent_repos = [] - packages.each do |package| - # TODO paginate - # TODO group dependencies by repo - dependent_repos_url = "https://repos.ecosyste.ms/api/v1/usage/#{package["ecosystem"]}/#{package["name"]}/dependencies" - conn = Faraday.new(url: dependent_repos_url) - response = conn.get - return unless response.success? - dependent_repos += JSON.parse(response.body) - end - self.dependent_repos = dependent_repos.uniq - self.save - end - def language return unless repository.present? repository['language'] @@ -488,62 +157,6 @@ def language_with_default language.presence || 'Unknown' end - def update_score - update_attribute :score, score_parts.sum - end - - def score_parts - [ - repository_score, - packages_score, - commits_score, - dependencies_score, - events_score - ] - end - - def repository_score - return 0 unless repository.present? - Math.log [ - (repository['stargazers_count'] || 0), - (repository['open_issues_count'] || 0) - ].sum - end - - def packages_score - return 0 unless packages.present? - Math.log [ - packages.map{|p| p["downloads"] || 0 }.sum, - packages.map{|p| p["dependent_packages_count"] || 0 }.sum, - packages.map{|p| p["dependent_repos_count"] || 0 }.sum, - packages.map{|p| p["docker_downloads_count"] || 0 }.sum, - packages.map{|p| p["docker_dependents_count"] || 0 }.sum, - packages.map{|p| p['maintainers'].map{|m| m['uuid'] } }.flatten.uniq.length - ].sum - end - - def commits_score - return 0 unless commits.present? - Math.log [ - (commits['total_committers'] || 0), - ].sum - end - - def dependencies_score - return 0 unless dependencies.present? - 0 - end - - def events_score - return 0 unless events.present? - 0 - end - - def language - return unless repository.present? - repository['language'] - end - def owner_name return unless repository.present? repository['owner'] @@ -554,48 +167,11 @@ def avatar_url repository['icon_url'] end - def matching_criteria? - good_topics? && external_users? && open_source_license? && active? - end - - def matching_topics - (keywords & Project.relevant_keywords) - end - - def no_bad_topics? - (keywords & Project.stop_words).blank? - end - - def good_topics? - matching_topics.length > 0 - end - - def packages_count - return 0 unless packages.present? - packages.length - end - - def monthly_downloads - return 0 unless packages.present? - packages.select{|p| p['downloads_period'] == 'last-month' }.map{|p| p["downloads"] || 0 }.sum - end - - def downloads - return 0 unless packages.present? - packages.map{|p| p["downloads"] || 0 }.sum - end - - def repository_license return nil unless repository.present? repository['license'] || repository.dig('metadata', 'files', 'license') end - def packages_licenses - return [] unless packages.present? - packages.map{|p| p['licenses'] }.compact - end - def readme_license return nil unless readme.present? readme_image_urls.select{|u| u.downcase.include?('license') }.any? @@ -605,31 +181,6 @@ def open_source_license? (packages_licenses + [repository_license] + [readme_license]).compact.uniq.any? end - def past_year_total_commits - return 0 unless commits.present? - commits['past_year_total_commits'] || 0 - end - - def past_year_total_commits_exclude_bots - return 0 unless commits.present? - past_year_total_commits - past_year_total_bot_commits - end - - def past_year_total_bot_commits - return 0 unless commits.present? - commits['past_year_total_bot_commits'].presence || 0 - end - - def commits_this_year? - return false unless repository.present? - if commits.present? - past_year_total_commits_exclude_bots > 0 - else - return false unless repository['pushed_at'].present? - repository['pushed_at'] > 1.year.ago - end - end - def archived? return false unless repository.present? repository['archived'] @@ -644,64 +195,6 @@ def fork? repository['fork'] end - def self.import_topic(topic) - resp = Faraday.get("https://repos.ecosyste.ms/api/v1/topics/#{ERB::Util.url_encode(topic)}?per_page=100&sort=created_at&order=desc") - if resp.status == 200 - data = JSON.parse(resp.body) - urls = data['repositories'].map{|p| p['html_url'] }.uniq.reject(&:blank?) - urls.each do |url| - existing_project = Project.find_by(url: url.downcase) - if existing_project.present? - #puts 'already exists' - else - project = Project.create(url: url.downcase) - project.sync_async - end - end - end - end - - def self.import_keyword(keyword) - resp = Faraday.get("https://packages.ecosyste.ms/api/v1/keywords/#{ERB::Util.url_encode(keyword)}?per_page=100&sort=created_at&order=desc") - if resp.status == 200 - data = JSON.parse(resp.body) - urls = data['packages'].reject{|p| p['status'].present? }.map{|p| p['repository_url'] }.uniq.reject(&:blank?) - urls.each do |url| - existing_project = Project.find_by(url: url.downcase) - if existing_project.present? - # puts 'already exists' - else - project = Project.create(url: url.downcase) - project.sync_async - end - end - end - end - - def self.import_org(host, org) - resp = Faraday.get("https://repos.ecosyste.ms/api/v1/hosts/#{host}/owners/#{org}/repositories?per_page=100") - if resp.status == 200 - data = JSON.parse(resp.body) - urls = data.map{|p| p['html_url'] }.uniq.reject(&:blank?) - urls.each do |url| - existing_project = Project.find_by(url: url) - if existing_project.present? - # puts 'already exists' - else - project = Project.create(url: url) - project.sync_async - end - end - end - end - - def citation_file_name - return unless repository.present? - return unless repository['metadata'].present? - return unless repository['metadata']['files'].present? - repository['metadata']['files']['citation'] - end - def download_url return unless repository.present? repository['download_url'] @@ -712,23 +205,6 @@ def archive_url(path) "https://archives.ecosyste.ms/api/v1/archives/contents?url=#{download_url}&path=#{path}" end - def fetch_citation_file - return unless citation_file_name.present? - return unless download_url.present? - conn = Faraday.new(url: archive_url(citation_file_name)) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - response = conn.get - return unless response.success? - json = JSON.parse(response.body) - - self.citation_file = json['contents'] - self.save - rescue - puts "Error fetching citation file for #{repository_url}" - end - def readme_file_name return unless repository.present? return unless repository['metadata'].present? @@ -758,47 +234,6 @@ def readme_url "#{repository['html_url']}/blob/#{repository['default_branch']}/#{readme_file_name}" end - def preprocessed_readme - return unless readme.present? - text = readme - # lowercase - text = text.downcase - # remove code blocks - text = text.gsub(/```.*?```/m, '') - # remove links - text = text.gsub(/\[.*?\]\(.*?\)/m, '') - # remove images - text = text.gsub(/!\[.*?\]\(.*?\)/m, '') - # remove headings - text = text.gsub(/#+.*?\n/m, '') - # remove lists - text = text.gsub(/-.*?\n/m, '') - # remove tables - text = text.gsub(/\|.*?\n/m, '') - # remove special characters - text = text.gsub(/[^a-z0-9\s]/i, '') - # newlines to spaces - text = text.gsub(/\n/, ' ') - # remove multiple spaces - text = text.gsub(/\s+/, ' ') - # remove leading and trailing spaces - text = text.strip - end - - def tokenized_readme - return unless preprocessed_readme.present? - - tokenizer = Tokenizers.from_pretrained("DWDMaiMai/tiktoken_cl100k_base") - tokenizer.encode(preprocessed_readme) - end - - def parse_citation_file - return unless citation_file.present? - CFF::Index.read(citation_file).as_json - rescue - puts "Error parsing citation file for #{repository_url}" - end - def blob_url(path) return unless repository.present? "#{repository['html_url']}/blob/#{repository['default_branch']}/#{path}" @@ -809,29 +244,8 @@ def raw_url(path) "#{repository['html_url']}/raw/#{repository['default_branch']}/#{path}" end - def commiter_domains - return unless commits.present? - return unless commits['committers'].present? - commits['committers'].map{|c| c['email'].split('@')[1].try(:downcase) }.reject{|e| e.nil? || ignored_domains.include?(e) || e.ends_with?('.local') || e.split('.').length ==1 }.group_by(&:itself).transform_values(&:count).sort_by{|k,v| v}.reverse - end - - def ignored_domains - ['users.noreply.github.com', "googlemail.com", "gmail.com", "hotmail.com", "outlook.com","yahoo.com","protonmail.com","web.de","example.com","live.com","icloud.com","hotmail.fr","yahoo.se","yahoo.fr"] - end - def funding_links - (package_funding_links + repo_funding_links + owner_funding_links + readme_funding_links).uniq - end - - def package_funding_links - return [] unless packages.present? - packages.map{|pkg| pkg['metadata']['funding'] }.compact.map{|f| f.is_a?(Hash) ? f['url'] : f }.flatten.compact - end - - def owner_funding_links - return [] if repository.blank? || repository['owner_record'].blank? || repository['owner_record']["metadata"].blank? - return [] unless repository['owner_record']["metadata"]['has_sponsors_listing'] - ["https://github.com/sponsors/#{repository['owner_record']['login']}"] + (repo_funding_links + readme_funding_links).uniq end def repo_funding_links @@ -900,37 +314,6 @@ def dois readme_doi_urls.map{|u| URI.parse(u).path.gsub(/^\//, '') }.uniq end - def fetch_works - works = {} - readme_doi_urls.each do |url| - openalex_url = "https://api.openalex.org/works/#{url}" - conn = Faraday.new(url: openalex_url) do |faraday| - faraday.response :follow_redirects - faraday.adapter Faraday.default_adapter - end - response = conn.get - if response.success? - works[url] = JSON.parse(response.body) - else - works[url] = nil - end - end - self.works = works - self.save - end - - def citation_counts - works.select{|k,v| v.present? }.map{|k,v| [k, v['counts_by_year'].map{|h| h["cited_by_count"]}.sum] }.to_h - end - - def total_citations - citation_counts.values.sum - end - - def first_work_citations - citation_counts.values.first - end - def readme_image_urls return [] unless readme.present? urls = readme.scan(/!\[.*?\]\((.*?)\)/).flatten.compact.uniq diff --git a/config/initializers/rswag-api.rb b/config/initializers/rswag-api.rb index 6ac48ecc..80c5b1f3 100644 --- a/config/initializers/rswag-api.rb +++ b/config/initializers/rswag-api.rb @@ -4,7 +4,7 @@ # This is used by the Swagger middleware to serve requests for API descriptions # NOTE: If you're using rswag-specs to generate Swagger, you'll need to ensure # that it's configured to generate files in the same folder - c.swagger_root = Rails.root.to_s + '/openapi' + c.openapi_root = Rails.root.to_s + '/openapi' # Inject a lamda function to alter the returned Swagger prior to serialization # The function will have access to the rack env for the current request diff --git a/config/initializers/rswag-ui.rb b/config/initializers/rswag-ui.rb index 21345f3e..5a97a00a 100644 --- a/config/initializers/rswag-ui.rb +++ b/config/initializers/rswag-ui.rb @@ -6,7 +6,7 @@ # NOTE: If you're using rspec-api to expose Swagger files (under swagger_root) as JSON or YAML endpoints, # then the list below should correspond to the relative paths for those endpoints - c.swagger_endpoint '/docs/api/v1/openapi.yaml', 'API V1 Docs' + c.openapi_endpoint '/docs/api/v1/openapi.yaml', 'API V1 Docs' # Add Basic Auth in case your API is private # c.basic_auth_enabled = true diff --git a/config/routes.rb b/config/routes.rb index 597ad373..f3f0337b 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -41,5 +41,5 @@ get '/422', to: 'errors#unprocessable' get '/500', to: 'errors#internal' - root "projects#index" + root "lists#index" end diff --git a/db/migrate/20240102201256_create_list_projects.rb b/db/migrate/20240102201256_create_list_projects.rb new file mode 100644 index 00000000..ceafbf07 --- /dev/null +++ b/db/migrate/20240102201256_create_list_projects.rb @@ -0,0 +1,14 @@ +class CreateListProjects < ActiveRecord::Migration[7.1] + def change + create_table :list_projects do |t| + t.integer :list_id + t.integer :project_id + t.string :name + t.string :description + t.string :category + t.string :sub_category + + t.timestamps + end + end +end diff --git a/db/migrate/20240102201648_create_projects.rb b/db/migrate/20240102201648_create_projects.rb new file mode 100644 index 00000000..59d57b53 --- /dev/null +++ b/db/migrate/20240102201648_create_projects.rb @@ -0,0 +1,13 @@ +class CreateProjects < ActiveRecord::Migration[7.1] + def change + create_table :projects do |t| + t.string :url + t.json :repository + t.text :readme + t.string :keywords, array: true, default: [] + t.datetime :last_synced_at + + t.timestamps + end + end +end diff --git a/db/schema.rb b/db/schema.rb index c09cad28..90d7f176 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,10 +10,21 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema[7.1].define(version: 2024_01_02_134157) do +ActiveRecord::Schema[7.1].define(version: 2024_01_02_201648) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" + create_table "list_projects", force: :cascade do |t| + t.integer "list_id" + t.integer "project_id" + t.string "name" + t.string "description" + t.string "category" + t.string "sub_category" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + end + create_table "lists", force: :cascade do |t| t.string "url" t.string "name" @@ -26,4 +37,14 @@ t.datetime "updated_at", null: false end + create_table "projects", force: :cascade do |t| + t.string "url" + t.json "repository" + t.text "readme" + t.string "keywords", default: [], array: true + t.datetime "last_synced_at" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + end + end diff --git a/test/fixtures/list_projects.yml b/test/fixtures/list_projects.yml new file mode 100644 index 00000000..7a222e0a --- /dev/null +++ b/test/fixtures/list_projects.yml @@ -0,0 +1,17 @@ +# Read about fixtures at https://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html + +one: + list_id: 1 + project_id: 1 + name: MyString + description: MyString + category: MyString + sub_category: MyString + +two: + list_id: 1 + project_id: 1 + name: MyString + description: MyString + category: MyString + sub_category: MyString diff --git a/test/models/list_project_test.rb b/test/models/list_project_test.rb new file mode 100644 index 00000000..741f2f9c --- /dev/null +++ b/test/models/list_project_test.rb @@ -0,0 +1,7 @@ +require "test_helper" + +class ListProjectTest < ActiveSupport::TestCase + # test "the truth" do + # assert true + # end +end