Ruby Examples
Code examples for integrating WikiRest with Ruby applications.
Installation
Using the standard library (no gems required):
require 'net/http'
require 'json'
require 'uri' Or with the HTTParty gem for cleaner code:
gem install httparty API Client
Basic client class
require 'net/http'
require 'json'
require 'uri'
class WikiRestClient
BASE_URL = 'https://api.wikirest.com/v1'
def initialize(api_key)
@api_key = api_key
end
def search(query, limit: 10)
params = { q: query, limit: limit }
request(:get, '/search', params)
end
def get_chunk(id)
request(:get, "/chunk/#{id}")
end
def get_page(page_id, format: 'chunks')
params = { format: format }
request(:get, "/page/#{page_id}", params)
end
def get_changes(since: nil, limit: 100)
params = { limit: limit }
params[:since] = since if since
request(:get, '/changes', params)
end
private
def request(method, path, params = {})
uri = URI("#{BASE_URL}#{path}")
uri.query = URI.encode_www_form(params) unless params.empty?
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
request = case method
when :get then Net::HTTP::Get.new(uri)
when :post then Net::HTTP::Post.new(uri)
end
request['X-API-Key'] = @api_key
request['Accept'] = 'application/json'
response = http.request(request)
raise "API Error: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
JSON.parse(response.body)
end
end Search
Search Wikipedia
client = WikiRestClient.new(ENV['WIKIREST_API_KEY'])
# Basic search
results = client.search('quantum computing', limit: 5)
puts "Found #{results['estimatedTotalHits']} results"
results['hits'].each do |hit|
puts "- #{hit['title']}: #{hit['text'][0..100]}..."
end Search with error handling
def safe_search(client, query)
results = client.search(query)
results['hits']
rescue StandardError => e
puts "Search failed: #{e.message}"
[]
end
hits = safe_search(client, 'artificial intelligence')
hits.each { |hit| puts hit['title'] } Working with Chunks
Get a specific chunk
chunk = client.get_chunk('12345_3')
puts "Title: #{chunk['title']}"
puts "Section: #{chunk['section']}"
puts "Text: #{chunk['text']}"
puts "URL: #{chunk['url']}" Process multiple chunks
# Get chunks from search results
results = client.search('machine learning', limit: 10)
chunks = results['hits'].map do |hit|
{
id: hit['id'],
title: hit['title'],
section: hit['section'],
text: hit['text'],
url: hit['url']
}
end
# Process chunks
chunks.each do |chunk|
puts "Processing: #{chunk[:title]} - #{chunk[:section]}"
end Working with Pages
Get full page content
page = client.get_page(12345)
puts "Page: #{page['title']}"
puts "URL: #{page['url']}"
puts "Total chunks: #{page['total_chunks']}"
page['chunks'].each_with_index do |chunk, i|
puts "\n--- Chunk #{i + 1} ---"
puts "Section: #{chunk['section']}"
puts chunk['text'][0..200]
end Get concatenated text
page = client.get_page(12345, format: 'concat')
puts "Full article text:"
puts page['text']
puts "\nWord count: #{page['word_count']}" Using HTTParty
Cleaner implementation with HTTParty
require 'httparty'
class WikiRest
include HTTParty
base_uri 'https://api.wikirest.com/v1'
def initialize(api_key)
@api_key = api_key
@options = {
headers: {
'X-API-Key' => api_key,
'Accept' => 'application/json'
}
}
end
def search(query, limit: 10)
self.class.get('/search', @options.merge(
query: { q: query, limit: limit }
))
end
def chunk(id)
self.class.get("/chunk/#{id}", @options)
end
def page(page_id, format: 'chunks')
self.class.get("/page/#{page_id}", @options.merge(
query: { format: format }
))
end
end
# Usage
client = WikiRest.new(ENV['WIKIREST_API_KEY'])
results = client.search('ruby programming')
results['hits'].each { |hit| puts hit['title'] } Rails Integration
Service object pattern
# app/services/wiki_search_service.rb
class WikiSearchService
def initialize
@client = WikiRestClient.new(Rails.application.credentials.wikirest_api_key)
end
def search(query, limit: 10)
Rails.cache.fetch("wiki_search/#{query}/#{limit}", expires_in: 1.hour) do
@client.search(query, limit: limit)
end
end
def get_article(page_id)
Rails.cache.fetch("wiki_page/#{page_id}", expires_in: 1.day) do
@client.get_page(page_id, format: 'concat')
end
end
end
# In a controller
class SearchController < ApplicationController
def index
@results = WikiSearchService.new.search(params[:q])
end
end Background job for batch processing
# app/jobs/wiki_sync_job.rb
class WikiSyncJob < ApplicationJob
queue_as :default
def perform(page_ids)
client = WikiRestClient.new(Rails.application.credentials.wikirest_api_key)
page_ids.each do |page_id|
page = client.get_page(page_id, format: 'concat')
Article.find_or_initialize_by(wiki_page_id: page_id).update!(
title: page['title'],
content: page['text'],
wiki_url: page['url'],
last_synced_at: Time.current
)
rescue StandardError => e
Rails.logger.error "Failed to sync page #{page_id}: #{e.message}"
end
end
end
# Enqueue the job
WikiSyncJob.perform_later([12345, 67890, 11111])