Unverified Commit 4e6919b7 authored by Mike McQuaid's avatar Mike McQuaid Committed by GitHub
Browse files

Merge pull request #12135 from Homebrew/dependabot/bundler/Library/Homebrew/nokogiri-1.12.5

build(deps): bump nokogiri from 1.11.7 to 1.12.5 in /Library/Homebrew
parents de476e83 35e0b1bf
......@@ -56,15 +56,15 @@ GEM
mime-types (3.3.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2021.0901)
mini_portile2 (2.5.3)
mini_portile2 (2.6.1)
minitest (5.14.4)
msgpack (1.4.2)
mustache (1.1.1)
net-http-digest_auth (1.4.1)
net-http-persistent (4.0.1)
connection_pool (~> 2.2)
nokogiri (1.11.7)
mini_portile2 (~> 2.5.0)
nokogiri (1.12.5)
mini_portile2 (~> 2.6.1)
racc (~> 1.4)
parallel (1.21.0)
parallel_tests (3.7.3)
......
......@@ -50,6 +50,7 @@ module Homebrew
if args.update?
excluded_gems = [
"did_you_mean", # RBI file is already provided by Sorbet
"webrobots", # RBI file is bugged
]
ohai "Updating Tapioca RBI files..."
......
......@@ -32,6 +32,9 @@ class MiniPortile
def patch_files; end
def patch_files=(_arg0); end
def path; end
def prepare_build_directory; end
def source_directory; end
def source_directory=(path); end
def target; end
def target=(_arg0); end
def version; end
......
# DO NOT EDIT MANUALLY
# This is an autogenerated file for types exported from the `webrobots` gem.
# Please instead update this file by running `bin/tapioca gem webrobots`.
# typed: true
class Nokogiri::HTML::Document < ::Nokogiri::XML::Document
def fragment(tags = T.unsafe(nil)); end
def meta_encoding; end
def meta_encoding=(encoding); end
def meta_robots(custom_name = T.unsafe(nil)); end
def nofollow?(custom_name = T.unsafe(nil)); end
def noindex?(custom_name = T.unsafe(nil)); end
def serialize(options = T.unsafe(nil)); end
def title; end
def title=(text); end
def type; end
private
def meta_content_type; end
def parse_meta_robots(custom_name); end
def set_metadata_element(element); end
class << self
def new(*_arg0); end
def parse(string_or_io, url = T.unsafe(nil), encoding = T.unsafe(nil), options = T.unsafe(nil)); end
def read_io(_arg0, _arg1, _arg2, _arg3); end
def read_memory(_arg0, _arg1, _arg2, _arg3); end
end
end
class WebRobots
def initialize(user_agent, options = T.unsafe(nil)); end
def allowed?(url); end
def crawl_delay(url); end
def create_cache; end
def disallowed?(url); end
def error(url); end
def error!(url); end
def flush_cache; end
def option(url, token); end
def options(url); end
def reset(url); end
def sitemaps(url); end
def user_agent; end
private
def crawl_delay_handler(delay, last_checked_at); end
def fetch_robots_txt(site); end
def get_robots_txt(site); end
def http_get(uri); end
def robots_txt_for(url); end
def split_uri(url); end
end
class WebRobots::Error < ::StandardError; end
class WebRobots::ParseError < ::WebRobots::Error
def initialize(message, site); end
def site; end
def to_s; end
end
class WebRobots::RobotsTxt
def initialize(site, records, options = T.unsafe(nil)); end
def allow?(request_uri, user_agent = T.unsafe(nil)); end
def crawl_delay(user_agent = T.unsafe(nil)); end
def error; end
def error!; end
def error=(_arg0); end
def options(user_agent = T.unsafe(nil)); end
def site; end
def sitemaps; end
def timestamp; end
private
def find_record(user_agent = T.unsafe(nil)); end
def target(user_agent = T.unsafe(nil)); end
class << self
def unfetchable(site, reason, target = T.unsafe(nil)); end
end
end
class WebRobots::RobotsTxt::AccessControlLine < ::WebRobots::RobotsTxt::Line
def compile; end
def match?(request_uri); end
end
class WebRobots::RobotsTxt::AgentLine < ::WebRobots::RobotsTxt::Line
def compile; end
def pattern; end
end
class WebRobots::RobotsTxt::AllowLine < ::WebRobots::RobotsTxt::AccessControlLine
def allow?; end
end
class WebRobots::RobotsTxt::CrawlDelayLine < ::WebRobots::RobotsTxt::Line
def compile; end
def delay; end
end
WebRobots::RobotsTxt::DISALLOW_ALL = T.let(T.unsafe(nil), String)
class WebRobots::RobotsTxt::DisallowLine < ::WebRobots::RobotsTxt::AccessControlLine
def allow?; end
end
class WebRobots::RobotsTxt::ExtentionLine < ::WebRobots::RobotsTxt::Line; end
class WebRobots::RobotsTxt::Line
def initialize(token, value); end
def compile; end
def token; end
def value; end
end
class WebRobots::RobotsTxt::Parser < ::Racc::Parser
def initialize(target, crawl_delay_handler = T.unsafe(nil)); end
def _reduce_1(val, _values, result); end
def _reduce_17(val, _values, result); end
def _reduce_18(val, _values, result); end
def _reduce_19(val, _values, result); end
def _reduce_2(val, _values, result); end
def _reduce_20(val, _values, result); end
def _reduce_21(val, _values, result); end
def _reduce_24(val, _values, result); end
def _reduce_25(val, _values, result); end
def _reduce_26(val, _values, result); end
def _reduce_28(val, _values, result); end
def _reduce_31(val, _values, result); end
def _reduce_32(val, _values, result); end
def _reduce_38(val, _values, result); end
def _reduce_39(val, _values, result); end
def _reduce_40(val, _values, result); end
def _reduce_41(val, _values, result); end
def _reduce_none(val, _values, result); end
def next_token; end
def on_error(token_id, value, stack); end
def parse(input, site); end
def parse!(input, site); end
def parse_error(message); end
end
WebRobots::RobotsTxt::Parser::KNOWN_TOKENS = T.let(T.unsafe(nil), Array)
WebRobots::RobotsTxt::Parser::RE_KNOWN_TOKENS = T.let(T.unsafe(nil), Regexp)
WebRobots::RobotsTxt::Parser::Racc_arg = T.let(T.unsafe(nil), Array)
WebRobots::RobotsTxt::Parser::Racc_token_to_s_table = T.let(T.unsafe(nil), Array)
class WebRobots::RobotsTxt::Record
def initialize(agentlines, rulelines); end
def allow?(request_uri); end
def default?; end
def delay; end
def match?(user_agent); end
def options; end
end
module Webrobots; end
Webrobots::VERSION = T.let(T.unsafe(nil), String)
......@@ -9829,8 +9829,211 @@ class WeakRef
def initialize(orig); end
end
class WebRobots
def allowed?(url); end
def crawl_delay(url); end
def create_cache(); end
def disallowed?(url); end
def error(url); end
def error!(url); end
def flush_cache(); end
def initialize(user_agent, options=T.unsafe(nil)); end
def option(url, token); end
def options(url); end
def reset(url); end
def sitemaps(url); end
def user_agent(); end
end
class WebRobots::Error
end
class WebRobots::Error
end
class WebRobots::ParseError
def initialize(message, site); end
def site(); end
end
class WebRobots::ParseError
end
class WebRobots::RobotsTxt
def allow?(request_uri, user_agent=T.unsafe(nil)); end
def crawl_delay(user_agent=T.unsafe(nil)); end
def error(); end
def error!(); end
def error=(error); end
def initialize(site, records, options=T.unsafe(nil)); end
def options(user_agent=T.unsafe(nil)); end
def site(); end
def sitemaps(); end
def timestamp(); end
DISALLOW_ALL = ::T.let(nil, ::T.untyped)
end
class WebRobots::RobotsTxt::AccessControlLine
def match?(request_uri); end
end
class WebRobots::RobotsTxt::AccessControlLine
end
class WebRobots::RobotsTxt::AgentLine
def pattern(); end
end
class WebRobots::RobotsTxt::AgentLine
end
class WebRobots::RobotsTxt::AllowLine
def allow?(); end
end
class WebRobots::RobotsTxt::AllowLine
end
class WebRobots::RobotsTxt::CrawlDelayLine
def delay(); end
end
class WebRobots::RobotsTxt::CrawlDelayLine
end
class WebRobots::RobotsTxt::DisallowLine
def allow?(); end
end
class WebRobots::RobotsTxt::DisallowLine
end
class WebRobots::RobotsTxt::ExtentionLine
end
class WebRobots::RobotsTxt::ExtentionLine
end
class WebRobots::RobotsTxt::Line
def compile(); end
def initialize(token, value); end
def token(); end
def value(); end
end
class WebRobots::RobotsTxt::Line
end
class WebRobots::RobotsTxt::Parser
def _reduce_1(val, _values, result); end
def _reduce_17(val, _values, result); end
def _reduce_18(val, _values, result); end
def _reduce_19(val, _values, result); end
def _reduce_2(val, _values, result); end
def _reduce_20(val, _values, result); end
def _reduce_21(val, _values, result); end
def _reduce_24(val, _values, result); end
def _reduce_25(val, _values, result); end
def _reduce_26(val, _values, result); end
def _reduce_28(val, _values, result); end
def _reduce_31(val, _values, result); end
def _reduce_32(val, _values, result); end
def _reduce_38(val, _values, result); end
def _reduce_39(val, _values, result); end
def _reduce_40(val, _values, result); end
def _reduce_41(val, _values, result); end
def _reduce_none(val, _values, result); end
def initialize(target, crawl_delay_handler=T.unsafe(nil)); end
def on_error(token_id, value, stack); end
def parse(input, site); end
def parse!(input, site); end
def parse_error(message); end
KNOWN_TOKENS = ::T.let(nil, ::T.untyped)
RE_KNOWN_TOKENS = ::T.let(nil, ::T.untyped)
Racc_arg = ::T.let(nil, ::T.untyped)
Racc_debug_parser = ::T.let(nil, ::T.untyped)
Racc_token_to_s_table = ::T.let(nil, ::T.untyped)
end
class WebRobots::RobotsTxt::Parser
end
class WebRobots::RobotsTxt::Record
def allow?(request_uri); end
def default?(); end
def delay(); end
def initialize(agentlines, rulelines); end
def match?(user_agent); end
def options(); end
end
class WebRobots::RobotsTxt::Record
end
class WebRobots::RobotsTxt
def self.unfetchable(site, reason, target=T.unsafe(nil)); end
end
class WebRobots
end
module Webrobots
VERSION = ::T.let(nil, ::T.untyped)
end
module Webrobots
end
class Zlib::Deflate
......
......@@ -45,10 +45,10 @@ $:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mime-types-data-3.202
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mime-types-3.3.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/net-http-digest_auth-1.4.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/net-http-persistent-4.0.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mini_portile2-2.5.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mini_portile2-2.6.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/x86_64-darwin-14/2.6.0-static/racc-1.5.2"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/racc-1.5.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/nokogiri-1.11.7-x86_64-darwin/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/nokogiri-1.12.5-x86_64-darwin/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubyntlm-0.6.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/webrick-1.7.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/webrobots-0.1.2/lib"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment