input
stringlengths 109
5.2k
| output
stringlengths 7
509
|
|---|---|
Summarize the following code: def dispatch_create_item!(msg)
if msg.has_attachments?
draft = msg.draft
msg.draft = true
resp = validate_created_item(ews.create_item(msg.to_ews))
msg.file_attachments.each do |f|
next unless f.kind_of?(File)
resp.add_file_attachment(f)
end
if draft
resp.submit_attachments!
resp
else
resp.submit!
end
else
resp = ews.create_item(msg.to_ews)
validate_created_item resp
end
end
|
Handles the CreateItem call for Forward ReplyTo and ReplyAllTo It will handle the neccessary actions for adding attachments .
|
Summarize the following code: def validate_created_item(response)
msg = response.response_messages[0]
if(msg.status == 'Success')
msg.items.empty? ? true : parse_created_item(msg.items.first)
else
raise EwsCreateItemError, "#{msg.code}: #{msg.message_text}"
end
end
|
validate the CreateItem response .
|
Summarize the following code: def items_since(date_time, opts = {})
opts = opts.clone
unless date_time.kind_of?(Date)
raise EwsBadArgumentError, "First argument must be a Date or DateTime"
end
restr = {:restriction =>
{:is_greater_than_or_equal_to =>
[{:field_uRI => {:field_uRI=>'item:DateTimeReceived'}},
{:field_uRI_or_constant =>{:constant => {:value=>date_time.to_datetime}}}]
}}
items(opts.merge(restr))
end
|
Fetch items since a give DateTime
|
Summarize the following code: def items_between(start_date, end_date, opts={})
items do |obj|
obj.restriction = { :and =>
[
{:is_greater_than_or_equal_to =>
[
{:field_uRI => {:field_uRI=>'item:DateTimeReceived'}},
{:field_uRI_or_constant=>{:constant => {:value =>start_date}}}
]
},
{:is_less_than_or_equal_to =>
[
{:field_uRI => {:field_uRI=>'item:DateTimeReceived'}},
{:field_uRI_or_constant=>{:constant => {:value =>end_date}}}
]
}
]
}
end
end
|
Fetch items between a given time period
|
Summarize the following code: def search_by_subject(match_str, exclude_str = nil)
items do |obj|
match = {:contains => {
:containment_mode => 'Substring',
:containment_comparison => 'IgnoreCase',
:field_uRI => {:field_uRI=>'item:Subject'},
:constant => {:value =>match_str}
}}
unless exclude_str.nil?
excl = {:not =>
{:contains => {
:containment_mode => 'Substring',
:containment_comparison => 'IgnoreCase',
:field_uRI => {:field_uRI=>'item:Subject'},
:constant => {:value =>exclude_str}
}}
}
match[:and] = [{:contains => match.delete(:contains)}, excl]
end
obj.restriction = match
end
end
|
Search on the item subject
|
Summarize the following code: def sync_items!(sync_state = nil, sync_amount = 256, sync_all = false, opts = {})
item_shape = opts.has_key?(:item_shape) ? opts.delete(:item_shape) : {:base_shape => :default}
sync_state ||= @sync_state
resp = ews.sync_folder_items item_shape: item_shape,
sync_folder_id: self.folder_id, max_changes_returned: sync_amount, sync_state: sync_state
rmsg = resp.response_messages[0]
if rmsg.success?
@synced = rmsg.includes_last_item_in_range?
@sync_state = rmsg.sync_state
rhash = {}
rmsg.changes.each do |c|
ctype = c.keys.first
rhash[ctype] = [] unless rhash.has_key?(ctype)
if ctype == :delete || ctype == :read_flag_change
rhash[ctype] << c[ctype][:elems][0][:item_id][:attribs]
else
type = c[ctype][:elems][0].keys.first
item = class_by_name(type).new(ews, c[ctype][:elems][0][type])
rhash[ctype] << item
end
end
rhash
else
raise EwsError, "Could not synchronize: #{rmsg.code}: #{rmsg.message_text}"
end
end
|
Syncronize Items in this folder . If this method is issued multiple times it will continue where the last sync completed .
|
Summarize the following code: def subscribe(evtypes = [:all], watermark = nil, timeout = 240)
# Refresh the subscription if already subscribed
unsubscribe if subscribed?
event_types = normalize_event_names(evtypes)
folder = {id: self.id, change_key: self.change_key}
resp = ews.pull_subscribe_folder(folder, event_types, timeout, watermark)
rmsg = resp.response_messages.first
if rmsg.success?
@subscription_id = rmsg.subscription_id
@watermark = rmsg.watermark
true
else
raise EwsSubscriptionError, "Could not subscribe: #{rmsg.code}: #{rmsg.message_text}"
end
end
|
Subscribe this folder to events . This method initiates an Exchange pull type subscription .
|
Summarize the following code: def unsubscribe
return true if @subscription_id.nil?
resp = ews.unsubscribe(@subscription_id)
rmsg = resp.response_messages.first
if rmsg.success?
@subscription_id, @watermark = nil, nil
true
else
raise EwsSubscriptionError, "Could not unsubscribe: #{rmsg.code}: #{rmsg.message_text}"
end
end
|
Unsubscribe this folder from further Exchange events .
|
Summarize the following code: def get_events
begin
if subscribed?
resp = ews.get_events(@subscription_id, @watermark)
rmsg = resp.response_messages[0]
@watermark = rmsg.new_watermark
# @todo if parms[:more_events] # get more events
rmsg.events.collect{|ev|
type = ev.keys.first
class_by_name(type).new(ews, ev[type])
}
else
raise EwsSubscriptionError, "Folder <#{self.display_name}> not subscribed to. Issue a Folder#subscribe before checking events."
end
rescue EwsSubscriptionTimeout => e
@subscription_id, @watermark = nil, nil
raise e
end
end
|
Checks a subscribed folder for events
|
Summarize the following code: def get_folder(opts = {})
args = get_folder_args(opts)
resp = ews.get_folder(args)
get_folder_parser(resp)
end
|
Get a specific folder by its ID .
|
Summarize the following code: def get_time_zones(full = false, ids = nil)
req = build_soap! do |type, builder|
unless type == :header
builder.get_server_time_zones!(full: full, ids: ids)
end
end
result = do_soap_request req, response_class: EwsSoapResponse
if result.success?
zones = []
result.response_messages.each do |message|
elements = message[:get_server_time_zones_response_message][:elems][:time_zone_definitions][:elems]
elements.each do |definition|
data = {
id: definition[:time_zone_definition][:attribs][:id],
name: definition[:time_zone_definition][:attribs][:name]
}
zones << OpenStruct.new(data)
end
end
zones
else
raise EwsError, "Could not get time zones"
end
end
|
Request list of server known time zones
|
Summarize the following code: def copy_folder(to_folder_id, *sources)
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.CopyFolder {
builder.nbuild.parent.default_namespace = @default_ns
builder.to_folder_id!(to_folder_id)
builder.folder_ids!(sources.flatten)
}
end
end
do_soap_request(req)
end
|
Defines a request to copy folders in the Exchange store
|
Summarize the following code: def move_folder(to_folder_id, *sources)
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.MoveFolder {
builder.nbuild.parent.default_namespace = @default_ns
builder.to_folder_id!(to_folder_id)
builder.folder_ids!(sources.flatten)
}
end
end
do_soap_request(req)
end
|
Defines a request to move folders in the Exchange store
|
Summarize the following code: def update_folder(folder_changes)
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.UpdateFolder {
builder.nbuild.parent.default_namespace = @default_ns
builder.nbuild.FolderChanges {
folder_changes.each do |fc|
builder[NS_EWS_TYPES].FolderChange {
builder.dispatch_folder_id!(fc)
builder[NS_EWS_TYPES].Updates {
# @todo finish implementation
}
}
end
}
}
end
end
do_soap_request(req)
end
|
Update properties for a specified folder There is a lot more building in this method because most of the builders are only used for this operation so there was no need to externalize them for re - use .
|
Summarize the following code: def empty_folder(opts)
validate_version(VERSION_2010_SP1)
ef_opts = {}
[:delete_type, :delete_sub_folders].each do |k|
ef_opts[camel_case(k)] = validate_param(opts, k, true)
end
fids = validate_param opts, :folder_ids, true
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.EmptyFolder(ef_opts) {|x|
builder.nbuild.parent.default_namespace = @default_ns
builder.folder_ids!(fids)
}
end
end
do_soap_request(req)
end
|
Empties folders in a mailbox .
|
Summarize the following code: def create_attachment(opts)
opts = opts.clone
[:parent_id].each do |k|
validate_param(opts, k, true)
end
validate_param(opts, :files, false, [])
validate_param(opts, :items, false, [])
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.CreateAttachment {|x|
builder.nbuild.parent.default_namespace = @default_ns
builder.parent_item_id!(opts[:parent_id])
x.Attachments {
opts[:files].each do |fa|
builder.file_attachment!(fa)
end
opts[:items].each do |ia|
builder.item_attachment!(ia)
end
opts[:inline_files].each do |fi|
builder.inline_attachment!(fi)
end
}
}
end
end
do_soap_request(req, response_class: EwsResponse)
end
|
Creates either an item or file attachment and attaches it to the specified item .
|
Summarize the following code: def resolve_names(opts)
opts = opts.clone
fcd = opts.has_key?(:full_contact_data) ? opts[:full_contact_data] : true
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.ResolveNames {|x|
x.parent['ReturnFullContactData'] = fcd.to_s
x.parent['SearchScope'] = opts[:search_scope] if opts[:search_scope]
x.parent.default_namespace = @default_ns
# @todo builder.nbuild.ParentFolderIds
x.UnresolvedEntry(opts[:name])
}
end
end
do_soap_request(req)
end
|
Resolve ambiguous e - mail addresses and display names
|
Summarize the following code: def convert_id(opts)
opts = opts.clone
[:id, :format, :destination_format, :mailbox ].each do |k|
validate_param(opts, k, true)
end
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.ConvertId {|x|
builder.nbuild.parent.default_namespace = @default_ns
x.parent['DestinationFormat'] = opts[:destination_format].to_s.camel_case
x.SourceIds { |x|
x[NS_EWS_TYPES].AlternateId { |x|
x.parent['Format'] = opts[:format].to_s.camel_case
x.parent['Id'] = opts[:id]
x.parent['Mailbox'] = opts[:mailbox]
}
}
}
end
end
do_soap_request(req, response_class: EwsResponse)
end
|
Converts item and folder identifiers between formats .
|
Summarize the following code: def create_item(attributes, to_ews_create_opts = {})
template = Viewpoint::EWS::Template::CalendarItem.new attributes
template.saved_item_folder_id = {id: self.id, change_key: self.change_key}
rm = ews.create_item(template.to_ews_create(to_ews_create_opts)).response_messages.first
if rm && rm.success?
CalendarItem.new ews, rm.items.first[:calendar_item][:elems].first
else
raise EwsCreateItemError, "Could not create item in folder. #{rm.code}: #{rm.message_text}" unless rm
end
end
|
Creates a new appointment
|
Summarize the following code: def get_user_availability(opts)
opts = opts.clone
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.GetUserAvailabilityRequest {|x|
x.parent.default_namespace = @default_ns
builder.time_zone!(opts[:time_zone])
builder.nbuild.MailboxDataArray {
opts[:mailbox_data].each do |mbd|
builder.mailbox_data!(mbd)
end
}
builder.free_busy_view_options!(opts[:free_busy_view_options])
builder.suggestions_view_options!(opts[:suggestions_view_options])
}
end
end
do_soap_request(req, response_class: EwsSoapFreeBusyResponse)
end
|
Provides detailed information about the availability of a set of users rooms and resources within a specified time window .
|
Summarize the following code: def get_rooms(roomDistributionList)
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.GetRooms {|x|
x.parent.default_namespace = @default_ns
builder.room_list!(roomDistributionList)
}
end
end
do_soap_request(req, response_class: EwsSoapRoomResponse)
end
|
Gets the rooms that are in the specified room distribution list
|
Summarize the following code: def get_room_lists
req = build_soap! do |type, builder|
if(type == :header)
else
builder.room_lists!
end
end
do_soap_request(req, response_class: EwsSoapRoomlistResponse)
end
|
Gets the room lists that are available within the Exchange organization .
|
Summarize the following code: def validate_version(exchange_version)
if server_version < exchange_version
msg = 'The operation you are attempting to use is not compatible with'
msg << " your configured Exchange Server version(#{server_version})."
msg << " You must be running at least version (#{exchange_version})."
raise EwsServerVersionError, msg
end
end
|
Some operations only exist for certain versions of Exchange Server . This method should be called with the required version and we ll throw an exception of the currently set
|
Summarize the following code: def build_soap!(&block)
opts = { :server_version => server_version, :impersonation_type => impersonation_type, :impersonation_mail => impersonation_address }
opts[:time_zone_context] = @time_zone_context if @time_zone_context
EwsBuilder.new.build!(opts, &block)
end
|
Build the common elements in the SOAP message and yield to any custom elements .
|
Summarize the following code: def camel_case(input)
input.to_s.split(/_/).map { |i|
i.sub(/^./) { |s| s.upcase }
}.join
end
|
Change a ruby_cased string to CamelCased
|
Summarize the following code: def iso8601_duration_to_seconds(input)
return nil if input.nil? || input.empty?
match_data = DURATION_RE.match(input)
raise(StringFormatException, "Invalid duration given") if match_data.nil?
duration = 0
duration += match_data[:weeks].to_i * 604800
duration += match_data[:days].to_i * 86400
duration += match_data[:hours].to_i * 3600
duration += match_data[:minutes].to_i * 60
duration += match_data[:seconds].to_i
end
|
Convert an ISO8601 Duration format to seconds
|
Summarize the following code: def subscribe(subscriptions)
req = build_soap! do |type, builder|
if(type == :header)
else
builder.nbuild.Subscribe {
builder.nbuild.parent.default_namespace = @default_ns
subscriptions.each do |sub|
subtype = sub.keys.first
if(builder.respond_to?(subtype))
builder.send subtype, sub[subtype]
else
raise EwsBadArgumentError, "Bad subscription type. #{subtype}"
end
end
}
end
end
do_soap_request(req, response_class: EwsResponse)
end
|
Used to subscribe client applications to either push pull or stream notifications .
|
Summarize the following code: def push_subscribe_folder(folder, evtypes, url, status_frequency = nil, watermark = nil)
status_frequency ||= 30
psr = {
:subscribe_to_all_folders => false,
:folder_ids => [ {:id => folder[:id], :change_key => folder[:change_key]} ],
:event_types=> evtypes,
:status_frequency => status_frequency,
:uRL => url.to_s
}
psr[:watermark] = watermark if watermark
subscribe([{push_subscription_request: psr}])
end
|
Create a push subscription to a single folder
|
Summarize the following code: def coerce(*)
return nil if value.nil?
return value if value.is_a?(Date)
DateTime.parse(value)
end
|
Coerces value to DateTime
|
Summarize the following code: def incoming_references(client = nil, query = {})
return false unless client
query = is_a?(Contentful::Entry) ? query.merge(links_to_entry: id) : query.merge(links_to_asset: id)
client.entries(query)
end
|
Gets a collection of entries which links to current entry
|
Summarize the following code: def entry(id, query = {})
normalize_select!(query)
query['sys.id'] = id
entries = Request.new(self, environment_url('/entries'), query).get
return entries if configuration[:raw_mode]
entries.first
end
|
Gets a specific entry
|
Summarize the following code: def os_info
os_name = case ::RbConfig::CONFIG['host_os']
when /(cygwin|mingw|mswin|windows)/i then 'Windows'
when /(darwin|macruby|mac os)/i then 'macOS'
when /(linux|bsd|aix|solarix)/i then 'Linux'
end
{ name: os_name, version: Gem::Platform.local.version }
end
|
Returns the X - Contentful - User - Agent os data
|
Summarize the following code: def contentful_user_agent
header = {
'sdk' => sdk_info,
'app' => app_info,
'integration' => integration_info,
'platform' => platform_info,
'os' => os_info
}
result = []
header.each do |key, values|
next unless values[:name]
result << format_user_agent_header(key, values)
end
result.join(' ')
end
|
Returns the X - Contentful - User - Agent
|
Summarize the following code: def request_headers
headers = { 'X-Contentful-User-Agent' => contentful_user_agent }
headers['Authorization'] = "Bearer #{configuration[:access_token]}" if configuration[:authentication_mechanism] == :header
headers['Content-Type'] = "application/vnd.contentful.delivery.v#{configuration[:api_version].to_i}+json" if configuration[:api_version]
headers['Accept-Encoding'] = 'gzip' if configuration[:gzip_encoded]
headers
end
|
Returns the headers used for the HTTP requests
|
Summarize the following code: def run_request(request)
url = request.absolute? ? request.url : base_url + request.url
logger.info(request: { url: url, query: request.query, header: request_headers }) if logger
Response.new(
self.class.get_http(
url,
request_query(request.query),
request_headers,
proxy_params,
timeout_params
), request
)
end
|
Runs request and parses Response
|
Summarize the following code: def do_build_resource(response)
logger.debug(response: response) if logger
configuration[:resource_builder].new(
response.object,
configuration.merge(endpoint: response.request.endpoint),
(response.request.query || {}).fetch(:locale, nil) == '*',
0
).run
end
|
Runs Resource Builder
|
Summarize the following code: def resolve(client, query = {})
id_and_query = [(id unless link_type == 'Space')].compact + [query]
client.public_send(
Contentful::Support.snakify(link_type).to_sym,
*id_and_query
)
end
|
Queries contentful for the Resource the Link is refering to Takes an optional query hash
|
Summarize the following code: def fields_with_locales
remapped_fields = {}
locales.each do |locale|
fields(locale).each do |name, value|
remapped_fields[name] ||= {}
remapped_fields[name][locale.to_sym] = value
end
end
remapped_fields
end
|
Returns all fields of the asset with locales nested by field
|
Summarize the following code: def reload(client = nil)
return client.send(Support.snakify(self.class.name.split('::').last), id) unless client.nil?
false
end
|
Issues the request that was made to fetch this response again . Only works for Entry Asset ContentType and Space
|
Summarize the following code: def field_for(field_id)
fields.detect { |f| Support.snakify(f.id) == Support.snakify(field_id) }
end
|
Field definition for field
|
Summarize the following code: def each_page
page = first_page
yield page if block_given?
until completed?
page = page.next_page
yield page if block_given?
end
end
|
Iterates over all pages of the current sync
|
Summarize the following code: def coerce(value, configuration)
return value if type.nil?
return value if value.nil?
options = {}
options[:coercion_class] = KNOWN_TYPES[items.type] unless items.nil?
KNOWN_TYPES[type].new(value, options).coerce(configuration)
end
|
Coerces value to proper type
|
Summarize the following code: def set_ext_params(ext_key, ext_params)
raise ArgumentError unless ext_params.is_a?(Hash)
@extensions[ext_key] = ext_params
end
|
Set the extension parameters for a specific vendor
|
Summarize the following code: def process_xml(xml)
doc = REXML::Document.new xml
if root = REXML::XPath.first(doc, 'xmlns:cartridge_basiclti_link')
@title = get_node_text(root, 'blti:title')
@description = get_node_text(root, 'blti:description')
@launch_url = get_node_text(root, 'blti:launch_url')
@secure_launch_url = get_node_text(root, 'blti:secure_launch_url')
@icon = get_node_text(root, 'blti:icon')
@secure_icon = get_node_text(root, 'blti:secure_icon')
@cartridge_bundle = get_node_att(root, 'xmlns:cartridge_bundle', 'identifierref')
@cartridge_icon = get_node_att(root, 'xmlns:cartridge_icon', 'identifierref')
if vendor = REXML::XPath.first(root, 'blti:vendor')
@vendor_code = get_node_text(vendor, 'lticp:code')
@vendor_description = get_node_text(vendor, 'lticp:description')
@vendor_name = get_node_text(vendor, 'lticp:name')
@vendor_url = get_node_text(vendor, 'lticp:url')
@vendor_contact_email = get_node_text(vendor, '//lticp:contact/lticp:email')
@vendor_contact_name = get_node_text(vendor, '//lticp:contact/lticp:name')
end
if custom = REXML::XPath.first(root, 'blti:custom', LTI_NAMESPACES)
set_properties(@custom_params, custom)
end
REXML::XPath.each(root, 'blti:extensions', LTI_NAMESPACES) do |vendor_ext_node|
platform = vendor_ext_node.attributes['platform']
properties = {}
set_properties(properties, vendor_ext_node)
REXML::XPath.each(vendor_ext_node, 'lticm:options', LTI_NAMESPACES) do |options_node|
opt_name = options_node.attributes['name']
options = {}
set_properties(options, options_node)
properties[opt_name] = options
end
self.set_ext_params(platform, properties)
end
end
end
|
Parse tool configuration data out of the Common Cartridge LTI link XML
|
Summarize the following code: def to_xml(opts = {})
builder = Builder::XmlMarkup.new(:indent => opts[:indent] || 0)
builder.instruct!
builder.cartridge_basiclti_link("xmlns" => "http://www.imsglobal.org/xsd/imslticc_v1p0",
"xmlns:blti" => 'http://www.imsglobal.org/xsd/imsbasiclti_v1p0',
"xmlns:lticm" => 'http://www.imsglobal.org/xsd/imslticm_v1p0',
"xmlns:lticp" => 'http://www.imsglobal.org/xsd/imslticp_v1p0',
"xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance",
"xsi:schemaLocation" => "http://www.imsglobal.org/xsd/imslticc_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticc_v1p0.xsd http://www.imsglobal.org/xsd/imsbasiclti_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imsbasiclti_v1p0p1.xsd http://www.imsglobal.org/xsd/imslticm_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticm_v1p0.xsd http://www.imsglobal.org/xsd/imslticp_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticp_v1p0.xsd"
) do |blti_node|
%w{title description launch_url secure_launch_url icon secure_icon}.each do |key|
blti_node.blti key.to_sym, self.send(key) if self.send(key)
end
vendor_keys = %w{name code description url}
if vendor_keys.any? { |k| self.send("vendor_#{k}") } || vendor_contact_email
blti_node.blti :vendor do |v_node|
vendor_keys.each do |key|
v_node.lticp key.to_sym, self.send("vendor_#{key}") if self.send("vendor_#{key}")
end
if vendor_contact_email
v_node.lticp :contact do |c_node|
c_node.lticp :name, vendor_contact_name
c_node.lticp :email, vendor_contact_email
end
end
end
end
if !@custom_params.empty?
blti_node.tag!("blti:custom") do |custom_node|
@custom_params.keys.sort.each do |key|
val = @custom_params[key]
custom_node.lticm :property, val, 'name' => key
end
end
end
if [email protected]?
@extensions.keys.sort.each do |ext_platform|
ext_params = @extensions[ext_platform]
blti_node.blti(:extensions, :platform => ext_platform) do |ext_node|
ext_params.keys.sort.each do |key|
val = ext_params[key]
if val.is_a?(Hash)
ext_node.lticm(:options, :name => key) do |type_node|
val.keys.sort.each do |p_key|
p_val = val[p_key]
type_node.lticm :property, p_val, 'name' => p_key
end
end
else
ext_node.lticm :property, val, 'name' => key
end
end
end
end
end
blti_node.cartridge_bundle(:identifierref => @cartridge_bundle) if @cartridge_bundle
blti_node.cartridge_icon(:identifierref => @cartridge_icon) if @cartridge_icon
end
end
|
Generate XML from the current settings
|
Summarize the following code: def seed(*args, &block)
SeedFu::Seeder.new(self, *parse_seed_fu_args(args, block)).seed
end
|
Load some seed data . There are two ways to do this .
|
Summarize the following code: def call(save: true)
if save == false
call_processor
elsif destination
handle_destination do
call_processor(destination: destination)
end
else
create_tempfile do |tempfile|
call_processor(destination: tempfile.path)
end
end
end
|
Initializes the pipeline with all the processing options . Determines the destination and calls the processor .
|
Summarize the following code: def destination_format
format = File.extname(destination)[1..-1] if destination
format ||= self.format
format ||= File.extname(source_path)[1..-1] if source_path
format || DEFAULT_FORMAT
end
|
Determines the appropriate destination image format .
|
Summarize the following code: def create_tempfile
tempfile = Tempfile.new(["image_processing", ".#{destination_format}"], binmode: true)
yield tempfile
tempfile.open
tempfile
rescue
tempfile.close! if tempfile
raise
end
|
Creates a new tempfile for the destination file yields it and refreshes the file descriptor to get the updated file .
|
Summarize the following code: def handle_destination
destination_existed = File.exist?(destination)
yield
rescue
File.delete(destination) if File.exist?(destination) && !destination_existed
raise
end
|
In case of processing errors both libvips and imagemagick will leave the empty destination file they created so this method makes sure it is deleted in case an exception is raised on saving the image .
|
Summarize the following code: def apply(operations)
operations.inject(self) do |builder, (name, argument)|
if argument == true || argument == nil
builder.send(name)
elsif argument.is_a?(Array)
builder.send(name, *argument)
else
builder.send(name, argument)
end
end
end
|
Add multiple operations as a hash or an array .
|
Summarize the following code: def call(file = nil, destination: nil, **call_options)
options = {}
options = options.merge(source: file) if file
options = options.merge(destination: destination) if destination
branch(options).call!(**call_options)
end
|
Call the defined processing and get the result . Allows specifying the source file and destination .
|
Summarize the following code: def branch(loader: nil, saver: nil, operations: nil, **other_options)
options = respond_to?(:options) ? self.options : DEFAULT_OPTIONS
options = options.merge(loader: options[:loader].merge(loader)) if loader
options = options.merge(saver: options[:saver].merge(saver)) if saver
options = options.merge(operations: options[:operations] + operations) if operations
options = options.merge(processor: self::Processor) unless self.is_a?(Builder)
options = options.merge(other_options)
options.freeze
Builder.new(options)
end
|
Creates a new builder object merging current options with new options .
|
Summarize the following code: def cookie_params
params = {}
cookies.each do |value|
value.split(';').each do |param|
param.strip!
name, value = param.split('=',2)
unless name =~ RESERVED_COOKIE_NAMES
params[name] = (value || '')
end
end
end
return params
end
|
The Cookie key - > value pairs returned with the response .
|
Summarize the following code: def active?(url)
# normalize the url
url = URI(url)
# session key
key = key_for(url)
return @sessions.has_key?(key)
end
|
Creates a new session cache .
|
Summarize the following code: def [](url)
# normalize the url
url = URI(url)
# session key
key = key_for(url)
unless @sessions[key]
session = Net::HTTP::Proxy(
@proxy.host,
@proxy.port,
@proxy.user,
@proxy.password
).new(url.host,url.port)
session.open_timeout = @open_timeout if @open_timeout
session.read_timeout = @read_timeout if @read_timeout
session.continue_timeout = @continue_timeout if @continue_timeout
session.keep_alive_timeout = @keep_alive_timeout if @keep_alive_timeout
if url.scheme == 'https'
session.use_ssl = true
session.verify_mode = OpenSSL::SSL::VERIFY_NONE
session.ssl_timeout = @ssl_timeout
session.start
end
@sessions[key] = session
end
return @sessions[key]
end
|
Provides an active HTTP session for a given URL .
|
Summarize the following code: def kill!(url)
# normalize the url
url = URI(url)
# session key
key = key_for(url)
if (sess = @sessions[key])
begin
sess.finish
rescue IOError
end
@sessions.delete(key)
end
end
|
Destroys an HTTP session for the given scheme host and port .
|
Summarize the following code: def sanitize_url(url)
url = URI(url)
url.fragment = nil if @strip_fragments
url.query = nil if @strip_query
return url
end
|
Sanitizes a URL based on filtering options .
|
Summarize the following code: def doc
unless body.empty?
doc_class = if html?
Nokogiri::HTML::Document
elsif rss? || atom? || xml? || xsl?
Nokogiri::XML::Document
end
if doc_class
begin
@doc ||= doc_class.parse(body, @url.to_s, content_charset)
rescue
end
end
end
end
|
Returns a parsed document object for HTML XML RSS and Atom pages .
|
Summarize the following code: def content_charset
content_types.each do |value|
if value.include?(';')
value.split(';').each do |param|
param.strip!
if param.start_with?('charset=')
return param.split('=',2).last
end
end
end
end
return nil
end
|
The charset included in the Content - Type .
|
Summarize the following code: def is_content_type?(type)
if type.include?('/')
# otherwise only match the first param
content_types.any? do |value|
value = value.split(';',2).first
value == type
end
else
# otherwise only match the sub-type
content_types.any? do |value|
value = value.split(';',2).first
value = value.split('/',2).last
value == type
end
end
end
|
Determines if any of the content - types of the page include a given type .
|
Summarize the following code: def []=(host,cookies)
collected = self[host]
cookies.each do |key,value|
if collected[key] != value
collected.merge!(cookies)
@dirty << host
break
end
end
return cookies
end
|
Add a cookie to the jar for a particular domain .
|
Summarize the following code: def from_page(page)
cookies = page.cookie_params
unless cookies.empty?
self[page.url.host] = cookies
return true
end
return false
end
|
Retrieve cookies for a domain from a page response header .
|
Summarize the following code: def for_host(host)
if @dirty.include?(host)
values = []
cookies_for_host(host).each do |name,value|
values << "#{name}=#{value}"
end
@cookies[host] = values.join('; ')
@dirty.delete(host)
end
return @cookies[host]
end
|
Returns the pre - encoded Cookie for a given host .
|
Summarize the following code: def [](url)
# normalize the url
url = URI(url)
key = [url.scheme, url.host, url.port]
paths = @credentials[key]
return nil unless paths
# longest path first
ordered_paths = paths.keys.sort_by { |path_key| -path_key.length }
# directories of the path
path_dirs = URI.expand_path(url.path).split('/')
ordered_paths.each do |path|
return paths[path] if path_dirs[0,path.length] == path
end
return nil
end
|
Creates a new auth store .
|
Summarize the following code: def []=(url,auth)
# normalize the url
url = URI(url)
# normalize the URL path
path = URI.expand_path(url.path)
key = [url.scheme, url.host, url.port]
@credentials[key] ||= {}
@credentials[key][path.split('/')] = auth
return auth
end
|
Add an auth credential to the store for supplied base URL .
|
Summarize the following code: def run(&block)
@running = true
until (@queue.empty? || paused? || limit_reached?)
begin
visit_page(dequeue,&block)
rescue Actions::Paused
return self
rescue Actions::Action
end
end
@running = false
@sessions.clear
return self
end
|
Start spidering until the queue becomes empty or the agent is paused .
|
Summarize the following code: def enqueue(url,level=0)
url = sanitize_url(url)
if (!(queued?(url)) && visit?(url))
link = url.to_s
begin
@every_url_blocks.each { |url_block| url_block.call(url) }
@every_url_like_blocks.each do |pattern,url_blocks|
match = case pattern
when Regexp
link =~ pattern
else
(pattern == link) || (pattern == url)
end
if match
url_blocks.each { |url_block| url_block.call(url) }
end
end
rescue Actions::Paused => action
raise(action)
rescue Actions::SkipLink
return false
rescue Actions::Action
end
@queue << url
@levels[url] = level
return true
end
return false
end
|
Enqueues a given URL for visiting only if it passes all of the agent s rules for visiting a given URL .
|
Summarize the following code: def get_page(url)
url = URI(url)
prepare_request(url) do |session,path,headers|
new_page = Page.new(url,session.get(path,headers))
# save any new cookies
@cookies.from_page(new_page)
yield new_page if block_given?
return new_page
end
end
|
Requests and creates a new Page object from a given URL .
|
Summarize the following code: def post_page(url,post_data='')
url = URI(url)
prepare_request(url) do |session,path,headers|
new_page = Page.new(url,session.post(path,post_data,headers))
# save any new cookies
@cookies.from_page(new_page)
yield new_page if block_given?
return new_page
end
end
|
Posts supplied form data and creates a new Page object from a given URL .
|
Summarize the following code: def visit_page(url)
url = sanitize_url(url)
get_page(url) do |page|
@history << page.url
begin
@every_page_blocks.each { |page_block| page_block.call(page) }
yield page if block_given?
rescue Actions::Paused => action
raise(action)
rescue Actions::SkipPage
return nil
rescue Actions::Action
end
page.each_url do |next_url|
begin
@every_link_blocks.each do |link_block|
link_block.call(page.url,next_url)
end
rescue Actions::Paused => action
raise(action)
rescue Actions::SkipLink
next
rescue Actions::Action
end
if (@max_depth.nil? || @max_depth > @levels[url])
enqueue(next_url,@levels[url] + 1)
end
end
end
end
|
Visits a given URL and enqueus the links recovered from the URL to be visited later .
|
Summarize the following code: def prepare_request_headers(url)
# set any additional HTTP headers
headers = @default_headers.dup
unless @host_headers.empty?
@host_headers.each do |name,header|
if url.host.match(name)
headers['Host'] = header
break
end
end
end
headers['Host'] ||= @host_header if @host_header
headers['User-Agent'] = @user_agent if @user_agent
headers['Referer'] = @referer if @referer
if (authorization = @authorized.for_url(url))
headers['Authorization'] = "Basic #{authorization}"
end
if (header_cookies = @cookies.for_host(url.host))
headers['Cookie'] = header_cookies
end
return headers
end
|
Prepares request headers for the given URL .
|
Summarize the following code: def prepare_request(url,&block)
path = unless url.path.empty?
url.path
else
'/'
end
# append the URL query to the path
path += "?#{url.query}" if url.query
headers = prepare_request_headers(url)
begin
sleep(@delay) if @delay > 0
yield @sessions[url], path, headers
rescue SystemCallError,
Timeout::Error,
SocketError,
IOError,
OpenSSL::SSL::SSLError,
Net::HTTPBadResponse,
Zlib::Error
@sessions.kill!(url)
failed(url)
return nil
end
end
|
Normalizes the request path and grabs a session to handle page get and post requests .
|
Summarize the following code: def visit?(url)
!visited?(url) &&
visit_scheme?(url.scheme) &&
visit_host?(url.host) &&
visit_port?(url.port) &&
visit_link?(url.to_s) &&
visit_url?(url) &&
visit_ext?(url.path) &&
robot_allowed?(url.to_s)
end
|
Determines if a given URL should be visited .
|
Summarize the following code: def accept?(data)
unless @accept.empty?
@accept.any? { |rule| test_data(data,rule) }
else
[email protected]? { |rule| test_data(data,rule) }
end
end
|
Creates a new Rules object .
|
Summarize the following code: def every_html_doc
every_page do |page|
if (block_given? && page.html?)
if (doc = page.doc)
yield doc
end
end
end
end
|
Pass every HTML document that the agent parses to a given block .
|
Summarize the following code: def every_xml_doc
every_page do |page|
if (block_given? && page.xml?)
if (doc = page.doc)
yield doc
end
end
end
end
|
Pass every XML document that the agent parses to a given block .
|
Summarize the following code: def every_rss_doc
every_page do |page|
if (block_given? && page.rss?)
if (doc = page.doc)
yield doc
end
end
end
end
|
Pass every RSS document that the agent parses to a given block .
|
Summarize the following code: def every_atom_doc
every_page do |page|
if (block_given? && page.atom?)
if (doc = page.doc)
yield doc
end
end
end
end
|
Pass every Atom document that the agent parses to a given block .
|
Summarize the following code: def initialize_filters(options={})
@schemes = []
if options[:schemes]
self.schemes = options[:schemes]
else
@schemes << 'http'
begin
require 'net/https'
@schemes << 'https'
rescue Gem::LoadError => e
raise(e)
rescue ::LoadError
warn "Warning: cannot load 'net/https', https support disabled"
end
end
@host_rules = Rules.new(
accept: options[:hosts],
reject: options[:ignore_hosts]
)
@port_rules = Rules.new(
accept: options[:ports],
reject: options[:ignore_ports]
)
@link_rules = Rules.new(
accept: options[:links],
reject: options[:ignore_links]
)
@url_rules = Rules.new(
accept: options[:urls],
reject: options[:ignore_urls]
)
@ext_rules = Rules.new(
accept: options[:exts],
reject: options[:ignore_exts]
)
if options[:host]
visit_hosts_like(options[:host])
end
end
|
Initializes filtering rules .
|
Summarize the following code: def each_meta_redirect
return enum_for(__method__) unless block_given?
if (html? && doc)
search('//meta[@http-equiv and @content]').each do |node|
if node.get_attribute('http-equiv') =~ /refresh/i
content = node.get_attribute('content')
if (redirect = content.match(/url=(\S+)$/))
yield redirect[1]
end
end
end
end
end
|
Enumerates over the meta - redirect links in the page .
|
Summarize the following code: def each_redirect(&block)
return enum_for(__method__) unless block
if (locations = @response.get_fields('Location'))
# Location headers override any meta-refresh redirects in the HTML
locations.each(&block)
else
# check page-level meta redirects if there isn't a location header
each_meta_redirect(&block)
end
end
|
Enumerates over every HTTP or meta - redirect link in the page .
|
Summarize the following code: def each_link
return enum_for(__method__) unless block_given?
filter = lambda { |url|
yield url unless (url.nil? || url.empty?)
}
each_redirect(&filter) if is_redirect?
if (html? && doc)
doc.search('//a[@href]').each do |a|
filter.call(a.get_attribute('href'))
end
doc.search('//frame[@src]').each do |iframe|
filter.call(iframe.get_attribute('src'))
end
doc.search('//iframe[@src]').each do |iframe|
filter.call(iframe.get_attribute('src'))
end
doc.search('//link[@href]').each do |link|
filter.call(link.get_attribute('href'))
end
doc.search('//script[@src]').each do |script|
filter.call(script.get_attribute('src'))
end
end
end
|
Enumerates over every link in the page .
|
Summarize the following code: def each_url
return enum_for(__method__) unless block_given?
each_link do |link|
if (url = to_absolute(link))
yield url
end
end
end
|
Enumerates over every absolute URL in the page .
|
Summarize the following code: def to_absolute(link)
link = link.to_s
new_url = begin
url.merge(link)
rescue Exception
return
end
if (!new_url.opaque) && (path = new_url.path)
# ensure that paths begin with a leading '/' for URI::FTP
if (new_url.scheme == 'ftp' && !path.start_with?('/'))
path.insert(0,'/')
end
# make sure the path does not contain any .. or . directories,
# since URI::Generic#merge cannot normalize paths such as
# "/stuff/../"
new_url.path = URI.expand_path(path)
end
return new_url
end
|
Normalizes and expands a given link into a proper URI .
|
Summarize the following code: def post(options={}, &block)
response = http.post_uri(options.merge(:body => serialize), &block)
handle_response(response)
end
|
Serializes the object POSTs it to + url + with + format + deserializes the returned document and updates properties accordingly .
|
Summarize the following code: def get(options={}, &block)
response = http.get_uri(options, &block)
handle_response(response)
end
|
GETs + url + with + format + deserializes the returned document and updates properties accordingly .
|
Summarize the following code: def put(options={}, &block)
response = http.put_uri(options.merge(:body => serialize), &block)
handle_response(response)
self
end
|
Serializes the object PUTs it to + url + with + format + deserializes the returned document and updates properties accordingly .
|
Summarize the following code: def each
page = fetch(@first_query)
loop do
if page['items'].empty? # we consume this array as we iterate
break if page['last_page']
page = fetch(page['next'])
# The second predicate (empty?) *should* be redundant, but we check it
# anyway as a defensive measure.
break if page['items'].empty?
end
item = page['items'].shift
yield translate(item)
end
end
|
Iterate through objects in response fetching the next page of results from the API as needed .
|
Summarize the following code: def sign(tx_template)
return tx_template if @xpubs_by_signer.empty?
@xpubs_by_signer.each do |signer_conn, xpubs|
tx_template = signer_conn.singleton_batch_request(
'/sign-transaction',
transactions: [tx_template],
xpubs: xpubs,
) { |item| Transaction::Template.new(item) }
end
tx_template
end
|
Sign a single transaction
|
Summarize the following code: def sign_batch(tx_templates)
if @xpubs_by_signer.empty?
# Treat all templates as if signed successfully.
successes = tx_templates.each_with_index.reduce({}) do |memo, (t, i)|
memo[i] = t
memo
end
BatchResponse.new(successes: successes)
end
# We need to work towards a single, final BatchResponse that uses the
# original indexes. For the next cycle, we should retain only those
# templates for which the most recent sign response was successful, and
# maintain a mapping of each template's index in the upcoming request
# to its original index.
orig_index = (0...tx_templates.size).to_a
errors = {}
@xpubs_by_signer.each do |signer_conn, xpubs|
next_tx_templates = []
next_orig_index = []
batch = signer_conn.batch_request(
'/sign-transaction',
transactions: tx_templates,
xpubs: xpubs,
) { |item| Transaction::Template.new(item) }
batch.successes.each do |i, template|
next_tx_templates << template
next_orig_index << orig_index[i]
end
batch.errors.each do |i, err|
errors[orig_index[i]] = err
end
tx_templates = next_tx_templates
orig_index = next_orig_index
# Early-exit if all templates have encountered an error.
break if tx_templates.empty?
end
successes = tx_templates.each_with_index.reduce({}) do |memo, (t, i)|
memo[orig_index[i]] = t
memo
end
BatchResponse.new(
successes: successes,
errors: errors,
)
end
|
Sign a batch of transactions
|
Summarize the following code: def set_no_auth
FHIR.logger.info 'Configuring the client to use no authentication.'
@use_oauth2_auth = false
@use_basic_auth = false
@security_headers = {}
@client = RestClient
@client.proxy = proxy unless proxy.nil?
@client
end
|
Set the client to use no authentication mechanisms
|
Summarize the following code: def set_basic_auth(client, secret)
FHIR.logger.info 'Configuring the client to use HTTP Basic authentication.'
token = Base64.encode64("#{client}:#{secret}")
value = "Basic #{token}"
@security_headers = { 'Authorization' => value }
@use_oauth2_auth = false
@use_basic_auth = true
@client = RestClient
@client.proxy = proxy unless proxy.nil?
@client
end
|
Set the client to use HTTP Basic Authentication
|
Summarize the following code: def set_bearer_token(token)
FHIR.logger.info 'Configuring the client to use Bearer Token authentication.'
value = "Bearer #{token}"
@security_headers = { 'Authorization' => value }
@use_oauth2_auth = false
@use_basic_auth = true
@client = RestClient
@client.proxy = proxy unless proxy.nil?
@client
end
|
Set the client to use Bearer Token Authentication
|
Summarize the following code: def request_payload(resource, headers)
if headers
format_specified = headers['Content-Type']
if format_specified.nil?
resource.to_xml
elsif format_specified.downcase.include?('xml')
resource.to_xml
elsif format_specified.downcase.include?('json')
resource.to_json
else
resource.to_xml
end
else
resource.to_xml
end
end
|
Extract the request payload in the specified format defaults to XML
|
Summarize the following code: def split(subnets=2)
unless (1..(2**@prefix.host_prefix)).include? subnets
raise ArgumentError, "Value #{subnets} out of range"
end
networks = subnet(newprefix(subnets))
until networks.size == subnets
networks = sum_first_found(networks)
end
return networks
end
|
Splits a network into different subnets
|
Summarize the following code: def supernet(new_prefix)
raise ArgumentError, "New prefix must be smaller than existing prefix" if new_prefix >= @prefix.to_i
return self.class.new("0.0.0.0/0") if new_prefix < 1
return self.class.new(@address+"/#{new_prefix}").network
end
|
Returns a new IPv4 object from the supernetting of the instance network .
|
Summarize the following code: def subnet(subprefix)
unless ((@prefix.to_i)..32).include? subprefix
raise ArgumentError, "New prefix must be between #@prefix and 32"
end
Array.new(2**([email protected]_i)) do |i|
self.class.parse_u32(network_u32+(i*(2**(32-subprefix))), subprefix)
end
end
|
This method implements the subnetting function similar to the one described in RFC3531 .
|
Summarize the following code: def -(oth)
if oth.is_a? Integer
self.prefix - oth
else
(self.prefix - oth.prefix).abs
end
end
|
Returns the difference between two prefixes or a prefix and a number as a Integer
|
Summarize the following code: def bot
return @bot unless @bot.nil?
@bot_command = nil
#
# parse any command-line options and use them to initialize the bot
#
params = {}
#:nocov:
opts = OptionParser.new
opts.banner = "Usage: #{File.basename($0)} [options]"
opts.separator ""
opts.separator "Specific options:"
opts.on('-c', '--config [ARG]', "Specify a config file to use") { |c| ENV["chatterbot_config"] = c }
opts.on('-t', '--test', "Run the bot without actually sending any tweets") { params[:debug_mode] = true }
opts.on('-v', '--verbose', "verbose output to stdout") { params[:verbose] = true }
opts.on('--dry-run', "Run the bot in test mode, and also don't update the database") { params[:debug_mode] = true ; params[:no_update] = true }
opts.on('-r', '--reset', "Reset your bot to ignore old tweets") {
@bot_command = :reset_since_id_counters
}
opts.on('--profile [ARG]', "get/set your bot's profile text") { |p|
@bot_command = :profile_text
@bot_command_args = [ p ]
}
opts.on('--website [ARG]', "get/set your bot's profile URL") { |u|
@bot_command = :profile_website
@bot_command_args = [ u ]
}
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
opts.parse!(ARGV)
#:nocov:
@bot = Chatterbot::Bot.new(params)
if @bot_command != nil
@bot.skip_run = true
result = @bot.send(@bot_command, *@bot_command_args)
puts result
end
@bot
end
|
generate a Bot object . if the DSL is being called from a Bot object just return it otherwise create a bot and return that
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.