_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q100
StompParser.Frame.content_encoding
train
def content_encoding if content_type = headers["content-type"] mime_type, charset = content_type.split(SEMICOLON, 2) charset = charset[CHARSET_OFFSET] if charset charset ||= EMPTY if charset.empty? and mime_type.start_with?("text/") Encoding::UTF_8 elsif charset.empty? Encoding::BINARY else ENCODINGS[charset] or raise StompParser::InvalidEncodingError, "invalid encoding #{charset.inspect}" end else Encoding::BINARY end end
ruby
{ "resource": "" }
q101
StompParser.Frame.write_header
train
def write_header(key, value) # @see http://stomp.github.io/stomp-specification-1.2.html#Repeated_Header_Entries key = decode_header(key) @headers[key] = decode_header(value) unless @headers.has_key?(key) end
ruby
{ "resource": "" }
q102
Citibike.Client.stations
train
def stations resp = self.connection.request( :get, Citibike::Station.path ) return resp if @options[:unwrapped] Citibike::Responses::Station.new(resp) end
ruby
{ "resource": "" }
q103
MessageDriver.Broker.find_destination
train
def find_destination(destination_name) destination = @destinations[destination_name] if destination.nil? raise MessageDriver::NoSuchDestinationError, "no destination #{destination_name} has been configured" end destination end
ruby
{ "resource": "" }
q104
Slodown.Formatter.extract_metadata
train
def extract_metadata @metadata = {} convert do |current| current.each_line.drop_while do |line| next false if line !~ /^#\+([a-z_]+): (.*)/ key, value = $1, $2 @metadata[key.to_sym] = value end.join('') end end
ruby
{ "resource": "" }
q105
Slodown.Formatter.embed_transformer
train
def embed_transformer lambda do |env| node = env[:node] node_name = env[:node_name] # We're fine with a bunch of stuff -- but not <iframe> and <embed> tags. return if env[:is_whitelisted] || !env[:node].element? return unless %w[iframe embed].include? env[:node_name] # We're dealing with an <iframe> or <embed> tag! Let's check its src attribute. # If its host name matches our regular expression, we can whitelist it. uri = URI(env[:node]['src']) return unless uri.host =~ allowed_iframe_hosts Sanitize.clean_node!(node, { elements: %w[iframe embed], attributes: { all: %w[allowfullscreen frameborder height src width] } }) { node_whitelist: [node] } end end
ruby
{ "resource": "" }
q106
Tenanfy.Helpers.append_tenant_theme_to_assets
train
def append_tenant_theme_to_assets(*assets) assets.map! do |asset| if should_add_tenant_theme_to_asset?(asset) && current_tenant "#{current_tenant.themes.first}/#{asset}" else asset end end assets end
ruby
{ "resource": "" }
q107
YourMembership.Session.authenticate
train
def authenticate(user_name, password) options = {} options[:Username] = user_name options[:Password] = password response = self.class.post('/', :body => self.class.build_XML_request('Auth.Authenticate', self, options)) self.class.response_valid? response if response['YourMembership_Response']['Auth.Authenticate'] get_authenticated_user else false end end
ruby
{ "resource": "" }
q108
Xcflushd.Authorizer.sorted_metrics
train
def sorted_metrics(metrics, hierarchy) # 'hierarchy' is a hash where the keys are metric names and the values # are arrays with the names of the children metrics. Only metrics with # children and with at least one usage limit appear as keys. parent_metrics = hierarchy.keys child_metrics = metrics - parent_metrics parent_metrics + child_metrics end
ruby
{ "resource": "" }
q109
Citibike.Response.all_near
train
def all_near(obj, dist) @data.select do |d| if d.id == obj.id false else d.distance_from(obj.latitude, obj.longitude) < dist end end end
ruby
{ "resource": "" }
q110
Citibike.Response.method_missing
train
def method_missing(sym, *args, &block) if self.data.respond_to?(sym) return self.data.send(sym, *args, &block) end super end
ruby
{ "resource": "" }
q111
Chimp.QueueWorker.run
train
def run while @never_exit work_item = ChimpQueue.instance.shift() begin if work_item != nil job_uuid = work_item.job_uuid group = work_item.group.group_id work_item.retry_count = @retry_count work_item.owner = Thread.current.object_id ChimpDaemon.instance.semaphore.synchronize do # only do this if we are running with chimpd if ChimpDaemon.instance.queue.processing[group].nil? # no op else # remove from the processing queue if ChimpDaemon.instance.queue.processing[group][job_uuid.to_sym] == 0 Log.debug 'Completed processing task ' + job_uuid.to_s Log.debug 'Deleting ' + job_uuid.to_s ChimpDaemon.instance.queue.processing[group].delete(job_uuid.to_sym) Log.debug ChimpDaemon.instance.queue.processing.inspect ChimpDaemon.instance.proc_counter -= 1 else if ChimpDaemon.instance.queue.processing[group][job_uuid.to_sym].nil? Log.debug 'Job group was already deleted, no counter to decrease.' else Log.debug 'Decreasing processing counter (' + ChimpDaemon.instance.proc_counter.to_s + ') for [' + job_uuid.to_s + '] group: ' + group.to_s ChimpDaemon.instance.queue.processing[group][job_uuid.to_sym] -= 1 Log.debug 'Processing counter now (' + ChimpDaemon.instance.proc_counter.to_s + ') for [' + job_uuid.to_s + '] group: ' + group.to_s Log.debug ChimpDaemon.instance.queue.processing[group].inspect Log.debug 'Still counting down for ' + job_uuid.to_s ChimpDaemon.instance.proc_counter -= 1 end end end end work_item.run else sleep 1 end rescue Exception => ex Log.error "Exception in QueueWorker.run: #{ex}" Log.debug ex.inspect Log.debug ex.backtrace work_item.status = Executor::STATUS_ERROR work_item.error = ex end end end
ruby
{ "resource": "" }
q112
WinGui.DefApi.enforce_count
train
def enforce_count(args, params, diff = 0) num_args = args.size num_params = params == 'V' ? 0 : params.size + diff if num_args != num_params raise ArgumentError, "wrong number of parameters: expected #{num_params}, got #{num_args}" end end
ruby
{ "resource": "" }
q113
WinGui.DefApi.return_enum
train
def return_enum lambda do |api, *args, &block| WinGui.enforce_count( args, api.prototype, -1) handles = [] cb = if block callback('LP', 'I', &block) else callback('LP', 'I') do |handle, message| handles << handle true end end args[api.prototype.find_index('K'), 0] = cb # Insert callback into appropriate place of args Array api.call *args handles end end
ruby
{ "resource": "" }
q114
Oulipo.StringExtensions.alliterativity
train
def alliterativity words = self.downcase.gsub(/[^a-z\s]/, '').split leading_letters = words.map(&:chr) # { 'a' => 3, 'b' => 1, ... } leading_letter_counts = leading_letters.inject({}) do |result, letter| result[letter] ||= 0 result[letter] += 1 result end most_used_count = leading_letter_counts.max_by { |kv| kv.last }.pop most_used_count.to_f / words.length end
ruby
{ "resource": "" }
q115
Oulipo.StringExtensions.n_plus
train
def n_plus(places, word_list) analysis = Analysis.new(self, :nouns => word_list) substitutor = Substitutor.new(analysis) substitutor.replace(:nouns).increment(places) end
ruby
{ "resource": "" }
q116
Oulipo.StringExtensions.snowball?
train
def snowball? words = self.split self.chaterism? && words.first.length < words.last.length end
ruby
{ "resource": "" }
q117
Oulipo.StringExtensions.chaterism?
train
def chaterism? words = self.gsub(/[^a-z\s]/i, '').split # Find the direction we're traveling flen, llen = words.first.length, words.last.length direction = flen > llen ? :downto : :upto # Compare the pattern of word lengths against a range-turned-array of expected word lengths words.map(&:length) == flen.send(direction, llen).to_a end
ruby
{ "resource": "" }
q118
MiniReadline.EditWindow.sync_window
train
def sync_window(edit_buffer, edit_posn) unless check_margins(edit_buffer.length, edit_posn) window_buffer.clear @show_prompt = true end image = build_screen_image(edit_buffer) update_screen(image) @window_buffer = image end
ruby
{ "resource": "" }
q119
MiniReadline.EditWindow.build_screen_image
train
def build_screen_image(edit_buffer) working_region = edit_buffer[left_margin..right_margin] if (mask = @options[:secret_mask]) mask[0] * working_region.length else working_region end.ljust(active_width) end
ruby
{ "resource": "" }
q120
MiniReadline.EditWindow.update_screen
train
def update_screen(image) if @show_prompt MiniTerm.print("\r#{prompt.text}\r") @show_prompt = false end (0...active_width).each do |index| if (image_char = image[index]) != window_buffer[index] MiniTerm.set_posn(column: prompt.length + index) MiniTerm.print(image_char) end end end
ruby
{ "resource": "" }
q121
Mandate.Memoize.__mandate_memoize
train
def __mandate_memoize(method_name) memoizer = Module.new do define_method method_name do @__mandate_memoized_results ||= {} if @__mandate_memoized_results.include?(method_name) @__mandate_memoized_results[method_name] else @__mandate_memoized_results[method_name] = super() end end end prepend memoizer end
ruby
{ "resource": "" }
q122
Aw.Fork.call
train
def call(*, **, &block) pid = fork_and_return_pid(&block) write.close result = read.read Process.wait(pid) # rubocop:disable MarshalLoad Marshal.load(result) # rubocop:enable MarshalLoad end
ruby
{ "resource": "" }
q123
Boson::Commands::WebCore.Get.get_body
train
def get_body uri = URI.parse(@url) @response = get_response(uri) (@options[:any_response] || @response.code == '200') ? @response.body : nil rescue @options[:raise_error] ? raise : puts("Error: GET '#{@url}' -> #{$!.class}: #{$!.message}") end
ruby
{ "resource": "" }
q124
Boson::Commands::WebCore.Get.parse_body
train
def parse_body(body) format = determine_format(@options[:parse]) case format when :json unless ::Boson::Util.safe_require 'json' return puts("Install the json gem to parse json: sudo gem install json") end JSON.parse body when :yaml YAML::load body else puts "Can't parse this format." end rescue @options[:raise_error] ? raise : puts("Error while parsing #{format} response of '#{@url}': #{$!.class}") end
ruby
{ "resource": "" }
q125
RailsRedshiftReplicator.FileManager.write_csv
train
def write_csv(file_name, records) line_number = exporter.connection_adapter.write(local_file(file_name), records) end
ruby
{ "resource": "" }
q126
RailsRedshiftReplicator.FileManager.file_key_in_format
train
def file_key_in_format(file_name, format) if format == "gzip" self.class.s3_file_key exporter.source_table, gzipped(file_name) else self.class.s3_file_key exporter.source_table, file_name end end
ruby
{ "resource": "" }
q127
RailsRedshiftReplicator.FileManager.upload_csv
train
def upload_csv(files) files.each do |file| basename = File.basename(file) next if basename == File.basename(exporter.replication.key) RailsRedshiftReplicator.logger.info I18n.t(:uploading_notice, file: file, key: self.class.s3_file_key(exporter.source_table, basename), scope: :rails_redshift_replicator) s3_client.put_object( key: self.class.s3_file_key(exporter.source_table, basename), body: File.open(file), bucket: bucket ) end files.each { |f| FileUtils.rm f } end
ruby
{ "resource": "" }
q128
Chimp.Executor.run_with_retry
train
def run_with_retry(&block) Log.debug "Running job '#{@job_id}' with status '#{@status}'" # If we are not the first job in this group, wait @delay ChimpDaemon.instance.semaphore.synchronize do if @group.started >= @concurrency && @delay.nonzero? Log.info "[#{@job_uuid}] Sleeping #{@delay} seconds between tasks" sleep @delay end @group.started += 1 end @status = STATUS_RUNNING @time_start = Time.now Log.info self.describe_work_start unless @quiet # # The inner level of exception handling here tries to catch anything # that can be easily retired or failed-- normal exceptions. # # The outer level of exception handling handles weird stuff; for example, # sometimes rest_connection raises RuntimeError exceptions... # # This fixes acu75562. # begin begin yield if not @dry_run if @owner != nil @status = STATUS_DONE @group.job_completed else Log.warn "[#{@job_uuid}][#{@job_id}] Ownership of job_id #{job_id} lost. User cancelled operation?" end rescue SystemExit, Interrupt => ex $stderr.puts 'Exiting!' raise ex rescue Interrupt => ex name = @array['name'] if @array name = @server['name'] || @server['nickname'] if @server Log.error self.describe_work_error if @retry_count > 0 @status = STATUS_RETRYING Log.error "[#{@job_uuid}][#{@job_id}] Error executing on \"#{name}\". Retrying in #{@retry_sleep} seconds..." @retry_count -= 1 sleep @retry_sleep retry end @status = STATUS_ERROR @error = ex Log.error "[#{@job_uuid}][#{@job_id}] Error executing on \"#{name}\": #{ex}" ensure @time_end = Time.now Log.info self.describe_work_done unless @quiet end rescue RuntimeError => ex err = ex.message + "IP: #{@server.params["ip_address"]}\n" if @server.params['ip_address'] err += " Group: #{@group.group_id}\n" if @group.group_id err += " Notes: #{@job_notes}\n" if @job_notes err += " Notes: #{@job_notes}\n" if @job_notes Log.error "[#{@job_uuid}][#{@job_id}] Caught RuntimeError: #{err} Job failed.\n" @status = STATUS_ERROR @error = ex end end
ruby
{ "resource": "" }
q129
CatarsePagarme.PaymentDelegator.transfer_funds
train
def transfer_funds raise "payment must be paid" if !payment.paid? bank_account = PagarMe::BankAccount.new(bank_account_attributes.delete(:bank_account)) bank_account.create raise "unable to create an bank account" unless bank_account.id.present? transfer = PagarMe::Transfer.new({ bank_account_id: bank_account.id, amount: value_for_transaction }) transfer.create raise "unable to create a transfer" unless transfer.id.present? #avoid sending notification payment.update_attributes(state: 'pending_refund') payment.payment_transfers.create!({ user: payment.user, transfer_id: transfer.id, transfer_data: transfer.to_json }) end
ruby
{ "resource": "" }
q130
Chimp.Chimp.run
train
def run queue = ChimpQueue.instance arguments = [] ARGV.each { |arg| arguments << arg.clone } self.cli_args=arguments.collect {|param| param.gsub(/(?<==).*/) do |match| match='"'+match+'"' end }.join(" ") parse_command_line if @interactive check_option_validity if @interactive disable_logging unless @@verbose puts "chimp #{VERSION} executing..." if (@interactive and not @use_chimpd) and not @@quiet # # Wait for chimpd to complete tasks # if @chimpd_wait_until_done chimpd_wait_until_done exit end # # Send the command to chimpd for execution # if @use_chimpd timestamp = Time.now.to_i length = 6 self.job_uuid = (36**(length - 1) + rand(36**length - 36**(length - 1))).to_s(36) ChimpDaemonClient.submit(@chimpd_host, @chimpd_port, self, job_uuid) exit else # Connect to the Api Connection.instance if @interactive Connection.connect else Connection.connect_and_cache end end # If we're processing the command ourselves, then go # ahead and start making API calls to select the objects # to operate upon # # Get elements if --array has been passed get_array_info # Get elements if we are searching by tags get_server_info # At this stage @servers should be populated with our findings # Get ST info for all elements if not @servers.empty? get_template_info unless @servers.empty? puts "Looking for the rightscripts (This might take some time)" if (@interactive and not @use_chimpd) and not @@quiet get_executable_info end if Chimp.failure #This is the failure point when executing standalone Log.error "##################################################" Log.error "[#{Chimp.get_job_uuid}] API CALL FAILED FOR:" Log.error "[#{Chimp.get_job_uuid}] chimp #{@cli_args} " Log.error "[#{Chimp.get_job_uuid}] Run manually!" Log.error "##################################################" exit 1 end # # Optionally display the list of objects to operate on # and prompt the user # if @prompt and @interactive list_of_objects = make_human_readable_list_of_objects confirm = (list_of_objects.size > 0 and @action != :action_none) or @action == :action_none if @script_to_run.nil? verify("Your command will be executed on the following:", list_of_objects, confirm) else verify("Your command \""+@script_to_run.params['right_script']['name']+"\" will be executed on the following:", list_of_objects, confirm) end end # # Load the queue with work # if not @servers.first.nil? and ( not @executable.nil? or @action == :action_ssh or @action == :action_report) jobs = generate_jobs(@servers, @server_template, @executable) add_to_queue(jobs) end # # Exit early if there is nothing to do # if @action == :action_none or ( queue.group[@group].nil? || queue.group[@group].size == 0) puts "No actions to perform." unless self.quiet else do_work end end
ruby
{ "resource": "" }
q131
Chimp.Chimp.check_option_validity
train
def check_option_validity if @hold && !@array_names.empty? puts "ERROR: Holding of array objects is not yet supported" exit 1 end if @tags.empty? and @array_names.empty? and @deployment_names.empty? and not @chimpd_wait_until_done puts "ERROR: Please select the objects to operate upon." help exit 1 end if not @array_names.empty? and ( not @tags.empty? or not @deployment_names.empty? ) puts "ERROR: You cannot mix ServerArray queries with other types of queries." help exit 1 end end
ruby
{ "resource": "" }
q132
Chimp.Chimp.get_servers_by_tag
train
def get_servers_by_tag(tags) # Take tags and collapse it, # Default case, tag is AND if @match_all t = tags.join("&tag=") filter = "tag=#{t}" servers = Connection.instances(filter) else t = tags.join(",") filter = "tag=#{t}" servers = Connection.instances(filter) end if servers.nil? if @ignore_errors Log.warn "[#{Chimp.get_job_uuid}] Tag query returned no results: #{tags.join(" ")}" else raise "[#{Chimp.get_job_uuid}] Tag query returned no results: #{tags.join(" ")}\n" end elsif servers.empty? if @ignore_errors Log.warn "[#{Chimp.get_job_uuid}] Tag query returned no results: #{tags.join(" ")}" else raise "[#{Chimp.get_job_uuid}] Tag query returned no results: #{tags.join(" ")}\n" end end servers = verify_tagged_instances(servers,tags) return(servers) end
ruby
{ "resource": "" }
q133
Chimp.Chimp.verify_tagged_instances
train
def verify_tagged_instances(servers,tags) array_list = servers # servers is an array of hashes # verify that each object contains the tags. if @match_all # has to contain BOTH matching_servers = array_list.select { |instance| (tags - instance['tags']).empty? } else # has to contain ANY matching_servers = array_list.select { |instance| tags.any? {|tag| instance['tags'].include?(tag) }} end # Shall there be a discrepancy, we need to raise an error and end the run. if matching_servers.size != servers.size if @ignore_errors Log.error "[#{Chimp.get_job_uuid}] #{servers.size - matching_servers.size} instances didnt match tag selection." Log.error "[#{Chimp.get_job_uuid}] #{tags.join(" ")}" Chimp.set_failure(true) Log.error "[#{Chimp.get_job_uuid}] Set failure to true because of discrepancy" servers = [] else raise "[#{Chimp.get_job_uuid}] #{servers.size - matching_servers.size} instances didnt match tag selection" end end return servers end
ruby
{ "resource": "" }
q134
Chimp.Chimp.get_hrefs_for_arrays
train
def get_hrefs_for_arrays(names) result = [] arrays_hrefs = [] if names.size > 0 names.each do |array_name| # Find if arrays exist, if not raise warning. # One API call per array begin Log.debug "[#{Chimp.get_job_uuid}] Making API 1.5 call: client.server_arrays.index(:filter => [#{array_name}])" tries ||= 3 result = Connection.client.server_arrays.index(:filter => ["name==#{array_name}"]) rescue Log.error "[#{Chimp.get_job_uuid}] Making API 1.5 call: client.server_arrays.index failed (retrying)." sleep 30 retry unless (tries -= 1).zero? Log.error "[#{Chimp.get_job_uuid}] Making API 1.5 call: client.server_arrays.index failed (giving up)." end # Result is an array with all the server arrays if result.size != 0 if @exact #remove results that do not exactly match result.each{ |r| if array_names.include?(r.raw['name']) arrays_hrefs += [ r.href ] end } else arrays_hrefs += result.collect(&:href) end else if @ignore_errors Log.debug "[#{Chimp.get_job_uuid}] Could not find array \"#{array_name}\"" else Log.error "[#{Chimp.get_job_uuid}] Could not find array \"#{array_name}\"" end end end if ( arrays_hrefs.empty? ) Log.debug "[#{Chimp.get_job_uuid}] Did not find any arrays that matched!" unless names.size == 1 end return(arrays_hrefs) end end
ruby
{ "resource": "" }
q135
Chimp.Chimp.detect_server_template
train
def detect_server_template(servers) Log.debug "[#{Chimp.get_job_uuid}] Looking for server template" st = [] if servers[0].nil? return (st) end st += servers.collect { |s| [s['href'],s['server_template']] }.uniq {|a| a[0]} # # We return an array of server_template resources # of the type [ st_href, st object ] # Log.debug "[#{Chimp.get_job_uuid}] Found server templates" return(st) end
ruby
{ "resource": "" }
q136
Chimp.Chimp.detect_right_script
train
def detect_right_script(st, script) Log.debug "[#{Chimp.get_job_uuid}] Looking for rightscript" executable = nil # In the event that chimpd find @op_scripts as nil, set it as an array. if @op_scripts.nil? @op_scripts = [] end if st.nil? return executable end # Take the sts and extract all operational scripts @op_scripts = extract_operational_scripts(st) # if script is empty, we will list all common scripts # if not empty, we will list the first matching one if @script == "" and @script != nil # list all operational scripts reduce_to_common_scripts(st.size) script_id = list_and_select_op_script # Provide the name + href s = Executable.new s.params['right_script']['href'] = @op_scripts[script_id][1].right_script.href s.params['right_script']['name'] = @op_scripts[script_id][0] @script_to_run = s else # Try to find the rightscript in our list of common operational scripts @op_scripts.each do |rb| script_name = rb[0] if script_name.downcase.include?(script.downcase) # We only need the name and the href s = Executable.new s.params['right_script']['href'] = rb[1].right_script.href s.params['right_script']['name'] = script_name @script_to_run = s Log.debug "[#{Chimp.get_job_uuid}] Found rightscript" return @script_to_run end end # # If we reach here it means we didnt find the script in the operationals # if @script_to_run == nil # Search outside common op scripts search_for_script_in_sts(script, st) if @script_to_run.nil? if @interactive puts "ERROR: Sorry, didnt find that ( "+script+" ), provide an URI instead" puts "I searched in:" st.each { |s| puts " * "+s[1]['name']+" [Rev"+s[1]['version'].to_s+"]" } if not @ignore_errors exit 1 end else Log.error "["+self.job_uuid+"] Sorry, didnt find the script: ( "+script+" )!" return nil end else if self.job_uuid.nil? self.job_uuid = "" end Log.warn "["+self.job_uuid+"] \"#{@script_to_run.params['right_script']['name']}\" is not a common operational script!" return @script_to_run end end end end
ruby
{ "resource": "" }
q137
Chimp.Chimp.reduce_to_common_scripts
train
def reduce_to_common_scripts(number_of_st) counts = Hash.new 0 @op_scripts.each { |s| counts[s[0]] +=1 } b = @op_scripts.inject({}) do |res, row| res[row[0]] ||= [] res[row[0]] << row[1] res end b.inject([]) do |res, (key, values)| res << [key, values.first] if values.size >= number_of_st @op_scripts = res end end
ruby
{ "resource": "" }
q138
Chimp.Chimp.extract_operational_scripts
train
def extract_operational_scripts(st) op_scripts = [] size = st.size st.each do |s| # Example of s structure # ["/api/server_templates/351930003", # {"id"=>351930003, # "name"=>"RightScale Right_Site - 2015q1", # "kind"=>"cm#server_template", # "version"=>5, # "href"=>"/api/server_templates/351930003"} ] Log.debug "[#{Chimp.get_job_uuid}] Making API 1.5 call: client.resource (ST)" begin tries ||= 3 temp = Connection.client.resource(s[1]['href']) Log.debug "[#{Chimp.get_job_uuid}] API 1.5 call client.resource (ST) complete" temp.runnable_bindings.index.each do |x| # only add the operational ones if x.sequence == 'operational' name = x.raw['right_script']['name'] op_scripts.push([name, x]) end end rescue Exception => e Log.error "[#{Chimp.get_job_uuid}] API 1.5 call client.resource (ST) failed (retrying)" Log.error "[#{Chimp.get_job_uuid}] #{e.message}" sleep 30 retry unless (tries -= 1).zero? Log.error "[#{Chimp.get_job_uuid}] API 1.5 call client.resource (ST) failed (giving up)" end end #We now only have operational runnable_bindings under the script_objects array if op_scripts.length < 1 raise "ERROR: No operational scripts found on the server(s). " st.each {|s| puts " (Search performed on server template '#{s[1]['name']}')" } end return op_scripts end
ruby
{ "resource": "" }
q139
Chimp.Chimp.queue_runner
train
def queue_runner(concurrency, delay, retry_count, progress) queue = ChimpQueue.instance queue.max_threads = concurrency queue.delay = delay queue.retry_count = retry_count total_queue_size = queue.size puts "Executing..." unless progress or not quiet pbar = ProgressBar.new("Executing", 100) if progress queue.start queue.wait_until_done(@group) do pbar.set(((total_queue_size.to_f - queue.size.to_f)/total_queue_size.to_f*100).to_i) if progress end pbar.finish if progress end
ruby
{ "resource": "" }
q140
Chimp.Chimp.verify_results
train
def verify_results(group = :default) failed_workers, results_display = get_results(group) # # If no workers failed, then we're done. # if failed_workers.empty? @paused = false return "continue" end # # Some workers failed; offer the user a chance to retry them # verify("The following objects failed:", results_display, false) unless @paused if !@prompt || @paused @paused = true sleep 15 return "pause" end while true puts "(R)etry failed jobs" puts "(A)bort chimp run" puts "(I)gnore errors and continue" command = gets() if command.nil? # # if command is nil, stdin is closed or its source ended # probably because we are in an automated environment, # then we pause like in '--no-prompt' scenario # puts 'Warning! stdin empty, using pause behaviour, use --no-prompt to avoid this message' @paused = true return 'pause' end if command =~ /^a/i puts "Aborting!" exit 1 elsif command =~ /^i/i puts "Ignoring errors and continuing" exit 0 elsif command =~ /^r/i puts "Retrying..." ChimpQueue.instance.group[group].requeue_failed_jobs! return 'retry' end end end
ruby
{ "resource": "" }
q141
Chimp.Chimp.get_results
train
def get_results(group_name) queue = ChimpQueue.instance Log.debug("getting results for group #{group_name}") results = queue.group[@group].results() failed_workers = [] results_display = [] results.each do |result| next if result == nil if result[:status] == :error name = result[:host] || "unknown" message = result[:error].to_s || "unknown" message.sub!("\n", "") failed_workers << result[:worker] results_display << "#{name[0..40]} >> #{message}" end end return [failed_workers, results_display] end
ruby
{ "resource": "" }
q142
Chimp.Chimp.process
train
def process Chimp.set_failure(false) Chimp.set_job_uuid(job_uuid) Log.debug "[#{job_uuid}] Processing task" # Add to our "processing" counter Log.debug "[#{job_uuid}] Trying to get array_info" unless Chimp.failure get_array_info unless Chimp.failure Log.debug "[#{job_uuid}] Trying to get server_info" unless Chimp.failure get_server_info unless Chimp.failure Log.debug "[#{job_uuid}] Trying to get template_info" unless Chimp.failure get_template_info unless Chimp.failure Log.debug "[#{job_uuid}] Trying to get executable_info" unless Chimp.failure get_executable_info unless Chimp.failure # All elements of task have been processed if Chimp.failure Log.error '##################################################' Log.error '[' + job_uuid + '] API CALL FAILED FOR:' Log.error '[' + job_uuid + "] chimp #{@cli_args} " Log.error '[' + job_uuid + '] Run manually!' Log.error '##################################################' return [] elsif @servers.first.nil? || @executable.nil? Log.warn "[#{Chimp.get_job_uuid}] Nothing to do for \"chimp #{@cli_args}\"." # decrease our counter ChimpDaemon.instance.queue.processing[@group].delete(job_uuid.to_sym) ChimpDaemon.instance.proc_counter -= 1 return [] else Log.debug "[#{Chimp.get_job_uuid}] Generating job(s)..." # @servers might be > 1, but we might be using limit_start number_of_servers = if @limit_start.to_i > 0 || @limit_end.to_i > 0 @limit_end.to_i - @limit_start.to_i else # reminder, we already are accounting for at least 1 @servers.size - 1 end Log.debug 'Increasing processing counter (' + ChimpDaemon.instance.proc_counter.to_s + ') + ' + number_of_servers.to_s + ' for group ' + @group.to_s ChimpDaemon.instance.queue.processing[@group][job_uuid.to_sym] += number_of_servers ChimpDaemon.instance.proc_counter += number_of_servers Log.debug 'Processing counter now (' + ChimpDaemon.instance.proc_counter.to_s + ') for group ' + @group.to_s return generate_jobs(@servers, @server_template, @executable) end end
ruby
{ "resource": "" }
q143
Chimp.Chimp.chimpd_wait_until_done
train
def chimpd_wait_until_done local_queue = ChimpQueue.instance $stdout.print "Waiting for chimpd jobs to complete for group #{@group}..." begin while !@dry_run local_queue = ChimpQueue.instance # # load up remote chimpd jobs into the local queue # this makes all the standard queue control methods available to us # sleeping_counter = 0 while true local_queue.reset! begin all = ChimpDaemonClient.retrieve_group_info(@chimpd_host, @chimpd_port, @group, :all) rescue RestClient::ResourceNotFound sleep 5 $stdout.print "\nINFO: Waiting on group #{@group} to populate" retry end ChimpQueue.instance.create_group(@group) ChimpQueue[@group].set_jobs(all) if ChimpQueue[@group].done? Log.debug 'Group ' + @group.to_s + ' is completed' jobs = ChimpQueue[@group].size $stdout.print "\nINFO: Group #{@group} has completed (#{jobs} jobs)" break else Log.debug 'Group ' + @group.to_s + ' is not done.' end if sleeping_counter % 240 == 0 $stdout.print "\n(Still) Waiting for group #{@group}" unless sleeping_counter == 0 end $stdout.print "." $stdout.flush sleeping_counter += 5 sleep 5 end # # If verify_results returns false, then ask chimpd to requeue all failed jobs. # case verify_results(@group) when 'continue' break when 'retry' ChimpDaemonClient.retry_group(@chimpd_host, @chimpd_port, @group) when 'pause' @paused = true #stuck in this loop until action is taken end end ensure #$stdout.print " done\n" end end
ruby
{ "resource": "" }
q144
BackgroundBunnies.Bunny.start
train
def start(connection_or_group) @connection = connection_or_group @channel = AMQP::Channel.new(@connection) queue_options = {} name = queue_name if queue_type == :broadcast queue_options[:exclusive] = true queue_options[:auto_delete] = true name = "#{Socket.gethostname}-#{Process.pid}-#{self.object_id}" @queue = @channel.queue(name, queue_options) @exchange = @channel.fanout(BackgroundBunnies.broadcast_exchange_name(queue_name)) @queue.bind(@exchange) else queue_options[:durable] = true @queue = @channel.queue(queue_name, queue_options) end @consumer = @queue.subscribe(:ack=>true) do |metadata, payload| info = metadata properties = nil begin job = Job.new(JSON.parse!(payload), info, properties) err = nil self.process(job) metadata.ack rescue =>err # processing went wrong, requeing message job = Job.new(nil, info, properties) unless job unless on_error(job, err) metadata.reject(:requeue=>true) else metadata.ack end end end end
ruby
{ "resource": "" }
q145
WinGui.Window.click
train
def click(id) h = child(id).handle rectangle = [0, 0, 0, 0].pack 'LLLL' get_window_rect h, rectangle left, top, right, bottom = rectangle.unpack 'LLLL' center = [(left + right) / 2, (top + bottom) / 2] set_cursor_pos *center mouse_event MOUSEEVENTF_LEFTDOWN, 0, 0, 0, 0 mouse_event MOUSEEVENTF_LEFTUP, 0, 0, 0, 0 end
ruby
{ "resource": "" }
q146
Pandora.Song.load_explorer_data
train
def load_explorer_data document = Nokogiri::XML(Faraday.get(@song_explorer_url).body) @id = document.search('songExplorer').first['musicId'] end
ruby
{ "resource": "" }
q147
Chimp.Server.run_executable
train
def run_executable(exec, options) script_href = "right_script_href="+exec.href # Construct the parameters to pass for the inputs params=options.collect { |k, v| "&inputs[][name]=#{k}&inputs[][value]=#{v}" unless k == :ignore_lock }.join('&') if options[:ignore_lock] params+="&ignore_lock=true" end # self is the actual Server object Log.debug "[#{Chimp.get_job_uuid}] Running executable" task = self.object.run_executable(script_href + params) return task end
ruby
{ "resource": "" }
q148
Phaserunner.Cli.main
train
def main program_desc 'Read values from the Grin PhaseRunner Controller primarily for logging' version Phaserunner::VERSION subcommand_option_handling :normal arguments :strict sort_help :manually desc 'Serial (USB) device' default_value '/dev/ttyUSB0' arg 'tty' flag [:t, :tty] desc 'Serial port baudrate' default_value 115200 arg 'baudrate' flag [:b, :baudrate] desc 'Modbus slave ID' default_value 1 arg 'slave_id' flag [:s, :slave_id] desc 'Path to json file that contains Grin Modbus Dictionary' default_value Modbus.default_file_path arg 'dictionary_file' flag [:d, :dictionary_file] desc 'Loop the command n times' default_value :forever arg 'loop_count', :optional flag [:l, :loop_count] desc 'Do not output to stdout' switch [:q, :quiet] desc 'Read a single or multiple adjacent registers from and address' arg_name 'register_address' command :read_register do |read_register| read_register.desc 'Number of registers to read starting at the Arg Address' read_register.default_value 1 read_register.flag [:c, :count] read_register.arg 'address' read_register.action do |global_options, options, args| address = args[0].to_i count = args[1].to_i node = dict[address] puts modbus.range_address_header(address, count).join(",") unless quiet (0..loop_count).each do |i| puts modbus.read_raw_range(address, count).join(",") unless quiet end end end desc 'Logs interesting Phaserunner registers to stdout and file' long_desc %q(Logs interesting Phaserunner registers to stdout and a CSV file. File name in the form: phaserunner.#{Time.now.strftime('%Y-%m-%d_%H-%M-%S')}.csv) command :log do |log| log.action do |global_options, options, args| filename = "phaserunner.#{Time.now.strftime('%Y-%m-%d_%H-%M-%S')}.csv" output_fd = File.open(filename, 'w') header = modbus.bulk_log_header # Generate and output header line hdr = %Q(Timestamp,#{header.join(",")}) puts hdr unless quiet output_fd.puts hdr (0..loop_count).each do |i| data = modbus.bulk_log_data str = %Q(#{Time.now.utc.round(10).iso8601(6)},#{data.join(",")}) puts str unless quiet output_fd.puts str sleep 0.2 end end end pre do |global, command, options, args| # Pre logic here # Return true to proceed; false to abort and not call the # chosen command # Use skips_pre before a command to skip this block # on that command only @quiet = global[:quiet] # Handle that loop_count can be :forever or an Integer @loop_count = if global[:loop_count] == :forever Float::INFINITY else global[:loop_count].to_i end @modbus = Modbus.new(global) @dict = @modbus.dict end post do |global,command,options,args| # Post logic here # Use skips_post before a command to skip this # block on that command only end on_error do |exception| # Error logic here # return false to skip default error handling true end exit run(ARGV) end
ruby
{ "resource": "" }
q149
RailsRedshiftReplicator.Deleter.handle_delete_propagation
train
def handle_delete_propagation if replicable.tracking_deleted && has_deleted_ids? RailsRedshiftReplicator.logger.info propagation_message(:propagating_deletes) delete_on_target ? purge_deleted : RailsRedshiftReplicator.logger.error(propagation_message(:delete_propagation_error)) end end
ruby
{ "resource": "" }
q150
RayyanScrapers.NihFulltextScraper.process_list_page
train
def process_list_page(page) @logger.info("Processing list page with URL: #{page.uri}") #page.save_as "html/result-list.html" new_items_found = nil items = page.links #[0..50] # TODO REMOVE [], getting sample only items_len = items.length - 1 @total = @total + items_len # pline "Found #{items_len} items in page", true items.each do |anchor| next if anchor.text == '../' new_items_found = false if new_items_found.nil? pid = anchor.text.split('.').first link = "#{page.uri}#{anchor.href}" @logger.info "Got result with id #{pid}" # pline " Item #{@curr_property} of #{@total}..." # get detailed info begin article = Article.find_by_url link if article.nil? new_items_found = true article = process_fulltext_detail_page(@agent.get(link), pid) yield article, true else yield article, false end rescue => exception @logger.error "Error processing #{link}:" @logger.error exception @logger.error exception.backtrace.join("\n") end @curr_property = @curr_property + 1 end new_items_found end
ruby
{ "resource": "" }
q151
SmsCarrier.LogSubscriber.deliver
train
def deliver(event) info do recipients = Array(event.payload[:to]).join(', ') "\nSent SMS to #{recipients} (#{event.duration.round(1)}ms)" end debug { event.payload[:sms] } end
ruby
{ "resource": "" }
q152
ACTV.Base.to_hash
train
def to_hash hash = {} hash["attrs"] = @attrs self.instance_variables.keep_if { |key| key != :@attrs }.each do |var| val = self.instance_variable_get(var) hash["attrs"][var.to_s.delete("@").to_sym] = val.to_hash if val.is_a? ACTV::Base end hash["attrs"] end
ruby
{ "resource": "" }
q153
Attributor.AttributeResolver.check
train
def check(path_prefix, key_path, predicate = nil) value = query(key_path, path_prefix) # we have a value, any value, which is good enough given no predicate return true if !value.nil? && predicate.nil? case predicate when ::String, ::Regexp, ::Integer, ::Float, ::DateTime, true, false return predicate === value when ::Proc # Cannot use === here as above due to different behavior in Ruby 1.8 return predicate.call(value) when nil return !value.nil? else raise AttributorException, "predicate not supported: #{predicate.inspect}" end end
ruby
{ "resource": "" }
q154
Attributor.Attribute.describe
train
def describe(shallow = true, example: nil) description = {} # Clone the common options TOP_LEVEL_OPTIONS.each do |option_name| description[option_name] = options[option_name] if options.key? option_name end # Make sure this option definition is not mistaken for the real generated example if (ex_def = description.delete(:example)) description[:example_definition] = ex_def end special_options = options.keys - TOP_LEVEL_OPTIONS - INTERNAL_OPTIONS description[:options] = {} unless special_options.empty? special_options.each do |opt_name| description[:options][opt_name] = options[opt_name] end # Change the reference option to the actual class name. if (reference = options[:reference]) description[:options][:reference] = reference.name end description[:type] = type.describe(shallow, example: example) # Move over any example from the type, into the attribute itself if (ex = description[:type].delete(:example)) description[:example] = dump(ex) end description end
ruby
{ "resource": "" }
q155
Attributor.Attribute.validate
train
def validate(object, context = Attributor::DEFAULT_ROOT_CONTEXT) raise "INVALID CONTEXT!! #{context}" unless context # Validate any requirements, absolute or conditional, and return. if object.nil? # == Attributor::UNSET # With no value, we can only validate whether that is acceptable or not and return. # Beyond that, no further validation should be done. return validate_missing_value(context) end # TODO: support validation for other types of conditional dependencies based on values of other attributes errors = validate_type(object, context) # End validation if we don't even have the proper type to begin with return errors if errors.any? if options[:values] && !options[:values].include?(object) errors << "Attribute #{Attributor.humanize_context(context)}: #{Attributor.errorize_value(object)} is not within the allowed values=#{options[:values].inspect} " end errors + type.validate(object, context, self) end
ruby
{ "resource": "" }
q156
Boson.Pipes.sort_pipe
train
def sort_pipe(object, sort) sort_lambda = lambda {} if object[0].is_a?(Hash) if sort.to_s[/^\d+$/] sort = sort.to_i elsif object[0].keys.all? {|e| e.is_a?(Symbol) } sort = sort.to_sym end sort_lambda = untouched_sort?(object.map {|e| e[sort] }) ? lambda {|e| e[sort] } : lambda {|e| e[sort].to_s } else sort_lambda = untouched_sort?(object.map {|e| e.send(sort) }) ? lambda {|e| e.send(sort) || ''} : lambda {|e| e.send(sort).to_s } end object.sort_by &sort_lambda rescue NoMethodError, ArgumentError $stderr.puts "Sort failed with nonexistant method '#{sort}'" end
ruby
{ "resource": "" }
q157
Boson.Pipes.pipes_pipe
train
def pipes_pipe(obj, arr) arr.inject(obj) {|acc,e| Boson.full_invoke(e, [acc]) } end
ruby
{ "resource": "" }
q158
MiniReadline.Readline.readline
train
def readline(options = {}) suppress_warnings initialize_parms(options) MiniTerm.raw { @edit.edit_process } ensure restore_warnings puts end
ruby
{ "resource": "" }
q159
MiniReadline.Readline.set_options
train
def set_options(options) @options = MiniReadline::BASE_OPTIONS .merge(instance_options) .merge(options) @options[:window_width] = MiniTerm.width - 1 set_prompt(@options[:prompt]) verify_mask(@options[:secret_mask]) end
ruby
{ "resource": "" }
q160
MiniReadline.Readline.set_prompt
train
def set_prompt(prompt) @options[:base_prompt] = Prompt.new(prompt) @options[:scroll_prompt] = Prompt.new(@options[:alt_prompt] || prompt) verify_prompt(@options[:base_prompt]) verify_prompt(@options[:scroll_prompt]) end
ruby
{ "resource": "" }
q161
FormHelper.ActionView::Helpers::FormTagHelper.hd_picker_tag
train
def hd_picker_tag(field_name, value=nil, cls="datepicker", opts={}, locale_format=nil) draw_ext_input_tag(field_name, value, cls, locale_format, opts) end
ruby
{ "resource": "" }
q162
Citibike.Api.distance_from
train
def distance_from(lat, long) dLat = self.degrees_to_radians(lat - self.latitude) dLon = self.degrees_to_radians(long - self.longitude) lat1 = self.degrees_to_radians(lat) lat2 = self.degrees_to_radians(self.latitude) a = Math.sin(dLat / 2) * Math.sin(dLat / 2) + Math.sin(dLon / 2) * Math.sin(dLon / 2) * Math.cos(lat1) * Math.cos(lat2) c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)) EARTH_RADIUS * c end
ruby
{ "resource": "" }
q163
Citibike.Api.method_missing
train
def method_missing(sym, *args, &block) if self.internal_object.key?(sym.to_s) return self.internal_object[sym.to_s] end super end
ruby
{ "resource": "" }
q164
Xcflushd.Storage.reports_to_flush
train
def reports_to_flush # The Redis rename command overwrites the key with the new name if it # exists. This means that if the rename operation fails in a flush cycle, # and succeeds in a next one, the data that the key had in the first # flush cycle will be lost. # For that reason, every time we need to rename a key, we will use a # unique suffix. This way, when the rename operation fails, the key # will not be overwritten later, and we will be able to recover its # content. suffix = suffix_for_unique_naming report_keys = report_keys_to_flush(suffix) if report_keys.empty? logger.warn "No reports available to flush" report_keys else reports(report_keys, suffix) end end
ruby
{ "resource": "" }
q165
ObjectInspector.Scope.join_name
train
def join_name(parts, separator: ObjectInspector.configuration.name_separator) the_parts = Array(parts).tap(&:compact!) the_parts.join(separator) if the_parts.any? end
ruby
{ "resource": "" }
q166
ObjectInspector.Scope.join_flags
train
def join_flags(flags, separator: ObjectInspector.configuration.flags_separator) the_flags = Array(flags).tap(&:compact!) the_flags.join(separator) if the_flags.any? end
ruby
{ "resource": "" }
q167
ObjectInspector.Scope.join_issues
train
def join_issues(issues, separator: ObjectInspector.configuration.issues_separator) the_issues = Array(issues).tap(&:compact!) the_issues.join(separator) if the_issues.any? end
ruby
{ "resource": "" }
q168
ObjectInspector.Scope.join_info
train
def join_info(items, separator: ObjectInspector.configuration.info_separator) the_items = Array(items).tap(&:compact!) the_items.join(separator) if the_items.any? end
ruby
{ "resource": "" }
q169
FriendlyId.JsonTranslate.execute_with_locale
train
def execute_with_locale(locale = ::I18n.locale, &block) actual_locale = ::I18n.locale ::I18n.locale = locale block.call ::I18n.locale = actual_locale end
ruby
{ "resource": "" }
q170
Brightbox.DatabaseServer.maintenance_window
train
def maintenance_window return nil if maintenance_weekday.nil? weekday = Date::DAYNAMES[maintenance_weekday] sprintf("%s %02d:00 UTC", weekday, maintenance_hour) end
ruby
{ "resource": "" }
q171
MtGox.Client.ticker
train
def ticker ticker = get('/api/1/BTCUSD/ticker') Ticker.instance.buy = value_currency ticker['buy'] Ticker.instance.high = value_currency ticker['high'] Ticker.instance.price = value_currency ticker['last_all'] Ticker.instance.low = value_currency ticker['low'] Ticker.instance.sell = value_currency ticker['sell'] Ticker.instance.volume = value_bitcoin ticker['vol'] Ticker.instance.vwap = value_currency ticker['vwap'] Ticker.instance.avg = value_currency ticker['avg'] Ticker.instance.last_local = value_currency ticker['last_local'] Ticker.instance end
ruby
{ "resource": "" }
q172
MtGox.Client.offers
train
def offers offers = get('/api/1/BTCUSD/depth/fetch') asks = offers['asks'].sort_by { |ask| ask['price_int'].to_i }.collect { |ask| Ask.new(self, ask) } bids = offers['bids'].sort_by { |bid| -bid['price_int'].to_i }.collect { |bid| Bid.new(self, bid) } {:asks => asks, :bids => bids} end
ruby
{ "resource": "" }
q173
MtGox.Client.trades
train
def trades(opts = {}) get('/api/1/BTCUSD/trades/fetch', opts). sort_by { |trade| trade['date'] }.collect do |trade| Trade.new(trade) end end
ruby
{ "resource": "" }
q174
MtGox.Client.order!
train
def order!(type, amount, price) order = {:type => order_type(type), :amount_int => intify(amount, :btc)} order[:price_int] = intify(price, :usd) if price != :market post('/api/1/BTCUSD/order/add', order) end
ruby
{ "resource": "" }
q175
MtGox.Client.withdraw!
train
def withdraw!(amount, address) if amount >= 1000 fail(FilthyRichError.new("#withdraw! take bitcoin amount as parameter (you are trying to withdraw #{amount} BTC")) else post('/api/1/generic/bitcoin/send_simple', :amount_int => intify(amount, :btc), :address => address)['trx'] end end
ruby
{ "resource": "" }
q176
Boson.Pipe.scientist_process
train
def scientist_process(object, global_opt, env={}) @env = env [:query, :sort, :reverse_sort].each {|e| global_opt.delete(e) } unless object.is_a?(Array) process_pipes(object, global_opt) end
ruby
{ "resource": "" }
q177
Boson.Pipe.process_pipes
train
def process_pipes(obj, options) internal_pipes(options).each {|pipe| obj = Pipes.send("#{pipe}_pipe", obj, options[pipe]) if options[pipe] } process_user_pipes(obj, options) end
ruby
{ "resource": "" }
q178
Boson.Pipe.process_user_pipes
train
def process_user_pipes(result, global_opt) pipes_to_process(global_opt).each {|e| args = [pipe(e)[:pipe], result] args << global_opt[e] unless pipe(e)[:type] == :boolean args << get_env(e, global_opt) if pipe(e)[:env] pipe_result = Boson.invoke(*args) result = pipe_result if pipe(e)[:filter] } result end
ruby
{ "resource": "" }
q179
AIXM.Document.errors
train
def errors xsd = Nokogiri::XML::Schema(File.open(AIXM.schema(:xsd))) xsd.validate(Nokogiri::XML(to_xml)).reject do |error| AIXM.config.ignored_errors && error.message.match?(AIXM.config.ignored_errors) end end
ruby
{ "resource": "" }
q180
Boson.Namespacer.full_invoke
train
def full_invoke(cmd, args) #:nodoc: command, subcommand = cmd.include?(NAMESPACE) ? cmd.split(NAMESPACE, 2) : [cmd, nil] dispatcher = subcommand ? Boson.invoke(command) : Boson.main_object dispatcher.send(subcommand || command, *args) end
ruby
{ "resource": "" }
q181
Unmarkdown.Parser.parse_nodes
train
def parse_nodes(nodes) output = '' # Short-circuit if it's empty return output if !nodes || nodes.empty? # Loop through nodes nodes.each do |node| case node.name when 'h1', 'h2', 'h3', 'h4', 'h5', 'h6' level = node.name.match(/\Ah(\d)\Z/)[1].to_i if @options[:underline_headers] && level < 3 content = parse_content(node) output << content + "\n" character = level == 1 ? '=' : '-' content.length.times { output << character} else hashes = '' level.times { hashes << '#' } output << "#{hashes} #{parse_content(node)}" end when 'blockquote' parse_content(node).split("\n").each do |line| output << "> #{line}\n" end when 'ul', 'ol' output << "\n\n" if @list.count > 0 if unordered = node.name == 'ul' @list << :unordered else @list << :ordered @list_position << 0 end output << parse_nodes(node.children) @list.pop @list_position.pop unless unordered when 'li' (@list.count - 1).times { output << ' ' } if @list.last == :unordered output << "* #{parse_content(node)}" else num = (@list_position[@list_position.count - 1] += 1) output << "#{num}. #{parse_content(node)}" end when 'pre' content = parse_content(node) if @options[:fenced_code_blocks] output << "```\n#{content}\n```" else content.split("\n").each do |line| output << " #{line}\n" end end when 'hr' output << "---\n\n" when 'a' output << "[#{parse_content(node)}](#{node['href']}#{build_title(node)})" when 'i', 'em' output << "*#{parse_content(node)}*" when 'b', 'strong' output << "**#{parse_content(node)}**" when 'u' output << "_#{parse_content(node)}_" when 'mark' output << "==#{parse_content(node)}==" when 'code' output << "`#{parse_content(node)}`" when 'img' output << "![#{node['alt']}](#{node['src']}#{build_title(node)})" when 'text' content = parse_content(node) # Optionally look for links content.gsub!(AUTOLINK_URL_REGEX, '<\1>') if @options[:autolink] content.gsub!(AUTOLINK_EMAIL_REGEX, '<\1>') if @options[:autolink] output << content when 'script' next unless @options[:allow_scripts] output << node.to_html else # If it's an supported node or a node that just contains text, just get # its content output << parse_content(node) end output << "\n\n" if BLOCK_ELEMENT_NAMES.include?(node.name) end output end
ruby
{ "resource": "" }
q182
Unmarkdown.Parser.parse_content
train
def parse_content(node) content = if node.children.empty? node.content else parse_nodes(node.children) end end
ruby
{ "resource": "" }
q183
AIXM.A.invert
train
def invert build(precision: precision, deg: (deg + 180) % 360, suffix: SUFFIX_INVERSIONS.fetch(suffix, suffix)) end
ruby
{ "resource": "" }
q184
Xcflushd.Flusher.async_authorization_tasks
train
def async_authorization_tasks(reports) # Each call to authorizer.authorizations might need to contact 3scale # several times. The number of calls equals 1 + number of reported # metrics without limits. # This is probably good enough for now, but in the future we might want # to make sure that we perform concurrent calls to 3scale instead of # authorizer.authorizations. reports.map do |report| task = Concurrent::Future.new(executor: thread_pool) do authorizer.authorizations(report[:service_id], report[:credentials], report[:usage].keys) end [report, task] end.to_h end
ruby
{ "resource": "" }
q185
Gametel.Navigation.on
train
def on(cls, &block) @current_screen = @current_page = cls.new waiting_for = "#{cls} to be active" wait_until(10, waiting_for) { @current_screen.active? } if @current_screen.respond_to?(:active?) block.call @current_screen if block @current_screen end
ruby
{ "resource": "" }
q186
Prawndown.Interface.markdown
train
def markdown(string, options = {}) text Prawndown::Parser.new(string).to_prawn, options.merge(inline_format: true) end
ruby
{ "resource": "" }
q187
MiniReadline.Edit.edit_loop
train
def edit_loop while @working @edit_window.sync_window(edit_buffer, edit_posn) @edit_window.sync_cursor(edit_posn) process_keystroke(MiniTerm.get_mapped_char) end edit_buffer end
ruby
{ "resource": "" }
q188
MiniReadline.Edit.word_right
train
def word_right(_keyboard_args) if @edit_posn < length right = @edit_buffer[(@edit_posn+1)..-1] @edit_posn = (posn = right.index(/\s\S/)) ? @edit_posn+posn+2 : length else MiniTerm.beep end end
ruby
{ "resource": "" }
q189
UnionStationHooks.RequestReporter.log_view_rendering
train
def log_view_rendering(options) return do_nothing_on_null(:log_view_rendering) if null? Utils.require_key(options, :name) Utils.require_key(options, :begin_time) Utils.require_key(options, :end_time) @transaction.log_activity(next_view_rendering_name, options[:begin_time], options[:end_time], options[:name], options[:has_error]) end
ruby
{ "resource": "" }
q190
VersionCompare.Conversions.ComparableVersion
train
def ComparableVersion(value) case value when String, Integer, Float, ->(val) { val.respond_to?(:to_ary) } ComparableVersion.new(value) when ->(val) { val.respond_to?(:to_comparable_version) } value.to_comparable_version else raise TypeError, "Cannot convert #{value.inspect} to ComparableVersion" end end
ruby
{ "resource": "" }
q191
Epuber.Compiler.archive
train
def archive(path = nil, configuration_suffix: nil) path ||= epub_name(configuration_suffix) epub_path = File.expand_path(path) Dir.chdir(@file_resolver.destination_path) do new_paths = @file_resolver.package_files.map(&:pkg_destination_path) if ::File.exists?(epub_path) Zip::File.open(epub_path, true) do |zip_file| old_paths = zip_file.instance_eval { @entry_set.entries.map(&:name) } diff = old_paths - new_paths diff.each do |file_to_remove| puts "DEBUG: removing file from result EPUB: #{file_to_remove}" if compilation_context.verbose? zip_file.remove(file_to_remove) end end end run_command(%(zip -q0X "#{epub_path}" mimetype)) run_command(%(zip -qXr9D "#{epub_path}" "#{new_paths.join('" "')}" --exclude \\*.DS_Store)) end path end
ruby
{ "resource": "" }
q192
Epuber.Compiler.epub_name
train
def epub_name(configuration_suffix = nil) epub_name = if !@book.output_base_name.nil? @book.output_base_name elsif @book.from_file? ::File.basename(@book.file_path, ::File.extname(@book.file_path)) else @book.title end epub_name += @book.build_version.to_s unless @book.build_version.nil? epub_name += "-#{@target.name}" if @target != @book.default_target epub_name += "-#{configuration_suffix}" unless configuration_suffix.nil? epub_name + '.epub' end
ruby
{ "resource": "" }
q193
TinyCI.Scheduler.run_all_commits
train
def run_all_commits commits = get_commits until commits.empty? do commits.each {|c| run_commit(c)} commits = get_commits end end
ruby
{ "resource": "" }
q194
SimpleRecord.ResultsArray.as_json
train
def as_json(options = nil) #:nodoc: # use encoder as a proxy to call as_json on all elements, to protect from circular references encoder = options && options[:encoder] || ActiveSupport::JSON::Encoding::Encoder.new(options) map { |v| encoder.as_json(v) } end
ruby
{ "resource": "" }
q195
BlueprintClient.AssetsApi.add_asset_to_node
train
def add_asset_to_node(namespace, type, id, asset_type, asset_id, opts = {}) data, _status_code, _headers = add_asset_to_node_with_http_info(namespace, type, id, asset_type, asset_id, opts) return data end
ruby
{ "resource": "" }
q196
BlueprintClient.AssetsApi.delete_asset
train
def delete_asset(namespace, asset_id, asset_type, opts = {}) delete_asset_with_http_info(namespace, asset_id, asset_type, opts) return nil end
ruby
{ "resource": "" }
q197
BlueprintClient.AssetsApi.get_asset
train
def get_asset(namespace, asset_type, asset_id, opts = {}) data, _status_code, _headers = get_asset_with_http_info(namespace, asset_type, asset_id, opts) return data end
ruby
{ "resource": "" }
q198
BlueprintClient.AssetsApi.get_assets_in_node
train
def get_assets_in_node(namespace, type, id, opts = {}) data, _status_code, _headers = get_assets_in_node_with_http_info(namespace, type, id, opts) return data end
ruby
{ "resource": "" }
q199
BlueprintClient.AssetsApi.remove_asset_from_node
train
def remove_asset_from_node(namespace, type, id, asset_type, asset_id, opts = {}) remove_asset_from_node_with_http_info(namespace, type, id, asset_type, asset_id, opts) return nil end
ruby
{ "resource": "" }