query
stringlengths
7
9.5k
document
stringlengths
10
1.07M
negatives
sequencelengths
19
19
metadata
dict
Sets the gracePeriodInMinutes property value. The number of minutes to wait before restarting the device after an app installation.
def grace_period_in_minutes=(value) @grace_period_in_minutes = value end
[ "def grace_period_in_minutes\n return @grace_period_in_minutes\n end", "def grace_period\n @grace_period\n end", "def grace_period_hours=(value)\n @grace_period_hours = value\n end", "def settings_sleep_timeout_in_minutes=(value)\n @settings_sleep_timeout_in_minutes = value\n end", "def escalation_time_in_minutes=(value)\n @escalation_time_in_minutes = value\n end", "def deadline_grace_period_in_days=(value)\n @deadline_grace_period_in_days = value\n end", "def grace_period_hours\n return @grace_period_hours\n end", "def grace_period_before_access_removal=(value)\n @grace_period_before_access_removal = value\n end", "def grace_days_used\n @grace_days_used ||= grace_days_used!\n end", "def default_lifetime_in_minutes=(value)\n @default_lifetime_in_minutes = value\n end", "def expire_after_grace!\n self.state_dsc = 'expiring soon'\n self.expires_on = [Date.today, paid_through].max + Freemium.days_grace\n Freemium.log_subscription_msg(self, \"now set to expire on #{self.expires_on}\")\n Freemium.mailer.deliver_expiration_warning(subscriber, self)\n save_without_revision!\n end", "def time_limit_per_day=(minutes)\n minutes = (minutes.to_i < 0) ? 0 : minutes.to_i\n seconds = minutes * 60\n write_attribute(:time_limit_per_day, seconds)\n end", "def restart_notification_snooze_duration_in_minutes=(value)\n @restart_notification_snooze_duration_in_minutes = value\n end", "def schedule_imminent_restart_warning_in_minutes=(value)\n @schedule_imminent_restart_warning_in_minutes = value\n end", "def grace_period_before_access_removal\n return @grace_period_before_access_removal\n end", "def date_inside_grace_window(expires_on)\n (expires_on + Rails.configuration.registration_grace_window) - 1.day\n end", "def compliance_grace_period_expiration_date_time=(value)\n @compliance_grace_period_expiration_date_time = value\n end", "def health_check_grace_period\n @group.health_check_grace_period\n end", "def health_check_grace_period\n data[:health_check_grace_period]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the restartNotificationSnoozeDurationInMinutes property value. The number of minutes to snooze the restart notification dialog when the snooze button is selected.
def restart_notification_snooze_duration_in_minutes return @restart_notification_snooze_duration_in_minutes end
[ "def restart_notification_snooze_duration_in_minutes=(value)\n @restart_notification_snooze_duration_in_minutes = value\n end", "def schedule_imminent_restart_warning_in_minutes\n return @schedule_imminent_restart_warning_in_minutes\n end", "def snooze_duration_in_days\n return @snooze_duration_in_days\n end", "def schedule_restart_warning_in_hours\n return @schedule_restart_warning_in_hours\n end", "def reminder_minutes_before_start\n return @reminder_minutes_before_start\n end", "def schedule_imminent_restart_warning_in_minutes=(value)\n @schedule_imminent_restart_warning_in_minutes = value\n end", "def minutes_in_hour\r\n return 60\r\n end", "def minutes\n _nudge[1]\n end", "def countdown_display_before_restart_in_minutes\n return @countdown_display_before_restart_in_minutes\n end", "def getDurationMinutes\r\n\t\t\t\t\treturn @durationMinutes\r\n\t\t\t\tend", "def duration_in_minutes\n @duration_in_minutes\n end", "def settings_sleep_timeout_in_minutes\n return @settings_sleep_timeout_in_minutes\n end", "def restart_count\n return @restart_count\n end", "def settings_screen_timeout_in_minutes\n return @settings_screen_timeout_in_minutes\n end", "def getMinutes\r\n\t\t\t\t\treturn @minutes\r\n\t\t\t\tend", "def minutes\n (seconds % 3600) / 60\n end", "def minutes_in_day\r\n return 1440\r\n end", "def escalation_time_in_minutes\n return @escalation_time_in_minutes\n end", "def seconds_before_retry\n @configuration[:seconds_before_retry]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the restartNotificationSnoozeDurationInMinutes property value. The number of minutes to snooze the restart notification dialog when the snooze button is selected.
def restart_notification_snooze_duration_in_minutes=(value) @restart_notification_snooze_duration_in_minutes = value end
[ "def restart_notification_snooze_duration_in_minutes\n return @restart_notification_snooze_duration_in_minutes\n end", "def schedule_imminent_restart_warning_in_minutes=(value)\n @schedule_imminent_restart_warning_in_minutes = value\n end", "def restart_settings=(value)\n @restart_settings = value\n end", "def schedule_imminent_restart_warning_in_minutes\n return @schedule_imminent_restart_warning_in_minutes\n end", "def schedule_restart_warning_in_hours=(value)\n @schedule_restart_warning_in_hours = value\n end", "def auto_restart_notification_dismissal=(value)\n @auto_restart_notification_dismissal = value\n end", "def settings_sleep_timeout_in_minutes=(value)\n @settings_sleep_timeout_in_minutes = value\n end", "def countdown_display_before_restart_in_minutes=(value)\n @countdown_display_before_restart_in_minutes = value\n end", "def reminder_minutes_before_start=(value)\n @reminder_minutes_before_start = value\n end", "def snooze_duration_in_days=(value)\n @snooze_duration_in_days = value\n end", "def settings_screen_timeout_in_minutes=(value)\n @settings_screen_timeout_in_minutes = value\n end", "def schedule_restart_warning_in_hours\n return @schedule_restart_warning_in_hours\n end", "def restart_count=(value)\n @restart_count = value\n end", "def escalation_time_in_minutes=(value)\n @escalation_time_in_minutes = value\n end", "def restart_delay\n self.load[:restart_delay]\n end", "def engaged_restart_snooze_schedule_in_days=(value)\n @engaged_restart_snooze_schedule_in_days = value\n end", "def setMinutes(minutes)\r\n\t\t\t\t\t@minutes = minutes\r\n\t\t\t\tend", "def respawn_in seconds\n info.respawn_time = (Time.now + seconds).to_i\n end", "def minimum_lifetime_in_minutes=(value)\n @minimum_lifetime_in_minutes = value\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Optionally produces a unique key to ensure uniqueness of this job. See the Generators module methods for example job_id generators you can use.
def create_job_id application_job_overrides_method!(__method__) # default implementation for framework jobs Generators.generate_uuid(self) end
[ "def unique_key\n @unique_key ||= section.identifier\n end", "def job_key(job)\n raise NotImplementedError.new(\"neither Collector.job_key() nor Job.key() are implemented\") unless job.respond_to?(:key)\n job.key()\n end", "def job_key\n Digest::MD5.hexdigest(path)\n end", "def assign_unique_key\n # generate zero padded random 6 digits\n self.unique_key = SecureRandom.random_number(10 ** 6).to_s.rjust(6,'0')\n self.unique_key_generated_at = Time.now.utc\n end", "def job_id\n raise NotImplementedError\n end", "def generate_key!(*) end", "def generate_key\n if self.key.blank? && !(self.node_id.blank? || self.timestamp.blank?)\n self.key = \"#{node_id}-#{timestamp.strftime('%Y%m%d%H%M%S')}\"\n end\n end", "def buildJobName()\n processID = $$\n @jobName = @jobPrefix + \"_\" + processID.to_s + \"_\" + rand(5000).to_s\n end", "def generate_key\n self.key = excessively_random_string if key.blank?\n end", "def allocate_job_id\n self.job_id ||= Job.allocate_job_id\n end", "def generate_new_storage_key\n object_store.generate_unique_key\n end", "def gen_key key\r\n if storage_type == 'memcache'\r\n key = [worker_name,worker_key,key].compact.join('_')\r\n key\r\n else\r\n key\r\n end\r\n end", "def generate_unique_slug_key\n self.slug = loop do\n random_generate_id = SecureRandom.hex\n break random_generate_id unless self.class.exists?(slug: random_generate_id)\n end\n end", "def generate_key\n self.key = self.class.generate_key( self.timestamp )\n end", "def get_key(job)\n job_url = get_job_url(job)\n key = \"dice_\" + Base64.urlsafe_encode64(CGI.unescape(job_url)).slice(0..249)\n \n return key\n end", "def create_job_id(path)\n folder = path.split(\"\\\\\").last\n job_id = \"#{folder}-#{Time.now.to_i}\"\n end", "def job_id\n igetset(:job_id) { env[\"ID\"] }\n end", "def job_id=(value)\n @job_id = value\n end", "def new_worker(opts={})\n @mutex.synchronize {\n job_key = opts[:job_key] || gen_key\n unless self[job_key]\n self[job_key] = instantiate_worker(opts[:class]).new(opts[:args])\n return job_key\n else\n raise ::BackgrounDRbDuplicateKeyError\n end \n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a new GetDecksResponse
def initialize(response) handle_errors(response) JSON.parse(response.body).each do |key, value| if key == 'decks' # Keep the API response count, it will differ when invalid decks are stripped @decks_count = value.size instance_variable_set( "@#{key}".to_sym, value.map { |deck| ZombieBattleground::Api::Models::Deck.new(deck) } ) else instance_variable_set("@#{key}".to_sym, value) end end end
[ "def decks(**args)\n decks_request(**args).decks\n end", "def get_new_deck\n HTTParty.get(\"https://deckofcardsapi.com/api/deck/#{@@deck_id}/shuffle/\")\n response = HTTParty.get(\"https://deckofcardsapi.com/api/deck/#{@@deck_id}/draw/?count=52\") # returns a hash of data\n cards = response[\"cards\"] # we only want the \"cards\" key and its corresponding value from that hash\n Blackjack.new(cards) # instantiate a new Blackjack instance\n end", "def initialize(response)\n handle_errors(response)\n\n @deck = ZombieBattleground::Api::Models::Deck.new(JSON.parse(response.body))\n end", "def create_response(*args)\n Response.new(*args)\n end", "def parse_create_response response\n return ::Episodic::Platform::EpisodesResponse.new(response)\n end", "def create_player_decks(player)\n if player\n player.create_supply(game_id: self.id, player_id: player.id , status: 'Supply')\n player.create_hand(game_id: self.id, player_id: player.id , status: 'Hand')\n player.create_discard(game_id: self.id, player_id: player.id , status: 'Discard')\n player.create_played(game_id: self.id, player_id: player.id , status: 'Played')\n end\n end", "def get_supplies\n with_monitoring_and_error_handling do\n raw_response = perform(:get, @supplies, nil, headers)\n\n MDOT::Response.new(\n response: raw_response,\n schema: :supplies,\n uuid: @user.uuid\n )\n end\n end", "def get(url, query = {})\n query[:format] ||= @format\n OEmbed::Response.create_for(raw(url, query), self, url, query[:format].to_s)\n end", "def create(options)\n API::request(:post, 'close_offering_requests', options)\n end", "def create\n @deck = Deck.new_from_api\n\n respond_to do |format|\n if @deck.save\n format.html { redirect_to @deck, notice: 'Deck was successfully created.' }\n format.json { render :show, status: :created, location: @deck }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @deck.errors, status: :unprocessable_entity }\n end\n end\n end", "def get_data\n # make the API call\n response = RestClient.get(BASE_URL)\n # parse the data\n data = JSON.parse(response)\n # the value of results points to an array\n poke_dex_data = data[\"results\"]\n # work with a short one first\n # should be matching out limit\n @@count = poke_dex_data.length\n # create a new PokeDexObj\n poke_dex_obj = PokeDex.new(poke_dex=poke_dex_data)\n # some of this work seems like it could be done with fewer classes\n # really the poke_dex_object is a way to separate concerns\n # ie this does the work of making new objects.\n # while the poke class does the wokd of breaking down each of those objects\n # more service class (hashit) contributes to the work of the poke class\n return poke_dex_obj\n end", "def create\n @deck = Deck.new(deck_params)\n\n if @deck.save\n render json: @deck, status: :created\n else\n render json: @deck.errors, status: :unprocessable_entity\n end\n end", "def new\n @card = @deck.cards.new\n @decks = Deck.all(:order => 'title')\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @card }\n end\n end", "def create_response(type, data)\n data[:response_type] = data.keys.first unless data.has_key?(:response_type)\n return Response.new(type, data)\n end", "def new\n @deck = Deck.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @deck }\n end\n end", "def new_deposition\n # mg = MetadataGenerator.new(resource: @resource)\n resp = ZC.standard_request(:post, \"#{ZC.base_url}/api/deposit/depositions\", json: {})\n\n @deposition_id = resp[:id]\n @links = resp[:links]\n\n resp\n end", "def make_eds_response(data, params = nil)\n docs = eds_documents(data)\n params ||= {}\n options = { documents: docs, blacklight_config: blacklight_config }\n blacklight_config.response_model.new(data, params, options)\n end", "def index\n @decks = current_user.decks\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @decks }\n end\n end", "def make_base_response\n {\n status: 200,\n message: '',\n data: []\n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Validator for decks in decks
def decks_contains_valid_decks @decks.each do |deck| next if deck.is_a?(ZombieBattleground::Api::Models::Deck) && deck.valid? && deck.errors.size.zero? errors.add(:decks, 'decks must be an array of Deck') end end
[ "def remove_invalid_decks\n @decks.select! do |deck|\n deck.is_a?(ZombieBattleground::Api::Models::Deck) &&\n deck.valid? &&\n deck.errors.size.zero?\n end\n end", "def validate\n # valid if list of headers identical to list of drills and data items combined\n # except that there may be dummy units, algorithm columns\n headers.each do |header|\n if !(definition.drill.keys+definition.data.keys+['units','algorithm','default']).include?(header)\n raise \"Header missing from itemdef: #{header}, itemdef has: #{(definition.drill.keys+definition.data.keys+['units','algorithm','default']).join(',')}\"\n end\n end\n (definition.drill.keys+definition.data.keys+['units']).each do |key|\n raise \"Header missing from data table: #{key}\" if !headers.include?(key)\n end\n end", "def validate_empty\n if self.units.count > 0\n errors.add(:units, \"must not exist in order for a unit to be deleted\")\n throw(:abort)\n elsif self.collections.count > 0\n errors.add(:collections, \"must not exist in order for a unit to be deleted\")\n throw(:abort)\n end\n end", "def check_departments\n\t if self.departments.size > MAX_DEPARTMENTS_FOR_COMAPNY\n\t errors.add(:department_ids, \"Selezionare al massimo #{MAX_DEPARTMENTS_FOR_COMAPNY} provincie\")\n end\t \n\tend", "def cards_contains_valid_cards\n @cards.each do |card|\n next if card.is_a?(ZombieBattleground::Api::Models::Card) &&\n card.valid? &&\n card.errors.size.zero?\n\n errors.add(:cards, 'cards must be an array of Card')\n end\n end", "def deck_is_a_deck\n return if @deck.is_a?(ZombieBattleground::Api::Models::Deck) &&\n @deck.valid? &&\n @deck.errors.size.zero?\n\n errors.add(:deck, 'deck must be a Deck')\n end", "def valid_sets; end", "def validate_empty\n if self.collections.count > 0\n errors.add(:collections, \"must not exist in order for a collection to be deleted\")\n throw(:abort)\n elsif self.items.count > 0\n errors.add(:items, \"must not exist in order for a collection to be deleted\")\n throw(:abort)\n end\n end", "def validate_suppliers!\n data.each do |supplier_name, config|\n REQUIRED_FIELDS.each do |field|\n result = config[field]\n raise MissingFieldError.new(supplier_name, field) if (result.nil? || result.to_s.empty?)\n end\n \n validate_workers!(supplier_name, config[\"workers\"])\n end\n end", "def validate_stock!\n return {} if empty?\n stock_levels.each_with_object({}) do |stock_level, obj|\n line_items.detect { |li| li.item.sku == stock_level.id }.tap do |li|\n next if li.nil?\n if stock_level.stock_available <= 0\n obj[li.item.sku] = {\n stock_level: 0,\n line_item_quantity: li.unit_quantity,\n message: \"Out of stock\"\n }\n elsif stock_level.stock_available < li.unit_quantity\n obj[li.item.sku] = {\n stock_level: stock_level.stock_available,\n line_item_quantity: li.unit_quantity,\n message: \"Only #{stock_level.stock_available} in stock\"\n }\n end\n li.errors.add(:unit_quantity, obj.dig(li.item.sku, :message)) unless obj.dig(li.item.sku, :message).nil?\n end\n end\n end", "def validate!\n # Require that at least one field exist.\n if fields.length == 0\n raise SkyDB::Query::ValidationError.new(\"At least one selection field is required for #{self.inspect}.\")\n end\n\n fields.each do |field|\n field.validate!\n end\n\n groups.each do |group|\n group.validate!\n end\n end", "def has_invalid_items?\n unless @offer[:items].empty? || @offer[:items].select { |item_name|\n !Inventory.items_and_values.include?(item_name)}.empty?\n add_error(:offer, :items, \"There is an invalid item in the list\")\n error = true\n end\n unless @for[:items].empty? || @for[:items].select { |item_name|\n !Inventory.items_and_values.include?(item_name)}.empty?\n add_error(:for, :items, \"There is an invalid item in the list\")\n error = true\n end\n\n error ||= false\n end", "def validate_groupings(groupings)\n groupings.each(&:validate_grouping)\n end", "def seasons_valid?\n\n seasons_errors.empty?\n\n end", "def must_have_three_cards_that_belong_to_same_deck\n errors.add_to_base(\"Set ##{self.id} does not have three cards!\") unless cards.length == 3\n unique_decks = cards.map {|card| card.deck }.compact.uniq\n if unique_decks.length > 1\n errors.add_to_base(\"Set ##{self.id} has cards from different decks!\")\n elsif unique_decks.length == 0\n errors.add_to_base(\"Set ##{self.id} has cards that don't belong to a deck!\")\n end\n end", "def funders_are_valid\n return unless funders.present?\n funders.each do |funder|\n next if valid_funders.include?(funder)\n errors.add(:funders, 'Invalid funder detected')\n end\n end", "def validate_adapters\n adapters.each_value { |adapter| validate(:adapter, adapter, children_for(adapter)) }\n end", "def ValidateConduitList(key, event)\n # [Quantifier][Speed][Order]\n # Quantifier is optional; for speed there are only 4 options; order starts with 1\n reg = \"^[-+?]*(10m|100m|1g|10g)[1-9]+[0-9]*$\"\n\n invalid_if = Builtins.find(\n (UI.QueryWidget(Id(key), :Value) || \"\").split(\" \")\n ) do |iface|\n if !Builtins.regexpmatch(iface, reg)\n Builtins.y2warning(\"iface %1 has incorrect format\", iface)\n next true\n end\n false\n end\n\n\n if invalid_if != nil\n # error popup\n Popup.Error(\n Builtins.sformat(\n _(\"The interface format '%1' is not valid\"),\n invalid_if\n )\n )\n return false\n end\n true\n end", "def validate_volume\n return unless requires_volume?\n\n this_service[:context][:volume].each do |question|\n if self[question.to_sym].nil?\n errors.add(question.to_sym, :blank)\n break\n end\n\n validates_numericality_of(question.to_sym, greater_than: 0, less_than_or_equal_to: 999999999, only_integer: true, message: :invalid)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Removes invalid vards from deck
def remove_invalid_decks @decks.select! do |deck| deck.is_a?(ZombieBattleground::Api::Models::Deck) && deck.valid? && deck.errors.size.zero? end end
[ "def chosen_set_validity!(playing_cards)\n @set_found = is_a_set? @chosen_cards\n @chosen_cards.each {|card| playing_cards.delete card} if @set_found\n clean_slate # Clears player picks\n end", "def remove_invalid_cards\n @cards.select! do |card|\n card.is_a?(ZombieBattleground::Api::Models::Card) &&\n card.valid? &&\n card.errors.size.zero?\n end\n end", "def remove_invalid_codes(guess)\n @valid_codes.filter! { |code| code.check_code(guess) == [@black_pegs, @white_pegs] }\n puts \"#{@valid_codes.count} possible codes left\"\n end", "def ensure_cards_valid\n unknown_name_index = 0\n missing_rarity_cards = []\n\n @cards.each do |c|\n if c['name'].blank?\n unknown_name_index += 1\n c['name'] = \"Unnamed Card #{unknown_name_index}\"\n end\n\n if c['rarity'].blank?\n c['rarity'] ||= 'common'\n missing_rarity_cards << c['name']\n end\n\n # Keep only those fields that interest us.\n c.keep_if { |key,_| CardTemplate.fields_whitelist.include? key }\n end\n\n if unknown_name_index > 0\n @warnings << I18n.t('activerecord.card_set.warnings.cards_without_names', card_set_name: @set_name, count: unknown_name_index)\n end\n add_warning_on_cards('cards_without_rarity', missing_rarity_cards)\n end", "def clean_slate\n @chosen_cards.clear\n @chosen_cards_indexes.clear\n end", "def discard(hand=[],drop=[])\n return_discard = []\n sort_drop = drop.sort {|a,b| b <=> a}\n sort_drop.each do |card|\n return_discard << hand[card]\n hand.delete_at(card)\n end\n return return_discard\nend", "def discard_hand\n @bet = nil\n @hand = []\n end", "def unprune_vars(assigned)\n vars.reject{|id, key| assigned.include?(id)}.each_value do |var|\n var.unprune\n end\n end", "def remove_invariable(aln, undefined)\n invs = 0\n lengths = aln.values.first.map(&:length)\n undef_chars = undefined.chars\n\n lengths.each_with_index do |len, i|\n (0 .. len - 1).each do |pos|\n chr = nil\n inv = true\n aln.each_key do |key|\n next if aln[key][i].nil?\n chr = aln[key][i][pos] if chr.nil? || undefined.chars.include?(chr)\n if chr != aln[key][i][pos] && !undef_chars.include?(aln[key][i][pos])\n inv = false\n break\n end\n end\n if inv\n aln.each_key { |key| aln[key][i][pos] = '!' unless aln[key][i].nil? }\n lengths[i] -= 1\n invs += 1\n end\n end\n aln.each_key { |key| aln[key][i].gsub!('!', '') unless aln[key][i].nil? }\n end\n invs\nend", "def remove_all argument\n if argument.kind_of? Suit\n @cards = @cards.find_all{|card| !card.suit.eql?(argument) }\n end\n if argument.kind_of? Rank\n @cards = @cards.find_all{|card| !card.rank.eql?(argument) }\n end\n self\n end", "def cull_invalid(what)\n remove = [\"author_combined_karma\", \"author_link_karma\", \"author_comment_karma\", \"author_karma_ratio\", \"author_account_age\"]\n what = what.reject {|x| x unless (x & remove).empty? }\n what\n end", "def reject_values(*values)\n resultant_pos = size.times.to_a - values.flat_map { |v| @cat_hash[v] }\n dv = at(*resultant_pos)\n unless dv.is_a? Daru::Vector\n pos = resultant_pos.first\n dv = at(pos..pos)\n end\n dv.remove_unused_categories\n end", "def discard_hands\n discard = @hands\n @hands = []\n discard.inject([]){ |cards, h| cards + h.cards }\n end", "def discard_necklace_if_visible\n \n @visible_treasures.each do |t| \n \n if t.type == TreasureKind::NECKLACE\n\n #Le pasamos el tesoro al CardDealer\n dealer = CardDealer.instance\n dealer.give_treasure_back(t)\n\n #Lo eliminamos de nuestros tesoros visibles (equipados)\n @visible_treasures.delete(t)\n\n #Salimos del bucle\n break\n end\n\n end\n\n \n end", "def forbidden_values(values)\n raise \"invalid input\" if !values.is_a? Array\n @possibilities -= values\n end", "def discardHiddenCards?()\n\t\t#Checks if we have at least 4 cards in our hand.\n\t\tif checkSize()\n\t\t\ti = 0 #Index to get to the last 4 cards of your hand.\n\t\t\tj = 0 #Index to discard all 4 cards.\n\t\t\ttemp = Card.new(0, \"\")\n\t\t\t#Checking to see if the 1st and 4th cards match numbers.\n\t\t\tif checkMatchNum()\n\t\t\t\t#Iterating to the 4 cards we want to discard.\n\t\t\t\twhile i < @hand.length - 4\n\t\t\t\t\ttemp = @hand.shift()\n\t\t\t\t\t@hand.push(temp) #Putting cards we don't want back in our deck.\n\t\t\t\t\ti += 1\n\t\t\t\tend\n\t\t\t\t#Discards 4 cards.\n\t\t\t\twhile j < 4\n\t\t\t\t\t@hand.pop()#Discarding cards.\n\t\t\t\t\tj += 1 \n\t\t\t\tend\n\t\t\t\tdiscardHiddenCards?() #Now that you discarded cards, have to check again if there's new stuff to discard.\n\t\t\tend\n\t\tend\n\t\tif checkSize()\n\t\t\ti = 0 #Index to get to the last 4 cards of your hand.\n\t\t\tj = 0 #Index to skip two 'middle' cards of the last four.\n\t\t\t#Checking to see if the 1st and 4th card match suits.\n\t\t\tif checkMatchSuit()\n\t\t\t\t#Iterating to the middle two cards\n\t\t\t\twhile i < @hand.length - 3\n\t\t\t\t\ttemp = @hand.shift()\n\t\t\t\t\t@hand.push(temp)\n\t\t\t\t\ti += 1\n\t\t\t\tend\n\t\t\t\t@hand.pop() #Discards the middle two cards.\n\t\t\t\t@hand.pop()\n\t\t\t\t@hand.push(@hand.pop())\n\t\t\t\tdiscardHiddenCards?() #Now that you discarded cards, have to check again if there's new stuff to discard.\n\t\t\tend\n\t\tend\n\tend", "def reset_discard_pile(deck)\n @cards = []\n @cards << deck.take_card\n while @cards[0].color == 'Wild' || @cards[0].number.is_a?(String)\n deck.cards.unshift(@cards.pop)\n @cards << deck.take_card\n end\n @cards\n end", "def make_discards(player, list_of_allies, list_of_axis)\n complete_squads = player.player_hand.completes\n\n # We need to make sure that we don't make attacks with squadrons that have already been used\n complete_squads = player.remove_used_squads(complete_squads, list_of_allies, list_of_axis)\n end", "def non_trump_cards\n @cards.select {|c| c.suit != @trump_suit}\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Mock a crappy remote API call that could toss an exception on some upstream error. It also mocks a return value of ':pending' when it's not complete and returns ':happy' when it's complete.
def crappy_api_call $count += 1 # First two calls fail raise "OH SNAP" if $count < 3 # Next few calls say pending return :pending if $count < 5 # Then finally done return :happy end
[ "def crappy_api_call\n $count += 1\n\n # First two calls fail\n raise \"OH SNAP\" if $count < 3\n\n # Next few calls say pending\n return :pending if $count < 5\n\n # Then finally done\n return :happy if $count > 5\nend", "def test_api_bad_request\n mock_clients bad_req_code, bad_req_code\n\n assert_equal expected_response(bad_req_code), ambiguous_request('')\n end", "def test_retries_then_raises_unexpected_exception\n client_stub = make_client_stub\n\n call_count = 0\n make_request_proc = lambda do |args|\n call_count += 1\n raise RuntimeError if call_count == 3\n raise FakeFaradayError.new(GRPC::Core::StatusCodes::UNAVAILABLE)\n end\n\n sleep_proc = ->(count) { }\n\n options = Gapic::CallOptions.new(\n timeout: 1,\n retry_policy: {\n retry_codes: [GRPC::Core::StatusCodes::UNAVAILABLE], \n }\n )\n\n Kernel.stub :sleep, sleep_proc do\n client_stub.stub :base_make_http_request, make_request_proc do\n ex = assert_raises RuntimeError do\n client_stub.make_get_request uri: \"/foo\", options: options\n end\n end\n end\n\n assert_equal 3, call_count\n end", "def stub_http_start_with_good_01_response()\n Net::HTTP.stub(:start, method(:http_start_stub_good_01)) do\n yield()\n end # Net::HTTP.stub(:start)\n end", "def test_should_handle_retry_by_status_code\n error = proc { raise Google::Cloud::Error, 'something bad happened' }\n @fire_cloud_client.stub :get_workspace_bucket, error do\n # should only retry once\n forbidden_mock = Minitest::Mock.new\n status = 403\n forbidden_mock.expect :status_code, status\n forbidden_mock.expect :nil?, false\n 3.times do\n forbidden_mock.expect :==, false, [Integer] # will check against 502..504\n end\n @fire_cloud_client.stub :extract_status_code, forbidden_mock do\n assert_raise RuntimeError do\n @fire_cloud_client.execute_gcloud_method(:get_workspace_file, 0, 'foo', 'bar.txt')\n forbidden_mock.verify\n end\n end\n # test with 502 should cause retry cascade\n status = 502\n bad_gateway_mock = Minitest::Mock.new\n 6.times do # 6 is for 5 total requests and then 6th iteration that terminates retry loop\n bad_gateway_mock.expect :status_code, status\n bad_gateway_mock.expect :nil?, false\n bad_gateway_mock.expect :==, true, [status]\n end\n @fire_cloud_client.stub :extract_status_code, bad_gateway_mock do\n assert_raise RuntimeError do\n @fire_cloud_client.execute_gcloud_method(:get_workspace_file, 0, 'foo', 'bar.txt')\n bad_gateway_mock.verify\n end\n end\n end\n end", "def mock_faraday_error(status_code)\n mocked_error_class = if (500..599).include?(status_code) && Faraday::VERSION.to_f >= 16.0\n Faraday::ServerError\n else\n Faraday::ClientError\n end\n\n ::Faraday::Connection.any_instance.stubs(:get).\n raises(mocked_error_class.new(nil, { status: status_code}))\n end", "def test_status_green_on_502\n Excon.stub({method: :get}, {body: '<doctype', status: 502})\n\n authorize 'user', 'password'\n assert_output( /502 override/,'') { get '/status' }\n last_response.body.must_equal 'green'\n end", "def mock_undetermined_vehicle_compliance\n allow(ComplianceCheckerApi).to receive(:vehicle_compliance)\n .and_raise(BaseApi::Error422Exception.new(422, '', {}))\n end", "def test_api_handles_nil_input\n mock_clients bad_req_code, bad_req_code\n\n assert_equal expected_response(bad_req_code), ambiguous_request(nil)\n end", "def fake_a_broken_uri(uri)\n stub_request(:get, uri).to_return(body: \"Oopsies\")\nend", "def test_api_handles_unsupported_input\n mock_clients bad_req_code, bad_req_code\n\n assert_equal expected_response(bad_req_code), ambiguous_request('text')\n assert_raises NoMethodError do\n ambiguous_request(123)\n end\n assert_raises NoMethodError do\n ambiguous_request([])\n end\n end", "def test_api_handles_empty_input\n mock_clients bad_req_code, bad_req_code\n\n assert_equal expected_response(bad_req_code), ambiguous_request({})\n assert_equal expected_response(bad_req_code), ambiguous_request('')\n end", "def mock_check_barcode_request(barcode, status_code)\n if status_code == '404'\n stub_request(:get, \"#{ENV.fetch('PATRON_MICROSERVICE_URL_V01', nil)}?barcode=\" + barcode)\n .to_return(status: 404, body: {\n \"message\" => \"Failed to retrieve patron record by barcode\",\n \"statusCode\" => 404\n }\n .to_json, headers: { 'Content-Type' => 'application/json' })\n return\n end\n\n if status_code == '409'\n stub_request(:get, \"#{ENV.fetch('PATRON_MICROSERVICE_URL_V01', nil)}?barcode=\" + barcode)\n .to_return(status: 409, body: {\n \"message\" => \"Multiple patron records found\",\n \"statusCode\" => 409\n }\n .to_json, headers: { 'Content-Type' => 'application/json' })\n return\n end\n\n if status_code == '500'\n stub_request(:get, \"#{ENV.fetch('PATRON_MICROSERVICE_URL_V01', nil)}?barcode=\" + barcode)\n .to_return(status: 500, body: {\n \"message\" => \"Server error\",\n \"statusCode\" => 500\n }\n .to_json, headers: { 'Content-Type' => 'application/json' })\n return\n end\n\n # return a successful 200 \"single unique user found\" response\n stub_request(:get, \"#{ENV.fetch('PATRON_MICROSERVICE_URL_V01', nil)}?barcode=\" + barcode)\n .to_return(status: 200,\n body: SIERRA_USER.to_json,\n headers: { 'Content-Type' => 'application/json' })\n end", "def call_api_and_handle_error(api_name)\n retry_counter = 0\n begin\n send(\"call_#{api_name}_api\")\n rescue OpenURI::HTTPError => e\n retry_counter += 1\n retry_message(e, retry_counter)\n return false if retry_counter > RETRY_CALL_API_COUNT\n\n (sleep RETRY_CALL_API_WAIT_TIME) && retry\n end\n end", "def try_api\n yield\n rescue SystemCallError\n # log API connection failure\n :network_error\n end", "def try_api\n yield\n rescue SystemCallError => e\n # log API connection failure\n :network_error\n end", "def test_status_green_on_empty\n Excon.stub({method: :get}, {body: '', status: 200})\n\n authorize 'user', 'password'\n assert_output( /status=200/,'') { get '/status' }\n last_response.body.must_equal 'green'\n end", "def test_faraday_error_applies_by_default\n mangled_json = @body_json.gsub(%r{\"code\":.*$}, \"\")\n mangled_json = mangled_json.gsub(%r{\"message\":.*$}, \"\")\n\n faraday_err_msg = \"foo\"\n faraday_err_code = 200\n\n faraday_err = OpenStruct.new(\n :message => faraday_err_msg,\n :response_body => mangled_json,\n :response_headers => @headers,\n :response_status => faraday_err_code\n )\n\n gapic_err = ::Gapic::Rest::Error.wrap_faraday_error faraday_err\n\n assert_equal faraday_err_code, gapic_err.status_code\n assert gapic_err.message.include? faraday_err_msg\n end", "def test_email_client_secondary_success_only\n MailgunClient.stubs(:send_email).returns(bad_req_code)\n SendgridClient.stubs(:send_email).returns(ok_code)\n\n input_val = EmailObject.new\n response = ClientResponse.new input_val\n response.set_ok\n\n assert_equal response, EmailClient.send_email(input_val)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Open the bookmarks and return an REXML::Document. If the cache option is provided, the document is loaded from the localpath specified.
def open require 'rexml/document' xml = if cache && File.exist?(cache) File.read(cache) else request :all end File.open(cache, 'wb') { |io| io.write(xml) } if cache REXML::Document.new(xml) end
[ "def open_with_cache(path)\n if @cache[path]\n if @verbose\n puts \"Opening cached contents of http://pollex.org.nz#{path} ...\"\n end\n @cache[path]\n else\n if @verbose\n puts \"Connecting to http://pollex.org.nz#{path} ...\"\n end\n page = Nokogiri::HTML(open(\"http://pollex.org.nz#{path}\"))\n @cache[path] = page\n page\n end\n end", "def open(path)\n document = parse(File.read(path))\n document.path = path\n document\n end", "def fetch_doc\n request_docs if @cache.empty?\n doc = @cache.shift\n doc unless error?(doc)\n end", "def load_document\n require 'open-uri'\n self.doc = Nokogiri::HTML(open(self.link))\n end", "def bookmark\n filename = File.join(@path, '.hg', 'bookmarks.current')\n if File.exist?(filename)\n file = File.open(filename, 'r')\n begin\n bookmark = file.read.chomp\n ensure\n file.close\n end\n bookmark\n else\n nil\n end\n end", "def load_bookmarks(scope, create=true)\n path = scope == PROJECT_SCOPE ? @project_bookmarks_path : @global_bookmarks_path\n @log.info \"scope=#{scope} path=#{path}\\n\"\n # if file missing then create a new one if requested\n if !FileTest.exist?(path)\n if create\n create_empty_bookmarks path\n else\n return false\n end\n end\n\n @bookmarks=[]\n IO.foreach(path) {|line|\n @log.info \"loading bm=#{line.chomp}\\n\"\n bm = BookmarkInfo.from_tag_line(line.chomp, scope)\n # error in bm? must be the viewpoint line\n if bm.nil?\n # take the view point from the last line in the array removing it\n @view_point = line.chomp.to_i\n break\n else\n @bookmarks.push bm\n @log.info \"Loaded bm #{bm.to_s}\\n\"\n end\n }\n\n return true\n end", "def get_content_as_dom()\n if @state == RedXmlResource::STATE_LAZY\n @document = @doc_service.find_document(@doc_name)\n @state = RedXmlResource::STATE_LOADED\n end\n return @document\n end", "def fetch_rdf\n @doc = Nokogiri::XML(open(@url))\n end", "def load_xml_document file_name\n xml_file = File.new file_name, \"r\"\n return Document.new xml_file\n end", "def get_doc\n begin\n @doc ||= Nokogiri(open( @url ))\n rescue Exception => e\n raise \"Problem with URL #{@url}: #{e}\"\n end\n end", "def from_cache file_path\n \n return @resolved[file_path] if @resolved.key?(file_path)\n \n begin\n \n reloaded = true\n \n cache_file = cache_file_name(file_path)\n \n url = @base_url + file_path\n \n entry = FileInfo.new(url)\n\n open(url) do |remote|\n \n entry.last_modified = remote.last_modified\n \n if @cache_data.key?(file_path) and @cache_data[file_path].last_modified == entry.last_modified\n reloaded = false\n else\n \n File.makedirs File.dirname(cache_file)\n\n File.open(cache_file, \"w\") do |file|\n file << remote.read\n end\n\n end\n \n end\n\n @cache_data[file_path] = entry if reloaded\n @resolved[file_path] = cache_file\n \n rescue OpenURI::HTTPError => ex\n \n log ex if @debug_mode\n \n return nil if ex.io.status == \"404\"\n \n resolve_offline file_path\n \n rescue Exception => ex\n\n if ex.class.to_s == 'Test::Unit::AssertionFailedError'\n raise ex\n end\n \n log ex if @debug_mode\n \n resolve_offline file_path\n \n end\n \n end", "def load_docs\n @cached_docs ||= super\n end", "def goto_document_bookmark\n # if there is an active project we use project bookmarks\n scope = @project_bookmarks_path.nil? ? GLOBAL_SCOPE : PROJECT_SCOPE\n goto_bookmark scope, true\n end", "def cache_docs (docs_dir)\n rtn = Hash.new()\n\n Dir.foreach(docs_dir) do |item|\n next if item[0] === \".\" or File.directory?(item)\n rtn[item] = File.read(\"#{docs_dir}/#{item}\")\n end\n\n return rtn\nend", "def read_cache\n @html = File.read(@cache_file) if cache_exist?\n parse_html unless @html.nil?\n end", "def get_data(force_refresh = false)\n begin\n if(File.exist?('fx.cache') && !force_refresh)\n data = File.read('fx.cache')\n @xml_doc = Nokogiri::XML(data)\n puts \"Using cached data\"\n elsif\n data = read_api\n @xml_doc = Nokogiri::XML(data)\n File.open('fx.cache', 'w') do |f|\n f.puts data\n end\n puts \"Using fresh data\"\n end\n rescue => e\n print_exception e\n end\n @xml_doc\n end", "def sitemap_doc\n return doc if doc && !gzip?\n\n begin\n @sitemap_doc ||= Nokogiri::XML::Document.parse(unzipped_body, @url.to_s, content_charset)\n rescue\n end\n end", "def docs(repo_name)\n return nil if repo(repo_name).private_repo?\n\n CACHE.fetch(\"alphagov/#{repo_name} docs\", expires_in: LOCAL_CACHE_DURATION) do\n recursively_fetch_files(repo_name, \"docs\")\n rescue Octokit::NotFound\n nil\n end\n end", "def open(id, rev = nil)\n begin\n unless Colonel.config.rugged_backend.nil?\n repo = Rugged::Repository.bare(File.join(Colonel.config.storage_path, id), backend: Colonel.config.rugged_backend)\n else\n repo = Rugged::Repository.bare(File.join(Colonel.config.storage_path, id))\n end\n rescue Rugged::OSError\n return nil\n end\n\n Document.new(nil, id: id, repo: repo, type: self)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Determine if any bookmarks have been updated since the time specified.
def updated_since?(time) time < last_updated_at end
[ "def fresh_at?(time)\n time >= updated_at\n end", "def content_changed_since?(last_updated)\n stale?(:last_modified => last_updated)\n end", "def updated_since?(timestamp)\n self.updated_at > timestamp\n end", "def up_to_date?\n updated_at > ( Time.now - outdated_after )\n end", "def changed_since_publication?\n return true unless on_flickr?\n\n last_updated > published_at + 1.second\n end", "def stale?(data, now = Time.now)\n (now - data.updated_at) > refresh_period\n end", "def has_changed?\n updated_at > 8.hours.ago\n end", "def movies_stale?\n Time.new - self.movies.last.updated_at >= 900\n end", "def contact_info_updated_since(t)\n return false if sdb_update_at.nil?\n t < sdb_update_at\n end", "def modified_since?( time )\n mtime > time\n end", "def has_played_since_last_update?\n @front_page = get_page(bungie_net_front_page_url)\n last_played = (@front_page/\"div.spotlight div \").inner_html.split(\"&nbsp; | &nbsp;\")[1].gsub(\"Last Played \", \"\").to_date\n return last_played > self.updated_at.to_date\n end", "def stale?\n updated_at < (Time.now - 1.minute)\n end", "def dirty?\n entries.exists? && latest_shorthand_or_extractor_update > earliest_entry_processing\n end", "def pending_refresh?\n last_check = checked_at || DateTime.new\n time_offset = Time.now - refresh_interval\n\n last_check.to_time < time_offset\n end", "def updated_since_published?\n updated_at.strftime('%m%d%y').to_i != published_at.strftime('%m%d%y').to_i\n end", "def fresh_by_time?\n return false unless env.key?(IF_MODIFIED_SINCE) && !last_modified.nil?\n Time.parse(last_modified) <= Time.parse(env.fetch(IF_MODIFIED_SINCE))\n end", "def is_stale?\n frequency = dispatchable.frequency.name\n created_at < time_for_interval(frequency)\n end", "def has_changed?\n\t\t\t@timestamp != File.mtime( path )\n\t\tend", "def needs_update?(from, to) #:doc:\n File.exists?(to) ? File.mtime(from) > File.mtime(to) : true\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Yield each bookmark to the block that requires synchronization. The :since option may be specified to indicate the time of the most recently updated bookmark. Only bookmarks whose time is more recent than the time specified are yielded.
def synchronize(options={}) if since = options[:since] since.utc return false unless updated_since?(since) else since = Time.at(0) since.utc end open.elements.each('posts/post') do |el| attributes = el.attributes time = Time.iso8601(attributes['time']) next if time <= since yield :href => attributes['href'], :hash => attributes['hash'], :description => attributes['description'], :extended => attributes['extended'], :time => time, :shared => (attributes['shared'] != 'no'), :tags => attributes['tag'].split(' ') end end
[ "def each\n bookmark = @first\n while bookmark\n yield bookmark\n bookmark = bookmark.next_sibling\n end\n end", "def each_transaction_since(block)\n\t\t\tunless block.is_a?(Block)\n\t\t\t\tblock = get_block(block)\n\t\t\tend\n\n\t\t\tinfo = @jr.listsinceblock(block.block_id)\n\n\t\t\ttxes = info.fetch('transactions')\n\t\t\ttxes.map!{ |tx| tx.fetch('txid') }\n\t\t\ttxes.uniq!\n\n\t\t\ttxes.each do |txid|\n\t\t\t\ttransaction = get_transaction(txid)\n\t\t\t\tyield(transaction)\n\t\t\tend\n\n\t\t\tget_block(info.fetch('lastblock'))\n\t\tend", "def skip_updating_of_linked_timestamps\n self.skip_update_timestamps = true\n yield\n self.skip_update_timestamps = false\n end", "def each_entry(options = {}, &block)\n if options[:paginate]\n since_reached = false\n feed = self\n loop do\n feed.entries.each do |entry|\n if options[:since] && entry.updated && options[:since] > entry.updated\n since_reached = true\n break\n else\n block.call(entry)\n end\n end\n\n if since_reached || feed.next_page.nil?\n break\n else feed.next_page\n feed = feed.next_page.fetch(options)\n end\n end\n else\n self.entries.each(&block)\n end\n end", "def timeline\n\t\t\t\tbookmarks_loader(Time.now, doorkeeper_token.resource_owner_id) \n\t\t\t\tbookmarks_formatter\t\t\t\t\n\t\t\tend", "def each_entry(options = {}, &block)\n\t if options[:paginate]\n\t since_reached = false\n\t feed = self\n\t loop do \n\t feed.entries.each do |entry|\n\t if options[:since] && entry.updated && options[:since] > entry.updated\n\t since_reached = true\n\t break\n\t else\n\t block.call(entry)\n\t end\n\t end\n\t \n\t if since_reached || feed.next_page.nil?\n\t break\n\t else feed.next_page\n\t feed = feed.next_page.fetch(options)\n\t end\n\t end\n\t else\n\t self.entries.each(&block)\n\t end\n\t end", "def watch\n before = nil\n loop do sleep(1)\n next if before == (after = mtime)\n yield ; before = after\n end\n end", "def modified_notes(since_usn: 0, &block)\n return if saved_usns.none?\n fetched_note_ids = Set.new\n newer_chunks = saved_usns.select { |chunk_usn| chunk_usn > since_usn }\n enum = Enumerator.new do |yielder|\n newer_chunks.each do |chunk_number|\n notes_in_local_chunk = local_resource(chunk_number).notes || []\n notes_in_local_chunk.each do |sparse_note|\n short_id = sparse_note.guid.first 8\n next if fetched_note_ids.include?(short_id)\n if local_note_is_stale?(sparse_note.guid, sparse_note.updateSequenceNum)\n fetched_note = note sparse_note.guid\n fetched_note_ids << short_id\n yielder << fetched_note\n next\n else\n @log.debug \"Local note is newer for #{short_id}\"\n end\n end\n end\n end\n enum.each(&block) if block_given?\n enum\n end", "def block(from_id, to_id, scope = Amico.default_scope_key)\n return if from_id == to_id\n\n Amico.redis.multi do\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.following_key}:#{scope}:#{from_id}\", to_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.following_key}:#{scope}:#{to_id}\", from_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.followers_key}:#{scope}:#{to_id}\", from_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.followers_key}:#{scope}:#{from_id}\", to_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.reciprocated_key}:#{scope}:#{from_id}\", to_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.reciprocated_key}:#{scope}:#{to_id}\", from_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.pending_key}:#{scope}:#{from_id}\", to_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.pending_with_key}:#{scope}:#{to_id}\", from_id)\n Amico.redis.zadd(\"#{Amico.namespace}:#{Amico.blocked_key}:#{scope}:#{from_id}\", Time.now.to_i, to_id)\n Amico.redis.zadd(\"#{Amico.namespace}:#{Amico.blocked_by_key}:#{scope}:#{to_id}\", Time.now.to_i, from_id)\n end\n end", "def for_each_commit(checkpoint: nil, &block)\n raise \"Not implemented\"\n end", "def sync_items_since!(datetime, opts={})\n clear_sync_state!\n\n begin\n items = sync_items!\n end until items.empty?\n\n items_since(datetime, opts)\n end", "def loadmore\n bookmarks_loader(session[:last_link_time], current_user.id)\n bookmark = @bookmarks.last\n if bookmark\n session[:last_link_time] = bookmark.updated_at \n end \n end", "def show_bookmark \n @listings = []\n @sellers = Seller.order(:cached_votes_up => :desc)\n @sellers.each do |seller|\n if current_user.liked? seller\n @listings_tmp = seller.listings.where(:sold_date => 3.hours.from_now.to_date).order(:cached_votes_up => :desc)\n @listings_tmp.each do |listing|\n @listings.append(listing)\n end\n end\n end\n @listings = Kaminari.paginate_array(@listings).page(params[:page]).per(8)\n end", "def each(&block)\n @lock.synchronize do\n @links.each(&block)\n end\n end", "def bookmarks(user)\n user = Access::Validate.user(user, false)\n Bookmark\n .where('(bookmarks.creator_id = ? OR bookmarks.updater_id = ?)', user.id, user.id)\n .order('bookmarks.updated_at DESC')\n end", "def sync_items_since!(datetime, opts={})\n clear_sync_state!\n\n begin\n items = sync_items!\n end until items.empty?\n\n items_since(datetime, opts)\n end", "def process!\n self.update(status: :processing)\n Markio::parse(File.open(bookmark_file.current_path)).each do |bookmark|\n LinkCreator.perform_async(bookmark_to_link_attributes(bookmark))\n end\n self.update(status: :done)\n end", "def watch\n loop { (yield @journal.current_entry while @journal.move_next) if @journal.wait(100_000) }\n end", "def follow &block\n @last_event_id = nil\n \n loop do\n new_events = fetch_new_events\n block.call new_events unless new_events.empty?\n sleep 1 if new_events.empty?\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes all keys to `UNSET_VALUE`
def initialize super keys.each do |key| set(key, self.class::UNSET_VALUE) end end
[ "def unsets\n self[\"$unset\"] ||= {}\n end", "def reset_initial_values\n @initial_values.clear if @initial_values\n @missing_initial_values.clear if @missing_initial_values\n end", "def reset_values\n @field_names.each { |key|\n @fields[key].value = nil\n }\n end", "def reset!\n self.user_values = {}\n\n # @inputs have to be nil, not an empty hash. otherwise\n # the memoized inputs will not pick up the changes.\n @inputs = nil\n end", "def reset\n @values = @defaults\n end", "def clear\n @value = 0\n end", "def clear\n dict.clear\n end", "def clear\n @mapping = DEFAULT_MAPPING.dup\n @values = []\n\n self\n end", "def init\n\t\t\tself.value = nil\n\t\tend", "def clear!\n @key_files = []\n @known_identities = nil\n self\n end", "def clear\n @_value = nil\n end", "def reset\n @value = nil\n end", "def unset(*keys)\n eval(UNSET, binding, __FILE__, UNSET_LINE)\n nil\nend", "def unset\n ::GObject.g_value_unset self\n end", "def clear_viewstate\n self.each { |k,v|\n self[k] = \"\" if k =~ /^__/\n }\n end", "def unset \n ::GObject::g_value_unset self\n end", "def clear!\n key_files.clear\n key_data.clear\n known_identities.clear\n self\n end", "def reset\n @value = 0\n end", "def re_init()\n\t\t@values = {}\n\t\t@section = nil\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a configuration line/stanza for the specified key and value. The returned line should include linefeed `\\n` if not empty. The default implementations returns "=\\n".
def config_for(key, value) "#{key}=#{value && value}\n" end
[ "def line_contents\n if value.kind_of?(Array)\n value.map { |v, i| \"#{key}#{@option_sep}#{v}\" }\n else\n \"#{key}#{@option_sep}#{value}\"\n end\n end", "def to_cookiestxt_line(linefeed = \"\\n\")\n [\n @domain,\n @for_domain ? True : False,\n @path,\n @secure ? True : False,\n @expires.to_i.to_s,\n @name,\n @value\n ].join(\"\\t\") << linefeed\n end", "def config_value(key)\n value = data[key.to_s].to_s\n value.strip!\n value\n end", "def write_basic_line(io, line_key, line_value, validators)\n return if !line_value || line_value.empty?\n line_value.strip!\n valid = validate_line_value(line_value, validators)\n if valid || !@raise_on_invalid_data\n io.puts \"#{line_key}: #{line_value}\"\n elsif validators && !validators.empty? && @raise_on_invalid_data\n raise InvalidDataError, \"Invalid value: `#{line_value}` given for key `#{line_key}`\"\n end\n end", "def optional_newline\n \"\\n\"\n end", "def linebreak?\n [\"\\r\\n\", \"\\n\"].include? @value\n end", "def makeConfigPairString( key, val, indent=0 )\n\t\t\tstring = \"\"\n\n\t\t\tcase key\n\t\t\twhen Symbol, String\n\t\t\t\tleading = \" \" * indent\n\n\t\t\t\tcase val\n\t\t\t\twhen nil\n\t\t\t\t\tstring << leading + key.to_s + \"\\n\"\n\n\t\t\t\twhen String, Numeric\n\t\t\t\t\tstring << leading + %{#{key.to_s} #{val.to_s}\\n}\n\n\t\t\t\twhen TrueClass\n\t\t\t\t\tstring << leading + %{#{key.to_s} On\\n}\n\n\t\t\t\twhen FalseClass\n\t\t\t\t\tstring << leading + %{#{key.to_s} Off\\n}\n\n\t\t\t\twhen Array\n\t\t\t\t\tstring << val.collect {|v|\n\t\t\t\t\t\tleading + \"#{key.to_s} #{v}\"\n\t\t\t\t\t}.join(\"\\n\") << \"\\n\"\n\n\t\t\t\twhen Hash\n\t\t\t\t\tstring << val.collect {|k,v|\n\t\t\t\t\t\tleading + \"#{key.to_s} #{k.to_s} #{v}\"\n\t\t\t\t\t}.join(\"\\n\") << \"\\n\"\n\n\t\t\t\telse\n\t\t\t\t\tstring << \"# Error (unhandled config val type '%s'): %s\" %\n\t\t\t\t\t\t[ val.class.name, val.inspect ]\n\t\t\t\tend\n\n\t\t\twhen Section\n\t\t\t\tstring << makeSectionString( key, val, indent )\n\n\t\t\telse\n\t\t\t\traise TypeError, \"Unhandled key-type: %s\" % key.class.name\n\t\t\tend\n\n\t\t\treturn string\n\t\tend", "def keyAndValueForLine(line)\n key = line[0 .. line.index('=') - 1]\n value = line[line.index('=') + 1 .. line.length()-1]\n # remove \" from string\n value = value.gsub '\"', ''\n return key.strip, value.strip\n end", "def formatted_value_string(value)\n return value unless value.to_s.include?(\"\\n\") || value.to_s.include?(\"\\302\")\n\n # replaces all of \\n with a break line, but make sure it is escaped before marking as safe\n value = ERB::Util.html_escape(value)\n value.gsub!(\"\\n\", '<br>')\n value.html_safe # rubocop:disable Rails/OutputSafety\n end", "def app_setting_display(key, value = nil)\n value = AppSettings[key] if value.nil?\n value = nil if value == 'nil'\n empty = value.nil?\n value = value.inspect\n name = key.to_s\n v_id = css_randomize(name)\n l_id = \"label-#{v_id}\"\n\n l_opt = { id: l_id, class: 'setting line' }\n append_css!(l_opt, value) if empty\n append_css!(l_opt, 'condensed') if name.size > 25\n label = html_span(name, l_opt)\n\n v_opt = { id: v_id, class: 'text', 'aria-describedby': l_id }\n append_css!(v_opt, value) if empty\n value = html_div(value, v_opt)\n\n label << value\n end", "def get_key_val_override(line)\n\t\toverride_in_line = nil\n\t\tunless line.include?(\"=\")\n\t\t#not a key=value type of line.\n\t\t\treturn nil\n\t\tend\n\t\t\n\t\tif line.index(\"=\") == 0\n\t\t#= is the first character. not valid.\n\t\t\traise 'Invalid config file at line: ' << line\n\t\tend\n\t\t\n\t\tsplit_index = line.index(\"=\")\n\t\t\n\t\t#get key and value based on position of =\n\t\tkey = line[0..split_index - 1]\n\t\tvalue = line[split_index + 1..line.length]\n\t\tkey.strip!\n\t\tvalue.strip!\n\t\t\n\t\t#determine if key has an override.\n\t\tif key.include?(OVERRIDE_START) and key.include?(OVERRIDE_END)\n\t\t\toverride_start_index = key.index(OVERRIDE_START)\n\t\t\toverride_end_index = key.index(OVERRIDE_END)\n\t\t\toverride_in_line = key[override_start_index + 1..override_end_index - 1]\n\t\t\tkey = key[0..override_start_index - 1]\n\t\t\t\n\t\t\tkey.strip!\n\t\t\toverride_in_line.strip!\n\t\t\t\n\t\t\tif key.empty? or override_in_line.empty?\n\t\t\t#either key is empty or the data within <> is empty\n\t\t\t\traise 'Invalid config file at line: ' << line\n\t\t\tend\n\t\tend\n\t\t\n\t\t#return the tuple with key, value, override data\n\t\t{:key => key, :value => value, :override_in_line => override_in_line}\n\tend", "def val_for key\n split(\"\\n\").val_for(key)\n end", "def format(key, value)\n \"#{key}#{key_value_separator}#{format_value(value)}\"\n end", "def line_ending\r\n \"\\n\"\r\n end", "def quoted_newlines?\n val = @gapi.configuration.load.allow_quoted_newlines\n val = false if val.nil?\n val\n end", "def item_value(subsetting, subsetting_value)\n (subsetting || '') + (@key_val_separator || '') + (subsetting_value || '')\n end", "def key_value_sep\n @key_value_sep ||= ' ' # for now assume everyone wants newer less verbose style\n end", "def insert_inline_setting_line(result, section, complete_setting)\n line_num = result[:line_num]\n s = complete_setting\n lines.insert(line_num + 1, \"#{@indent_char * (@indent_width || section.indentation || 0)}#{s[:setting]}#{s[:separator]}#{s[:value]}\")\n end", "def line_ending=(value)\n @line_ending = value || \"\\r\\n\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This gets a game cliche from a list of cliches in a text file
def get_game_cliche #initialize variable chosen_line = nil #Get the cliche File.foreach("game_cliches.txt").each_with_index do |line, number| chosen_line = line if rand < 1.0/(number+1) end return chosen_line.chomp end
[ "def readfile(filename, elf_attack = 3)\n max_x, max_y = readfile_coords(filename)\n grid = Array.new(max_x + 1) { Array.new(max_y + 1) }\n units = []\n unit_id = 0\n y = 0\n #File.readlines(filename).each do |line|\n lines.each do |line|\n x = 0\n line.chars.each do |c|\n if c == '#'\n grid[x][y] = '#'\n elsif c == 'G'\n grid[x][y] = '.'\n unit = {\n x: x,\n y: y,\n type: 'gob',\n display: 'G',\n id: unit_id,\n hp: 200,\n atk: 3,\n alive: true\n }\n unit_id += 1\n units.push unit\n elsif c == 'E'\n grid[x][y] = '.'\n unit = {\n x: x,\n y: y,\n type: 'elf',\n display: 'E',\n id: unit_id,\n hp: 200,\n atk: elf_attack,\n alive: true\n }\n unit_id += 1\n units.push unit\n elsif c == '.'\n grid[x][y] = '.'\n end\n x += 1\n end\n y += 1\n end\n {\n grid: grid,\n units: units,\n max_x: max_x,\n max_y: max_y,\n game_over: false,\n elf_died: false\n }\nend", "def get_game_genre\n\t\t#initialize variable\n\t\tchosen_line = nil\n\t\t\n\t\t#Get the cliche\n\t\tFile.foreach(\"game_genres.txt\").each_with_index do |line, number|\n\t\t\tchosen_line = line if rand < 1.0/(number+1)\n\t\tend\n\t\t\n\n\treturn chosen_line.chomp\n\tend", "def get_parts_from craft_file\n #get indexes of start and end lines for each PART\n start_indexes = craft_file.each_with_index.select{|l,ind| l =~ /^PART/}.map{|p| p[1]} #Parts start on lines with 'PART' at the start of a line\n end_indexes = craft_file.each_with_index.select{|l,ind| l =~ /^}/}.map{|p| p[1]} #Parts end on lines with '}' at the start of a line\n raise \"PART/braket mismatch\" unless start_indexes.count == end_indexes.count #just incase\n part_indexes = start_indexes.zip(end_indexes) #combine start and end indexes into pairs [[start_ind, end_ind], [start_ind, end_ind]...]\n\n part_indexes.map do |start_index, end_index|\n part_data = craft_file[start_index..end_index] #get all lines for a PART\n if @mode == :servo\n significant_lines = part_data[2..14]\n significant_lines.join\n else\n significant_lines = part_data.select{|line| PartKeys.include?(line.split(\"=\")[0].strip) } #select the lines with required keys \n #discard any data following an underscore (ID references), remove leading/trailing whitespace, new-line/tab chars, and join lines together. \n significant_lines.map{|line| line.split(\"_\")[0].strip }.join \n end\n\n end\n end", "def cards\n # Open and iterate on file contents to create individual cards, closes automatically\n File.open(filename, 'r') do |file|\n file.readlines.map do |line|\n question, answer, category = line.chomp.split(',')\n Card.new(question, answer, category.to_sym)\n end\n end\n end", "def read_attacks_file(path)\n ret_arr = []\n \n begin\n read_file_lines(path) {\n |pos_txt| \n pos_txt = pos_txt[1..-2].split(\",\")\n pos = Position.new(pos_txt[0],pos_txt[1])\n ret_arr.push(pos)\n }\n rescue \n return nil \n end \n return ret_arr\nend", "def get_crp(full_aln_path, full_crp_path)\n array_crps = []\n arr = []\n flag_para = false\n alns = get_aln(full_aln_path)\n index_aln = 0\n # File.open(DATA_PATH + \"/full_crp_kigoshi.txt\", 'r').each_with_index do |line, index|\n File.open(full_crp_path, 'r').each_with_index do |line, index| \n if (index%3 == 0)\n arr = []\n arr << line\n next\n end\n\n if (index - 1)%3 == 0\n arr << line\n next\n end\n\n if (index - 2)%3 == 0\n arr << line\n arr << alns[index_aln]\n array_crps << arr\n index_aln += 1\n end\n end\n return array_crps\n end", "def read_attacks_file(path)\n positionArray = Array.new()\n index = 0\n read_file_lines(path) {|string|\n if string =~ /^\\(([0-9]+),([0-9]+)\\)$/\n newPosition = Position.new($1.to_i, $2.to_i)\n positionArray[index] = newPosition\n index = index + 1\n end\n }\n return positionArray\nend", "def read (file)\n\tif (!File.exist?(file))\n\t\tputs \"File #{file} does not exist.\"\n\tend\n\n\ttriangle = []\n\n\tf = File.new(file)\n\n\tf.each do |line|\n\t\ttriangle << line.strip.split(\" \").collect{|i| i.to_i}\n\tend\n\n\treturn triangle\nend", "def identify_craft_in campaign_name\n dir = File.join(self.path, \"saves\", campaign_name, \"Ships\")\n Dir.chdir(dir)\n {\n :vab => Dir.glob(\"VAB/*.craft\").map{|craft| craft.gsub(\"VAB/\", \"\")}, \n :sph => Dir.glob(\"SPH/*.craft\").map{|craft| craft.gsub(\"SPH/\", \"\")}\n }\n end", "def choose_word\n open('word_list.txt') do |file|\n file_content = file.readlines\n file_content[rand(file_content.size)].strip\n end\n end", "def load_map_from_file(src_file)\n File.open(src_file, \"r\") { |f| \n f.lines.each { |line| \n cave_a = line.split[0]\n cave_b = line.split[1]\n \n add_cave(TaM::Cave.new(cave_a))\n add_cave(TaM::Cave.new(cave_b))\n \n tunnel = TaM::Tunnel.new(@tunnels.size, @caves[cave_a], @caves[cave_b]) \n add_tunnel(tunnel)\n @caves[cave_a].add_tunnel(tunnel)\n @caves[cave_b].add_tunnel(tunnel)\n }\n }\n end", "def load_char(character_name)\n # condition to check if user entered character name has associated file\n if File.exists?(\"#{character_name}.txt\")\n # open file and initialize hash\n character_file = File.open(\"#{character_name}.txt\")\n character_array = []\n # loop through file for array population\n while ! character_file.eof?\n # read each line from file\n line = character_file.gets.chomp\n\n # split each line on tab\n data_array = line.split(\",\")\n\n # populate array from character file\n character_array << data_array\n end\n\n # close file\n character_file.close\n\n return character_array\n end\nend", "def loadChar(fileName, set)\n # input file\n File.open(fileName, \"r\") do |inputFile|\n while line = inputFile.gets\n line.chomp!\n next if line.empty? || line[0,1] == '#'\n set << line\n end\n end\nend", "def census_reader(file)\n\t\t\tindex = 0 # Key value for hash table will be 0 - 78, There are 77 Neighbourhoods in Chicago. \n\t\t\tCSV.foreach(file) do |line| \n\t\t\t # Key = [0] COMMUNITY AREA NAME, [1] % AGED 25+ wo HS DIPLOMA, [2]PER CAPITA INCOME \n\t\t\t $census[index] = [line[1].strip.to_s, line[5].to_f, line[7].to_i]\n\t\t\t index += 1\n\t\t\tend\t\n\t\t\t$census.delete(78) #Delete last line of census file giving total data of Chicago.\n\t\t\treturn $census\n\t\tend", "def generate_words\n ret = []\n\n File.open('enable.txt').each do |line|\n new_line = line\n # We don't care for the new line character in the game of hangman.\n new_line = new_line.delete(\"\\n\")\n ret << new_line\n end\n\n return ret\nend", "def packets file\n Capp.open(file).loop\n end", "def new_game\n dictonary = File.new('dictonary.txt', 'r')\n cleaned_dictonary = dictonary.readlines(chomp: true).select { |word| word.length >= 5 && word.length <= 12 }\n dictonary.close\n word = cleaned_dictonary.sample\n set_up_game(word, '_' * word.length)\n end", "def read_ships_file(path)\n newBoard = GameBoard.new 10, 10\n numShips = 0\n read_file_lines(path) {|string| \n lineArray = string.split\n if numShips >= 5\n return newBoard\n end\n\n lineArray[1] = lineArray[1].delete(\",\")\n lineArray[2] = lineArray[2].delete(\",\")\n\n if lineArray[0] =~ /^\\(([0-9]+),([0-9]+)\\)/ && lineArray[1] =~ /^(Up|Down|Right|Left)$/ && lineArray[2] =~ /^[1-5]$/\n lineArray[0] =~ /^\\(([0-9]+),([0-9]+)\\)/\n row = $1.to_i\n column = $2.to_i\n newPosition = Position.new(row, column)\n newShip = Ship.new(newPosition, lineArray[1], lineArray[2].to_i)\n if newBoard.add_ship(newShip)\n numShips = numShips + 1\n end\n end \n }\n\n if numShips != 5 \n return nil\n end\n\n return newBoard\nend", "def initialize file\n word_set = Set.new\n\n File.open(file, \"r\") do |f|\n f.each_line do |raw_line|\n line = raw_line.strip.upcase\n\n if HangmanGame.acceptable_word? line\n word_set.add line\n end\n end\n end\n\n @words = word_set.to_a.freeze\n\n Random.srand\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This gets a business model from a list in a text file
def get_business_model #initialize variable chosen_line = nil #Get the cliche File.foreach("business_models.txt").each_with_index do |line, number| chosen_line = line if rand < 1.0/(number+1) end return chosen_line.chomp end
[ "def load_file(f)\n all_orgs = []\n f.lines do |line|\n next unless f.lineno > 3\n org = Organism.new\n fields = line.split(\"\\s\")\n org.taxid = fields[0]\n org.lineage = fields[1]\n org.name = fields[2].gsub(/\"/,\"\")\n org.rank = fields[3]\n org.number = fields[4]\n all_orgs.push(org)\n end\n return all_orgs\nend", "def generate_list\n File.open(@filename, 'r') do |f|\n f.lineno = 1\n f.each do |line|\n todo_arr = line.squeeze(' ').split('|')\n puts todo_arr\n todo = Item.new(todo_arr[1], :completed => todo_arr[2], :created_at => todo_arr[3])\n @todos << todo\n puts @todos.inspect\n end\n end\n end", "def list_of_food\n \n foodlist_file = File.open(\"./classes/foodlist.txt\", \"r\")\n foods = read_foodlist(foodlist_file)\n\n return foods\nend", "def define_models_from_file(file)\n define_models_from_json_string(File.read(file)) if file\n end", "def read_multi_hmm_file(filename)\n\t\tfile = File.new(filename, \"r\")\n\t\tmodels = {}\n\t\tcurrent_model = nil\n\n\t\twhile (line = file.gets)\n\t\t\tif line =~ /^~(.+) \"(.+)\"/\n\t\t\t\tcurrent_model = HMM.new\n\t\t\t\tmodels[$2] = current_model\n\t\t\telsif line =~ /\\<([A-Z]+)\\>(.+)$/\n\t\t\t\tif current_model\n\t\t\t\t\tcurrent_model.set($1, $2)\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tcurrent_model.update_last(line.split(\" \").map { |n| n.to_f }) if current_model\n\t\t\tend\n\t\tend\n\t\tfile.close\n\n\t\tmodels\n\tend", "def read_all\r\n people = []\r\n File.open('people', 'r') do |file|\r\n file.each do |line|\r\n name, age = line.split(', ')\r\n people << Person.new(name, age)\r\n end\r\n end\r\n\r\n people\r\nend", "def read_classifications(filepath)\n File.readlines(filepath).map do |line|\n label, *vector = line.split(',').map(&:to_i)\n Classification.new(label, vector)\n end\nend", "def read_file(master_list, fileName, sepRegex, reorder)\n\tFile.foreach(\"codetest_files/input_files/\"+fileName) do |x|\n\t\tnewS = x.gsub(/\\n/, \"\").split(sepRegex)\n\t\tnewO = {\n\t\t\t\"LastName\": newS[0],\n\t\t\t\"FirstName\": newS[1],\n\t\t\t\"Gender\": newS[reorder[0]][0] == \"M\" ? \"Male\" : \"Female\",\n\t\t\t\"DOB\": newS[reorder[1]].gsub(/-/, \"/\"),\n\t\t\t\"FavoriteColor\": newS[reorder[2]]\n\t\t}\n\t\t# pushing object into master array\n\t\tmaster_list << newO\n\tend\n\tmaster_list\nend", "def read_from_file\n File.readlines('todolist.txt').each do |line|\n add_item(line.chop)\n end\n end", "def split_model_file(file)\n code = File.read(file)\n pre = []\n start_fields = nil\n fields = []\n end_fields = nil\n post = []\n state = :pre\n line_no = 0\n field_block_end = nil\n code.each_line do |line|\n # line.chomp!\n line_no += 1\n case state\n when :pre\n if line =~ /^\\s*fields\\s+do(?:\\s(.+))?$/\n field_block_end = /^\\s*end(?:\\s(.+))?$/\n start_fields = line\n state = :fields\n elsif line =~ /^\\s*fields\\s+\\{(?:\\s(.+))?$/\n field_block_end = /^\\s*\\}(?:\\s(.+))?$/\n start_fields = line\n state = :fields\n else\n pre << line\n end\n when :fields\n if line =~ field_block_end\n end_fields = line\n state = :post\n else\n if line =~ /^\\s*field\\s+:(\\w+).+?(#.+)?$/\n name = $1\n comment = $2\n elsif line =~ /^\\s*field\\s+['\"](.+?)['\"].+?(#.+)?$/\n name = $1\n comment = $2\n elsif line =~ /^\\s*(\\w+).+?(#.+)?$/\n name = $1\n comment = $2\n else\n name = comment = nil\n end\n fields << [line, name, comment]\n end\n when :post\n post << line\n end\n end\n if !start_fields\n i = 0\n (0...pre.size).each do |i|\n break if pre[i] =~ /^\\s*class\\b/\n end\n raise \"Model declaration not found in #{file}\" unless i<pre.size\n post = pre[i+1..-1]\n pre = pre[0..i]\n pre << \"\\n\"\n start_fields = \" fields do\\n\"\n end_fields = \" end\\n\"\n post.unshift \"\\n\" unless post.first.strip.empty?\n fields = []\n end\n [pre,start_fields,fields,end_fields,post]\n end", "def read_from_file filename\n @list = []\n File.readlines(filename).each do |line|\n line = line.split(/\\t/)\n unless is_comment? line\n name = line[0].strip\n cidr = line[1].strip\n description = line[2].strip\n @list.push ExampleNetwork.new name, cidr, description\n end\n end\n File.delete(filename)\n self\n end", "def load_from_file(params = {})\n file = params[:file] || raise('File path required for load_from_file')\n list = List.new(params)\n dsl = ListDSL.new(list, params)\n dsl.instance_eval(File.read(file), File.realpath(file, Dir.pwd))\n list\n end", "def read_from_file(filename)\r\n\t\tIO.readlines(filename).each { |task| \r\n\t\t\tstatus, *description = task.split(' : ')\r\n\t\t\tadd(Task.new(description.join(' : ').strip, status.include?('X'))) }\r\n\tend", "def from(file_or_url)\n file = read_file_or_url(file_or_url)\n props = file.read.split(/\\n/)\n hash = {}\n props.each do |line|\n attr, value = line.split('=')\n attr = attr.to_sym\n if Types::ARRAY_ATTRIBUTES.include?(attr)\n hash[attr] = value.split(',')\n else\n hash[attr] = value\n end\n end\n ::Arduino::Library::Model.from_hash(hash)\n end", "def read_instances( model, file, options={} )\n instances = YAML::load_stream( File.open( file ) )\n instances.each do |i|\n model.new( i )\n end\n end", "def read_listfile(listfile)\n lines = File.open(listfile).readlines\n lines.last.chomp! unless lines.length == 0\n end", "def parse_file\n File.open(\"FoodDB.txt\", \"r\") do |f|\n f.each_line do |line|\n line.chomp!\n command = line.split(\",\")\n name = command[0]\n type = command[1]\n info = command[2]\n #switches on type\n case type\n when \"b\"\n addFood(name, info)\n when \"r\"\n length = command.length-1\n ingredients = command[2..length]\n addRecipe(name,ingredients)\n end\n end\n end\n end", "def read(file)\n i = 1\n File.foreach(file) do |line|\n row = line.strip.split(/\\s+/)\n raise Price::Error::InvalidLine, \"Line #{i} is invalid '#{line}'\" if row[0].nil? || row[1].nil?\n @items << OrderItem.new(row[0].to_i, row[1])\n i += 1\n end\n end", "def load_model(model_class)\n begin\n file_content = File.read(model_class.db_filename)\n json_data = JSON.parse(file_content)\n rescue Errno::ENOENT\n # The file does not exists\n json_data = []\n end\n json_data.each do |data_hash|\n new_object = model_class.from_hash(data_hash)\n new_object.save\n end\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This gets a game genre from a list in a text file
def get_game_genre #initialize variable chosen_line = nil #Get the cliche File.foreach("game_genres.txt").each_with_index do |line, number| chosen_line = line if rand < 1.0/(number+1) end return chosen_line.chomp end
[ "def genres\n @genres ||= File.readlines(txt('genres')).map(&:strip)\nend", "def load_genre(file_name)\n\t\tindata = []\n\t\tCSV.foreach(\"#{file_name}\", col_sep: \"|\") do |row|\n\t\t\tgenre_for_movie = [row[5].to_i,row[6].to_i,row[7].to_i,row[8].to_i,row[9].to_i,row[10].to_i,row[11].to_i,row[12].to_i,row[13].to_i,row[14].to_i,row[15].to_i,row[16].to_i,row[17].to_i,row[18].to_i,row[19].to_i,row[20].to_i,row[21].to_i,row[22].to_i,row[23].to_i ]\n\t\t\tindata.push({\"movie_id\"=>row[0].to_i,\"genres\"=>genre_for_movie})\n\t\tend\n\t\treturn indata\n\tend", "def list_genre\n genre = gets.chomp\n Genre.all.each do |a|\n if a.name == genre\n a.songs.collect { |s| puts \"#{s.artist.name} - #{s.name} - #{s.genre.name}\" }\n end\n end\n end", "def print_genres\n files.sort.each do |filename|\n puts Song.create_from_filename(filename).genre.name\n end\n end", "def list_genre\n list_songs\n puts \"\\n-Please choose a genre from the playlist by entering the name of the desired genre:\"\n genre_request = gets.strip.downcase\n\n if genre = Genre.find_by_name(genre_request)\n genre.songs.each do |song|\n puts \"#{song.artist.name} - #{song.name} - #{song.genre.name}\"\n end\n else\n puts \"\\n-Could not find a genre by the name of '#{genre_request}' in the playlist, please check the spelling or choose another genre from the playlist:\"\n end\n end", "def read_genre()\n\tcount = $genre_names.length\n\ti = 0\n\tputs 'Genre: '\n\twhile i < count\n\t\tputs \"#{i} \" + $genre_names[i]\n\t\ti += 1\n\tend\n\tselectedGenre = read_integer_in_range('Please select your album genre.', 0, count - 1)\n\tselectedGenre\nend", "def genre\n fetch('game.genre')\n end", "def genre(path)\n mp3 = first_of_type(path, \"mp3\")\n results = run_cmd(cmd('id3v2 -l', mp3)) if mp3\n return tag_value(\"Genre\", results)\n end", "def genre\n marc_genre_leader = Traject::TranslationMap.new(\"marc_genre_leader\")\n marc_genre_007 = Traject::TranslationMap.new(\"marc_genre_007\")\n\n results = marc_genre_leader[ record.leader.slice(6,2) ] ||\n marc_genre_leader[ record.leader.slice(6)] ||\n record.find_all {|f| f.tag == \"007\"}.collect {|f| marc_genre_007[f.value.slice(0)]}\n\n [results].flatten\n end", "def list_songs_by_genre\n puts \"Please enter the name of a genre:\"\n genreName = gets.strip\n \n # Stores song names to be sorted later\n songNames = []\n \n # Gets matching genres and stores song names that match.\n Song.all.each{|songInstance| \n if (songInstance.genre.name == genreName)\n songNames << songInstance.name\n end\n }\n \n # sort songNames\n songNames.sort!\n \n # Output list of songs matching genre\n songNames.each_with_index{|songName, index|\n puts \"#{index + 1}. #{Song.find_by_name(songName).artist.name} - #{Song.find_by_name(songName).name}\"\n }\n end", "def list_songs_by_genre\n # Get artist\n puts \"Please enter the name of a genre:\"\n user_input = gets.chomp\n\n #Set genre object from input\n genre = Genre.find_by_name(user_input)\n\n # If genre exists, grab songs and output alphabetized by song artist name\n if genre != nil\n genre.songs.sort_by { |song| song.name }.each.with_index(1) { |song, index| puts \"#{index}. #{song.artist.name} - #{song.name}\" }\n end\n end", "def genres\n to_array search_by_itemprop 'genre'\n end", "def parse_genre(input)\n genres = []\n # Split fields\n input.split(',').each do |genre|\n ##\n # Start with a single empty genre string. Split the genre by spaces\n # and process each component. If a component does not have a slash,\n # concatenate it to all genre strings present in +temp+. If it does\n # have a slash present, duplicate all genre strings, and concatenate\n # the first component (before the slash) to the first half, and the\n # last component to the last half. +temp+ now has an array of genre\n # combinations.\n #\n # 'Traditional Heavy/Power Metal' => ['Traditional Heavy', 'Traditional Power']\n # 'Traditional/Classical Heavy/Power Metal' => [\n # 'Traditional Heavy', 'Traditional Power',\n # 'Classical Heavy', 'Classical Power']\n #\n temp = ['']\n genre.downcase.split.reject { |g| ['(early)', '(later)', 'metal'].include? g }.each do |g|\n if g.include? '/'\n # Duplicate all WIP genres\n temp2 = temp.dup\n\n # Assign first and last components to temp and temp2 respectively\n split = g.split '/'\n temp.map! { |t| t.empty? ? split.first.capitalize : \"#{t.capitalize} #{split.first.capitalize}\" }\n temp2.map! { |t| t.empty? ? split.last.capitalize : \"#{t.capitalize} #{split.last.capitalize}\" }\n\n # Add both genre trees\n temp += temp2\n else\n temp.map! { |t| t.empty? ? g.capitalize : \"#{t.capitalize} #{g.capitalize}\" }\n end\n end\n genres += temp\n end\n genres.uniq\n end", "def list_songs_by_genre\n puts \"Please enter the name of a genre:\"\n user_input = gets.chomp\n\n if Genre.find_by_name(user_input)\n found_genre = Genre.find_by_name(user_input)\n\n found_genre.songs.sort_by {|song| song.name}.each_with_index do |song, index|\n puts \"#{index + 1}. #{song.artist.name} - #{song.name}\"\n end\n end\n end", "def genres\n search_by_text('жанры').split(', ')\n end", "def list_songs_by_genre\n puts \"Please enter the name of a genre:\"\n input = gets.chomp\n if genre = Genre.find_by_name(input)\n sort_by_name = genre.songs.sort_by do |genre|\n genre.name\n end\n sort_by_name.each.with_index(1) do |genre, i|\n puts \"#{i}. #{genre.artist.name} - #{genre.name}\"\n end\n end\n end", "def store_genre\n genres = RSpotify::Recommendations.available_genre_seeds\n genre = @prompt.select('Which genre would you like to add to your list?', genres, filter: true)\n genre_details = {\n 'name' => genre.capitalize,\n 'type' => 'genre'\n }\n @mylist << genre_details\n update_file\n end", "def scrapping_genre(url)\n doc = Nokogiri::HTML(open(url))\n\n # To create our genre database + links\n doc.css(\"div.genre\").each do |div|\n genre = div.text.tr('»', '').chop # hash\n id = Genre.create(name: genre).id\n link = 'http://everynoise.com/' + (div.css('a')[0][\"href\"])\n # scraps artists info\n scrapping_artists(link, id)\n end\n end", "def genre(genre)\r\n Dir.open(\"#{@rootDir}/#{genre}\").each() do |filename|\r\n artist(genre, filename) if valid_dir?(\"#{@rootDir}/#{genre}/#{filename}\");\r\n song(genre, nil, nil, filename) if mp3?(filename)\r\n end\r\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The top left of checkerboard should always be filled with [r] You should assume the size input is always an integer You should not assume the size input is always positive number Input size of <= 0 will return an empty string
def checkerboard(size) for i in 1..size do i % 2 == 0 ? checker=0 : checker=1 for j in 1..size do if checker == 1 print "[r]" checker = 0 else print "[b]" checker = 1 end end puts "\n" end end
[ "def checkerboard(size)\n return \"\" if size <= 0\n completed_board = \"\"\n\n first_line = size.times do |n|\n if n.odd?\n completed_board << \"[r]\"\n else\n completed_board << \"[b]\"\n end\n end\n\n # binding.pry\nend", "def checker_board(size)\n\n size.times do |i|\n puts \"X \" * size\n puts \" X\" * size\n end\n\n\n\nend", "def create_board_line(size, value = \"blank\")\n value == \"blank\" ? cell = \" \" : cell = \"-----\"\n value == \"blank\" ? divider = \"|\" : divider = \"+\"\n line = ''\n (1..size).each do |x|\n endcell = x % size != 0 ? divider : \"\\n\"\n line << cell + endcell\n end\n line\nend", "def generate_board(size)\n board = []\n size.times do |x1|\n board[x1] = []\n size.times do |y1|\n board[x1][y1] = BLANK_SPACE\n end\n end\n board\n end", "def board_size\n size = nil\n\n until size\n clear_screen \"Please enter the board size you wish to play on. \\nAny number is valid (though less than 3 is kinda dumb, trust me)\"\n size = gets.chomp.to_i\n end\n\n size\n end", "def empty_space\r\n\ti = @rng.rand((@size_x-1)*(@size_y-1) - @snek.size - 1) # i-ième case vide du board\r\n\t0.upto(@size_x) do |x|\r\n\t\t0.upto(@size_y) do |y|\r\n\t\t\tif i <= 0 && @board[x][y] == ' '\r\n\t\t\t\treturn [x,y]\r\n\t\t\telse\r\n\t\t\t\ti -= 1\r\n\t\t\tend\r\n\t\tend\r\n\tend\r\n\treturn nil # no more space\r\n end", "def board_string\n result = \" 0 1 2\\n\"\n (0...3).each do |row|\n result += \"#{row} \"\n (0...3).each do |col|\n value = maybe_colorize(row, col)\n result += value + '|'\n end\n result = result[0..-2] + \"\\n\"\n result += \" #{'-' * 5}\\n\" unless row == 2 # No horizontal divider after last row.\n end\n result\n end", "def board_to_string\n line = \"#{\"----\" * @size}\"\n board_as_string = \"#{line}\"\n @board.each do |columns|\n row = \"\\n\"\n columns.each do |element|\n element = \" \" if element.nil? # keeps alignment\n row = \"#{row}| #{element} \"\n end\n board_as_string << \"#{row}|\\n#{line}\"\n end\n return board_as_string\n end", "def build_square size\n #starting positions\n x,y = size/2,0\n\n # build square\n (1..size**2).inject(Array.new(size){[]}) do |arr,i|\n\n # store current number in square\n arr[y][x] = i\n\n # move up and left\n x = (x+1)%size\n y = (y-1)%size\n\n # undo move and move down if space is taken\n if arr[y][x]\n y = (y+2)%size\n x = (x-1)%size\n end\n arr\n end\n end", "def print_board\n puts ''\n puts ' |B| (black) = right color, right space'\n puts ' |W| (white) = right color, wrong space'\n puts ''\n puts ''\n line = ' ' + '----' * @board.holes\n puts line\n board_size = @board.decoding_board.size - 1\n\n board_size.downto(0) do |i|\n print format('%7s ', \"#{i + 1}. \")\n\n @board.holes.times do |j|\n print j.nil? ? '| |' : '|' + ' '.color(@board.decoding_board[i][j]) + '|'\n end\n print ' '\n\n @board.feedback_board[i].each { |j| print \"|#{j}|\" }\n print \"\\n\"\n puts line\n end\n\n puts ''\n end", "def construct_board\n board_dimensions = []\n puts \"Select board width between 1 and 500\".colorize(:green)\n board_width = gets.chomp\n puts \"Select board height between 1 and 500\".colorize(:green)\n board_height = gets.chomp\n puts sep = \"------------------------------------------------------\".colorize(:yellow)\n validate_width_is_number = board_width.to_i\n validate_height_is_number = board_height.to_i\n if validate_width_is_number <= 0 || validate_height_is_number > 500 && validate_height_is_number <= 0 || validate_height_is_number > 500\n puts \"Selection must be a number and between 1 and 500\".colorize(:red)\n return construct_board\n else \n validated_width = validate_width_is_number\n validated_height = validate_height_is_number\n board_size = validated_width.to_i * validated_height.to_i \n puts \"You have selected a board width of #{validated_width} and a board height of #{validated_height}.\".colorize(:green)\n puts \"Constructing a board with #{board_size} tiles.\".colorize(:green)\n if validated_width == validated_height\n board_dimensions.push(validated_width, validated_height)\n puts sep\n else\n puts \"Sorry, the board's width and height must be equal to each other. Try again\".colorize(:red)\n puts sep\n return construct_board\n end\n end\n return board_dimensions\nend", "def initializeBoard()\n #Init starting board according to size\n @board[@size/2-1][@size/2-1] = BLACK;\n @board[@size/2-1][@size/2] = WHITE;\n @board[@size/2][@size/2-1] = WHITE;\n @board[@size/2][@size/2] = BLACK;\n\n end", "def board_size()\n p \"What board size would you want? odd numbers besides 1 are valid choices; 3 is a normal 3 by 3 board and 5 would make a 5 by 5 board.\"\n choice = gets.chomp\n if choice.to_i.odd? == true && choice.to_i > 1 && choice.to_i < 10\n choice.to_i\n else\n \tp \"wrong size choice\"\n board_size()\n end\nend", "def display_board\n cell_one = \" \"\n cell_two = \" \"\n cell_three = \" \"\n cell_four = \" \"\n cell_five = \" \"\n cell_six = \" \"\n cell_seven = \" \"\n cell_eight = \" \"\n cell_nine = \" \"\n divider = \"|\"\n row = \"-----------\"\n puts cell_one + divider + cell_two + divider + cell_three\n puts row\n puts cell_four + divider + cell_five + divider + cell_six\n puts row\n puts cell_seven + divider + cell_eight + divider + cell_nine\nend", "def display_board\n cell = \" \"\n print cell+\"|\"+cell+\"|\"+cell+\"\\n-----------\\n\"+cell+\"|\"+cell+\"|\"+cell+\"\\n-----------\\n\"+cell+\"|\"+cell+\"|\"+cell+\"\\n\"\n\nend", "def create_board_indicator\n @row_of_first_empty_square = 0\n @col_of_first_empty_square = 0\n first_empty_square_found = false\n board_indicator = initialize_empty_board\n (MIN_ROW_POSITION..MAX_ROW_POSITION).each do |j| \n (MIN_COL_POSITION..MAX_COL_POSITION).each do |i|\n #board_indicator[j][i] = 1 if (@board[j][i] != 0 && @board[j][i] != '')\n if (@board[j][i] == 0 || @board[j][i] == '')\n if !first_empty_square_found\n puts 'first empty'\n first_empty_square_found = true\n @row_of_first_empty_square = j\n @col_of_first_empty_square = i\n end\n else\n board_indicator[j][i] = 1\n end\n end\n end\n board_indicator\n end", "def size\n @board.size\n end", "def blank_board\n 0b111111100000100000100000100000100000100000100000100000100000100000\nend", "def size\n\t\t@board.length\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The number of alarms. Returns an Integer.
def count @alarms.objects.find { |o| o.name == "count" }.val end
[ "def alarms\n data[:alarms]\n end", "def count\n @monkeys.count\n end", "def alarms\n data.alarms\n end", "def aps_notification_count_for_application(application_name)\n redis.llen(aps_application_queue_key(application_name)).to_i\n end", "def amountOfNotifications\r\n @notifications.length\r\n end", "def ripple_count\n resplashes.map(&:ripple_count).sum + resplashes.length\n end", "def num_missed_calls\n missed_contacts.count\n end", "def count\n @lights.reduce(0) { |memo, row| memo + row.count { |c| c } }\n end", "def get_at_words_count\n @redis.llen(\"store:ats\").to_i\n end", "def android_count\n return @android_count\n end", "def count\n ring.count\n end", "def notification_count\n @notifications.size\n end", "def alarm_arn\n data[:alarm_arn]\n end", "def countNumOnBoard()\n count = 0 ;\n @onBoardList.each{|demand|\n count += demand.numPassenger ;\n }\n return count ;\n end", "def getAlarms\n req = DaemonGetAlarmsRequest.new\n rc = nil\n sendAndRecv(req){ |resp|\n rc = resp.alarms\n }\n rc\n end", "def unread_notification_count\n unread_notifications.count\n end", "def count\n self.at('/RETS/COUNT')['Records'].to_i\n end", "def activities_count\n return 0 if @activities.nil?\n @activities.size\n end", "def requested_count\n notifications.count\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Query alarms. options A Hash of options: :start A DateTime instance describing the earliest time to query history for. :end A DateTime instance describing the latest time to query history for.
def query options from = options.fetch :start to = options.fetch :end query = @alarms.objects.find { |o| o.name == "query" } filter = OBIX::Builder.new do obj do abstime name: "start", val: from.iso8601 abstime name: "end", val: to.iso8601 end end.object alarms = query.invoke filter alarms.objects.find { |o| o.name == "data" } end
[ "def query options\n from = options.fetch :start\n to = options.fetch :end\n\n query = @history.objects.find { |o| o.name == \"query\" }\n\n filter = OBIX::Builder.new do\n obj do\n abstime name: \"start\", val: from.iso8601\n abstime name: \"end\", val: to.iso8601\n end\n end.object\n\n history = query.invoke filter\n\n history.objects.find { |o| o.name == \"data\" }\n end", "def query options\n from = options.fetch :start\n to = options.fetch :end\n\n query = self.records.objects.find { |o| o.name == \"query\" }\n\n filter = OBIX::Builder.new do\n obj do\n abstime name: \"start\", val: from.iso8601\n abstime name: \"end\", val: to.iso8601\n end\n end.object\n\n filter_result = query.invoke filter\n filter_result.objects.find { |o| o.name == \"data\" }\n end", "def describe_alarms(options={})\n if alarm_names = options.delete('AlarmNames')\n options.merge!(AWS.indexed_param('AlarmNames.member.%d', [*alarm_names]))\n end\n request({\n 'Action' => 'DescribeAlarms',\n :parser => Fog::Parsers::AWS::CloudWatch::DescribeAlarms.new\n }.merge(options))\n end", "def describe_alarm_history(options={})\n request({\n 'Action' => 'DescribeAlarmHistory',\n :parser => Fog::Parsers::AWS::CloudWatch::DescribeAlarmHistory.new\n }.merge(options))\n end", "def describe_alarms_for_metric(options)\n if dimensions = options.delete('Dimensions')\n options.merge!(AWS.indexed_param('Dimensions.member.%d.Name', dimensions.map {|dimension| dimension['Name']}))\n options.merge!(AWS.indexed_param('Dimensions.member.%d.Value', dimensions.map {|dimension| dimension['Value']}))\n end\n request({\n 'Action' => 'DescribeAlarmsForMetric',\n :parser => Fog::Parsers::AWS::CloudWatch::DescribeAlarmsForMetric.new\n }.merge(options))\n end", "def query\n begin\n response = resource[\"/query/#{app}\"].post(:days => options[:days], :url => options[:url], :mode => options[:mode])\n rescue RestClient::InternalServerError\n display \"An error has occurred.\"\n end\n display response.to_s\n end", "def query_scheduled_messages(options = nil)\n require_relative 'scheduledmessage'\n @api.cursor(ScheduledMessage, get_base_api_path() + \"/scheduled\", options)\n end", "def find_future_events(options={})\n formatted_start_min = encode_time(Time.now)\n query = \"?timeMin=#{formatted_start_min}#{parse_options(options)}\"\n event_lookup(query)\n end", "def getAlarms\n req = DaemonGetAlarmsRequest.new\n rc = nil\n sendAndRecv(req){ |resp|\n rc = resp.alarms\n }\n rc\n end", "def alarms(tracker_id, options = {})\n get_json(path_user_version(\"/devices/tracker/#{tracker_id}/alarms\", options))\n end", "def all(options = {})\n constraints = [\n (\"SystemModstamp < #{options[:before].utc.iso8601}\" if options[:before]),\n (\"SystemModstamp >= #{options[:after].utc.iso8601}\" if options[:after]),\n *options[:conditions],\n ]\n\n DB.client.query(query(*constraints)).map do |record|\n instance_type.new(@record_type, record, @mapping)\n end\n end", "def query_broadcasts(options = nil)\n require_relative 'broadcast'\n @api.cursor(Broadcast, get_base_api_path() + \"/broadcasts\", options)\n end", "def query(metric_name, options={})\n key = build_key(metric_name)\n parse_time_range(options)\n key << assemble_for(options)\n if options[:id]\n key = \"dnid:#{options[:id]}\"\n real_key = DulyNoted.redis.get key\n if options[:meta_fields]\n options[:meta_fields].collect! { |x| x.to_s }\n result = {}\n options[:meta_fields].each do |field|\n result[field] = DulyNoted.redis.hget real_key, field\n end\n results = [result]\n else\n results = [DulyNoted.redis.hgetall(real_key)]\n end\n else\n keys = find_keys(key)\n grab_results = Proc.new do |metric|\n if options[:meta_fields]\n options[:meta_fields].collect! { |x| x.to_s }\n result = {}\n options[:meta_fields].each do |field|\n result[field] = DulyNoted.redis.hget metric, field\n end\n result\n else\n DulyNoted.redis.hgetall metric\n end\n end\n results = []\n if options[:time_start] && options[:time_end]\n keys.each do |key|\n results += DulyNoted.redis.zrangebyscore(key, options[:time_start].to_f, options[:time_end].to_f).collect(&grab_results)\n end\n else\n keys.each do |key|\n results += DulyNoted.redis.zrange(key, 0, -1).collect(&grab_results)\n end\n end\n end\n return results\n end", "def get_query_call_recordings(account_id,\r\n from: nil,\r\n to: nil,\r\n min_start_time: nil,\r\n max_start_time: nil)\r\n # Prepare query url.\r\n _query_builder = config.get_base_uri(Server::VOICEDEFAULT)\r\n _query_builder << '/api/v2/accounts/{accountId}/recordings'\r\n _query_builder = APIHelper.append_url_with_template_parameters(\r\n _query_builder,\r\n 'accountId' => { 'value' => account_id, 'encode' => false }\r\n )\r\n _query_builder = APIHelper.append_url_with_query_parameters(\r\n _query_builder,\r\n 'from' => from,\r\n 'to' => to,\r\n 'minStartTime' => min_start_time,\r\n 'maxStartTime' => max_start_time\r\n )\r\n _query_url = APIHelper.clean_url _query_builder\r\n\r\n # Prepare headers.\r\n _headers = {\r\n 'accept' => 'application/json'\r\n }\r\n\r\n # Prepare and execute HttpRequest.\r\n _request = config.http_client.get(\r\n _query_url,\r\n headers: _headers\r\n )\r\n VoiceBasicAuth.apply(config, _request)\r\n _response = execute_request(_request)\r\n\r\n # Validate response against endpoint and global error codes.\r\n case _response.status_code\r\n when 400\r\n raise ApiErrorException.new(\r\n 'Something\\'s not quite right... Your request is invalid. Please' \\\r\n ' fix it before trying again.',\r\n _response\r\n )\r\n when 401\r\n raise APIException.new(\r\n 'Your credentials are invalid. Please use your Bandwidth dashboard' \\\r\n ' credentials to authenticate to the API.',\r\n _response\r\n )\r\n when 403\r\n raise ApiErrorException.new(\r\n 'User unauthorized to perform this action.',\r\n _response\r\n )\r\n when 404\r\n raise ApiErrorException.new(\r\n 'The resource specified cannot be found or does not belong to you.',\r\n _response\r\n )\r\n when 415\r\n raise ApiErrorException.new(\r\n 'We don\\'t support that media type. If a request body is required,' \\\r\n ' please send it to us as `application/json`.',\r\n _response\r\n )\r\n when 429\r\n raise ApiErrorException.new(\r\n 'You\\'re sending requests to this endpoint too frequently. Please' \\\r\n ' slow your request rate down and try again.',\r\n _response\r\n )\r\n when 500\r\n raise ApiErrorException.new(\r\n 'Something unexpected happened. Please try again.',\r\n _response\r\n )\r\n end\r\n validate_response(_response)\r\n\r\n # Return appropriate response type.\r\n decoded = APIHelper.json_deserialize(_response.raw_body)\r\n ApiResponse.new(\r\n _response,\r\n data: decoded.map { |element| CallRecordingMetadata.from_hash(element) }\r\n )\r\n end", "def query(options) # :nodoc:\n @next_token = options[:next_token]\n query_expression = build_conditions(options[:query_expression])\n # add sort_options to the query_expression\n if options[:sort_option]\n sort_by, sort_order = sort_options(options[:sort_option])\n sort_query_expression = \"['#{sort_by}' starts-with '']\"\n sort_by_expression = \" sort '#{sort_by}' #{sort_order}\"\n # make query_expression to be a string (it may be null)\n query_expression = query_expression.to_s\n # quote from Amazon:\n # The sort attribute must be present in at least one of the predicates of the query expression.\n if query_expression.blank?\n query_expression = sort_query_expression\n elsif !query_attributes(query_expression).include?(sort_by)\n query_expression += \" intersection #{sort_query_expression}\"\n end\n query_expression += sort_by_expression\n end\n # request items\n query_result = self.connection.query(domain, query_expression, options[:max_number_of_items], @next_token)\n @next_token = query_result[:next_token]\n items = query_result[:items].map do |name|\n new_item = self.new('id' => name)\n new_item.mark_as_old\n reload_if_exists(record) if options[:auto_load]\n new_item\n end\n items\n end", "def query_airtime_transactions(options = nil)\n require_relative 'airtimetransaction'\n @api.cursor(AirtimeTransaction, get_base_api_path() + \"/airtime_transactions\", options)\n end", "def alarms\n data.alarms\n end", "def alarms\n data[:alarms]\n end", "def get_query_metadata_for_account(account_id,\r\n from: nil,\r\n to: nil,\r\n min_start_time: nil,\r\n max_start_time: nil)\r\n # Prepare query url.\r\n _query_builder = config.get_base_uri(Server::VOICEDEFAULT)\r\n _query_builder << '/api/v2/accounts/{accountId}/recordings'\r\n _query_builder = APIHelper.append_url_with_template_parameters(\r\n _query_builder,\r\n 'accountId' => { 'value' => account_id, 'encode' => true }\r\n )\r\n _query_builder = APIHelper.append_url_with_query_parameters(\r\n _query_builder,\r\n 'from' => from,\r\n 'to' => to,\r\n 'minStartTime' => min_start_time,\r\n 'maxStartTime' => max_start_time\r\n )\r\n _query_url = APIHelper.clean_url _query_builder\r\n\r\n # Prepare headers.\r\n _headers = {\r\n 'accept' => 'application/json'\r\n }\r\n\r\n # Prepare and execute HttpRequest.\r\n _request = config.http_client.get(\r\n _query_url,\r\n headers: _headers\r\n )\r\n VoiceBasicAuth.apply(config, _request)\r\n _response = execute_request(_request)\r\n\r\n # Validate response against endpoint and global error codes.\r\n if _response.status_code == 400\r\n raise ApiErrorResponseException.new(\r\n 'Something\\'s not quite right... Your request is invalid. Please' \\\r\n ' fix it before trying again.',\r\n _response\r\n )\r\n elsif _response.status_code == 401\r\n raise APIException.new(\r\n 'Your credentials are invalid. Please use your Bandwidth dashboard' \\\r\n ' credentials to authenticate to the API.',\r\n _response\r\n )\r\n elsif _response.status_code == 403\r\n raise ApiErrorResponseException.new(\r\n 'User unauthorized to perform this action.',\r\n _response\r\n )\r\n elsif _response.status_code == 404\r\n raise ApiErrorResponseException.new(\r\n 'The resource specified cannot be found or does not belong to you.',\r\n _response\r\n )\r\n elsif _response.status_code == 415\r\n raise ApiErrorResponseException.new(\r\n 'We don\\'t support that media type. If a request body is required,' \\\r\n ' please send it to us as `application/json`.',\r\n _response\r\n )\r\n elsif _response.status_code == 429\r\n raise ApiErrorResponseException.new(\r\n 'You\\'re sending requests to this endpoint too frequently. Please' \\\r\n ' slow your request rate down and try again.',\r\n _response\r\n )\r\n elsif _response.status_code == 500\r\n raise ApiErrorResponseException.new(\r\n 'Something unexpected happened. Please try again.',\r\n _response\r\n )\r\n end\r\n validate_response(_response)\r\n\r\n # Return appropriate response type.\r\n decoded = APIHelper.json_deserialize(_response.raw_body)\r\n ApiResponse.new(\r\n _response,\r\n data: decoded.map { |element| RecordingMetadataResponse.from_hash(element) }\r\n )\r\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns the measure record associated with the device
def measure_for_device(device_id, measure_id) device = Device.where(:name => device_id) # use first on device to get record from association, then # use first again on measures to get measure record device.first.measures.where(:name => measure_id).first if device.any? end
[ "def definition\n if @sub_id\n QME::QualityMeasure.query_measures({'id' => @measure_id, 'sub_id' => @sub_id}, @bundle_id).first()\n else\n QME::QualityMeasure.query_measures({'id' => @measure_id}, @bundle_id).first()\n end\n end", "def measures\n instance.measures.values\n end", "def get_cell_measures(cell)\n query_cell_info '/cell/getMeasures', cell\n end", "def units_record() @records.get(GRT_UNITS); end", "def device_detail\n return @device_detail\n end", "def get_patient_result(patient_id)\n qm = QualityMeasure.new(@measure_id, @sub_id)\n measure = Builder.new(get_db, qm.definition, @parameter_values)\n records = get_db.collection('records')\n result = records.map_reduce(measure.map_function, \"function(key, values){return values;}\",\n :out => {:inline => true}, \n :raw => true, \n :finalize => measure.finalize_function,\n :query => {:medical_record_number => patient_id, :test_id => @parameter_values['test_id']})\n raise result['err'] if result['ok']!=1\n result['results'][0]['value']\n end", "def measure(name = :mandatory, opts = {}, &block)\n raise OEDLMissingArgumentException.new(:measure, :name) if name == :mandatory\n\n mDef = appDefinition.measurements[name]\n if (mDef == nil)\n raise \"Unknown measurement point '#{name}'\"\n end\n m = OMF::EC::OML::MStream.new(name, @appRef, opts, self, &block)\n @measurements << m\n return m\n end", "def mag_record() @records.get_data(GRT_MAG); end", "def registered_measures\n MeasureRegistry.measures\n end", "def measures(cube_unique_name)\n @measures = {} if @measures.nil?\n\n @measures[cube_unique_name] ||= cube(cube_unique_name).get_measures.map do |measure|\n {\n :unique_name => measure.get_unique_name,\n :name => measure.get_caption\n }\n end\n end", "def measures\n @measures ||= begin\n measures = StringyFi::Measures.new\n part = parts.first\n part.xpath('measure').each_with_index do |part_measure, m|\n measures[m] ||= []\n part_measure.xpath('note').each_with_object(measures[m]) do |note, memo|\n next unless note.xpath(\"staff\").text == \"1\"\n next unless note.xpath(\"voice\").text == \"1\"\n pitch = note.xpath(\"pitch\")\n duration = note.xpath(\"duration\").text.to_i\n actual_notes = note.xpath(\"actual-notes\").text.to_i\n normal_notes = note.xpath(\"normal-notes\").text.to_i\n if actual_notes > 0 and normal_notes > 0\n duration = duration * 1.0 * normal_notes / actual_notes\n end\n duration_type = note.xpath(\"type\").text\n memo << StringyFi::Note.new(\n pitch.xpath('step').text,\n pitch.xpath('octave').text,\n pitch.xpath('alter').text,\n duration,\n duration_type\n )\n end\n end\n measures\n end\n end", "def metric_devices\n return @metric_devices\n end", "def get_record_timing\r\n @record.get_timing\r\n end", "def measurements\n return @values['measurements'] if @values.key?('measurements')\n @values['measurements'] = {}\n @values['measurements']\n end", "def find_measure(instance_name)\n @items.find { |i| i.name == instance_name }\n end", "def measure(key, definition, cast=nil)\n @measures << MeasureDSL.new(key, definition, cast)\n end", "def get_user_defined_measures\n\t\tmeasures = ComparisonMeasure.find(:all,:conditions=>[\"comparison_id = ? AND measure_type = ?\",self.id,0])\n\t\treturn measures\n\tend", "def find_measure(measure_name)\n @measure_paths.each do |mp|\n measure_xml = File.join(mp, measure_name, 'measure.xml')\n measure_rb = File.join(mp, measure_name, 'measure.rb')\n if File.exist?(measure_xml) && File.exist?(measure_rb)\n measure_parsed = parse_measure_xml(measure_xml)\n return measure_parsed\n end\n end\n\n return nil\n end", "def get_metric start, stop, step\n\t\t@store.get_metric self, start, stop, step\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
write a function to search target in nums. If target exists, then return its index, otherwise return 1. Template 1 is the most basic and elementary form of Binary Search. It is the standard Binary Search Template that most high schools or universities use when they first teach students computer science. Template 1 is used to search for an element or condition which can be determined by accessing a single index in the array. Approach 1: Algorithm Initialise left and right pointers: left = 0, right = n 1. While left <= right: Compare middle element of the array nums[pivot] to the target value target. If the middle element is the target target = nums[pivot] : return pivot. If the target is not yet found: If target < nums[pivot], continue the search on the left right = pivot 1. Else continue the search on the right left = pivot + 1. Complexity Analysis Time complexity: O(log N). Space complexity: O(1) since it's a constant space solution.
def search(nums, target) left = 0 right = nums.length - 1 while left <= right pivot = left + (right - left) / 2 return pivot if nums[pivot] == target if target < nums[pivot] right = pivot - 1 else left = pivot + 1 end end -1 end
[ "def binary_search(arr, target, min_idx, max_idx)\n # Ensure valid arguments\n raise ArgumentError, \"The first argument must be an array.\" unless arr.is_a?(Array)\n raise ArgumentError, \"The array provided must contain all integer values.\" unless arr.all? {|el| el.is_a?(Integer)}\n raise ArgumentError, \"The target must be an integer value.\" unless target.is_a?(Integer)\n\n return nil if arr.size == 0\n return nil if target < arr[min_idx] || target > arr[max_idx]\n\n while min_idx <= max_idx do\n mid_idx = min_idx + (max_idx - min_idx)/2\n\n return mid_idx if arr[mid_idx] == target\n\n if target < arr[mid_idx]\n max_idx = mid_idx - 1\n else\n min_idx = mid_idx + 1\n end\n end\n\n nil\nend", "def first_pos(nums, target)\n start_ind = 0\n last_ind = nums.size #will return the size if not found such element\n\n while start_ind + 1 < last_ind do\n mid = start_ind + (last_ind - start_ind) / 2\n if nums[mid] < target\n start_ind = mid\n else\n last_ind = mid\n end\n end\n\n if nums[start_ind] >= target\n return start_ind\n end\n\n return last_ind\nend", "def sortAndSearch(arr, target)\n arr = arr.sort_by(&:to_i)\n\n startIndex = 0;\n endIndex = arr.size-1\n\n if (target < arr[startIndex] || target > arr[endIndex])\n return 'target is not in array'\n elsif (target === arr[startIndex])\n return startIndex\n elsif (target === arr[endIndex])\n return endIndex\n end\n\n while (startIndex < endIndex - 1)\n midIndex = (startIndex + endIndex) / 2\n\n if (arr[midIndex] === target)\n return midIndex\n elsif (target < arr[midIndex])\n endIndex = midIndex\n elsif (target > arr[midIndex])\n startIndex = midIndex\n end\n end\n\n return 'target is not in array'\nend", "def find_start nums, target, left, right\n if left + 1 >= right\n return left if nums[left] == target\n return right if nums[right] == target\n return -1\n end\n\n mid = left + (right - left) / 2\n\n if nums[mid] >= target\n right = mid\n else\n left = mid\n end\n\n find_start nums, target, left, right\nend", "def binary_search arr, target\n max_index = arr.length - 1\n min_index = 0\n\n while(max_index >= min_index)\n midpoint = (max_index + min_index)/2\n if arr[midpoint] > target\n max_index = midpoint - 1\n elsif arr[midpoint] < target\n min_index = midpoint + 1\n elsif arr[midpoint] == target\n return midpoint\n end\n end\n\n return nil\nend", "def binary_search(target, array)\r\n\t# index = 0\r\n\t# # mid = index\r\n\t# mid = array.length / 2\r\n\r\n\t# while array[mid] == target\r\n\t# \treturn mid\r\n\r\n\t# \tif target < array[mid]\r\n\t# \t\tmid2= (array[0..mid].length)/2\r\n\r\n\t# \t\tif array[mid2] == target\r\n\t# \t\t\treturn mid2\r\n\t# \t\telsif array[mid2] <= target\r\n\t\t\t\t\r\n\t# \t\t\treturn \r\n\t# \t\telsif target > mid\r\n\t# \t\tmid3 = (array[mid..-1].length)/2\r\n\t# \t\treturn mid3\r\n\t# \t\tend\r\n\t# \tend\r\n\t# index+=1\r\n\t# end\r\n\tstart_index = 0\r\n\tlast_index = array.length-1\r\n\twhile start_index < last_index\r\n\t\tmiddle = (last_index + start_index)/2\r\n\t\tif middle == target\r\n\t\t\treturn middle\r\n\t\telsif middle < target\r\n\t\t\tstart_index = middle + 1\r\n\t\telsif array[middle]> target\r\n\t\t\tlast_index = middle -1\r\n\t\tend\r\n\t\treturn -1\r\n\tend\r\nend", "def binary_search(arr, target)\n start = 0\n stop = arr.length - 1\n while start <= stop\n middle = (start + stop) / 2\n if arr[middle] == target\n return middle\n elsif arr[middle] > target\n stop = middle - 1\n else\n start = middle + 1\n end\n end\n -1\nend", "def binary_search(target, array)\r\n\t#Your code here\r\n\tindex = array.length / 2\r\n\tlo = 0\r\n\thi = array.length - 1\r\n\twhile array[index] != target && array.include?(target)\r\n\t\tif array[index] > target\r\n\t\t\thi = index - 1\r\n\t\t index = (lo + hi) / 2\r\n\t\telsif array[index] < target\r\n\t\t\tlo = index + 1\r\n\t\t\tindex = (lo + hi) / 2\r\n\t\tend\r\n\tend\r\n\tif array[index] == target\r\n\t\treturn index\r\n\telse\r\n\t\treturn -1\r\n\tend \r\nend", "def binary_search_rec arr, target, min_index = 0, max_index = arr.length - 1\n return nil if min_index > max_index\n\n midpoint = (max_index + min_index)/2\n\n if arr[midpoint] > target\n return binary_search_rec(arr, target, min_index, midpoint - 1)\n elsif arr[midpoint] < target\n return binary_search_rec(arr, target, midpoint + 1, max_index)\n elsif arr[midpoint] == target\n return midpoint\n end\nend", "def find_index(array, target, root = build_tree(array))\n return root.index if target == root.val\n if target < root.val\n if root.left\n find_index(array, target, root.left)\n else\n -1\n end\n else target > root.val\n if root.right\n find_index(array, target, root.right)\n else\n -1\n end\n end\nend", "def find_index nums, target\n idx = recurseHelper nums, target, 0, nums.length - 1\n return idx || - 1\nend", "def bsearch(arr, target)\n #base case\n return nil if arr != arr.sort\n\n if arr.length == 1\n return arr[0] == target ? 0 : nil\n end\n #inductive step\n mid = arr.length / 2\n result = mid\n\n if target == arr[mid]\n return result\n elsif target < arr[mid]\n bsearch(arr[0...mid], target)\n else #target > arr[mid]\n bsearch(arr[mid..-1], target)\n result += mid - 1\n end\n\n# class Array\n# def binary_search(target)\n# return nil if self - [target] == self\n#\n# m = length/2\n#\n# return m if self[m] == target\n#\n# case self[m] <=> target\n# when -1\n# self[m+1..-1].binary_search(target) + self[0..m].length\n# when 1\n# self[0...m].binary_search(target)\n# end\n# end\n# end\n\nend", "def binary_search(array, target)\n lower_bound = 0\n upper_bound = array.length - 1\n while lower_bound <= upper_bound\n midpoint = (lower_bound + upper_bound) / 2\n value_at_midpoint = array[midpoint]\n if target = value_at_midpoint\n return midpoint\n elsif target > value_at_midpoint\n lower_bound = midpoint + 1\n elsif target < value_at_midpoint\n upper_bound = midpoint - 1\n end\n end\n return nil\nend", "def bsearch(arr, target)\n return nil if !arr.include?(target) && arr.length <= 1\n\n mid_idx = arr.length / 2\n middle = arr[mid_idx]\n if target < middle\n return bsearch(arr[0...mid_idx], target) #either nil, or index on left side\n elsif target > middle\n index = bsearch(arr[(mid_idx+1)..-1], target) #either nil, or index on right side\n index == nil ? (return nil) : (return index + mid_idx + 1)\n else \n return mid_idx\n end\nend", "def search_insert(nums, target)\n if !nums.index(target).nil?\n nums.index(target)\n elsif nums.last > target\n new_target = nums.find do |num|\n num > target\n end\n nums.index(new_target)\n else\n nums.count\n end\nend", "def search_insert(nums, target)\n lo = 0\n hi = nums.length\n\n while lo < hi\n mid = (lo + hi) / 2\n\n if nums[mid] < target\n lo = mid + 1\n else\n hi = mid\n end\n end\n\n lo\nend", "def first_occurence(arr, target)\n idx = binary_search(arr, target)\n return nil unless idx \n \n while idx > 0 && arr[idx - 1] == target\n idx -= 1\n end\n\n idx\nend", "def bsearch(arr, target)\n return nil if arr.length < 1 \n\n mid_val = arr.length / 2\n\n if arr[mid_val] == target\n return mid_val\n elsif arr[mid_val] < target \n # mid_val + bsearch(arr[mid_val..-1], target)\n idx = bsearch(arr[mid_val+1..-1], target)\n if idx != nil\n mid_val + idx + 1\n else\n nil\n end\n else\n bsearch(arr[0...mid_val], target)\n end\nend", "def solution(nums, target)\n\tnums.each.with_index do |n, i|\n\t\tj = i + 1\n\t\twhile (j <= nums.size-1)\n\t\t\treturn [i,j] if target == (n + nums[j])\n\t\t\tj += 1\n\t\tend\n\tend\n\t-1\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds a handler to the reactor. A handler is a callable taking a single argument, the message.
def add_handler(handler) @handlers << handler end
[ "def add_message_handler(&block)\n @message_handlers << block\n end", "def on_message &handler\n\t\t@message_handlers << handler\n\tend", "def register_handler(handler)\n @handler = handler\n self\n end", "def register_handler(new_handler)\n @@handlers << new_handler\n end", "def register_inbound_handler(handler)\n @inbound_handlers << handler\n end", "def add_handler(type, callable=nil, &b)\n callable = callable(callable, b)\n regex = regex_for(type)\n sync do\n id = @handler_serial+=1\n handlers << [id, regex, callable]\n id\n end\n end", "def register_inbound_handler(handler)\n\t\t@inbound_handlers << handler\n\tend", "def add(type, handler)\n if !handler.respond_to?(:call)\n raise ArgumentError.new(\"Lynr::Events handler must respond_to?(:call)\")\n end\n @semaphore.synchronize {\n subscribers = @backend.fetch(type, [])\n @backend.store(type, subscribers.dup << handler)\n }\n self\n end", "def on_message(&handler)\n @on_message_handler = handler\n end", "def add_handler(command, &block)\n # Make sure we have a handler hash\n if (!@handlers) then\n @handlers = Hash.new\n end\n if (!@handlers[command]) then\n @handlers[command] = Array.new\n end\n @handlers[command] << block\n end", "def push_handler(new_handler)\n new_handler = new_handler.new(self) if new_handler.is_a?(Class)\n @handler.push(new_handler)\n new_handler.resume\n nil\n end", "def push_handler\n handler.push_handler self\n end", "def register_handler(handler, run_setup = true)\n self.setup if run_setup\n self.subhandlers << handler unless handler.blank?\n end", "def register_handler(handler)\n handler.handled_events.each do |event_type|\n (event_handlers[event_type] ||= []) << handler\n Twitch::Bot::Logger.debug \"Registered #{handler} for #{event_type}\"\n end\n end", "def define_message_handler(message_name, handler)\n\t\t@message_handlers[message_name.to_s] = handler\n\tend", "def register_handler(klass, handler); end", "def add(handler,filter_opts={})\n if !filter_opts.empty?\n handler = Handlers::Filter.new(handler,filter_opts)\n end\n self.handlers << handler\n self\n end", "def __set_handler(handler)\n @__handler = handler\n end", "def set_message_handler(&block); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /recovery_passwords GET /recovery_passwords.json
def index @recovery_passwords = RecoveryPassword.all end
[ "def index\n @passwords = application.passwords.all\n end", "def index\n @passwords = current_user.passwords\n end", "def password\n respond_to do |format|\n format.html\n format.json { render json: { :password => get_password } }\n end\n end", "def forgot_password\n user = validate_user(params['forgot_username'])\n\n devices = get_mfa_devices(user.id)\n\n status = user ? :ok : :not_found\n\n render json: devices, status: status\n end", "def index\n @xmt_mgr_passwords = Xmt::Password.all\n end", "def show\n @password = password.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @password }\n end\n end", "def password_check(hash)\n get_resource('https://api.pwnedpasswords.com/range/'.concat(hash))\nend", "def create\n @password = application.passwords.new(password_params)\n\n respond_to do |format|\n if @password.save\n format.html { redirect_to [application, @password], notice: 'Password was successfully created.' }\n format.json { render action: 'show', status: :created, location: @password }\n else\n format.html { render action: 'new' }\n format.json { render json: @password.errors, status: :unprocessable_entity }\n end\n end\n end", "def retrieve_password_validation_rules()\n start.uri('/api/system-configuration/password-validation-rules')\n .get()\n .go()\n end", "def index\n @password_hashes = PasswordHash.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @password_hashes }\n end\n end", "def password_reset_hash\n db.get(\"User:reset_#{id}\")\n end", "def send_lost_password_request(data)\n post 'lostpassword', :login => data[:login], :email => data[:email]\n end", "def retrieve_password_validation_rules()\n startAnonymous.uri('/api/tenant/password-validation-rules')\n .get()\n .go()\n end", "def zone_passwords(zone_name)\n parameters = \"zone=#{zone_name}\"\n request(:get, \"/api/zone/passwords?#{parameters}\")\n end", "def show\n @password_hash = PasswordHash.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @password_hash }\n end\n end", "def index\r\n @password_recovery_tokens = PasswordRecoveryToken.all\r\n end", "def retrieve_two_factor_recovery_codes(user_id)\n start.uri('/api/user/two-factor/recovery-code')\n .url_segment(user_id)\n .get()\n .go()\n end", "def generate_recovery_codes\n code = Heroku::Auth.ask_for_second_factor\n\n recovery_codes = api.request(\n :expects => 200,\n :method => :post,\n :path => \"/account/two-factor/recovery-codes\",\n :headers => { \"Heroku-Two-Factor-Code\" => code }\n ).body\n\n display \"Recovery codes:\"\n recovery_codes.each { |c| display c }\n rescue RestClient::Unauthorized => e\n error Heroku::Command.extract_error(e.http_body)\n end", "def generate_new_password_for_guest(args = {}) \n post(\"/guestaccess.json/#{args[:guestId]}/newpassword\", args)\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /recovery_passwords POST /recovery_passwords.json
def create @recovery_password = RecoveryPassword.new(recovery_password_params) respond_to do |format| if @recovery_password.save format.html { redirect_to @recovery_password, notice: 'Recovery password ha sido creado.' } format.json { render :show, status: :created, location: @recovery_password } else format.html { render :new } format.json { render json: @recovery_password.errors, status: :unprocessable_entity } end end end
[ "def generate_new_password_for_guest(args = {}) \n post(\"/guestaccess.json/#{args[:guestId]}/newpassword\", args)\nend", "def create\n @password = application.passwords.new(password_params)\n\n respond_to do |format|\n if @password.save\n format.html { redirect_to [application, @password], notice: 'Password was successfully created.' }\n format.json { render action: 'show', status: :created, location: @password }\n else\n format.html { render action: 'new' }\n format.json { render json: @password.errors, status: :unprocessable_entity }\n end\n end\n end", "def send_lost_password_request(data)\n post 'lostpassword', :login => data[:login], :email => data[:email]\n end", "def create\n password_recovery.save\n render action: 'show'\n end", "def send_password_to_user(args = {}) \n put(\"/guestaccess.json/#{args[:guestId]}/sendpassword\", args)\nend", "def lost_password_recovery_success\n # Open the lost password form\n # Submit the lost password form\n # Check the e-mail\n # Submit the reset password token\n # Check that the password has changed, and the user can login\n end", "def set_password_lion\n @user['ShadowHashData'] = NSMutableArray.new\n @user['ShadowHashData'][0] = serialize_shadowhashdata(resource[:password_sha512], 'SALTED-SHA512')\n end", "def create\n @password = password.new(params[:password])\n\n respond_to do |format|\n if @password.save\n format.html { redirect_to @password, notice: 'password was successfully created.' }\n format.json { render json: @password, status: :created, location: @password }\n else\n format.html { render action: \"new\" }\n format.json { render json: @password.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\r\n @password_recovery_token = PasswordRecoveryToken.new(password_recovery_token_params)\r\n\r\n respond_to do |format|\r\n if @password_recovery_token.save\r\n format.html { redirect_to @password_recovery_token, notice: (t \"password_recovery.successfully_created\") }\r\n format.json { render action: 'show', status: :created, location: @password_recovery_token }\r\n else\r\n format.html { render action: 'new' }\r\n format.json { render json: @password_recovery_token.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def resetpw\n\t\tuser = User.find_by(email: params[:email]) \n\t\tif !user.nil?\n\t\t\tif Time.now - user.validation_time < 60 * 60\n\t\t\t\tvalidation_code = params[:validation_code]\n\t\t \t\n\t \tif validation_code == user.validation_code\n\t\t \trtn = {\n\t\t \t\t\tstatus: \"201\"\n\t\t \t\t}\n\t\t\t\t\trender :json => rtn\n\t\t\t\t\tpsw_token = BCrypt::Engine.hash_secret(params[:new_password], user.password_salt)\n\t\t\t\t\tuser.update(password_digest: psw_token)\n\t\t\t\telse # validation code not correct\n\t\t\t\t\trtn = {\n\t \t\t\t\tstatus: \"401\"\n\t \t\t\t\t}\n\t\t\t\t\trender :json => rtn\n\t\t\t\tend\n\t\t\telse # validation code expired\n\t\t\t\trtn = {\n \t\t\t\tstatus: \"402\"\n \t\t\t}\n\t\t\t\trender :json => rtn\n\t\t\tend\n\t\telse # no such email found\n\t\t\trtn = {\n\t \t\t\tstatus: \"403\"\n\t \t\t}\n\t\t\trender :json => rtn\n\t\tend\n end", "def create\n @xmt_mgr_password = Xmt::Password.new(xmt_mgr_password_params)\n\n respond_to do |format|\n if @xmt_mgr_password.save\n format.html { redirect_to @xmt_mgr_password, notice: 'Password was successfully created.' }\n format.json { render :show, status: :created, location: @xmt_mgr_password }\n else\n format.html { render :new }\n format.json { render json: @xmt_mgr_password.errors, status: :unprocessable_entity }\n end\n end\n end", "def password_reset_request\n end", "def generate_recovery_codes\n code = Heroku::Auth.ask_for_second_factor\n\n recovery_codes = api.request(\n :expects => 200,\n :method => :post,\n :path => \"/account/two-factor/recovery-codes\",\n :headers => { \"Heroku-Two-Factor-Code\" => code }\n ).body\n\n display \"Recovery codes:\"\n recovery_codes.each { |c| display c }\n rescue RestClient::Unauthorized => e\n error Heroku::Command.extract_error(e.http_body)\n end", "def lost_password\n\n end", "def create\n @sulabh_password = SulabhPassword.new(sulabh_password_params)\n\n respond_to do |format|\n if @sulabh_password.save\n format.html { redirect_to @sulabh_password, notice: 'Sulabh password was successfully created.' }\n format.json { render :show, status: :created, location: @sulabh_password }\n else\n format.html { render :new }\n format.json { render json: @sulabh_password.errors, status: :unprocessable_entity }\n end\n end\n end", "def post(req)\n @errors = validate_change_password(@posted)\n render 'admin/account/password.erb' if has_errors?\n dealer = dealership(req).set({\n 'identity' => Lynr::Model::Identity.new(dealership(req).identity.email, posted['password'])\n })\n dealer_dao.save(dealer)\n redirect \"/admin/#{dealer.slug}/account?#{Helpers::PARAM}=success\"\n end", "def forgot_password\n user = validate_user(params['forgot_username'])\n\n devices = get_mfa_devices(user.id)\n\n status = user ? :ok : :not_found\n\n render json: devices, status: status\n end", "def request_new_password(name)\n values = {\n name: name\n }\n @client.make_request :post,\n auth_path('request_new_password'),\n values\n end", "def password\n respond_to do |format|\n format.html\n format.json { render json: { :password => get_password } }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /recovery_passwords/1 DELETE /recovery_passwords/1.json
def destroy @recovery_password.destroy respond_to do |format| format.html { redirect_to recovery_passwords_url, notice: 'Recovery password ha sido eliminado.' } format.json { head :no_content } end end
[ "def destroy\n @password.destroy\n respond_to do |format|\n format.html { redirect_to passwords_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @password = password.find(params[:id])\n \n @password.destroy\n\n respond_to do |format|\n format.html { redirect_to passwords_url }\n format.json { head :no_content }\n end\n end", "def delete_guest_access_portal(args = {}) \n delete(\"/guestaccess.json/gap/#{args[:portalId]}\", args)\nend", "def destroy\r\n @password_recovery_token.destroy\r\n respond_to do |format|\r\n format.html { redirect_to password_recovery_tokens_url }\r\n format.json { head :no_content }\r\n end\r\n end", "def destroy\n @xmt_mgr_password.destroy\n respond_to do |format|\n format.html { redirect_to xmt_mgr_passwords_url, notice: 'Password was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @sulabh_password.destroy\n respond_to do |format|\n format.html { redirect_to sulabh_passwords_url, notice: 'Sulabh password was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @secure_password.destroy\n respond_to do |format|\n format.html { redirect_to dashboard_index_path, notice: 'Secure password was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @password_setting.destroy\n respond_to do |format|\n format.html { redirect_to password_settings_url }\n format.json { head :no_content }\n end\n end", "def destroy\n app_id = @app_credential.app_id\n @app_credential.destroy\n respond_to do |format|\n format.html { redirect_to app_credentials_url(app_id: app_id), notice: 'App credential was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @password_hash = PasswordHash.find(params[:id])\n @password_hash.destroy\n\n respond_to do |format|\n format.html { redirect_to password_hashes_url }\n format.json { head :no_content }\n end\n end", "def delete!\n url = \"#{Rails.configuration.waste_exemplar_services_url}/registrations/#{uuid}.json\"\n Rails.logger.debug \"Registration: about to DELETE\"\n deleted = true\n begin\n response = RestClient.delete url\n\n # result = JSON.parse(response.body)\n self.uuid = nil\n save\n\n rescue => e\n Airbrake.notify(e)\n Rails.logger.error e.to_s\n deleted = false\n end\n deleted\n end", "def destroy\n @save_credential.destroy\n respond_to do |format|\n format.html { redirect_to save_credentials_url }\n format.json { head :no_content }\n end\n end", "def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def delete_mutant(m_id)\n\n request = API_URL + \"mutants/#{m_id}\"\n @response = RestClient.delete request\n log_mutant(\"deleted\")\n return\nend", "def destroy\n @password = Password.find(params[:id])\n @password.destroy\n\n respond_to do |format|\n format.html { redirect_to(passwords_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n delete_obj @keystore.url\n @keystore.destroy\n respond_to do |format|\n format.html { redirect_to keystores_url, notice: 'Keystore was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def cmd_delete argv\n setup argv\n uuid = @hash['uuid']\n response = @api.delete(uuid)\n msg response\n return response\n end", "def destroy\n @password_tier.destroy\n respond_to do |format|\n format.html { redirect_to password_tiers_url, notice: 'PasswordTier was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @secret.destroy\n respond_to do |format|\n format.html { redirect_to secrets_url, notice: 'El secreto se eliminó correctamente.' }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Constraints Chef libraries are evaluated before the recipe that places the chef_gem that it needs is put into place. This places two constraints on this library: 1) A 'require' must be done in a method 2) This class cannot use 'Subclass < Superclass' As Net::LDAP is a class it cannot be included as a module
def initialize require 'rubygems' require 'net-ldap' require 'cicphash' end
[ "def load_needed_dependencies!\n super\n if File.exist?(policyfile)\n debug(\"Policyfile found at #{policyfile}, using Policyfile to resolve dependencies\")\n Chef::Policyfile.load!(logger: logger)\n elsif File.exist?(berksfile)\n debug(\"Berksfile found at #{berksfile}, loading Berkshelf\")\n Chef::Berkshelf.load!(logger: logger)\n elsif File.exist?(cheffile)\n debug(\"Cheffile found at #{cheffile}, loading Librarian-Chef\")\n Chef::Librarian.load!(logger: logger)\n end\n end", "def setup_chef_class(run_context)\n Chef.set_run_context(run_context)\n end", "def required_if_used(*args)\n unless @required_gems\n [:included, :extended, :inherited].each do |method_name|\n define_method(method_name) do |klass|\n super if defined?(super)\n @required_gems.each { |gem| require gem.to_s }\n end\n end\n end\n @required_gems ||= []\n @required_gems |= args\n end", "def __load_codebase_initialize\n begin \n require 'initialize'\n rescue LoadError \n begin\n require 'service_manager'\n rescue LoadError \n raise Exception.new(\"Neither initialize or ServiceManager could be loaded for #{$service[:full_name]}\")\n end\n end\nend", "def require_chef_gem(name)\n require(name)\n rescue LoadError\n raise ChefGemLoadError.new(name)\n end", "def install_on klass\n klass.send :include, InitializeWrapper unless klass.include?(InitializeWrapper)\n end", "def load_needed_dependencies!\n if File.exist?(berksfile)\n debug(\"Berksfile found at #{berksfile}, loading Berkshelf\")\n Chef::Berkshelf.load!(logger)\n elsif File.exist?(cheffile)\n debug(\"Cheffile found at #{cheffile}, loading Librarian-Chef\")\n Chef::Librarian.load!(logger)\n end\n end", "def initialize(name, run_context = nil)\n super\n @provider = Chef::Provider::Package::Rubygems\n end", "def add_poolparty_base_requirements\n heartbeat\n haproxy\n ruby\n poolparty_base_packages\n realize_plugins!(true) # Force realizing of the plugins\n end", "def loadGems\n\tbegin\n \t\trequire \"eventmachine\" unless defined?(EventMachine)\n\trescue LoadError => detail\n\t\traise Puppet::Error, \"Could not load EventMachine gem: Please ensure the EventMachine gem is installed before using this provider.\"\n\tend\n\n\tbegin\n \t\trequire \"amqp\" unless defined?(AMQP)\n\trescue LoadError => detail\n\t\traise Puppet::Error, \"Could not load AMQP gem: Please ensure the AMQP gem is installed before using this provider.\"\n\tend\nend", "def add_poolparty_base_requirements\n # poolparty_base_heartbeat\n poolparty_base_ruby\n poolparty_base_packages \n end", "def hook_require!\n @hooking_require = true\n\n # There are two independent require methods. Joy!\n ::Kernel.module_eval do\n class << self\n orig_public_require = Kernel.public_method(:require)\n define_method(:require) do |path, *args|\n ::Bumbler::Hooks.handle_require(path) do\n orig_public_require.call(path, *args)\n end\n end\n end\n\n orig_instance_require = self.instance_method(:require)\n define_method(:require) do |path, *args|\n ::Bumbler::Hooks.handle_require(path) do\n orig_instance_require.bind(self).call(path, *args)\n end\n end\n end\n\n @hooking_require = nil\n end", "def init\n clone_appd_cookbook\n chef_gem \"install berkshelf\"\n end", "def require!\n super do\n gem @gem_name, @version if @version\n end\n end", "def cabar_comp_require name, version = nil\n path = File.expand_path(File.dirname(__FILE__) + \"../../../../../../comp/#{name}/#{version}/lib/ruby\")\n $:.insert(0, path) unless $:.include?(path)\n # $stderr.puts \"#{$:.inspect} #{path.inspect}\"\n require name\nend", "def add_poolparty_base_requirements \n heartbeat\n haproxy\n ruby\n poolparty\n end", "def setup_class_dsl(tool_class)\n return if tool_class.name.nil? || tool_class.is_a?(DSL::Tool)\n class << tool_class\n alias_method :super_include, :include\n end\n tool_class.extend(DSL::Tool)\n end", "def require_engine\n return if defined? ::RedCloth\n require_library 'redcloth'\n end", "def load_inspec_libs\r\n require 'inspec'\r\n if Inspec::VERSION != node['audit']['inspec_version'] && node['audit']['inspec_version'] !='latest'\r\n Chef::Log.warn \"Wrong version of inspec (#{Inspec::VERSION}), please \"\\\r\n 'remove old versions (/opt/chef/embedded/bin/gem uninstall inspec).'\r\n else\r\n Chef::Log.warn \"Using inspec version: (#{Inspec::VERSION})\"\r\n end\r\n require 'bundles/inspec-compliance/api'\r\n require 'bundles/inspec-compliance/http'\r\n require 'bundles/inspec-compliance/configuration'\r\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Bind This method should not be used directly. It is used to bind to the directory server. The databag_name is the name of the databag that is used for looking up connection credentials. It returns a connected ruby Net::LDAP object
def bind( host, port, credentials, databag_name, use_tls ) # :yields: host, port, credentials, databag_name, use_tls credentials = credentials.kind_of?(Hash) ? credentials.to_hash : credentials.to_s unless databag_name.kind_of?(String) or databag_name.kind_of?(Symbol) raise "Invalid databag_name: #{databag_name}" end if credentials.kind_of?(String) and credentials.length > 0 # Pull named credentials from the databag require 'chef/data_bag_item' require 'chef/encrypted_data_bag_item' secret = Chef::EncryptedDataBagItem.load_secret credentials = Chef::EncryptedDataBagItem.load( databag_name.to_s, credentials, secret ).to_hash end unless credentials.kind_of?(Hash) and credentials.key?('bind_dn') and credentials.key?('password') raise "Invalid credentials: #{credentials}" end args = { host: host, port: port, auth: { method: :simple, username: credentials['bind_dn'], password: credentials['password'] } } args[:encryption] = :simple_tls if use_tls @ldap = Net::LDAP.new args raise "Unable to bind: #{@ldap.get_operation_result.message}" unless @ldap.get_operation_result.message == 'Success' @ldap end
[ "def bind\n conn = Net::LDAP.new :host => @config[:server],\n :port => @config[:port],\n :base => @config[:base]\n if @config[:encryption]\n conn.encryption @config[:encryption]\n end\n \n raise \"Anonymous Bind is disabled\" if @config[:password] == \"\" && !(@config[:anonymous_bind])\n \n conn.auth \"#{@config[:username]}@#{@config[:domain]}\", @config[:password]\n \n begin\n Timeout::timeout(10){\n if conn.bind\n return conn\n else\n raise 'Query User Rejected'\n end\n }\n rescue Timeout::Error\n raise 'Unable to connect to LDAP Server'\n rescue Errno::ECONNRESET\n if @config[:allow_fallback]\n @config[:port] = @config[:allow_fallback]\n @config[:encryption] = false\n return Adauth::Connection.new(@config).bind\n end\n end\n end", "def connection(which_ldap)\n Net::LDAP.new(:host => host(which_ldap), :port => port(which_ldap), :encryption => (:simple_tls if ssl?(which_ldap)))\n end", "def bind( user_dn, password )\n\t\tuser_dn = user_dn.dn if user_dn.respond_to?( :dn )\n\n\t\tself.log.info \"Binding with connection %p as: %s\" % [ self.conn, user_dn ]\n\t\tself.conn.bind( user_dn.to_s, password )\n\t\t@bound_user = user_dn.to_s\n\tend", "def ldap\n Net::LDAP.new :host => SETTING[ENV['RACK_ENV']]['ldapserver'],\n :port => 389,\n :auth => {\n :method => :simple,\n :username => [SETTING[ENV['RACK_ENV']]['admin_cn'], SETTING[ENV['RACK_ENV']]['ldap_dn']].join(','),\n :password => SETTING[ENV['RACK_ENV']]['passwd']\n }\n end", "def connect\n @conn ||= Net::LDAP.new\n @conn.host = @host\n @conn.port = @port\n @conn.auth \"#{@login_dn},#{@base_dn}\", @password\n\n if @conn.bind\n return true\n else\n return false\n end\n end", "def ldap_connect\n ldap = Net::LDAP.new(host: host, port: Net::LDAP::DefaultPort)\n ldap\n end", "def open(host, user = nil, password = nil)\n @ldap = Net::LDAP.new\n @ldap.host = host\n @ldap.port = 389\n @ldap.auth(user, password)\n @ldap.bind\n end", "def adapter\n Net::LDAP\n end", "def new_net_ldap()\n params = {\n :host => host,\n :auth => authentication_information,\n :port => 636, \n :encryption => {:method =>:simple_tls}\n }\n @net_ldap = Net::LDAP.new(params)\n @net_ldap.bind || raise(BindFailedException)\n @net_ldap\n rescue Net::LDAP::LdapError => e\n raise(BindFailedException)\n end", "def connector(_host='localhost', _port=389, _rootdn='', _passdn='')\n begin \n if not $connection then\n output \"connecting to #{_host} on port : #{_port}\" if $verbose\n $connection = LDAP::Conn.new(_host,_port)\n $connection.set_option(LDAP::LDAP_OPT_PROTOCOL_VERSION, 3)\n end\n if _rootdn.empty? and not $bind then\n output 'Anonymous binding' if $verbose \n $connection = $connection.bind\n $bind = true\n elsif not _rootdn.empty? and not $authenticated then\n output 'Authenticated binding' if $verbose\n $connection.unbind if $connection.bound?\n $connection = $connection.bind(\"#{_rootdn}\", \"#{_passdn}\")\n $authenticated = true\n end\n return $connection\n rescue Exception\n raise LdapmapperConnectionError\n end\n end", "def bind_as(username, password, search_root = Chef::Config[:ldap_base_root],\n user_attribute = Chef::Config[:ldap_user_attribute],\n user_preprocess = Chef::Config[:ldap_user_preprocess])\n if bind\n search_root = LDAPConnection.call_if_proc(search_root, username)\n actual_username = LDAPConnection.call_if_proc(user_preprocess, username, username)\n search_filter = LDAPConnection.call_if_proc(user_attribute, actual_username, \"(#{user_attribute}=#{LDAPConnection.ldap_escape(actual_username)})\")\n Chef::WebUIUser::LDAPUser.load(@conn.bind_as(:base => search_root, :password => password, :filter => search_filter))\n else\n raise ArgumentError, \"Unable to bind to any LDAP server\" \n end\n end", "def new_net_ldap\n params = {\n :host => host,\n :auth => authentication_information,\n :port => 636,\n :encryption => { :method => :simple_tls }\n }\n @net_ldap = Net::LDAP.new(params)\n @net_ldap.bind || raise(BindFailedException)\n @net_ldap\n rescue Net::LDAP::Error => e\n raise(BindFailedException)\n end", "def ldap_connection\n key = \"__#{self}__ldap_connection\".to_sym\n Thread.current[key] = Net::LDAP.new(self.updates_to_ldap_options[:ldap_spec]) unless Thread.current[key]\n Thread.current[key]\n end", "def ldap\n\t\tif !@ldap\n\t\t\tif self.options.config && (uri = self.options.config.ldapuri)\n\t\t\t\t@ldap = Treequel.directory( uri )\n\t\t\telse\n\t\t\t\t@ldap = Treequel.directory_from_config\n\t\t\tend\n\n\t\t\tself.log.info \"Authentication will use: %s\" % [ @ldap.uri ]\n\t\tend\n\n\t\treturn @ldap\n\tend", "def bind_for_rails(bind_file = \"#{Rails.root}/config/ldap.yml\", environment = RAILS_ENV)\n bind(bind_file, environment)\n end", "def bind auth=@auth\n if @open_connection\n @result = @open_connection.bind auth\n else\n conn = Connection.new( :host => @host, :port => @port , :encryption => @encryption)\n @result = conn.bind @auth\n conn.close\n end\n\n @result == 0\n end", "def connect\n # Connect to LDAP\n @conn = LDAP::SSLConn.new( 'ldap.ucdavis.edu', 636 )\n @conn.set_option( LDAP::LDAP_OPT_PROTOCOL_VERSION, 3 )\n @conn.bind(dn = LDAP_SETTINGS['base_dn'], password = LDAP_SETTINGS['base_pw'] )\n end", "def client\n Net::LDAP.new({\n host: Settings.ldap.host,\n port: Settings.ldap.port,\n encryption: { method: :simple_tls },\n auth: {\n method: :simple,\n username: Settings.ldap.application_bind,\n password: Settings.ldap.application_password\n }\n })\n end", "def bind_datagram(bind_addr, auth)\n\t\t\tsock = case @options[:sb_type]\n\t\t\twhen :xmpp\n\t\t\t\tDatagram::XMPPSocket.new(bind_addr, auth)\n\t\t\telse\n\t\t\t\traise \"invalid side band type: #{@options[:sb_type].inspect}\"\n\t\t\tend\n\n\t\t\tsock.on_bind do \n\t\t\t\t@log.info \"bound datagram to addr: #{sock.addr.inspect}\"\n\t\t\tend\n\t\t\tsock.bind\n\n\t\t\treturn sock\n\t\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Search This method is used to search the directory server. It accepts the connection resource object described above along with the basedn to be searched. Optionally it also accepts an LDAP filter and scope. The default filter is objectClass= and the default scope is 'base' It returns a list of entries.
def search( c, basedn, *constraints ) # :yields: connection_info, basedn, filter, scope self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap raise "Must specify base dn for search" unless basedn ( filter, scope, attributes ) = constraints filter = filter.nil? ? Net::LDAP::Filter.eq( 'objectClass', '*' ) : filter case scope when 'base' scope = Net::LDAP::SearchScope_BaseObject when 'one' scope = Net::LDAP::SearchScope_SingleLevel else scope = Net::LDAP::SearchScope_WholeSubtree end scope = scope.nil? ? Net::LDAP::SearchScope_BaseObject : scope attributes = attributes.nil? ? [ '*' ] : attributes entries = @ldap.search( base: basedn, filter: filter, scope: scope, attributes: attributes ) raise "Error while searching: #{@ldap.get_operation_result.message}" unless @ldap.get_operation_result.message =~ /(Success|No Such Object)/ return entries end
[ "def query_ldap(session_handle, base, scope, filter, fields)\n vprint_status(\"Searching LDAP directory\")\n search = wldap32.ldap_search_sA(session_handle, base, scope, filter, nil, 0, 4)\n vprint_status(\"search: #{search}\")\n\n if search['return'] == LDAP_SIZELIMIT_EXCEEDED\n print_error(\"LDAP_SIZELIMIT_EXCEEDED, parsing what we retrieved, try increasing the MAX_SEARCH value [0:LDAP_NO_LIMIT]\")\n elsif search['return'] != Error::SUCCESS\n print_error(\"No results\")\n wldap32.ldap_msgfree(search['res'])\n return\n end\n\n search_count = wldap32.ldap_count_entries(session_handle, search['res'])['return']\n\n if search_count == 0\n print_error(\"No entries retrieved\")\n wldap32.ldap_msgfree(search['res'])\n return\n end\n\n print_status(\"Entries retrieved: #{search_count}\")\n\n pEntries = []\n entry_results = []\n\n if datastore['MAX_SEARCH'] == 0\n max_search = search_count\n else\n max_search = [datastore['MAX_SEARCH'], search_count].min\n end\n\n 0.upto(max_search - 1) do |i|\n\n if(i==0)\n pEntries[0] = wldap32.ldap_first_entry(session_handle, search['res'])['return']\n end\n\n if(pEntries[i] == 0)\n print_error(\"Failed to get entry\")\n wldap32.ldap_msgfree(search['res'])\n return\n end\n\n vprint_status(\"Entry #{i}: 0x#{pEntries[i].to_s(16)}\")\n\n entry = get_entry(pEntries[i])\n\n # Entries are a linked list...\n if client.arch == ARCH_X64\n pEntries[i+1] = entry[4]\n else\n pEntries[i+1] = entry[3]\n end\n\n ber = get_ber(entry)\n\n field_results = []\n fields.each do |field|\n vprint_status(\"Field: #{field}\")\n\n values = get_values_from_ber(ber, field)\n\n values_result = \"\"\n values_result = values.join(',') if values\n vprint_status(\"Values #{values}\")\n\n field_results << {:type => 'unknown', :value => values_result}\n end\n\n entry_results << field_results\n end\n\n return {\n :fields => fields,\n :results => entry_results\n }\n end", "def search(dn = @base, *attributes)\n (filter, scope, attrs) = attributes\n filter = filter.nil? ? Net::LDAP::Filter.eq('objectClass', '*') : filter\n\n scope = case\n when scope == 'base'\n Net::LDAP::SearchScope_BaseObject\n when scope == 'one'\n Net::LDAP::SearchScope_SingleLevel\n else\n Net::LDAP::SearchScope_WholeSubtree\n end\n\n scope = scope.nil? ? Net::LDAP::SearchScope_BaseObject : scope\n attrs = attrs.nil? ? ['*'] : attrs\n @ldap.search(base: dn, filter: filter, scope: scope, attributes: attrs)\n end", "def search( base, scope=:subtree, filter='(objectClass=*)', options={} )\n\t\tcollectclass = nil\n\n\t\t# If the base argument is an object whose class knows how to create instances of itself\n\t\t# from an LDAP::Entry, use it instead of Treequel::Branch to wrap results\n\t\tif options.key?( :results_class )\n\t\t\tcollectclass = options.delete( :results_class )\n\t\telse\n\t\t\tcollectclass = base.class.respond_to?( :new_from_entry ) ?\n\t\t\t\tbase.class :\n\t\t\t\tself.results_class\n\t\tend\n\n\t\t# Format the arguments in the way #search_ext2 expects them\n\t\tbase_dn, scope, filter, searchopts =\n\t\t\tself.normalize_search_parameters( base, scope, filter, options )\n\n\t\t# Unwrap the search options from the hash in the correct order\n\t\tself.log.debug do\n\t\t\tattrlist = SEARCH_PARAMETER_ORDER.inject([]) do |list, param|\n\t\t\t\tlist << \"%s: %p\" % [ param, searchopts[param] ]\n\t\t\tend\n\t\t\t\"searching with base: %p, scope: %p, filter: %p, %s\" %\n\t\t\t\t[ base_dn, scope, filter, attrlist.join(', ') ]\n\t\tend\n\t\tparameters = searchopts.values_at( *SEARCH_PARAMETER_ORDER )\n\n\t\t# Wrap each result in the class derived from the 'base' argument\n\t\tself.log.debug \"Searching via search_ext2 with arguments: %p\" % [[\n\t\t\tbase_dn, scope, filter, *parameters\n\t\t]]\n\n\t\tresults = []\n\t\tself.conn.search_ext2( base_dn, scope, filter, *parameters ).each do |entry|\n\t\t\tbranch = collectclass.new_from_entry( entry, self )\n\t\t\tbranch.include_operational_attrs = true if\n\t\t\t\tbase.respond_to?( :include_operational_attrs? ) &&\n\t\t\t\tbase.include_operational_attrs?\n\n\t\t\tif block_given?\n\t\t\t\tresults << yield( branch )\n\t\t\telse\n\t\t\t\tresults << branch\n\t\t\tend\n\t\tend\n\n\t\treturn results\n\trescue RuntimeError => err\n\t\tconn = self.conn\n\n\t\t# The LDAP library raises a plain RuntimeError with an incorrect message if the\n\t\t# connection goes away, so it's caught here to rewrap it\n\t\tcase err.message\n\t\twhen /no result returned by search/i\n\t\t\traise LDAP::ResultError.new( LDAP.err2string(conn.err) )\n\t\telse\n\t\t\traise\n\t\tend\n\tend", "def search(attr,query)\n # Get configuration ready.\n server = @config['server']\n port = @config['port']\n auth = { :method => :simple,\n :username => @config['username'],\n :password => @config['password']\n }\n base = @config['basedn']\n if (!@config['encryption'].nil?)\n encryption = @config['encryption'].to_sym\n end\n\n result = Net::LDAP::Entry.new()\n\n # Perform the search.\n Net::LDAP.open(:host => server, :port => port, :auth => auth,\n :encryption => encryption, :base => base) do |ldap|\n if (!ldap.bind())\n result = false\n else\n filter = Net::LDAP::Filter.eq(attr,query)\n result = ldap.search(:filter => filter)\n end\n end\n\n return result\n end", "def search args = {}\n search_filter = (args && args[:filter]) || Filter.eq( \"objectclass\", \"*\" )\n search_filter = Filter.construct(search_filter) if search_filter.is_a?(String)\n search_base = (args && args[:base]) || \"dc=example,dc=com\"\n search_attributes = ((args && args[:attributes]) || []).map {|attr| attr.to_s.to_ber}\n return_referrals = args && args[:return_referrals] == true\n\n attributes_only = (args and args[:attributes_only] == true)\n scope = args[:scope] || Net::LDAP::SearchScope_WholeSubtree\n raise LdapError.new( \"invalid search scope\" ) unless SearchScopes.include?(scope)\n\n # An interesting value for the size limit would be close to A/D's built-in\n # page limit of 1000 records, but openLDAP newer than version 2.2.0 chokes\n # on anything bigger than 126. You get a silent error that is easily visible\n # by running slapd in debug mode. Go figure.\n rfc2696_cookie = [126, \"\"]\n result_code = 0\n\n loop {\n # should collect this into a private helper to clarify the structure\n\n request = [\n search_base.to_ber,\n scope.to_ber_enumerated,\n 0.to_ber_enumerated,\n 0.to_ber,\n 0.to_ber,\n attributes_only.to_ber,\n search_filter.to_ber,\n search_attributes.to_ber_sequence\n ].to_ber_appsequence(3)\n \n controls = [\n [\n LdapControls::PagedResults.to_ber,\n false.to_ber, # criticality MUST be false to interoperate with normal LDAPs.\n rfc2696_cookie.map{|v| v.to_ber}.to_ber_sequence.to_s.to_ber\n ].to_ber_sequence\n ].to_ber_contextspecific(0)\n\n pkt = [next_msgid.to_ber, request, controls].to_ber_sequence\n @conn.write pkt\n\n result_code = 0\n controls = []\n\n while (be = @conn.read_ber(AsnSyntax)) && (pdu = LdapPdu.new( be ))\n case pdu.app_tag\n when 4 # search-data\n yield( pdu.search_entry ) if block_given?\n when 19 # search-referral\n if return_referrals\n if block_given?\n se = Net::LDAP::Entry.new\n se[:search_referrals] = (pdu.search_referrals || [])\n yield se\n end\n end\n #p pdu.referrals\n when 5 # search-result\n result_code = pdu.result_code\n controls = pdu.result_controls\n break\n else\n raise LdapError.new( \"invalid response-type in search: #{pdu.app_tag}\" )\n end\n end\n\n # When we get here, we have seen a type-5 response.\n # If there is no error AND there is an RFC-2696 cookie,\n # then query again for the next page of results.\n # If not, we're done.\n # Don't screw this up or we'll break every search we do.\n more_pages = false\n if result_code == 0 and controls\n controls.each do |c|\n if c.oid == LdapControls::PagedResults\n more_pages = false # just in case some bogus server sends us >1 of these.\n if c.value and c.value.length > 0\n cookie = c.value.read_ber[1]\n if cookie and cookie.length > 0\n rfc2696_cookie[1] = cookie\n more_pages = true\n end\n end\n end\n end\n end\n\n break unless more_pages\n } # loop\n\n result_code\n end", "def ldapsearch\n ldap_settings = {\n :port => 389,\n :auth => { :method => :anonymous }\n \n }\n if params[:host].blank? or params[:filter].blank? or params[:base].blank?\n render :status=>400, :json=>{:message=>\"Missing parameter\"}\n return\n end\n ldap_settings[:host] = params[:host].to_s\n ldap_settings[:port] = params[:port].to_s unless params[:port].blank?\n unless params[:username].blank?\n ldap_settings[:auth] = {\n :method => :simple,\n :username => params[:username].to_s,\n :password => params[:password].to_s\n }\n end\n ldap_settings[:encryption] = :simple_tls if params[:encryption].to_s == 'simple_tls'\n\n begin\n ldap = Net::LDAP.new ldap_settings\n query_filter = Net::LDAP::Filter.construct params[:filter].to_s\n basedn = params[:base].to_s\n\n results = []\n ldap.search(:base => basedn, :filter => query_filter) do |entry|\n results << entry\n# puts \"DN: #{entry.dn}\"\n entry.each do |attribute, values|\n# puts \" #{attribute}:\"\n values.each do |value|\n# puts \" --->#{value}\"\n end\n end\n end\n rescue Exception => e\n render :status=>400, :json => { message: e.message }\n return\n end\n\n# puts ldap.get_operation_result\n\n render :status=>200, :json => results\n end", "def ldap()\n\n\t\tldap = Net::LDAP.new :host => $k.get('server_ldap'),\n\t\t :port => $k.get('port_ldap'),\n\t\t :auth => {\n\t\t\t :method => :simple,\n\t\t\t :username => $k.get('user_ldap'),\n\t\t\t :password => $k.get('password_ldap')\n\t\t \t\t}\n\n\t\tfilter = Net::LDAP::Filter.eq( \"objectClass\", \"*\" )\n\t\ttreebase = \"subdata=services,uid=f4994c2a-783a-4962-bf3a-5003d7b8,ds=SUBSCRIBER,o=DEFAULT,DC=C-NTDB\"\n\n\t\tldap.search( :base => treebase, :filter => filter ) do |entry|\n\t\t puts \"\"\n\t\t logger.debug \"##{entry.dn}\"\n\t\t entry.each do |attribute, values|\n\t\t values.each do |value|\n\t\t puts \"#{attribute}: #{value}\"\n\t\t end\n\t\t end\n\t\tend\n\n\t\t# p ldap.get_operation_result\n\tend", "def search( scope=:subtree, filter='(objectClass=*)', parameters={}, &block )\n\t\treturn self.directory.search( self, scope, filter, parameters, &block )\n\tend", "def search(options, &block)\n options[:base] = @base_name\n options[:attributes] ||= []\n options[:paged_searches_supported] = true\n\n @ldap.search(options, &block)\n end", "def lookup(terms = {})\n # Format the CWID if that's what we're getting.\n if terms[:cwid]\n terms = format(terms.values.first)\n else\n terms = terms.values.first\n end\n\n # Make the request to the LDAP server\n req = self.connection.get do |r|\n r.url self.configuration.search_path + terms\n end\n\n # Return nil if there's a nil directory return\n if req.body['directory'].nil?\n return []\n end\n\n # Return nil if there aren't any results\n if req.body['directory'] && (req.body['directory'].empty? || req.body['directory'].nil?)\n return []\n\n # Otherwise, create a new Person out of it\n else\n results = []\n\n if req.body['directory']['person'].is_a?(Hash)\n results << Person.new(req.body['directory']['person'])\n else\n req.body['directory']['person'].each do |r|\n results << Person.new(r)\n end\n end\n\n return results\n end\n end", "def get_entry( c, dn ) # :yields: connection_info, distinguished_name\n \n self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap\n \n entry = @ldap.search( \n base: dn, \n filter: Net::LDAP::Filter.eq( 'objectClass', '*' ),\n scope: Net::LDAP::SearchScope_BaseObject,\n attributes: [ '*' ]\n )\n \n raise \"Error while searching: #{@ldap.get_operation_result.message}\" unless @ldap.get_operation_result.message =~ /(Success|No Such Object)/\n return entry ? entry.first : entry\n end", "def search(query = '', type: 'account', domain: nil, **options)\n options[:limit] ||= Zimbra::Directory::SEARCH_LIMIT\n DirectoryService.search(query, type.to_sym, domain, options)\n end", "def find(params = {})\n unless params.include?(:dn)\n LdapAdmin::Logger.send('Error in find. No dn parameter provided')\n return false\n end\n\n @ldap.search(:base => params[:dn])\n\n end", "def search(args = {})\n super(args).tap do |result|\n if result.is_a?(Array)\n result.map!{ |e| Cul::LDAP::Entry.new(e) }\n end\n end\n end", "def search( directory=nil )\n\t\tdirectory ||= self.model_class.directory\n\t\tbases = self.model_bases\n\t\tobjectclasses = self.model_objectclasses\n\n\t\traise Treequel::ModelError, \"%p has no search criteria defined\" % [ self ] if\n\t\t\tbases.empty? && objectclasses.empty?\n\n\t\tTreequel.log.debug \"Creating search for %p using model class %p\" %\n\t\t\t[ self, self.model_class ]\n\n\t\t# Start by making a Branchset or BranchCollection for the mixin's bases. If\n\t\t# the mixin doesn't have any bases, just use the base DN of the directory\n\t\t# to be searched\n\t\tbases = [directory.base_dn] if bases.empty?\n\t\tsearch = bases.\n\t\t\tmap {|base| self.model_class.new(directory, base).branchset }.\n\t\t\tinject {|branch1,branch2| branch1 + branch2 }\n\n\t\tTreequel.log.debug \"Search branch after applying bases is: %p\" % [ search ]\n\n\t\treturn self.model_objectclasses.inject( search ) do |branchset, oid|\n\t\t\tTreequel.log.debug \" adding filter for objectClass=%s to %p\" % [ oid, branchset ]\n\t\t\tbranchset.filter( :objectClass => oid )\n\t\tend\n\tend", "def search\r\n @instance.search @query, @enterprise_filter\r\n end", "def search(options)\n connection.search(options.merge(base: search_base))\n end", "def get_users(host, base='dc=griddynamics,dc=net', scope=LDAP::LDAP_SCOPE_SUBTREE, filter='(objectclass=person)')\n\n puts \"Getting users from #{host} with base #{base} with filter #{filter}\"\n\n attrs = ['uid', 'mail', 'sn', 'givenName' ,'cn', 'sshPublicKey']\n\n conn = LDAP::Conn.new(host)\n\n puts \"Connection received: #{conn.inspect}\"\n\n conn.set_option(LDAP::LDAP_OPT_PROTOCOL_VERSION, 3)\n\n puts conn.bind('','')\n\n conn.perror(\"bind\")\n\n begin\n users = Hash.new\n\n conn.search(base, scope, filter, attrs) { |entry|\n groups = []\n entry.dn.split(',').each { |dn|\n tmp = dn.split('=')\n if tmp[0] == 'ou'\n groups << tmp[1]\n end\n }\n\n if groups.include?('people') and groups.include?('griddynamics') and not groups.include?('deleted')\n users[entry.vals('uid')[0].dup] = {\n :email => entry.vals('mail')[0].dup,\n :name => entry.vals('givenName')[0].dup,\n :last_name => entry.vals('sn')[0].dup,\n :full_name => entry.vals('cn')[0].dup,\n :ssh_public_key => entry.vals('sshPublicKey').nil? ? nil : entry.vals('sshPublicKey')[0].dup,\n :groups => groups.dup\n }\n end\n }\n return users\n rescue LDAP::ResultError\n conn.perror(\"search\")\n exit\n end\n conn.perror(\"search\")\n conn.unbind\nend", "def search(options)\n search_results = []\n referral_entries = []\n\n search_results = connection.search(options) do |entry|\n if entry && entry[:search_referrals]\n referral_entries << entry\n end\n end\n\n unless referral_entries.empty?\n entry = referral_entries.first\n referral_string = entry[:search_referrals].first\n if GitHub::Ldap::URL.valid?(referral_string)\n referral = Referral.new(referral_string, admin_user, admin_password, port)\n search_results = referral.search(options)\n end\n end\n\n Array(search_results)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Get Entry This method accepts a connection resource object. It is intended to be used with Chef::Resource::LdapEntry objects that will also have a .dn method indicating Distinguished Name to be retrieved. It returns a single entry.
def get_entry( c, dn ) # :yields: connection_info, distinguished_name self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap entry = @ldap.search( base: dn, filter: Net::LDAP::Filter.eq( 'objectClass', '*' ), scope: Net::LDAP::SearchScope_BaseObject, attributes: [ '*' ] ) raise "Error while searching: #{@ldap.get_operation_result.message}" unless @ldap.get_operation_result.message =~ /(Success|No Such Object)/ return entry ? entry.first : entry end
[ "def retrieve_entry(dn)\n entries = @ldap.search(base: dn, scope: Net::LDAP::SearchScope_BaseObject, return_result: true)\n if entries\n entries.first\n end\n end", "def entry(dn = @base)\n entry = @ldap.search(\n base: dn,\n filter: Net::LDAP::Filter.eq('objectClass', '*'),\n scope: Net::LDAP::SearchScope_BaseObject,\n attributes: ['*']\n )\n entry ? entry.first : entry\n end", "def get_entry( branch )\n\t\tself.log.debug \"Looking up entry for %p\" % [ branch.dn ]\n\t\treturn self.conn.search_ext2( branch.dn, SCOPE[:base], '(objectClass=*)' ).first\n\trescue LDAP::ResultError => err\n\t\tself.log.info \" search for %p failed: %s\" % [ branch.dn, err.message ]\n\t\treturn nil\n\tend", "def get_entry(entryid)\n call_api('feed/entry/%s' % URI.encode(entryid))['entries'].first\n end", "def get_entry(id)\n Entry.find(id)\n end", "def get_entry(entry_id)\n fetcher.get_acl_entry(service_id, id, entry_id)\n end", "def get_extended_entry( branch )\n\t\tself.log.debug \"Looking up entry (with operational attributes) for %p\" % [ branch.dn ]\n\t\treturn self.conn.search_ext2( branch.dn, SCOPE[:base], '(objectClass=*)', %w[* +] ).first\n\trescue LDAP::ResultError => err\n\t\tself.log.info \" search for %p failed: %s\" % [ branch.dn, err.message ]\n\t\treturn nil\n\tend", "def entry\n @connection.remote_entry.get\n end", "def entry\n @entry ||= store.get entry_id\n end", "def get_entry(entry)\n selected_entry = find_entry(entry)\n raise Errno::ENOENT, entry if selected_entry.nil?\n\n selected_entry\n end", "def get_entry\n id = params[:id]\n if /\\A\\d+\\z/.match(id)\n @entry = Entry.find(id)\n else\n @entry = Entry.find_by_slug(id)\n end\n raise ActiveRecord::RecordNotFound if @entry.nil?\n end", "def entry\n Entry.new\n end", "def find_entry(uid)\n # this is necessary because NetLDAP's \"search\" doesn't natively accept spaces in strings\n filter = Net::LDAP::Filter.eq('uid', uid)\n entry = @ldap.search( :base => $ldap_usersdn, :filter=> filter )\n entry.first if entry\n end", "def get_related (entry, type)\n related_entry_key = entry.related[type] unless entry.related.nil?\n self.entries[related_entry_key]\n end", "def find_entry(pack_id, entry_type, entry_id)\n case entry_type.downcase\n when 'contributable' \n return PackContributableEntry.find(:first, :conditions => [\"id = ? AND pack_id = ?\", entry_id, pack_id])\n when 'remote'\n return PackRemoteEntry.find(:first, :conditions => [\"id = ? AND pack_id = ?\", entry_id, pack_id])\n else\n return nil\n end\n end", "def get_acl_entry(opts = {})\n data, _status_code, _headers = get_acl_entry_with_http_info(opts)\n data\n end", "def query_entry(sql, *sqlargs)\n row = @db.get_first_row(sql, *sqlargs)\n return Entry.new.load_from_database_row(row)\n end", "def read_communication_entry(domain_id, communication_entry_id, opts = {})\n data, _status_code, _headers = read_communication_entry_with_http_info(domain_id, communication_entry_id, opts)\n return data\n end", "def get_dn(domain,dn,return_attrs = [])\n scope = Net::LDAP::SearchScope_BaseObject\n ldap = ldap_connect(domain)\n results = ldap.search(:base => dn, :scope => scope, :attributes => return_attrs)\n if (results.nil? || results.length == 0)\n return nil\n else\n results.each do |entry|\n return entry\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Add Entry This method accepts a connection resource object, a distinguished name, and the attributes for the entry to be added.
def add_entry( c, dn, attrs ) # :yields: connection_info, distinguished_name, attributes self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap # Ensure no duplicates by casting as a case insensitive, case preserving hash attrs = CICPHash.new.merge(attrs) # Ensure relativedn is included in the attribute list relativedn = dn.split(/,(?!([\w -]+=[\w -]+,?){1,}\")/).first attrs.merge!(Hash[*relativedn.split('=', 2).flatten]) @ldap.add dn: dn, attributes: attrs raise "Unable to add record: #{@ldap.get_operation_result.message}" unless @ldap.get_operation_result.message == 'Success' end
[ "def add_subscription_entry(name, entry)\n\t\tend", "def add(entry)\n @entries[entry.tag] = entry\n end", "def add(dn, attributes)\n attributes = normalize_attributes(attributes)\n log_dispatch(:add, dn, attributes)\n adapter.add(dn, attributes)\n end", "def add_entry\n File.open(self[:conf_file], 'a') do |file|\n file.puts comment\n file.puts serialized_entry\n end\n end", "def add_entry(entry)\n @entries << entry\n end", "def add(account) \n entry = Keybox::HostAccountEntry.new(account, account)\n\n if @options.use_password_hash_for_url then\n begin \n account_uri = URI.parse(account) \n if not account_uri.scheme.nil? then\n entry = Keybox::URLAccountEntry.new(account,account)\n end\n rescue ::URI::InvalidURIError\n # just ignore it, we're fine with the Host\n # Account Entry\n end\n\n end\n new_entry = gather_info(entry)\n hsay \"Adding #{new_entry.title} to database.\", :information\n @db << new_entry\n end", "def addEntry(iEntry)\n @Entries << iEntry\n end", "def add(dn = @base, attributes)\n relativedn = dn.split(',').first\n attributes.merge!(Hash[*relativedn.split('=').flatten])\n @ldap.add dn: dn, attributes: attributes\n end", "def add(resource, entry_name = nil)\n if resource.instance_of?(ROSRS::Resource)\n contents << ROSRS::FolderEntry.create(self, entry_name, resource.uri)\n else\n contents << ROSRS::FolderEntry.create(self, entry_name, resource)\n end\n\n end", "def add_connection(identifier:, connection:)\n puts \"adding connection: \"\n puts identifier\n connections[identifier] = [] if connections[identifier].nil?\n connections[identifier] << connection\n connection_map[connection.object_id] = identifier\n end", "def add entry\n existing_entry = @entries[entry.msgid]\n if existing_entry\n entry = existing_entry.merge(entry)\n end\n @entries[entry.msgid] = entry\n end", "def create_entry options = {}\n client.create_network_acl_entry(entry_options(options))\n nil\n end", "def add_connection(resource, connection)\n @data['powerConnections'] << {\n 'connectionUri' => resource['uri'],\n 'deviceConnection' => connection,\n 'sourceConnection' => connection\n }\n end", "def add_resource(*resource)\n add_resource(*resource[0..-2]) if resource.length > 1\n resource = resource.pop\n raise ArgumentError, \"Can only add objects that respond to :ref, not instances of #{resource.class}\" unless resource.respond_to?(:ref)\n fail_on_duplicate_type_and_title(resource)\n title_key = title_key_for_ref(resource.ref)\n\n @transient_resources << resource if applying?\n @resource_table[title_key] = resource\n\n # If the name and title differ, set up an alias\n\n if resource.respond_to?(:name) and resource.respond_to?(:title) and resource.respond_to?(:isomorphic?) and resource.name != resource.title\n self.alias(resource, resource.uniqueness_key) if resource.isomorphic?\n end\n\n resource.catalog = self if resource.respond_to?(:catalog=)\n add_vertex(resource)\n @relationship_graph.add_vertex(resource) if @relationship_graph\n end", "def add_object(_dn, _record, _host='localhost',_port=389, _rootdn='', _passdn='')\n _record.delete('dn') \n _data = _record\n _data.each{|_key,_value|\n _data[_key] = _value.to_a \n }\n begin\n connector(_host,_port,_rootdn,_passdn).add(\"#{_dn}\", _data)\n return true\n rescue LDAP::ResultError\n raise LdapmapperAddRecordError \n return false\n end\n end", "def add(entry)\n _check_open!\n ::Dnet.route_add(@handle, entry)\n end", "def add(name, resource, attributes = {})\n resources[name] = resource\n resource.update_attributes(attributes) if attributes.any?\n resource\n end", "def addentry(entryname, entry)\n if entry.class.eql?(Directory)\n entry.setparent(self) \n else #file\n entry.setdir(self)\n end\n @contents[entryname] = entry\n end", "def add(resource)\n get_resource(resource.type).add(resource.value)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Modify Entry Accepts a connection resource object as the first argument, followed by an Array of ldap operations. It is intended to be used with Chef::Resource::LdapEntry objects that will also have a .dn method that returns the DN of the entry to be modified. Each ldap operation in the ldap operations list is an Array object with the following items: 1. LDAP operation ( e.g. :add, :delete, :replace ) 2. Attribute name ( String or Symbol ) 3. Attribute Values ( String or Symbol, or Array of Strings or Symbols ) So an example of an operations list to be passed to this method might look like this: [ [ :add, 'attr1', 'value1' ], [ :replace, :attr2, [ :attr2a, 'attr2b', :attr2c ] ], [ :delete, 'attr3' ], [ :delete, :attr4, 'value4' ] ] Note that none of the values passed can be Integers. They must be STRINGS ONLY! This is a limitation of the ruby netldap library.
def modify_entry( c, dn, ops ) # :yields: connection_info, distinguished_name, operations entry = self.get_entry( c, dn ) @ldap.modify dn: dn, operations: ops raise "Unable to modify record: #{@ldap.get_operation_result.message}" unless @ldap.get_operation_result.message =~ /(Success|Attribute or Value Exists)/ end
[ "def update_entry(entry, ldap_attrs, user_attrs, ldap_key, user_key)\n if user_attrs.has_key?(user_key)\n if ldap_attrs.has_key?(ldap_key)\n if user_attrs[user_key] != ldap_attrs[ldap_key].first\n entry << LDAP.mod(LDAP::LDAP_MOD_REPLACE, ldap_key, user_attrs[user_key].is_a?(String) ? [ user_attrs[user_key] ] : user_attrs[user_key] )\n end\n else\n entry << LDAP.mod(LDAP::LDAP_MOD_ADD, ldap_key, user_attrs[user_key].is_a?(String) ? [ user_attrs[user_key] ] : user_attrs[user_key] )\n end\n else\n if ldap_attrs.has_key?(ldap_key)\n entry << LDAP.mod(LDAP::LDAP_MOD_DELETE, ldap_key, [ ])\n end\n end\n end", "def modify args\n modify_dn = args[:dn] or raise \"Unable to modify empty DN\"\n modify_ops = []\n a = args[:operations] and a.each {|op, attr, values|\n # TODO, fix the following line, which gives a bogus error\n # if the opcode is invalid.\n op_1 = {:add => 0, :delete => 1, :replace => 2} [op.to_sym].to_ber_enumerated\n modify_ops << [op_1, [attr.to_s.to_ber, values.to_a.map {|v| v.to_ber}.to_ber_set].to_ber_sequence].to_ber_sequence\n }\n\n request = [modify_dn.to_ber, modify_ops.to_ber_sequence].to_ber_appsequence(6)\n pkt = [next_msgid.to_ber, request].to_ber_sequence\n @conn.write pkt\n\n (be = @conn.read_ber(AsnSyntax)) && (pdu = LdapPdu.new( be )) && (pdu.app_tag == 7) or raise LdapError.new( \"response missing or invalid\" )\n pdu.result_code\n end", "def mod_object(_dn, _mod, _host='localhost',_port=389, _rootdn='', _passdn='')\n# begin\n _mod.delete('dn')\n _data = _mod\n _data.each{|_key,_value|\n _data[_key] = _value.to_a\n }\n connector(_host,_port,_rootdn,_passdn).modify(\"#{_dn}\", _data)\n return true\n# rescue LDAP::ResultError\n# raise LdapmapperModRecordError\n# return false\n# end\n end", "def modify( branch, mods )\n\t\tif mods.first.respond_to?( :mod_op )\n\t\t\tself.log.debug \"Modifying %s with LDAP mod objects: %p\" % [ branch.dn, mods ]\n\t\t\tself.conn.modify( branch.dn, mods )\n\t\telse\n\t\t\tnormattrs = normalize_attributes( mods )\n\t\t\tself.log.debug \"Modifying %s with: %p\" % [ branch.dn, normattrs ]\n\t\t\tself.conn.modify( branch.dn, normattrs )\n\t\tend\n\tend", "def add_entry( c, dn, attrs ) # :yields: connection_info, distinguished_name, attributes\n \n self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap\n \n # Ensure no duplicates by casting as a case insensitive, case preserving hash\n attrs = CICPHash.new.merge(attrs)\n # Ensure relativedn is included in the attribute list\n relativedn = dn.split(/,(?!([\\w -]+=[\\w -]+,?){1,}\\\")/).first\n attrs.merge!(Hash[*relativedn.split('=', 2).flatten])\n @ldap.add dn: dn, attributes: attrs\n raise \"Unable to add record: #{@ldap.get_operation_result.message}\" unless @ldap.get_operation_result.message == 'Success'\n end", "def diff_with_entry( attribute, values )\n\t\tmods = []\n\t\tattribute = attribute.to_s\n\t\tentry = self.entry || {}\n\t\tentry_values = entry.key?( attribute ) ? entry[attribute] : []\n\n\t\t# Workaround for the fact that Time has a #to_ary, causing it to become an\n\t\t# Array of integers when cast via Array().\n\t\tvalues = [ values ] if values.is_a?( Time )\n\n\t\tvalues = Array( values ).compact.\n\t\t\tcollect {|val| self.get_converted_attribute(attribute, val) }\n\t\tself.log.debug \" comparing %s values to entry: %p vs. %p\" %\n\t\t\t[ attribute, values, entry_values ]\n\n\t\t# If the attributes on the server are the same as the local ones,\n\t\t# it's a NOOP.\n\t\tif values.sort == entry_values.sort\n\t\t\tself.log.debug \" no change.\"\n\t\t\treturn nil\n\n\t\t# If the directory doesn't have this attribute, but the local\n\t\t# object does, it's an ADD\n\t\telsif entry_values.empty?\n\t\t\tself.log.debug \" ADD %s: %p\" % [ attribute, values ]\n\t\t\treturn LDAP::Mod.new( LDAP::LDAP_MOD_ADD, attribute, values )\n\n\t\t# ...or if the local value doesn't have anything for this attribute\n\t\t# but the directory does, it's a DEL\n\t\telsif values.empty?\n\t\t\tself.log.debug \" DELETE %s\" % [ attribute ]\n\t\t\treturn LDAP::Mod.new( LDAP::LDAP_MOD_DELETE, attribute )\n\n\t\t# ...otherwise it's a REPLACE\n\t\telse\n\t\t\tself.log.debug \" REPLACE %s: %p with %p\" %\n\t\t\t\t[ attribute, entry_values, values ]\n\t\t\treturn LDAP::Mod.new( LDAP::LDAP_MOD_REPLACE, attribute, values )\n\t\tend\n\n\tend", "def modify_object(config_id, attr_key_val_list, _bin_directory = new_resource.bin_dir)\n cmd = \"AdminConfig.modify('#{config_id}', #{attr_key_val_list})\"\n Chef::Log.debug(\"Modifying #{config_id}' to #{attr_key_val_list}\")\n wsadmin_exec(\"modify config_id: #{config_id}\", cmd)\n save_config\n end", "def update_with_entry(entry)\n self.meta_data.smart_update_attributes entry.instance_values.symbolize_keys\n self.smart_update_attributes entry.instance_values.symbolize_keys\n end", "def cmd_modify argv\n setup argv\n json = @hash['json']\n e = @hash['element']\n response = @api.modify(json, e)\n msg response\n return response\n end", "def update_entry(entry)\n fetcher.update_acl_entry(entry)\n end", "def domain_update(args)\n if args.key?(:chg) && args[:chg].key?(:registrant)\n raise ArgumentError, 'You need to do a trade or recover operation to change the registrant'\n end\n has_contacts = args.key?(:add) && args[:add].key?(:contacts) || args.key?(:add) && args[:add].key?(:contacts)\n has_ns = args.key?(:add) && args[:add].key?(:ns) || args.key?(:add) && args[:add].key?(:ns)\n has_other = args.key?(:add) && args[:add].key?(:status) || args.key?(:add) && args[:add].key?(:status) || args.key?(:chg) && args[:chg].key?(:authInfo)\n if [has_contacts, has_ns, has_other].count { |v| v } > 1\n raise ArgumentError, \"You can't update all that at one time\"\n end\n [:add, :rem].each do |ar|\n if args.key?(ar) && args[ar].key?(:ns) && args[ar][:ns].first.is_a?(String)\n args[ar][:ns] = args[ar][:ns].map { |ns| { :hostName => ns } }\n end\n end\n super\n end", "def send( conn )\n if @change_type == :MODRDN\n # TODO: How do we deal with 'newsuperior'?\n # The LDAP API's ldap_modrdn2_s() function doesn't seem to use it.\n return conn.modrdn( @dn, @attrs['newrdn'], @attrs['deleteoldrdn'] )\n end\n\n # Mask out the LDAP_MOD_BVALUES bit, as it's irrelevant here.\n case @change_type & ~LDAP_MOD_BVALUES\n when LDAP_MOD_ADD\n @controls == [] ? conn.add( @dn, @attrs ) :\n conn.add_ext( @dn, @attrs, @controls, [] )\n when LDAP_MOD_DELETE\n @controls == [] ? conn.delete( @dn ) :\n conn.delete_ext( @dn, @controls, [] )\n when LDAP_MOD_REPLACE\n @controls == [] ? conn.modify( @dn, @mods ) :\n conn.modify_ext( @dn, @mods, @controls, [] )\n end\n\n self\n end", "def ldap_mod_replace( attribute, *values )\n\t\treturn LDAP::Mod.new( LDAP::LDAP_MOD_REPLACE, attribute.to_s, values.flatten )\n\tend", "def update\n @ldap_entry = LdapEntry.find(params[:id])\n\n respond_to do |format|\n if @ldap_entry.update_attributes(ldap_entry_params)\n format.html { redirect_to ldap_entries_path, notice: 'Ldap entry was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ldap_entry.errors, status: :unprocessable_entity }\n end\n end\n end", "def delete_entry( c, dn ) # :yields: connection_info, distinguished_name\n \n self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap\n @ldap.delete dn: dn\n raise \"Unable to remove record: #{@ldap.get_operation_result.message}\" unless @ldap.get_operation_result.message =~ /(Success|No Such Object)/\n end", "def ldappassword\n\n$HOST = ''\n$PORT = LDAP::LDAP_PORT\n$SSLPORT = LDAP::LDAPS_PORT\nbase = 'dc=, dc='\nldapadmin = 'cn=, dc=, dc='\nldapadminpass = ''\nscope = LDAP::LDAP_SCOPE_SUBTREE\nattrs = ['sn', 'cn']\n\n#hash the password for ldap change\ne_password = \"{SHA}\" + Base64.encode64(Digest::SHA1.digest(@newpasswd)).chomp\n\nconn = LDAP::Conn.new($HOST, $PORT)\nreset = [\n LDAP.mod(LDAP::LDAP_MOD_REPLACE, \"userPassword\", [e_password]),\n]\n\n conn.bind(ldapadmin,ldapadminpass)\n begin\n conn.search(base, scope, \"uid=#{@authex.username}\", attrs) { |entry|\n $USERDN = entry.dn\n }\n rescue LDAP::ResultError\n conn.perror(\"search\")\n exit\n end\n\n begin\n conn.modify(\"#{$USERDN}\", reset)\n puts $USERDN\n rescue LDAP::ResultError => msg\n puts \"Can't change password: \" + msg\n exit 0\n rescue LDAP::Error => errcode\n puts \"Can't change password: \" + LDAP.err2string(errcode)\n exit 0\n end\n\n\n\nend", "def patch_communication_entry(domain_id, communication_map_id, communication_entry_id, communication_entry, opts = {})\n patch_communication_entry_with_http_info(domain_id, communication_map_id, communication_entry_id, communication_entry, opts)\n nil\n end", "def ldap_mod_add( attribute, *values )\n\t\treturn LDAP::Mod.new( LDAP::LDAP_MOD_ADD, attribute.to_s, values.flatten )\n\tend", "def change_collection_permissions(id_list_path)\r\nwork_ids = IO.readlines(id_list_path)\r\n\twork_ids.each do |i|\r\n\ti = i.strip #trailing white space, line ends etc will cause a faulty uri error\t\r\n\t\tc = Object::Collection.find(i) \r\n\t\tputs \"got collection for \" + i\r\n\t\tc.permissions = [Hydra::AccessControls::Permission.new({:name=> \"york\", :type=>\"group\", :access=>\"read\"}), Hydra::AccessControls::Permission.new({:name=>\"admin\", :type=> \"group\", :access => \"edit\"})]\r\n\t\tc.save!\t\t\r\n\tend\r\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
== Delete Entry Expects a connection resource object, along with a .dn method that returns the Distinguished Name of the entry to be deleted.
def delete_entry( c, dn ) # :yields: connection_info, distinguished_name self.bind( c.host, c.port, c.credentials, c.databag_name, c.use_tls ) unless @ldap @ldap.delete dn: dn raise "Unable to remove record: #{@ldap.get_operation_result.message}" unless @ldap.get_operation_result.message =~ /(Success|No Such Object)/ end
[ "def delete(dn)\n @conn.delete :dn => dn\n end", "def delete_entry(entry)\n fetcher.delete_acl_entry(entry)\n end", "def delete_entry(entry)\n @address_book.entries.delete(entry)\n puts \"#{entry.name} has been deleted\"\n end", "def delete_entry(entry)\n address_book.entries.delete(entry)\n puts \"#{entry.name} has been deleted\"\n end", "def destroy\n begin\n self.class.delete(dn)\n @new_entry = true\n rescue Error\n raise DeleteError.new(_(\"Failed to delete LDAP entry: %s\") % dn)\n end\n end", "def delete_entry(entryid)\n require_api_login\n call_api('entry/delete', nil, {\n 'entry' => entryid,\n })\n end", "def delete(entry)\n entries.delete(entry)\n end", "def delete(entry)\n raise(ArgumentError, 'Must be an Entry') unless entry.is_a?(PoParser::Entry)\n\n @entries.delete(entry)\n end", "def destroy\n @dnis_entry = DnisEntry.find(params[:id])\n @dnis_entry.destroy\n\n respond_to do |format|\n format.html { redirect_to dnis_entries_url }\n format.json { head :no_content }\n end\n end", "def delete_entry(edit_uri)\n delete_resource(edit_uri)\n end", "def delete_object(_dn, _host='localhost',_port=389, _rootdn='', _passdn='')\n begin\n connector(_host,_port,_rootdn,_passdn).delete(\"#{_dn}\")\n return true\n rescue LDAP::ResultError\n raise LdapmapperDeleteRecordError\n return false\n end\n end", "def delete(entry)\n _check_open!\n ::Dnet.arp_delete(@handle, entry)\n end", "def destroy\n dns_entry_response = RestClient.delete('https://api.cloudflare.com/client/v4/zones/:zone_identifier/dns_records/:identifier',:content_type => :json, :accept => :json, :'x-auth-key' => session[:key] :'x-auth-email' => session[:email])\n @dns_entry.destroy\n respond_to do |format|\n format.html { redirect_to dns_entries_url, notice: \"Dns entry was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def delete_entry(cache_id:, request:)\n {\n method: \"CacheStorage.deleteEntry\",\n params: { cacheId: cache_id, request: request }.compact\n }\n end", "def remove\n domain = args.shift.downcase rescue nil\n fail(\"Usage: heroku dns:add DOMAIN\") unless domain\n result = resource(\"/dns/#{domain}\").delete\n display \"Deleting #{domain}\"\n end", "def delete_entry\n\t\t#delete product from cards\n\t\t\tstatement=@connection.prepare(\"delete from cards where card_no=?\")\n\t\t\tstatement.execute(@card_no)\n\t\t#delete product from inline_products\n\t\t\tstatement1=@connection.prepare(\"delete from inline_products where card_no=?\")\n\t\t\tstatement1.execute(@card_no)\n\tend", "def delete_domain!(name)\n sdb_query({:Action => 'DeleteDomain', 'DomainName' => name})\n end", "def destroy\n @account_entry = AccountEntry.find(params[:id])\n @account_entry.destroy\n\n respond_to do |format|\n format.html { redirect_to(account_entries_url) }\n format.xml { head :ok }\n end\n end", "def entry_deleted\n if Entry.exists? feed_id: self.feed_id, guid: self.guid\n Rails.logger.warn \"Failed attempt to mark as deleted existing entry - guid: #{self.try :guid}, published: #{self.try :published}, feed_id: #{self.feed_id}, feed title: #{self.feed.title}\"\n errors.add :guid, 'entry not deleted'\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Just an idea, instead of localeStorage, saving the current project in Redis in a hash. Not implemented.
def cache(user) "#{user.id}_project" end
[ "def cache_key\n super + I18n.locale.to_s\n end", "def cache_key_for(path, options)\n \"#{path}:#{I18n.locale}:#{options[:bundle] ? '1' : '0'}\"\n end", "def current_locale\n Thread.current[:\"localite:locale\"] || base\n end", "def translations_hash; end", "def project_key; frozen_value_or_default(:project_key, project.blank? ? nil : project.key); end", "def cached_translations(locale, key)\n return unless self.translations and self.translations[locale]\n self.translations[locale][key]\n end", "def contents_locales_hash\n Cmsino::Content.where(umbrella: self.name).inject({}){|res,c| res[c.locale] = c; res}\n end", "def store_translations(locale, data, options = {})\n locale = locale.to_sym\n translations[locale] ||= {}\n data = data.deep_symbolize_keys\n translations[locale].deep_merge!(data)\n end", "def locale = @lock.with_read_lock { @context.locale(true).code }", "def set_current_project\n\n # Grab the project name\n @current_project = Project.current = Project.where(:name => request.cookies['project']).first\n\n # Create a project cookie if none exists\n unless @current_project\n @current_project = Project.current = Project.create(:name => \"default\")\n cookies.permanent[:project] = Project.current.name\n end\n end", "def git_cache\n self[KEY]\n end", "def current_locale_code\n Utility.locale_code\n end", "def i18n_key; end", "def store_translations(locale, data)\n flatten_data(locale => data).each do |key, value|\n self[key] = value\n end\n end", "def build_translations_hash(locale=I18n.default_locale)\n loc_model = Locale.find_by(name: locale) # The model object for the given locale\n \n TranslationKey.all.map do |tkey|\n [\n tkey.name, \n tkey.translation(loc_model).try(:value)\n ]\n end.to_h\n end", "def local_cache(name = {}, options = {}, &block)\n if (name.is_a? String) || (name.is_a? Array)\n name = Array(name)\n name << I18n.locale\n end\n cache name, options, &block\n end", "def load_cache(locale)\n\n\t\t\t\t\t\t# Initialize cache structures\n\t\t\t\t\t\t@o2t = {} if @o2t.nil?\n\t\t\t\t\t\t@t2o = {} if @t2o.nil?\n\t\t\t\t\t\t\n\t\t\t\t\t\t# Fill cache if empty\n\t\t\t\t\t\tif @o2t[locale.to_sym].nil? || @t2o[locale.to_sym].nil?\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t# Preset\n\t\t\t\t\t\t\t@o2t[locale.to_sym] = {}\n\t\t\t\t\t\t\t@t2o[locale.to_sym] = {}\n\n\t\t\t\t\t\t\t# Static data from config\n\t\t\t\t\t\t\tif RicUrl.static_slugs\n\t\t\t\t\t\t\t\tRicUrl.static_slugs.each do |item|\n\t\t\t\t\t\t\t\t\tif item[:locale].to_s == locale.to_s\n\t\t\t\t\t\t\t\t\t\ttranslation_as_key = item[:translation]\n\t\t\t\t\t\t\t\t\t\ttranslation_as_key = translation_as_key.downcase if RicUrl.downcase_translations == true\n\t\t\t\t\t\t\t\t\t\tif RicUrl.use_filter\n\t\t\t\t\t\t\t\t\t\t\tif RicUrl.current_app_filter.to_s == item[:filter] # Slug belongs to current application\n\t\t\t\t\t\t\t\t\t\t\t\t@o2t[locale.to_sym][item[:original]] = item[:translation]\n\t\t\t\t\t\t\t\t\t\t\t\t@t2o[locale.to_sym][translation_as_key] = item[:original]\n\t\t\t\t\t\t\t\t\t\t\telsif !item[:filter].blank? # Slug belongs to other application\n\t\t\t\t\t\t\t\t\t\t\t\turl = RicUrl.available_filter_urls[item[:filter].to_sym]\n\t\t\t\t\t\t\t\t\t\t\t\t@o2t[locale.to_sym][item[:original]] = url.trim(\"/\") + item[:translation] if !url.blank?\n\t\t\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t\t\t\t@o2t[locale.to_sym][item[:original]] = item[:translation]\n\t\t\t\t\t\t\t\t\t\t\t@t2o[locale.to_sym][translation_as_key] = item[:original]\n\t\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\tend\n\n\t\t\t\t\t\t\t# Dynamic data from DB\n\t\t\t\t\t\t\tdata = where(locale: locale.to_s)\n\t\t\t\t\t\t\tdata.each do |item|\n\t\t\t\t\t\t\t\ttranslation_as_key = item.translation\n\t\t\t\t\t\t\t\ttranslation_as_key = translation_as_key.downcase if RicUrl.downcase_translations == true\n\t\t\t\t\t\t\t\tif RicUrl.use_filter\n\t\t\t\t\t\t\t\t\tif RicUrl.current_app_filter.to_s == item.filter # Slug belongs to current application\n\t\t\t\t\t\t\t\t\t\t@o2t[locale.to_sym][item.original] = item.translation\n\t\t\t\t\t\t\t\t\t\t@t2o[locale.to_sym][translation_as_key] = item.original\n\t\t\t\t\t\t\t\t\telsif !item.filter.blank? # Slug belongs to other application\n\t\t\t\t\t\t\t\t\t\turl = RicUrl.available_filter_urls[item.filter.to_sym]\n\t\t\t\t\t\t\t\t\t\t@o2t[locale.to_sym][item.original] = url.trim(\"/\") + item.translation if !url.blank?\n\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t\t@o2t[locale.to_sym][item.original] = item.translation\n\t\t\t\t\t\t\t\t\t@t2o[locale.to_sym][translation_as_key] = item.original\n\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\tend\n\n\t\t\t\t\t\tend\n\n\t\t\t\t\tend", "def cache(name = {}, options = {}, &block)\n options ||= {}\n super([I18n.locale, name], options, &block)\n end", "def update_backend\n if Idioma.configuration.redis_backend\n if i18n_value.present?\n Idioma::RedisBackend.update_phrase(self)\n else\n Idioma::RedisBackend.delete_phrase(self)\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
turn a href into an absolute path
def make_path(base, href) return href if href[0] == '/' base + href end
[ "def to_absolute( root, link )\n\n parsed = URI.parse( link )\n if parsed.scheme == 'file'\n parsed.scheme = nil\n return parsed.to_s\n end\n\n return link if URI.parse( link ).host\n\n begin\n # remove anchor\n link = URI.encode( link.to_s.gsub( /#[a-zA-Z0-9_-]*$/,'' ) )\n\n base_url = URI.parse( URI.encode( root ) )\n relative = URI.parse( link )\n absolute = base_url.merge( relative )\n\n absolute.path = '/' if absolute.path && absolute.path.empty?\n\n return absolute.to_s\n rescue Exception => e\n $stderr.puts e\n $stderr.puts e.backtrace.join( \"\\n\" )\n return nil\n end\n end", "def url_from_href(href) #:nodoc:\n scheme, host, path = $1, $2, $3 if URL_PARTS.match href\n\n scheme = uri.scheme if scheme.nil? or scheme.empty? and uri.respond_to? :scheme\n\n host = uri.host if host.nil? or host.empty? and uri.respond_to? :host\n\n path = (\n (/\\/$/.match(uri.path)) ?\n '%s%s' % [uri.path,path] :\n '%s/%s' % [File.dirname(uri.path),path]\n ) unless /^\\//.match path\n\n '%s://%s%s' % [scheme, host, path]\n end", "def to_absolute(link)\n return nil if link.nil?\n\n # remove anchor\n link = URI.encode(URI.decode(link.to_s.gsub(/#[a-zA-Z0-9_-]*$/,'')))\n\n relative = URI(link)\n absolute = base ? base.merge(relative) : URI(url).merge(relative)\n\n absolute.path = '/' if absolute.path.empty?\n\n return absolute\n end", "def to_absolute(link)\n return nil if link.nil?\n\n # remove anchor\n link = URI.encode(URI.decode(link.to_s.gsub(/#[a-zA-Z0-9_-]*$/,'')))\n\n relative = URI(link)\n absolute = @url.merge(relative)\n\n absolute.path = '/' if absolute.path.empty?\n\n return absolute\n end", "def relativize(href, path, absolute_base, root_dir)\n # href = actual href string on page\n # path = actual current location / file path of current page\n # absolute_base = the base url for the site\n\n href_url = URI.join(URI.encode(absolute_base), URI.encode(href))\n path_url = URI.join(absolute_base, URI.encode(path))\n relative_url = path_url.route_to(href_url).to_s\n url_out = test_index(relative_url, href_url, absolute_base, root_dir)\n if href.match(/^#/)\n url_out = href\n end\n url_out\nend", "def to_absolute(page, link)\n # remove anchor\n link = URI.encode(link.to_s.gsub(/#[a-zA-Z0-9_-]*$/,''))\n\n relative = URI(link)\n absolute = page.url.merge(relative)\n\n absolute.path = '/' if absolute.path.empty?\n\n return absolute\n end", "def relative_link(link)\n # REMEMBER TO ADD ROUTES TO HANDLE THESE LINKS\n link.gsub!(@base, \"\").gsub!(\".html\", \"\").gsub!(\"-\", \"_\")\n end", "def resolve_url(a_href, abs_url)\n unless [a_href, abs_url].all?{ |a| [String, Addressable::URI, URI].any?{|c| a.is_a?(c) } }\n raise ArgumentError, 'Arguments must either be Strings or URIs' \n end\n\n absolute_url = Addressable::URI.parse(abs_url).normalize \n raise ArgumentError, \"#{absolute_url} must be absolute\" unless absolute_url.absolute?\n href = Addressable::URI.parse(a_href).normalize \n \n # return :href if :href is already absolute\n return href.to_s if href.absolute?\n\n return absolute_url.join(href).to_s\n end", "def standardize_hrefs\n nodes do |node|\n if node['href']\n node['href'] = escape_path( node['href'] )\n end\n end\n end", "def process_href(href)\n \"https://en.wikipedia.org\" + href\n end", "def url_for original\n (@wuala_dir + \"./#{original.path}\").cleanpath.to_s\n end", "def path_to_link(path)\n return '/home' if path == 'home.textile'\n els = path.split('/')\n dir_name = els[0]\n file_name = els[1]\n dir_name = dir_name.split('-')[1..dir_name.split('-').size].join('-')\n\n els2 = file_name.split('-')\n if (els2[0] == els2[1]) && (els2[0] == '')\n return \"/#{dir_name}\"\n else\n file_name = els2[1..els2.size].join('-').split('.')[0]\n return \"/#{dir_name}/#{file_name}\"\n end\nend", "def resolve_link_paths(document)\n document.gsub(/<(?:a[^>]+href)=[\"'](\\/)/) do |match|\n match.chomp('/') + base_url + '/'\n end\n end", "def as_href(from_path)\n RDoc::Markup::ToHtml.gen_relative_url from_path, path\n end", "def build_url(absolute_path)\n absolute_path.gsub(self.fs_path, '').gsub(/^\\//, '').downcase\n end", "def link_to_file(link, file)\n link_file = link.split('#').first\n if link_file == ''\n file\n elsif link_file.start_with?('/')\n link_file[1..]\n else\n File.expand_path(\"#{File.dirname(file)}/#{link_file}\").gsub(\"#{Dir.pwd}/\", '')\n end\nend", "def absolutify_url(url)\n url =~ /^\\w*\\:/i ? url : File.join(@url,url)\n end", "def convert_link(url, link)\n url.match /([a-z]+.)(\\/\\/[^\\/]+)/\n schema = $1\n authority = $2\n path = url.gsub schema+authority, ''\n path = path.gsub(/[a-z]{1}\\.html/, '')\n\n # puts \"schema, #{schema}\"\n # puts \"authority, #{authority}\"\n # puts \"path, #{path}\"\n\n case link\n when /^(\\/{1})/\n schema + authority + link\n when /^(\\.{2})/\n result = link.split('/').inject(path) do |memo, element|\n # if element is .., we want to remove one level of the current path\n if element == '..'\n memo.split('/')[0..-2].join('/')\n else\n # if the element is not .., we want to add it to the schema\n memo += ('/' + element)\n end\n end\n schema + authority + result\n when /^([a-z]+:)/\n link\n else\n schema + authority + path + link\n end\nend", "def build_href(path, collection: false)\n if propstat_relative_path\n request.path_for path, collection: collection\n else\n request.url_for path, collection: collection\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
reduce any '../', './', and '//' in a path or uri
def reduce_path(path) if path =~ /^(https?:\/\/.+)(\/.*)/ prefix = $1 path = $2 relative = false else prefix = nil relative = path[0] != '/' end while path.sub!(/\/*[^\/]+\/+\.\./, ''); end while path.sub!(/\/+\.\/+/, '/'); end path = path[2..-1] if path[0..1] == './' while path.sub!(/\/\//, '/'); end path = path[1..-1] if relative and path[0] == '/' path = prefix + path if prefix path end
[ "def normalize_uri(*strs)\n new_str = strs * \"/\"\n\n new_str = new_str.gsub!(\"//\", \"/\") while new_str.index(\"//\")\n\n # Makes sure there's a starting slash\n unless new_str[0,1] == '/'\n new_str = '/' + new_str\n end\n\n new_str\n end", "def normalize_uri(*strs)\n new_str = strs * '/'\n new_str = new_str.gsub!('//', '/') while new_str.index('//')\n new_str\n end", "def split_uri(uri); end", "def rootify_url *paths\n '/' << EUtils.normalize_path(paths.compact.join('/')).gsub(/\\A\\/+|\\/+\\Z/, '')\n end", "def humanish(uri)\n uri.path.split('/').last.gsub('.git', '')\n end", "def normalize_path(path); end", "def path; URI.parse(pathname).path; end", "def rootify_url *paths\n '/' << normalize_path(paths.compact.join('/')).gsub(/\\A\\/+|\\/+\\Z/, '')\n end", "def split_file_uri(uri); end", "def split_file_uri(uri)\n scheme, _, host, _, _, path, _, query, _ = URI.split(uri)\n\n path = URI::Generic::DEFAULT_PARSER.unescape(path)\n path.force_encoding(Encoding::UTF_8)\n\n # Hack for parsing Windows \"/C:/Users/IEUser\" paths\n if File::ALT_SEPARATOR && path[2] == ':'\n path = path[1..-1]\n end\n\n [scheme, host || '', path, query]\n end", "def concatenate_paths(*paths)\n return nil if paths.nil? || paths.empty?\n path = paths.join('/')\n uri = URI.parse(path)\n scheme = uri.scheme.nil? ? 'http' : uri.scheme\n path = path.split(/([file|https|http]+\\:\\/{2})/).last\n path.gsub!(/\\/{2,}/, '/') # Remove any // occurences that might have come along\n \"#{scheme}://#{path}\"\n end", "def host_and_path(uri)\n uri = URI.parse(uri) unless uri.is_a? URI\n host = uri.host\n path = uri.path.gsub(/\\/$/, '')\n path = path.empty? ? '/' : uri.path\n URI.parse(\"#{host}#{path}\")\n end", "def normalize_path(path)\n path.sub(%r{^/}, '').tr('', '')\n end", "def normalize_path(path)\n if path.start_with?('/')\n File.join(relative_url_root, path)\n else\n path\n end\n end", "def blindly_relativize_path(maybe_abspath)\n (maybe_abspath.split('').drop_while {|ch| ch=='/'}).join\n end", "def normalize_path(path)\n path = \"/#{path}\"\n path.squeeze!('/')\n path.sub!(%r{/+\\Z}, '')\n path = '/' if path == ''\n path\n end", "def normalized_path; end", "def simplifyPath(path)\n stack = []\n path.scan(%r{ [^/] * }x) do |name|\n case name\n when '.', ''\n # pass\n when '..'\n stack.pop\n else\n stack << name\n end\n end\n '/' + stack.join('/')\nend", "def normalize_url(path)\n @known_pairs ||= {}\n @public_directories_regex ||= Regexp.new(Bones.public_directories.join('|'))\n \n if v = @known_pairs[path]\n return v\n else\n value = case\n when path =~ /^(\\w{3,}:\\/\\/|mailto)/\n # don't do anything to this type of URL\n return path\n when path =~ @public_directories_regex\n path\n when File.directory?('pages' / path)\n path\n else\n # don't add .html if there's already an extension\n path =~ /\\..+/ ? path : path + '.html'\n end\n \n @known_pairs[path] = options.base / value\n end \n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create an empty page with necessary assets for project +p+
def create_empty_page(p) cli.say 'Creating project page' FileUtils.mkdir_p(browse_file(p, '.')) %w[favicon-32.png style.css].each do |i| FileUtils.cp(template_file(i), browse_file(p, i)) end write_file(p, 'about.html') do build_from_template('about.html', citation: MiGA::MiGA.CITATION) end end
[ "def generate_project_page(p)\n # Redirect page\n write_file(p, '../index.html') { build_from_template('redirect.html') }\n\n # Summaries\n summaries = Dir[\"#{p.path}/*.tsv\"].map do |i|\n b = File.basename(i, '.tsv')\n generate_summary_page(i, p)\n \"<li><a href='s-#{b}.html'>#{format_name(b)}</a></li>\"\n end.join('')\n\n # Project index page\n data = {\n project_active: 'active',\n information: format_metadata(p),\n summaries: summaries.empty? ? 'None' : \"<ul>#{summaries}</ul>\",\n results: format_results(p)\n }\n write_file(p, 'index.html') { build_from_template('index.html', data) }\n end", "def create(p)\n puts \"Creating page #{p}\"\n Dir.mkdir p unless File.exists? p\n Content.new(\"#{@dir}\", @extension).page p\n end", "def page(p)\n page_name = p.split('/')[1]\n \n f = File.new(\"#{p}/index.html\", 'w+')\n f.puts \"---\"\n f.puts \"layout: page\"\n f.puts \"title: #{page_name.titleize}\"\n f.puts \"---\"\n \n self.copy f, \"#{@dir}/images/#{page_name}.#{@extension}\"\n f.close\n end", "def create_base_project\n puts \"creating base project\"\n directory \"templates\", \"#{app_path}\"\n end", "def add_blank_page\n new_page(true, false)\n end", "def generate\n \n e = ERB.new(open('template/webpage.html.erb').read)\n\n root_page = write_page(@root_page, e)\n\n root_page\n end", "def create_default_template_file\n unless self.description.redirection_required?\n self.localizations.each do |page_localization|\n file_path = File.join(Rails.root, \"app\", \"views\" , \"pages\" , \"#{self.view}.#{page_localization.locale.slug}.html.haml\" )\n unless File.exists?(file_path)\n file = File.new(file_path, \"w\")\n\n page_localization.contents.each do |content|\n file.puts(\"= @page.easy_contents(:#{content.section_name})\")\n end\n file.close\n end\n end\n end\n end", "def generate_page file\n setup\n\n template_file = @template_dir + 'page.rhtml'\n\n out_file = @outputdir + file.path\n debug_msg \" working on %s (%s)\" % [file.full_name, out_file]\n rel_prefix = @outputdir.relative_path_from out_file.dirname\n search_index_rel_prefix = rel_prefix\n search_index_rel_prefix += @asset_rel_path if @file_output\n\n current = file\n asset_rel_prefix = rel_prefix + @asset_rel_path\n\n @title = \"#{file.page_name} - #{@options.title}\"\n\n debug_msg \" rendering #{out_file}\"\n render_template template_file, out_file do |io|\n here = binding\n # suppress 1.9.3 warning\n here.local_variable_set(:current, current)\n here.local_variable_set(:asset_rel_prefix, asset_rel_prefix)\n here\n end\n end", "def generate_main_page_with(specs)\n main_page = Amy::Model::Main.new\n specs['resources'].each_pair { |resource, options|\n main_page.add_resource( { 'resource' => resource, 'title' => options['title'] } )\n }\n main_page.links = specs['links'] || []\n main_page.version = specs['api_version']\n main_page.base_url = specs['base_url']\n @generator.do(\"#{Amy::BASE_DIR}/views/main.erb.html\", main_page)\n end", "def create\n flash[:notice] = 'The page was successfully created.' if page.save\n respond_with(page, location: project_page_path(page.project, page))\n end", "def new_page(path, title=nil)\n\n filename = File.join(CONFIG[:pages], \"#{path}\")\n filename = File.join(filename, \"index.#{CONFIG[:ext]}\") if File.extname(filename) == \"\"\n title = File.basename(filename, File.extname(filename)).gsub(/[\\W\\_]/, \" \").gsub(/\\b\\w/){$&.upcase} if title.nil?\n if File.exist?(filename)\n return false unless agree(loud \"#{filename} already exists. Do you want to overwrite?\")\n end\n \n FileUtils.mkdir_p File.dirname(filename)\n page = HP::YamlDoc.new\n page.yaml[\"layout\"] = 'page'\n page.yaml[\"title\"] = \"#{title.gsub(/-/,' ')}\"\n page.yaml[\"description\"] = \"\"\n page.puts \"{% include JB/setup %}\"\n puts \"Creating new page: #{filename}\"\n page.write_to_file(filename)\n return true\n end", "def create_project\n empty_directory(project)\n end", "def new\n @page = @site.pages.new\n @page.parts << PagePart.new(:name => \"body\")\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new_project\n if @@actual_configuration_options['new']\n remove_file \"public/index.html\"\n insert_into_file \"config/routes.rb\", \"root :to => 'welcome#index'\", :after => \"resources :users\\n\"\n template \"app/controllers/welcome_controller.erb\", \"app/controllers/welcome_controller.rb\"\n empty_directory \"app/views/welcome\"\n template \"app/views/welcome/welcome_index.erb\", \"app/views/welcome/index.html.erb\"\n insert_into_file \"config/routes.rb\", \"root :to => 'welcome#index'\", :after => \"resources :users\\n\"\n rep_str = load_erb_string('partials/_application_flash.html.erb')\n insert_into_file \"app/views/layouts/application.html.erb\", rep_str, :after => \"<%= render 'shared/admin_nav' %>\\n\" \n end\n end", "def add_template_pages; end", "def seed_page_basics!\n page_yml_filenames = [\n \"sell.yml\", \"about.yml\", \"buy.yml\",\n \"rent.yml\", \"home.yml\", \"legal_notice.yml\",\n \"contact.yml\", \"privacy_policy.yml\",\n ]\n\n page_yml_filenames.each do |page_yml_filename|\n seed_page page_yml_filename\n end\n end", "def setup_launching_soon_page\n @css_file = LAUNCHING_SOON_CONFIG[:css_file_name]\n @launching_date = Time.zone.parse(LAUNCHING_SOON_CONFIG[:launching_date]).utc\n render :template => File.join('launching_soon', LAUNCHING_SOON_CONFIG[:html_file_name]), :layout => \"launching_soon\"\n end", "def load_index_page\n @page ||= Language.current_root_page\n render template: \"alchemy/welcome\", layout: false if signup_required?\n end", "def create_base_project\n raise GeneratorArgumentsError if app_path.nil?\n puts \"creating base project\"\n directory \"templates\", \"#{app_path}\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create landing page for project +p+
def generate_project_page(p) # Redirect page write_file(p, '../index.html') { build_from_template('redirect.html') } # Summaries summaries = Dir["#{p.path}/*.tsv"].map do |i| b = File.basename(i, '.tsv') generate_summary_page(i, p) "<li><a href='s-#{b}.html'>#{format_name(b)}</a></li>" end.join('') # Project index page data = { project_active: 'active', information: format_metadata(p), summaries: summaries.empty? ? 'None' : "<ul>#{summaries}</ul>", results: format_results(p) } write_file(p, 'index.html') { build_from_template('index.html', data) } end
[ "def create_empty_page(p)\n cli.say 'Creating project page'\n FileUtils.mkdir_p(browse_file(p, '.'))\n %w[favicon-32.png style.css].each do |i|\n FileUtils.cp(template_file(i), browse_file(p, i))\n end\n write_file(p, 'about.html') do\n build_from_template('about.html', citation: MiGA::MiGA.CITATION)\n end\n end", "def new_project\n if @@actual_configuration_options['new']\n remove_file \"public/index.html\"\n insert_into_file \"config/routes.rb\", \"root :to => 'welcome#index'\", :after => \"resources :users\\n\"\n template \"app/controllers/welcome_controller.erb\", \"app/controllers/welcome_controller.rb\"\n empty_directory \"app/views/welcome\"\n template \"app/views/welcome/welcome_index.erb\", \"app/views/welcome/index.html.erb\"\n insert_into_file \"config/routes.rb\", \"root :to => 'welcome#index'\", :after => \"resources :users\\n\"\n rep_str = load_erb_string('partials/_application_flash.html.erb')\n insert_into_file \"app/views/layouts/application.html.erb\", rep_str, :after => \"<%= render 'shared/admin_nav' %>\\n\" \n end\n end", "def create\n @project = Project.new(project_params)\n if @project.save\n redirect_to \"/charity_lp/landingpage\", notice: 'Research Form was successfully created.'\n # format.json { render :show, status: :created, location: @project }\n else\n render 'new'\n\n # format.html {redirect_to \"/projects/new\", notice: 'Errors in submition, please ensure all fields are filled in correctly' }\n # format.json { render json: @project.errors, status: :unprocessable_entity }\n end\n end", "def landing_page\n end", "def setup_launching_soon_page\n @css_file = LAUNCHING_SOON_CONFIG[:css_file_name]\n @launching_date = Time.zone.parse(LAUNCHING_SOON_CONFIG[:launching_date]).utc\n render :template => File.join('launching_soon', LAUNCHING_SOON_CONFIG[:html_file_name]), :layout => \"launching_soon\"\n end", "def load_index_page\n @page ||= Language.current_root_page\n render template: \"alchemy/welcome\", layout: false if signup_required?\n end", "def page(p)\n page_name = p.split('/')[1]\n \n f = File.new(\"#{p}/index.html\", 'w+')\n f.puts \"---\"\n f.puts \"layout: page\"\n f.puts \"title: #{page_name.titleize}\"\n f.puts \"---\"\n \n self.copy f, \"#{@dir}/images/#{page_name}.#{@extension}\"\n f.close\n end", "def create_research_project\n create_thing\n unless url_error_element.visible?\n self.button(:id=>\"group_create_new_area\", :class=>\"s3d-button s3d-header-button s3d-popout-button\").wait_until_present\n ResearchIntro.new @browser\n end\n end", "def project\n lambda { |text| render( '{{> portfolio/' + slug + '}}' ) }\n end", "def create\n if developer_signed_in?\n @project = Project.createproject(params[:project],current_developer.id)\n respond_to do |format|\n if @project.save\n format.html { redirect_to \"/developers/projects\",\n notice: I18n.t('views.project.flash_messages.project_was_successfully_created') }\n format.json { render json: @project, status: :created, location: @project }\n else\n format.html { render action: \"new\" }\n format.json { render json: @project.errors, status: :unprocessable_entity }\n end\n end\n else\n developer_unauthorized\n render 'pages/home'\n end\n end", "def create\n flash[:notice] = 'The page was successfully created.' if page.save\n respond_with(page, location: project_page_path(page.project, page))\n end", "def generate_project_html\n @projects = working_binder.projects\n \n @p = %Q{\n <img src=\"https://s3.amazonaws.com/homebinderstatic/pdf/images/projects-header.png\"/>\n \n <div class=\"padme\">\n <table class=\"table\">\n <tr>\n <th>Name</th>\n <th>Completion Date</th>\n <th>Status</th>\n </tr>\n <tr><td style=\"height:25px;\">&nbsp;</td></tr>\n }\n \n @projects.each do |project|\n @p += %Q{\n <tr>\n <td>#{project.name}</td>\n <td>#{project.end_date.strftime(\"%m/%d/%Y\") unless project.end_date.nil?}</td>\n <td>#{project.get_status.titleize unless project.get_status.nil?}</td>\n </tr>\n }\n end\n \n @p += %Q{\n </table>\n </div>\n <p style=\"page-break-after:always;\"></p> \n } \n end", "def homepage(page)\n @project.homepage = page\n end", "def create_village_project\n iframe_src = \"<iframe src='\" + root_url.sub(/\\/$/, '') + embed_project_path(@project) + \"' width='575' height='485'></iframe><p>This project created with <b><a href='\" + root_url + \"'>#{ENV[\"APP_NAME\"]}</a></b> and updated on \" + @project.updated_at.strftime(\"%A, %B %-d at %-I:%M %p\") +\"</p>\"\n iframe_src = iframe_src.html_safe.to_str\n village_user_ids = @project.village_user_ids\n response = access_token.post(\"/api/projects\", params: {project: {name: @project.title, project_type_id: 15, source: \"original\", description: iframe_src, thumbnail_file_id: 769508, user_ids: village_user_ids} })\n village_project_id = response.parsed[\"project\"][\"id\"]\n @project.update_attributes(:village_id => village_project_id)\n end", "def contribute\n \trender layout: \"developer_home_template\"\n end", "def new\n @root = \"projects\"\n @branch = \"new\"\n \n @project = Project.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @project }\n end\n end", "def create(p)\n puts \"Creating page #{p}\"\n Dir.mkdir p unless File.exists? p\n Content.new(\"#{@dir}\", @extension).page p\n end", "def generate_new_website\n\n page = Page.new \n page.title = \"Homepage\"\n page.body = \"Welcome to my homepage\"\n page.save\n\n site = Site.new\n site.name = \"New Website\"\n site.description = \"Welcome to my brand new website\"\n site.home_page_slug = page.slug\n site.save\n\n index\n\n end", "def create\n visit(CentralAdmin).create_institutional_proposal\n on ProposalLogLookup do |look|\n look.proposal_number.set @proposal_number\n look.search\n look.select_item @proposal_number\n end\n on InstitutionalProposal do |create|\n create.expand_all\n @document_id=create.document_id\n @doc_header=create.doc_title\n @proposal_number=create.institutional_proposal_number\n fill_out create, :proposal_type, :award_id, :activity_type, :project_title, :description\n set_sponsor_code\n create.save\n end\n if @proposal_log && $current_page.errors.size==0\n pi = make ProjectPersonnelObject, principal_name: @proposal_log.principal_investigator,\n full_name: @proposal_log.pi_full_name,\n document_id: @document_id,\n lookup_class: @lookup_class,\n search_key: @search_key,\n doc_header: @doc_header\n @project_personnel << pi\n view :contacts\n @project_personnel.principal_investigator.set_up_units\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create page for the summary +path+ in project +p+
def generate_summary_page(path, p) b = File.basename(path, '.tsv') table = '<table class="table table-hover table-responsive">' File.open(path, 'r') do |fh| fh.each do |ln| r = ln.chomp.split("\t") if $. == 1 table += '<thead><tr>' + r.map { |i| "<th scope=col>#{format_name(i)}</th>" }.join(' ') + '</tr></thead><tbody>' else table += "<tr><th scope=row>#{r.shift}</th>" + r.map { |i| "<td>#{i}</td>" }.join(' ') + "</tr>" end end end table += '</tbody></table>' write_file(p, "s-#{b}.html") do build_from_template( 'summary.html', file: "#{b}.tsv", name: format_name(b), table: table ) end end
[ "def generate_project_page(p)\n # Redirect page\n write_file(p, '../index.html') { build_from_template('redirect.html') }\n\n # Summaries\n summaries = Dir[\"#{p.path}/*.tsv\"].map do |i|\n b = File.basename(i, '.tsv')\n generate_summary_page(i, p)\n \"<li><a href='s-#{b}.html'>#{format_name(b)}</a></li>\"\n end.join('')\n\n # Project index page\n data = {\n project_active: 'active',\n information: format_metadata(p),\n summaries: summaries.empty? ? 'None' : \"<ul>#{summaries}</ul>\",\n results: format_results(p)\n }\n write_file(p, 'index.html') { build_from_template('index.html', data) }\n end", "def create_empty_page(p)\n cli.say 'Creating project page'\n FileUtils.mkdir_p(browse_file(p, '.'))\n %w[favicon-32.png style.css].each do |i|\n FileUtils.cp(template_file(i), browse_file(p, i))\n end\n write_file(p, 'about.html') do\n build_from_template('about.html', citation: MiGA::MiGA.CITATION)\n end\n end", "def create(p)\n puts \"Creating page #{p}\"\n Dir.mkdir p unless File.exists? p\n Content.new(\"#{@dir}\", @extension).page p\n end", "def page(p)\n page_name = p.split('/')[1]\n \n f = File.new(\"#{p}/index.html\", 'w+')\n f.puts \"---\"\n f.puts \"layout: page\"\n f.puts \"title: #{page_name.titleize}\"\n f.puts \"---\"\n \n self.copy f, \"#{@dir}/images/#{page_name}.#{@extension}\"\n f.close\n end", "def create_summary\r\n puts \"Creating a summary...\"\r\n #@lists.disp_summary\r\n create_html_summary\r\n file = File.open('test.html','w')\r\n file << @html\r\n file.close\r\n end", "def new_page(path, title=nil)\n\n filename = File.join(CONFIG[:pages], \"#{path}\")\n filename = File.join(filename, \"index.#{CONFIG[:ext]}\") if File.extname(filename) == \"\"\n title = File.basename(filename, File.extname(filename)).gsub(/[\\W\\_]/, \" \").gsub(/\\b\\w/){$&.upcase} if title.nil?\n if File.exist?(filename)\n return false unless agree(loud \"#{filename} already exists. Do you want to overwrite?\")\n end\n \n FileUtils.mkdir_p File.dirname(filename)\n page = HP::YamlDoc.new\n page.yaml[\"layout\"] = 'page'\n page.yaml[\"title\"] = \"#{title.gsub(/-/,' ')}\"\n page.yaml[\"description\"] = \"\"\n page.puts \"{% include JB/setup %}\"\n puts \"Creating new page: #{filename}\"\n page.write_to_file(filename)\n return true\n end", "def write_page(page, path = '')\n self.output_resource_op page\n\n # Note: we assume the current locale is the default one\n page.translated_in.each do |locale|\n default_locale = locale.to_sym == self.mounting_point.default_locale.to_sym\n\n # we do not need the localized version of the filepath\n filepath = page.fullpath.dasherize\n\n Locomotive::Mounter.with_locale(locale) do\n # we assume the filepath is already localized\n self.write_page_to_fs(page, filepath, default_locale ? nil : locale)\n end\n end\n\n self.output_resource_op_status page\n\n # also write the nested pages\n (page.children || []).each do |child|\n self.write_page(child, page.depth == 0 ? '' : page.slug)\n end\n end", "def generate_dataset_page(p, d)\n data = {\n unmiga_name: d.name.unmiga_name,\n information: format_metadata(d),\n results: format_results(d)\n }\n write_file(p, \"d_#{d.name}.html\") do\n build_from_template('dataset.html', data)\n end\n end", "def new_page_info\n args = Webby.site.args\n\n # TODO: maybe even get rid of this method altogether\n raise \"Usage: webby #{args.rake.first} 'path'\" if args.raw.empty?\n\n [args.page, args.title, args.dir]\n end", "def generate_page\n self.each(&:get_result)\n \n path = File.expand_path(\"#{WebServiceDocumenter.output_dir}/#{@endpoint.gsub(/\\/$/,'').gsub(/\\.\\w*$/,'')}.html\")\n # create the path if necessary\n FileUtils.mkdir_p(File.dirname(path))\n # write out our file\n File.open(path, 'w+') do |f|\n erb = ::ERB.new(File.read(File.expand_path('../templates/service.html.erb', __FILE__)))\n f.write(erb.result(binding))\n end\n path\n end", "def page title, story\n page = {'title' => title, 'story' => story, 'journal' => [create(title)]}\n File.open(\"../pages/#{slug(title)}\", 'w') do |file| \n file.write JSON.pretty_generate(page)\n end\nend", "def parse_project(p)\n @builder.outline(\"text\" => p[\"name\"], \"type\" => \"link\", \"url\" => p[\"url\"], \"created\" => p[\"created\"]) do\n p[\"tasks\"].each { |t| parse_task(t) }\n end\n end", "def generate_project_html\n @projects = working_binder.projects\n \n @p = %Q{\n <img src=\"https://s3.amazonaws.com/homebinderstatic/pdf/images/projects-header.png\"/>\n \n <div class=\"padme\">\n <table class=\"table\">\n <tr>\n <th>Name</th>\n <th>Completion Date</th>\n <th>Status</th>\n </tr>\n <tr><td style=\"height:25px;\">&nbsp;</td></tr>\n }\n \n @projects.each do |project|\n @p += %Q{\n <tr>\n <td>#{project.name}</td>\n <td>#{project.end_date.strftime(\"%m/%d/%Y\") unless project.end_date.nil?}</td>\n <td>#{project.get_status.titleize unless project.get_status.nil?}</td>\n </tr>\n }\n end\n \n @p += %Q{\n </table>\n </div>\n <p style=\"page-break-after:always;\"></p> \n } \n end", "def create_help_tree(arypath) \n # Page#find_by_url raises an exception when page not found\n begin \n page = Page.find_by_url(arypath.join(\"/\"))\n return page if page\n rescue\n end \n last_page = arypath.inject(nil) do |parent, slug|\n page = Page.find_by_slug_and_parent_id(slug, parent)\n unless page\n page = Page.new(:title => slug.titleize, \n :slug => slug, \n :breadcrumb => slug.titleize, \n :content => slug , \n :parent => parent)\n page.save!\n page.published!\n end\n page\n end\n send_internal_message(last_page)\n last_page\n end", "def create\n @descriptive_page = DescriptivePage.new(descriptive_page_params)\n @descriptive_page.Project_id = session[:current_project_id]\n\n respond_to do |format|\n if @descriptive_page.save\n format.html { redirect_to @descriptive_page, notice: 'Descriptive page was successfully created.' }\n format.json { render :show, status: :created, location: @descriptive_page }\n else\n format.html { render :new }\n format.json { render json: @descriptive_page.errors, status: :unprocessable_entity }\n end\n end\n end", "def add_page(path, body, status=200, headers={})\n @pages << {\n 'path' => path,\n 'body' => body,\n 'status' => status,\n 'headers' => headers\n }\n end", "def write_the_html_file(mdpath, name)\n DbgMgr.put \"out_html\", \"writing #{name}html\"\n file = File.join @doc, (name + \"html\")\n VersionedFile.new_version file, true, bak: @doc_bak, keep: @keep\n cmd = \"pandoc -p -s -S -f markdown -t html5 --toc --toc-depth=4 \" +\n \"--self-contained --css #{@css} -o #{file} #{mdpath}\"\n DbgMgr.put \"out_html\", cmd\n output = %x[#{cmd}]\n [$?.exitstatus, output]\n\n end", "def documentation_page store, generator, path, req, res\n text_name = path.chomp '.html'\n name = text_name.gsub '/', '::'\n\n if klass = store.find_class_or_module(name) then\n res.body = generator.generate_class klass\n elsif page = store.find_text_page(name.sub(/_([^_]*)\\z/, '.\\1')) then\n res.body = generator.generate_page page\n elsif page = store.find_text_page(text_name.sub(/_([^_]*)\\z/, '.\\1')) then\n res.body = generator.generate_page page\n else\n not_found generator, req, res\n end\n end", "def format_for_summary_report(work)\n \"\\n#{work.creator.first}. #{work.title.first} (#{document_path(work)})\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create page for dataset +d+ within project +p+
def generate_dataset_page(p, d) data = { unmiga_name: d.name.unmiga_name, information: format_metadata(d), results: format_results(d) } write_file(p, "d_#{d.name}.html") do build_from_template('dataset.html', data) end end
[ "def generate_datasets_index(p)\n cli.say 'Creating index pages'\n data = format_dataset_index(p)\n data.each do |k, v|\n write_file(p, \"#{k}_datasets.html\") do\n v[:list] = 'None' if v[:list] == ''\n build_from_template(\n 'datasets.html',\n v.merge(:\"#{k}_datasets_active\" => 'active')\n )\n end\n end\n end", "def generate_project_page(p)\n # Redirect page\n write_file(p, '../index.html') { build_from_template('redirect.html') }\n\n # Summaries\n summaries = Dir[\"#{p.path}/*.tsv\"].map do |i|\n b = File.basename(i, '.tsv')\n generate_summary_page(i, p)\n \"<li><a href='s-#{b}.html'>#{format_name(b)}</a></li>\"\n end.join('')\n\n # Project index page\n data = {\n project_active: 'active',\n information: format_metadata(p),\n summaries: summaries.empty? ? 'None' : \"<ul>#{summaries}</ul>\",\n results: format_results(p)\n }\n write_file(p, 'index.html') { build_from_template('index.html', data) }\n end", "def generate(site)\n # page_gen-dirs is a global option which determines whether we want to\n # generate index pages (name/index.html) or HTML files (name.html) for\n # all sets\n index_files = site.config['page_gen-dirs'] == true\n\n # data contains the specification of all the datasets for which we want\n # to generate individual pages (look at the README file for its documentation)\n data = site.config['page_gen']\n if data\n data.each do |data_spec|\n index_files_for_this_data = data_spec['index_files'] != nil ? data_spec['index_files'] : index_files\n template = data_spec['template'] || data_spec['data']\n name = data_spec['name']\n name_expr = data_spec['name_expr']\n title = data_spec['title']\n title_expr = data_spec['title_expr']\n dir = data_spec['dir'] || data_spec['data']\n extension = data_spec['extension'] || \"html\"\n page_data_prefix = data_spec['page_data_prefix']\n debug = data_spec['debug']\n \n if not site.layouts.key? template\n puts \"error (datapage-gen). could not find template #{template}. Skipping dataset #{name}.\"\n else\n # records is the list of records for which we want to generate\n # individual pages\n records = nil\n\n data_spec['data'].split('.').each do |level|\n if records.nil?\n records = site.data[level]\n else\n records = records[level]\n end\n end\n if (records.kind_of?(Hash))\n records = records.values\n end\n\n # apply filtering conditions:\n # - filter requires the name of a boolean field\n # - filter_condition evals a ruby expression which can use =record= as argument\n records = records.select { |record| record[data_spec['filter']] } if data_spec['filter']\n records = records.select { |record| eval(data_spec['filter_condition']) } if data_spec['filter_condition']\n\n # we now have the list of all records for which we want to generate individual pages\n # iterate and call the constructor\n records.each do |record|\n site.pages << DataPage.new(site, site.source, index_files_for_this_data, dir, page_data_prefix, record, name, name_expr, title, title_expr, template, extension, debug)\n end\n end\n end\n end\n end", "def initialize(site, base, index_files, dir, page_data_prefix, data, name, name_expr, title, title_expr, template, extension, debug)\n @site = site\n @base = base\n\n if debug\n puts \"debug (datapage-gen) Record read:\"\n puts \">> #{data}\"\n\n puts \"debug (datapage-gen) Configuration variables:\"\n [:index_files, :dir, :page_data_prefix, :name, :name_expr, :title, :title_expr, :template, :extension].each do |variable|\n puts \">> #{variable}: #{eval(variable.to_s)}\"\n end\n end\n\n # @dir is the directory where we want to output the page\n # @name is the name of the page to generate\n # @name_expr is an expression for generating the name of the page\n #\n # the value of these variables changes according to whether we\n # want to generate named folders or not\n if name_expr\n record = data\n raw_filename = eval(name_expr)\n if raw_filename == nil\n puts \"error (datapage-gen). name_expr '#{name_expr}' generated an empty value in record #{data}\"\n return\n end\n puts \"debug (datapage-gen). using name_expr: '#{raw_filename}' (sanitized) will be used as the filename\" if debug\n else\n raw_filename = data[name]\n if raw_filename == nil\n puts \"error (datapage-gen). empty value for field '#{name}' in record #{data}\"\n return\n end\n puts \"debug (datapage-gen). using name field: '#{raw_filename}' (sanitized) will be used as the filename\" if debug\n end\n\n if title_expr\n record = data\n raw_title = eval(title_expr)\n if raw_title == nil\n puts \"error (datapage-gen). title_expr '#{title_expr}' generated an empty value in record #{data}\"\n return\n end\n puts \"debug (datapage-gen). using title_expr: '#{raw_title}' will be used the page title\" if debug\n else\n raw_title = data[title]\n if raw_title == nil\n raw_title = raw_filename # for backwards compatibility\n puts \"debug (datapage-gen). empty title field: falling back to filename for the page title\" if debug\n end\n puts \"debug (datapage-gen). will use '#{raw_title}' as the page title\" if debug\n end\n\n filename = sanitize_filename(raw_filename).to_s\n\n @dir = dir + (index_files ? \"/\" + filename + \"/\" : \"\")\n @name = (index_files ? \"index\" : filename) + \".\" + extension.to_s\n\n self.process(@name)\n\n if @site.layouts[template].path.end_with? 'html'\n @path = @site.layouts[template].path.dup\n else\n @path = File.join(@site.layouts[template].path, @site.layouts[template].name)\n end\n\n base_path = @site.layouts[template].path\n base_path.slice! @site.layouts[template].name\n self.read_yaml(base_path, @site.layouts[template].name)\n\n self.data['title'] = raw_title\n\n # add all the information defined in _data for the current record to the\n # current page (so that we can access it with liquid tags)\n if page_data_prefix\n self.data[page_data_prefix] = data\n else\n if data.key?('name')\n data['_name'] = data['name']\n end\n self.data.merge!(data)\n end\n\n end", "def create(p)\n puts \"Creating page #{p}\"\n Dir.mkdir p unless File.exists? p\n Content.new(\"#{@dir}\", @extension).page p\n end", "def generate_project_html\n @projects = working_binder.projects\n \n @p = %Q{\n <img src=\"https://s3.amazonaws.com/homebinderstatic/pdf/images/projects-header.png\"/>\n \n <div class=\"padme\">\n <table class=\"table\">\n <tr>\n <th>Name</th>\n <th>Completion Date</th>\n <th>Status</th>\n </tr>\n <tr><td style=\"height:25px;\">&nbsp;</td></tr>\n }\n \n @projects.each do |project|\n @p += %Q{\n <tr>\n <td>#{project.name}</td>\n <td>#{project.end_date.strftime(\"%m/%d/%Y\") unless project.end_date.nil?}</td>\n <td>#{project.get_status.titleize unless project.get_status.nil?}</td>\n </tr>\n }\n end\n \n @p += %Q{\n </table>\n </div>\n <p style=\"page-break-after:always;\"></p> \n } \n end", "def generatePage(y=nil, m=nil)\n\n\t# Set file name and title.\n\tid = 'index'\n\tname = 'All data'\n\tid = name = sprintf('%04d', y) if !y.nil?\n\tid = name += '-' + sprintf('%02d', m) if !m.nil?\n\n\t# Get pageviews for given period.\n\tpageviews = 0\n\tif (y.nil?)\n\t\ts = $db.prepare 'SELECT SUM(`count`) FROM `pageviews`'\n\telsif (m.nil?)\n\t\ts = $db.prepare 'SELECT SUM(`count`) FROM `pageviews` WHERE `year` = ?'\n\t\ts.bind_params(y)\n\telse\n\t\ts = $db.prepare 'SELECT SUM(`count`) FROM `pageviews` WHERE `year` = ? AND `month` = ?'\n\t\ts.bind_params(y, m)\n\tend\n\tr = s.execute\n\tr.each do |row|\n\t\tpageviews = row[0]\n\tend\n\n\t# Get unique visitors for given period.\n\tvisitors = 0\n\tif (y.nil?)\n\t\ts = $db.prepare 'SELECT COUNT(*) FROM `visitors`'\n\telsif (m.nil?)\n\t\ts = $db.prepare 'SELECT COUNT(*) FROM `visitors` WHERE `year` = ?'\n\t\ts.bind_params(y)\n\telse\n\t\ts = $db.prepare 'SELECT COUNT(*) FROM `visitors` WHERE `year` = ? AND `month` = ?'\n\t\ts.bind_params(y, m)\n\tend\n\tr = s.execute\n\tr.each do |row|\n\t\tvisitors = row[0]\n\tend\n\n\t# Generate HTML page.\n\tFile.open(WEBPATH + '/' + id + '.html', 'w') do |f|\n\n\t\tf.write('<!DOCTYPE HTML>' + \"\\n\")\n\t\tf.write('<html>' + \"\\n\")\n\t\tf.write('<head>' + \"\\n\")\n\t\tf.write('<title>' + WEBSITENAME + ' - ' + name + '</title>' + \"\\n\")\n\t\tf.write('<link rel=\"stylesheet\" type=\"text/css\" href=\"main.css\" />' + \"\\n\")\n\t\tf.write('<script type=\"text/javascript\" src=\"main.js\"></script>' + \"\\n\")\n\t\tf.write('</head>' + \"\\n\")\n\t\tf.write('<body>' + \"\\n\\n\")\n\n\t\tf.write('<h1>' + WEBSITENAME + ' - ' + name + '</h1>' + \"\\n\")\n\t\tf.write('<canvas id=\"pagegraph\" class=\"pagegraph\" width=\"700\" height=\"200\"></canvas>' + \"\\n\")\n\t\tf.write('<div class=\"pagesummary\">' + \"\\n\")\n\t\tf.write('<div>' + formatnumber(pageviews) + ' page views</div>' + \"\\n\")\n\t\tf.write('<div>' + formatnumber(visitors) + ' unique visitors</div>' + \"\\n\")\n\t\tf.write('</div>' + \"\\n\\n\")\n\n\t\t# Generate table for pages.\n\t\tf.write('<div class=\"pagestable\">' + \"\\n\")\n\t\tf.write('<div><div onclick=\"sortbydata(event)\">Page</div><div onclick=\"sortbyviews(event)\">Views</div></div>' + \"\\n\")\n\t\tmax = 0\n\t\tif (y.nil?)\n\t\t\ts = $db.prepare 'SELECT `page`, SUM(`count`) FROM `pages` GROUP BY `page` ORDER BY SUM(`count`) DESC'\n\t\telsif (m.nil?)\n\t\t\ts = $db.prepare 'SELECT `page`, SUM(`count`) FROM `pages` WHERE `year` = ? GROUP BY `page` ORDER BY SUM(`count`) DESC'\n\t\t\ts.bind_params(y)\n\t\telse\n\t\t\ts = $db.prepare 'SELECT `page`, SUM(`count`) FROM `pages` WHERE `year` = ? AND `month` = ? GROUP BY `page` ORDER BY SUM(`count`) DESC'\n\t\t\ts.bind_params(y, m)\n\t\tend\n\t\tr = s.execute\n\t\tr.each do |row|\n\t\t\tmax = row[1].to_f / pageviews\n\t\t\tbreak\n\t\tend\n\t\tr.reset()\n\t\tr.each do |row|\n\t\t\tf.write('<div><div>' + row[0] + '</div><div><span style=\"width:' + (row[1].to_f / pageviews / max * 100).round.to_s + '%;\"></span></div><div>' + formatnumber(row[1]) + '</div></div>' + \"\\n\")\n\t\tend\n\t\tf.write('</div>' + \"\\n\")\n\t\tf.write('<div class=\"pagemore\" id=\"pagemore\" onclick=\"showmore(event);\"><a>More...</a></div>' + \"\\n\\n\")\n\n\t\t# Generate table for countries.\n\t\tf.write('<div class=\"pagestable\">' + \"\\n\")\n\t\tf.write('<div><div onclick=\"sortbydata(event)\">Country</div><div onclick=\"sortbyviews(event)\">Views</div></div>' + \"\\n\")\n\t\tmax = 0\n\t\tif (y.nil?)\n\t\t\ts = $db.prepare 'SELECT `country`, SUM(`count`) FROM `countries` GROUP BY `country` ORDER BY SUM(`count`) DESC'\n\t\telsif (m.nil?)\n\t\t\ts = $db.prepare 'SELECT `country`, SUM(`count`) FROM `countries` WHERE `year` = ? GROUP BY `country` ORDER BY SUM(`count`) DESC'\n\t\t\ts.bind_params(y)\n\t\telse\n\t\t\ts = $db.prepare 'SELECT `country`, SUM(`count`) FROM `countries` WHERE `year` = ? AND `month` = ? GROUP BY `country` ORDER BY SUM(`count`) DESC'\n\t\t\ts.bind_params(y, m)\n\t\tend\n\t\tr = s.execute\n\t\tr.each do |row|\n\t\t\tmax = row[1].to_f / pageviews\n\t\t\tbreak\n\t\tend\n\t\tr.reset()\n\t\tr.each do |row|\n\t\t\tcountry = $countryname.has_key?(row[0]) ? $countryname[row[0]] : row[0]\n\t\t\tf.write('<div><div>' + country + '</div><div><span style=\"width:' + (row[1].to_f / pageviews / max * 100).round.to_s + '%;\"></span></div><div>' + formatnumber(row[1]) + '</div></div>' + \"\\n\")\n\t\tend\n\t\tf.write('</div>' + \"\\n\")\n\t\tf.write('<div class=\"pagemore\" id=\"pagemore\" onclick=\"showmore(event);\"><a>More...</a></div>' + \"\\n\\n\")\n\n\t\t# Display menu for available dates.\n\t\t($aye..$ays).each do |yi|\n\t\t\tf.write('<div class=\"pagedates\" id=\"pagedates\">' + \"\\n\")\n\t\t\tf.write('<div>' + \"\\n\")\n\t\t\ta = (yi == y and m.nil? ) ? ' class=\"active\"' : ''\n\t\t\tf.write('<a' + a + ' href=\"' + sprintf('%04d', yi) + '.html\">' + sprintf('%04d', yi) + '</a>' + \"\\n\")\n\t\t\tf.write('</div><div>' + \"\\n\")\n\t\t\tmsi = $ays == yi ? $ams : 1\n\t\t\tmei = $aye == yi ? $ame : 12\n\t\t\t(msi..mei).each do |mi|\n\t\t\t\ta = (yi == y and mi == m) ? ' class=\"active\"' : ''\n\t\t\t\tf.write('<a' + a + ' href=\"' + sprintf('%04d', yi) + '-' + sprintf('%02d', mi) + '.html\">' + sprintf('%04d', yi) + '-' + sprintf('%02d', mi) + '</a>' + \"\\n\")\n\t\t\tend\n\t\t\tf.write('</div>' + \"\\n\")\n\t\t\tf.write('</div>' + \"\\n\\n\")\n\t\tend\n\n\t\t# Generate page views data for the graph.\n\t\tf.write('<script>' + \"\\n\")\n\t\tf.write('graphdata={' + \"\\n\")\n\t\tar = {}\n\t\tif (y.nil?)\n\t\t\t# Only show data from full months.\n\t\t\tys = $ays\n\t\t\tms = $ams\n\t\t\tds = $ads\n\t\t\tye = $aye\n\t\t\tme = $ame\n\t\t\tde = $ade\n\t\t\tif ds != 1 and (ys < ye or ms < me)\n\t\t\t\tms += 1\n\t\t\t\tds = 1\n\t\t\tend\n\t\t\tif de != calendardays(ye, me) and (ys < ye or ms < me)\n\t\t\t\tme -= 1\n\t\t\t\tde = calendardays(ye, me)\n\t\t\tend\n\t\t\t# Provide no data unless there's 2 full months of data or more.\n\t\t\tif ys != ye or ms != me\n\t\t\t\t(ys..ye).each do |yi|\n\t\t\t\t\tmsi = ys == yi ? ms : 1\n\t\t\t\t\tmei = ye == yi ? me : 12\n\t\t\t\t\t(msi..mei).each do |mi|\n\t\t\t\t\t\tar[sprintf('%04d', yi) + '-' + sprintf('%02d', mi)] = '0'\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\ts = $db.prepare 'SELECT `year`, `month`, SUM(`count`) FROM `pageviews` WHERE `year` >= ? AND `year` <= ? AND `month` >= ? AND `month` <= ? GROUP BY `year`, `month`'\n\t\t\t\ts.bind_params(ys, ye, ms, me)\n\t\t\t\tr = s.execute\n\t\t\t\tr.each do |row|\n\t\t\t\t\tar[sprintf('%04d', row[0]) + '-' + sprintf('%02d', row[1])] = row[2].to_s\n\t\t\t\tend\n\t\t\t\ts.close\n\t\t\tend\n\t\telsif (m.nil?)\n\t\t\tms = 1\n\t\t\tme = 12\n\t\t\t(ms..me).each do |mi|\n\t\t\t\tar[sprintf('%04d', y) + '-' + sprintf('%02d', mi)] = '0'\n\t\t\tend\n\t\t\ts = $db.prepare 'SELECT `month`, SUM(`count`) FROM `pageviews` WHERE `year` = ? GROUP BY `month`'\n\t\t\ts.bind_params(y)\n\t\t\tr = s.execute\n\t\t\tr.each do |row|\n\t\t\t\tar[sprintf('%04d', y) + '-' + sprintf('%02d', row[0])] = row[1].to_s\n\t\t\tend\n\t\t\ts.close\n\t\telse\n\t\t\tds = 1\n\t\t\tde = calendardays(y, m)\n\t\t\t(ds..de).each do |di|\n\t\t\t\tar[sprintf('%04d', y) + '-' + sprintf('%02d', m) + '-' + sprintf('%02d', di)] = '0'\n\t\t\tend\n\t\t\ts = $db.prepare 'SELECT `day`, SUM(`count`) FROM `pageviews` WHERE `year` = ? AND `month` = ? GROUP BY `day`'\n\t\t\ts.bind_params(y, m)\n\t\t\tr = s.execute\n\t\t\tr.each do |row|\n\t\t\t\tar[sprintf('%04d', y) + '-' + sprintf('%02d', m) + '-' + sprintf('%02d', row[0])] = row[1].to_s\n\t\t\tend\n\t\t\ts.close\n\t\tend\n\t\tar.each do |k, v|\n\t\t\tf.write('\\'' + k + '\\':' + v + ',' + \"\\n\")\n\t\tend\n\t\tf.write('}' + \"\\n\")\n\t\tf.write('drawgraph();' + \"\\n\")\n\t\tf.write('</script>' + \"\\n\\n\")\n\t\n\t\tf.write('</body>' + \"\\n\")\n\t\tf.write('</html>')\n\n\tend\n\nend", "def initialize(site, base, index_files, dir, data, name, title, template, extension, defaults = {})\n @site = site\n @base = base\n\n # @dir is the directory where we want to output the page\n # @name is the name of the page to generate\n #\n # the value of these variables changes according to whether we\n # want to generate named folders or not\n if data[name] == nil\n puts \"error (datapage_gen). empty value for field '#{name}' in record #{data}\"\n else\n filename = sanitize_filename(data[name]).to_s\n\n @dir = dir + (index_files ? \"/\" + filename + \"/\" : \"\")\n @name = (index_files ? \"index\" : filename) + \".\" + extension.to_s\n\n self.process(@name)\n self.read_yaml(File.join(base, '_layouts'), template + \".html\")\n\n # original method to set page title to data[name]\n # self.data['title'] = data[name]\n\n if title\n self.data['title'] = data[title]\n elsif data['publiccode'] && data['publiccode']['name']\n self.data['title'] = data['publiccode']['name'] + ' - ' + defaults['title_suffix']\n else\n self.data['title'] = data[name]\n end\n\n self.data.merge!(defaults)\n # add all the information defined in _data for the current record to the\n # current page (so that we can access it with liquid tags)\n self.data.merge!(data)\n end\n end", "def generate(site)\n # page_gen_dirs determines whether we want to generate index pages\n # (name/index.html) or standard files (name.html). This information\n # is passed to the DataPage constructor, which sets the @dir variable\n # as required by this directive\n index_files = site.config['page_gen-dirs'] == true\n\n # data contains the specification of the data for which we want to generate\n # the pages (look at the README file for its specification)\n data = site.config['page_gen']\n if data\n data.each do |data_spec|\n index_files_for_this_data = data_spec['index_files'] != nil ? data_spec['index_files'] : index_files\n template = data_spec['template'] || data_spec['data']\n name = data_spec['name']\n title = data_spec['title']\n dir = data_spec['dir'] || data_spec['data']\n extension = data_spec['extension'] || \"html\"\n\n if site.layouts.key? template\n # records is the list of records defined in _data.yml\n # for which we want to generate different pages\n records = nil\n data_spec['data'].split('.').each do |level|\n if records.nil?\n records = site.data[level]\n else\n records = records[level]\n end\n end\n\n # apply filtering conditions:\n # - filter requires the name of a boolean field\n # - filter_condition evals a ruby expression\n records = records.select { |r| r[data_spec['filter']] } if data_spec['filter']\n records = records.select { |record| eval(data_spec['filter_condition']) } if data_spec['filter_condition']\n\n records.each do |record|\n site.pages << DataPage.new(site, site.source, index_files_for_this_data, dir, record, name, title, template, extension, data_spec['defaults'])\n end\n else\n puts \"error (datapage_gen). could not find template #{template}\" if not site.layouts.key? template\n end\n end\n end\n end", "def selection_page_data\n @soda_client.get(\n Figaro.env.select_page_dataset\n )\n end", "def show_dataset(name)\n return if project.nil?\n v.new_window(self) { |_, nv| nv.dataset(name) }\n end", "def create_page_jsonld\n mentions = mentions_as_lod\n page_name = \"Chapter #{@page.data['chapter']}: #{@page.data['title']}\"\n lod_mentions = {\n '@id' => create_page_id,\n 'name': page_name,\n '@type' => 'WebPage',\n 'mentions' => mentions\n }\n lod = { '@context' => @context, '@graph' => lod_mentions }\n compact_jsonld(@context, lod)\n end", "def generate(site)\n\n # page_gen_dirs determines whether we want to generate index pages\n # (name/index.html) or standard files (name.html). This information\n # is passed to the DataPage constructor, which sets the @dir variable\n # as required by this directive\n\n index_files = site.config['page_gen-dirs'] == true\n\n # data contains the specification of the data for which we want to generate\n # the pages (look at the README file for its specification)\n data = site.config['page_gen']\n types = site.config['data_types']\n if data\n data.each do |data_spec|\n # template = data_spec['template'] || data_spec['data']\n name = data_spec['name']\n # dir = data_spec['dir'] || data_spec['data']\n # Added 2 lines: Set context and type for JSON-LD \n context = data_spec['context'] || \"http://schema.org/\"\n # type = data_spec['type'] || \"Thing\"\n extension = data_spec['extension'] || \"html\"\n\n # records is the list of records defined in _data.yml\n # for which we want to generate different pages\n records = nil\n data_spec['data'].split('.').each do |level|\n if records.nil?\n records = site.data[level]\n else\n records = records[level]\n end\n end\n records.each do |record|\n # Added 3 lines: Add context and type for JSON-LD to each record\n collection = record[\"collection\"]\n dir = types[collection][\"dir\"] || collection\n template = types[collection][\"template\"]\n type = types[collection][\"type\"]\n record[\"@context\"] = context\n record[\"data\"][\"@type\"] = type\n record[\"data\"][\"name\"] = record[\"name\"]\n site.pages << DataPage.new(site, site.source, index_files, dir, record, name, template, extension)\n end\n end\n end\n end", "def generate(site)\n # page_gen_dirs determines whether we want to generate index pages\n # (name/index.html) or standard files (name.html). This information\n # is passed to the DataPage constructor, which sets the @dir variable\n # as required by this directive\n puts \"debug\"\n index_files = site.config['page_gen-dirs']\n index_files = true if index_files.nil?\n\n # config contains the specification of the data for which we want to generate\n # the pages (look at the README file for its specification)\n config = site.config['data_gen']\n\n # default configuration: get all data files, use the 'data_page.html' template,\n # output to /data\n path = nil\n template = 'data_page'\n dir = 'data'\n \n \n if config\n path = config['path'] || path\n template = config['template'] || template\n dir = config['dir'] || dir\n end\n\n if site.layouts.key? template\n data_files = path.nil? ? site.data : site.data[path]\n\n data_files.each do |name, record|\n site.pages << DataPage.new(site, site.source, index_files, dir, record, name, template, \"html\")\n end\n else\n puts \"DataPageGenerator error. could not find template #{template}\"\n end\n end", "def create\n @data_page = DataPage.new(data_page_params)\n\n respond_to do |format|\n if @data_page.save\n format.html { redirect_to @data_page, notice: 'Data page was successfully created.' }\n format.json { render :show, status: :created, location: @data_page }\n else\n format.html { render :new }\n format.json { render json: @data_page.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_page\n @outfile.puts <<'EOF'\n<!DOCTYPE html>\n<html>\n <head>\n <title>Fun Fun Fun</title>\n <meta charset=\"utf-8\">\nEOF\n\n include_stylesheets\n include_javascript\n add_data(@batch)\n @outfile.puts <<'EOF'\n </head>\n <body>\n <svg class=\"chart\">\n </svg>\n </body>\n</html>\nEOF\n end", "def new\n @page_title = \"Data Sets\"\n @data_set = DataSet.new\n add_crumb(\"Admin\", '/admin')\n add_crumb(\"Data Sets\", '/data_sets')\n add_crumb(\"New\")\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @data_set }\n end\n end", "def create\n @descriptive_page = DescriptivePage.new(descriptive_page_params)\n @descriptive_page.Project_id = session[:current_project_id]\n\n respond_to do |format|\n if @descriptive_page.save\n format.html { redirect_to @descriptive_page, notice: 'Descriptive page was successfully created.' }\n format.json { render :show, status: :created, location: @descriptive_page }\n else\n format.html { render :new }\n format.json { render json: @descriptive_page.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_project_data_set\n @data_set =\"\"\n parent.managed_repository do\n if params[:name]\n if Voeis::DataSet.first(:name => params[:name]).nil?\n if params[:type].nil? || params[:type].empty?\n params[:type] = \"default\"\n end\n if params[:description].nil?\n params[:description] = \"\"\n end\n debugger\n @data_set = Voeis::DataSet.create(:name=>params[:name], :type=>params[:type], :description=>params[:description])\n else\n @data_set = {\"error\" => \"The name: #{params[:name]} already exists as data set.\"}\n end\n else\n @data_set = {\"error\" => \"The name parameter is required to create a new data set.\"}\n end\n end\n respond_to do |format|\n format_response(@data_set, format)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create pages for reference and query dataset indexes
def generate_datasets_index(p) cli.say 'Creating index pages' data = format_dataset_index(p) data.each do |k, v| write_file(p, "#{k}_datasets.html") do v[:list] = 'None' if v[:list] == '' build_from_template( 'datasets.html', v.merge(:"#{k}_datasets_active" => 'active') ) end end end
[ "def index_pages\n debug_msg \" generating pages search index\"\n\n pages = @files.select do |file|\n file.text?\n end\n\n pages.each do |page|\n debug_msg \" #{page.page_name}\"\n record = page.search_record\n @index[:searchIndex] << search_string(record.shift)\n @index[:longSearchIndex] << ''\n record.shift\n @index[:info] << record\n end\n end", "def process_index\n bindings = {\n :url => @definition.get_url,\n :name => @definition.get_name,\n :resources => @definition.resources,\n :description => @definition.get_description,\n :version => @definition.get_version\n }\n\n page = Calamum::DocGenerator.new(:index)\n page.save_template('index.html', bindings)\n end", "def create_indices\n destination = File.join(@config[:site][:root], @config[:site][:posts], @config[:site][:index])\n Dir.mkdir(destination) if !Dir.exists?(destination)\n\n # Clear out the indices before making them\n Dir.entries(destination).each do |f|\n index = File.join(destination, f)\n File.delete(index) if File.file?(index)\n end\n\n temp_dir = File.join(\"templates\")\n template = Template.new(temp_dir, 'index.html', post_location=@config[:site][:posts])\n indices = []\n\n # Segment the posts into groups of 5\n @posts.each_slice(5) { |posts|\n indices << posts\n }\n\n # Create the indices and save them\n indices.length.times { |i|\n p_pg = nil\n n_pg = nil\n\n # Find the relative location (to the site) of the index\n rel_index = File.join(\"/\", @config[:site][:posts], @config[:site][:index])\n\n # Figure out the previous/next pages, if they exist\n p_pg = File.join(rel_index, i.to_s) if i > 0\n n_pg = File.join(rel_index, (i+2).to_s) if i + 1 < indices.length\n\n # Render the index page\n indices[i] = template.render(indices[i], prev_page=p_pg, next_page=n_pg)\n\n # Save the index page\n index_file = File.join(destination, (i+1).to_s)\n File.open(index_file, 'w') do |f|\n f.print(indices[i])\n end\n }\n end", "def pages_index\n @pages.inject([]) { |h, p|\n h << {\n :title => p.to_s,\n :url => p.url,\n :type => p.data['group'],\n :parent => (urls.index(p.parent.path) if p.parent?) }\n h\n }\n end", "def build_index\n\n\t\t# Names the file based on date and time for uniqueness and ability to find which one you want later\n\t\tt = Time.now\n\t\t@file_time = t.strftime(\"%Y.%b.%d_%H.%M.%S\")\n\t\t@filename = \"quilt_pages/#{@needed_rows}x#{@needed_columns}_#{@file_time}.html\"\n\n\t\t# Store the quilt page template in a variable\n\t\tquilt_template = File.read \"templates/quilt_template.erb\"\n\t\t# Start a new ERB\n\t\terb_template = ERB.new quilt_template\n\t\t# Pull it all together and put info into one variable\n\t\tquilt_page = erb_template.result(binding)\n\n\t\t# Makes the directory for the quilt pages if there isn't one\n\t\tDir.mkdir(\"quilt_pages\") unless Dir.exists? \"quilt_pages\"\n\n\t\t# Opens the file and saves (actually writes) the quilt info\n\t\tFile.open(@filename, 'w') do |file|\n\t\t\tfile.puts quilt_page\n\t\tend\n\n\t\tsystem(\"open #{@filename}\")\n\tend", "def generate(site)\n # page_gen-dirs is a global option which determines whether we want to\n # generate index pages (name/index.html) or HTML files (name.html) for\n # all sets\n index_files = site.config['page_gen-dirs'] == true\n\n # data contains the specification of all the datasets for which we want\n # to generate individual pages (look at the README file for its documentation)\n data = site.config['page_gen']\n if data\n data.each do |data_spec|\n index_files_for_this_data = data_spec['index_files'] != nil ? data_spec['index_files'] : index_files\n template = data_spec['template'] || data_spec['data']\n name = data_spec['name']\n name_expr = data_spec['name_expr']\n title = data_spec['title']\n title_expr = data_spec['title_expr']\n dir = data_spec['dir'] || data_spec['data']\n extension = data_spec['extension'] || \"html\"\n page_data_prefix = data_spec['page_data_prefix']\n debug = data_spec['debug']\n \n if not site.layouts.key? template\n puts \"error (datapage-gen). could not find template #{template}. Skipping dataset #{name}.\"\n else\n # records is the list of records for which we want to generate\n # individual pages\n records = nil\n\n data_spec['data'].split('.').each do |level|\n if records.nil?\n records = site.data[level]\n else\n records = records[level]\n end\n end\n if (records.kind_of?(Hash))\n records = records.values\n end\n\n # apply filtering conditions:\n # - filter requires the name of a boolean field\n # - filter_condition evals a ruby expression which can use =record= as argument\n records = records.select { |record| record[data_spec['filter']] } if data_spec['filter']\n records = records.select { |record| eval(data_spec['filter_condition']) } if data_spec['filter_condition']\n\n # we now have the list of all records for which we want to generate individual pages\n # iterate and call the constructor\n records.each do |record|\n site.pages << DataPage.new(site, site.source, index_files_for_this_data, dir, page_data_prefix, record, name, name_expr, title, title_expr, template, extension, debug)\n end\n end\n end\n end\n end", "def generate_indexes\n @indexes.each do |index|\n html_out = File.open \"#{@sitepath}/#{index}.html\",\"w\"\n layout_engine = Haml::Engine.new(\n File.read(\"#{@basepath}/_layouts/#{index}.haml\"))\n payload = layout_engine.render(Object.new,:posts=>@posts)\n html_out.write payload\n html_out.close\n end\n end", "def index\r\n # constants that never change for the database (unless of course the DB schema itself changes)\r\n initialize_attributes\r\n update_model_page_numbers\r\n\r\n if self.table_name\r\n restore_page_number_on_nav # restores the page number when navigating back along the breadcrumbs\r\n self.last_page_num = self.model_page_nums[self.table_name+'_page'] # preserve the page number so selected navigation records are selected from the correct page.\r\n @data_table = load_DDO(self.table_name, self.last_page_num, self.qualifier, MAIN_TABLE_ROWS)\r\n add_hidden_index_values(@data_table)\r\n load_navigators(self.table_name)\r\n @parent_dataset = load_fk_tables(@parent_tables, self.parent_qualifiers)\r\n @child_dataset = load_fk_tables(@child_tables, self.child_qualifiers)\r\n # Update the parent tab index based on the existence and value of the selected_parent_table_index parameter\r\n update_parent_child_tab_indices\r\n end\r\n end", "def create_sub_index(data, folder_num)\n create_partials data[:sub_file], data\n data[:header] = read_file(get_header_path(data[:sub_file]))\n data[:footer] = read_file(get_footer_path(data[:sub_file]))\n data[:stylesheetloc] = sub_file_stylesheet_locs\n system \"mkdir page#{folder_num}\"\n write_data data, 'data'\n system \"erb _templates/_index.html.erb > page#{folder_num}/index.html\"\n end", "def index\n\t\t# @data_pages = DataPage.all\n\t\t@data_pages = DataPage.page params[:page]\n\tend", "def create_contents_and_index\n contents = []\n index = []\n\n (@files+@classes).sort.each do |entry|\n\tcontent_entry = { \"c_name\" => entry.name, \"ref\" => entry.path }\n\tindex << { \"name\" => entry.name, \"aref\" => entry.path }\n\n\tinternals = []\n\n\tmethods = entry.build_method_summary_list(entry.path)\n\n\tcontent_entry[\"methods\"] = methods unless methods.empty?\n contents << content_entry\n\tindex.concat methods\n end\n\n values = { \"contents\" => contents }\n template = TemplatePage.new(RDoc::Page::CONTENTS)\n File.open(\"contents.hhc\", \"w\") do |f|\n\ttemplate.write_html_on(f, values)\n end\n\n values = { \"index\" => index }\n template = TemplatePage.new(RDoc::Page::CHM_INDEX)\n File.open(\"index.hhk\", \"w\") do |f|\n\ttemplate.write_html_on(f, values)\n end \n end", "def build_fake_index\n\n # PDF book toc, index and extras will be entered manually to add entries to the index\n @document.toc = {}\n @document.index = {}\n @document.extras = {}\n\n sections = @cookbook.sections\n page = 8\n\n # Need to show duplicate names in extra pages and recipes\n # If two extra pages (or recipes) have the same name, they must appear two times in the index file.\n # => Using unique label name: recipe_name{{page_number}}\n sections.each do |section|\n if section.has_children?\n\n @document.toc[section.name] = page = page+1\n\n section.recipes.each do |recipe|\n page_num = page = page+recipe.pages.round\n @document.index[\"#{recipe.name}{{#{page_num}}}\"] = page_num\n end\n\n section.extra_pages.each do |extra_page|\n page_num = page = page+extra_page.pages.round\n if extra_page.index_as_recipe?\n @document.index[\"#{extra_page.name}{{#{page_num}}}\"] = page_num\n else \n @document.extras[\"#{extra_page.name}{{#{page_num}}}\"] = page_num\n end\n end\n end\n end\n end", "def initialize(site, base, index_files, dir, page_data_prefix, data, name, name_expr, title, title_expr, template, extension, debug)\n @site = site\n @base = base\n\n if debug\n puts \"debug (datapage-gen) Record read:\"\n puts \">> #{data}\"\n\n puts \"debug (datapage-gen) Configuration variables:\"\n [:index_files, :dir, :page_data_prefix, :name, :name_expr, :title, :title_expr, :template, :extension].each do |variable|\n puts \">> #{variable}: #{eval(variable.to_s)}\"\n end\n end\n\n # @dir is the directory where we want to output the page\n # @name is the name of the page to generate\n # @name_expr is an expression for generating the name of the page\n #\n # the value of these variables changes according to whether we\n # want to generate named folders or not\n if name_expr\n record = data\n raw_filename = eval(name_expr)\n if raw_filename == nil\n puts \"error (datapage-gen). name_expr '#{name_expr}' generated an empty value in record #{data}\"\n return\n end\n puts \"debug (datapage-gen). using name_expr: '#{raw_filename}' (sanitized) will be used as the filename\" if debug\n else\n raw_filename = data[name]\n if raw_filename == nil\n puts \"error (datapage-gen). empty value for field '#{name}' in record #{data}\"\n return\n end\n puts \"debug (datapage-gen). using name field: '#{raw_filename}' (sanitized) will be used as the filename\" if debug\n end\n\n if title_expr\n record = data\n raw_title = eval(title_expr)\n if raw_title == nil\n puts \"error (datapage-gen). title_expr '#{title_expr}' generated an empty value in record #{data}\"\n return\n end\n puts \"debug (datapage-gen). using title_expr: '#{raw_title}' will be used the page title\" if debug\n else\n raw_title = data[title]\n if raw_title == nil\n raw_title = raw_filename # for backwards compatibility\n puts \"debug (datapage-gen). empty title field: falling back to filename for the page title\" if debug\n end\n puts \"debug (datapage-gen). will use '#{raw_title}' as the page title\" if debug\n end\n\n filename = sanitize_filename(raw_filename).to_s\n\n @dir = dir + (index_files ? \"/\" + filename + \"/\" : \"\")\n @name = (index_files ? \"index\" : filename) + \".\" + extension.to_s\n\n self.process(@name)\n\n if @site.layouts[template].path.end_with? 'html'\n @path = @site.layouts[template].path.dup\n else\n @path = File.join(@site.layouts[template].path, @site.layouts[template].name)\n end\n\n base_path = @site.layouts[template].path\n base_path.slice! @site.layouts[template].name\n self.read_yaml(base_path, @site.layouts[template].name)\n\n self.data['title'] = raw_title\n\n # add all the information defined in _data for the current record to the\n # current page (so that we can access it with liquid tags)\n if page_data_prefix\n self.data[page_data_prefix] = data\n else\n if data.key?('name')\n data['_name'] = data['name']\n end\n self.data.merge!(data)\n end\n\n end", "def index\n @page_title = \"Implmentations of VDW Datasets\"\n @dataset_implementations = DatasetImplementation.all(:order => \"site_id, dataset_id\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @dataset_implementations }\n end\n end", "def gen_main_index\n template = RDoc::TemplatePage.new @template::INDEX\n\n open 'index.html', 'w' do |f|\n classes = @classes.sort.map { |klass| klass.value_hash }\n\n values = {\n 'main_page' => @main_page,\n 'initial_page' => main_url,\n 'style_url' => style_url('', @options.css),\n 'title' => CGI.escapeHTML(@options.title),\n 'charset' => @options.charset,\n 'classes' => classes,\n }\n\n values['inline_source'] = @options.inline_source\n\n template.write_html_on f, values\n end\n end", "def create_index_anchor_wizard(params)\r\n print \"LOG: begin: create_index_anchor_wizard \\n\" if @log_name \r\n print \"LOG: params: #{params} \\n\" if @log_param \r\n\r\n path = params['paths'].select{|x| x['key'] = params['option']}.first[\"pathItems\"]\r\n properties_path = '';\r\n index = 0;\r\n path.each{|item|\r\n properties_path = \"#{properties_path}#{params['ontology']}::#{item['propertiesNames'][params['options'][index]]}.\"\r\n index += 1\r\n }\r\n\r\n index_key = \"#{path.first['className']}_for_#{params['mainclass']}_IndexAnchor\"\r\n index_position = @global_var[index_key][0] || 1\r\n name = \"#{index_key}_#{index_position}\"\r\n\r\n function_params = {'name' => name, 'title' => name,\r\n 'query' => \"#{params['ontology'].upcase}::#{path.first['className']}.find_all.select{ |x| context_param.#{properties_path}include? x}\"}\r\n values = create_context_wizard(function_params)[:result]\r\n \r\n function_params = {'name' => 'context_param', 'context_id' => values['context']}\r\n create_parameter_for_context_wizard(function_params)\r\n\r\n function_params = {'name' => path.first['className'], 'index_id' => params['index_id'],\r\n 'index_navigation_attribute_index' => values['defaultIndex']}\r\n create_index_attribute_for_index_wizard(function_params)\r\n \r\n val = get_context_attr_wizard({:id => values['defaultIndex']})[:result]\r\n function_params = {'index_id' => val['rows'][0]['id'], 'name' => 'context_param', 'expression' => 'parameters[:context_param]'}\r\n create_attribute_context_parameters_wizard(function_params)\r\n\r\n @global_var[index_key][0] = index_position + 1\r\n\r\n return {:status => true, :result => {}}\r\n\r\n end", "def index\n get_own_documents\n if @page > @pages_amount && @pages_amount != 0\n @page = @pages_amount\n get_own_documents\n end\n render_js_or_html_index\n end", "def index(root_page_number)\n Innodb::Index.new(self, root_page_number)\n end", "def setup_page_traffic_data(document_count:)\n document_count.times.each do |i|\n insert_document(\"page-traffic_test\", { rank_14: i }, id: \"/path/#{i}\", type: \"page-traffic\")\n end\n commit_index(\"page-traffic_test\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Format +obj+ metadata as a table
def format_metadata(obj) '<table class="table table-sm table-responsive">' + obj.metadata.data.map do |k, v| case k when /^run_/, :plugins, :user next when :web_assembly_gz v = "<a href='#{v}'>#{v[0..50]}...</a>" when :datasets v = v.size end "<tr><td class='text-right pr-4'><b>#{format_name(k)}</b></td>" \ "<td>#{v}</td></tr>" end.compact.join('') + '</table>' end
[ "def info_table(record = @record)\n model = record.class\n\n data = model.exposable_attributes(:pdf, human: true).map do |attr, method|\n [model.human_attribute_name(attr), record.send(method).to_s]\n end\n\n table data\n gap\n end", "def metadata_table metric\n\t\tt = metric.map{|k,v| [k,v]}\n\n\t\ttable = \"\"\n\t\tt.each {|a|\n\t\t\ttable += \"<tr><td>#{a[0]}</td><td> = #{a[1]}</td></tr>\"\n\t\t}\n\n\t\theader = \"<tr><td>#{@origin_id}</td><td> - #{self.class.name}</td></tr>\"\n\t\treturn \"<table style='text-align: left'>#{header}#{table}</table>\"\n\tend", "def to_atd\n object.inject({\n \"@id\" => (id.to_s if id),\n \"@type\" => \"AnnotatedTable\",\n \"url\" => self.url.to_s,\n \"tableSchema\" => (tableSchema.to_atd if tableSchema),\n }) do |memo, (k, v)|\n memo[k.to_s] ||= v\n memo\n end.delete_if {|k,v| v.nil? || v.is_a?(Metadata) || k.to_s == \"@context\"}\n end", "def dump_table(io, table_obj)\n create_data = table_obj.data.clone\n create_data.delete(:name)\n create_data[:return_sql] = true\n\n # Get SQL for creating table and add it to IO.\n sqls = @export_db.tables.create(table_obj.name, **create_data)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n\n\n # Try to find a primary column in the table.\n prim_col = nil\n table_obj.columns do |col|\n if col.primarykey?\n prim_col = col\n break\n end\n end\n\n\n debug \"Dumping data for table: #{table_obj.name}\"\n\n # Set up rows and way to fill rows.\n rows = []\n\n\n @db.select(table_obj.name, nil, unbuffered: true) do |row|\n rows << row\n @rows_count += 1\n\n if rows.length >= 1000\n update_status\n dump_insert_multi(io, table_obj, rows)\n end\n end\n\n\n # Dump the last rows if any.\n dump_insert_multi(io, table_obj, rows) unless rows.empty?\n end", "def object_csv(hash, namespaces)\n namespace = hash.dig(\"metadata\", \"namespace\")\n team, repo = namespace_team_repo(namespace, namespaces)\n\n [\n hash.fetch(\"kind\"),\n hash.fetch(\"apiVersion\"),\n hash.dig(\"metadata\", \"name\"),\n namespace,\n team,\n repo\n ].join(\", \")\nend", "def format_list\n config = TABLE[@vobject][:columns]\n CLIHelper::ShowTable.new do\n column :DATACENTER,\n 'Object datacenter',\n :size => config[:DATACENTER] || 15 do |d|\n d[:datacenter]\n end\n\n column :IMID, 'identifier for ...', :size=>config[:IMID] || 4 do |d|\n d[:import_id]\n end\n\n column :REF, 'ref', :left, :adjust, :size=>config[:REF] || 15 do |d|\n d[:ref] || d[:cluster_ref]\n end\n\n column :NAME, 'Name', :left, :expand,\n :size=>config[:NAME] || 20 do |d|\n d[:name] || d[:simple_name]\n end\n\n column :CLUSTERS, 'CLUSTERS', :left,\n :size=>config[:CLUSTERS] || 10 do |d|\n d = d[:clusters] if d[:clusters]\n d[:one_ids] || d[:cluster].to_s\n end\n\n column :PATH, 'PATH', :left, :expand,\n :size=>config[:PATH] || 10 do |d|\n d[:path]\n end\n\n default(*config.keys)\n end\n end", "def format_object(obj)\n if obj.kind_of? Exception\n return \"Caught #{obj.class}: #{obj.message}\\n\\t\" +\n (obj.backtrace.nil? ? [] : obj.backtrace[0...@depth]).join(\"\\n\\t\")\n elsif obj.kind_of? String\n return obj\n else # inspect the object\n return \"#{obj.class}: #{obj.inspect}\"\n end\n end", "def format_obj( obj )\n case obj\n when String; obj\n when Exception\n str = \"<#{obj.class.name}> #{obj.message}\"\n if @backtrace && !obj.backtrace.nil?\n str << \"\\n\\t\" << obj.backtrace.join(\"\\n\\t\")\n end\n str\n when nil; \"<#{obj.class.name}> nil\"\n else\n str = \"<#{obj.class.name}> \"\n str << case @obj_format\n when :inspect; obj.inspect\n when :yaml; try_yaml(obj)\n when :json; try_json(obj)\n else obj.to_s end\n str\n end\n end", "def dump_table(io, table_obj)\n #Get SQL for creating table and add it to IO.\n sqls = @args[:db].tables.create(table_obj.name, table_obj.data, :return_sql => true)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n \n #Try to find a primary column in the table.\n prim_col = nil\n table_obj.columns do |col|\n if col.primarykey?\n prim_col = col\n break\n end\n end\n \n \n #Set up rows and way to fill rows.\n rows = []\n block_data = proc do |row|\n rows << row\n @rows_count += 1\n \n if rows.length >= 1000\n self.update_status\n self.dump_insert_multi(io, table_obj, rows)\n end\n end\n \n \n #If a primary column is found then use IDQuery. Otherwise use cloned unbuffered query.\n args = {:idquery => prim_col.name.to_sym} if prim_col\n \n \n #Clone the connecting with array-results and execute query.\n @args[:db].clone_conn(:result => \"array\") do |db|\n db.select(table_obj.name, nil, args, &block_data)\n end\n \n \n #Dump the last rows if any.\n self.dump_insert_multi(io, table_obj, rows) if !rows.empty?\n end", "def parse_model_attributes(object, attributes = [])\n return \"\" unless attributes.length > 0\n\n i = 1\n data = \"<table class='attributes-table'>\"\n attributes.each do |a|\n unless object[a.to_sym].blank?\n i.modulo(2) == 0 ? style = \"even\" : style = \"odd\"\n data << \"<tr class='#{style}'>\"\n\n descriptor = a.humanize\n\n # Custom descriptor substitutions\n descriptor.gsub!(/(G|g)(C|c)/, 'GC')\n descriptor.gsub!(/accession/, 'NCBI accession')\n descriptor.gsub!(/(N|n)cbi/, 'NCBI')\n descriptor.gsub!(/(G|g)rin/, 'GRIN')\n descriptor.gsub!(/(S|s)elf incompatibility/, 'Mating system')\n\n ## Column One ##\n data << \"<td>#{descriptor}</td>\"\n\n # Custom values\n ## Column Two ##\n case a.to_sym\n when :ncbi_taxon_id\n data << \"<td>#{link_to object[a.to_sym], NCBI_URL + object[a.to_sym]}</td>\"\n when :grin_taxon_id\n data << \"<td>#{link_to object[a.to_sym], GRIN_URL + object[a.to_sym]}</td>\"\n when :gc_content_genome\n data << \"<td>#{object[a.to_sym]}%</td>\"\n when :gc_content_transcriptome\n data << \"<td>#{object[a.to_sym]}%</td>\"\n when :genome_size\n data << \"<td>#{object[a.to_sym]} (Mbp)</td>\"\n when :chloroplast_genome_size\n data << \"<td>#{object[a.to_sym]} (kbp)</td>\"\n when :mitochondria_genome_size\n data << \"<td>#{object[a.to_sym]} (kbp)</td>\"\n else\n data << \"<td>#{object[a.to_sym]}</td>\"\n end\n\n information = \"#{a}_information\".to_sym\n # If the attribute has additional information, add the dialog box.\n ## Column Three ##\n unless object[information].blank?\n data << \"<td>\"\n details = object[information]\n title = information.to_s.humanize.gsub(/(G|g)(C|c)/, 'GC')\n # Text container\n data << \"<div id='#{a}_information_dialog' title='#{title}'>#{details}</div>\"\n data << \" <a id='#{a}_information_opener' href='#'>Read More &raquo;</a>\"\n # jQuery dialog box\n data << \"<script>$j(function($){$('##{a}_information_dialog').dialog(\" +\n \"{autoOpen:false,show:'blind',hide:'blind',width:450});\" +\n \"$('##{a}_information_opener').click(function(){\" +\n \"$('##{a}_information_dialog').dialog('open');return false;});});\" +\n \"</script>\"\n data << \"</td>\"\n else\n data << \"<td></td>\"\n end\n data << \"</tr>\"\n i += 1\n end\n end\n data << \"</table>\"\n\n data.html_safe\n end", "def to_atd\n object.inject({\n \"@id\" => (id.to_s if id),\n \"@type\" => \"AnnotatedTableGroup\",\n \"tables\" => Array(self.tables).map(&:to_atd)\n }) do |memo, (k, v)|\n memo[k.to_s] ||= v\n memo\n end.delete_if {|k,v| v.nil? || v.is_a?(Metadata) || k.to_s == \"@context\"}\n end", "def hash_to_HTML(object)\n res=''\n parity_class=0\n object.each do |k,v|\n parity_class=( parity_class+1 ) % 2\n res <<= %{<tr class=\"r#{parity_class}\"><td>} + html_repr(k) + \n '</td><td>'+ html_repr(v) +'</td></tr>'\n end\n '<table>'+res+'</table>'\n end", "def print_attrs(obj, point=\"*\", splitter=\":\", endline=\"\\n\")\n result = \"\"\n if obj.is_a?(Hash)\n obj.each do |arg, value|\n result += \"#{point} #{arg}#{splitter} \"\n result += case value\n when Hash\n \"#{endline}#{print_attrs(value, point[0,1]+point, splitter, endline)}\"\n when Array\n \"#{value.join(\", \")}#{endline}\"\n else\n \"#{value}#{endline}\"\n end\n end\n end\n return result\n end", "def format_table_datasource\n unless @format_table\n @format_table = TTY::Table.new(header: %w(Duration: Size: Title:))\n @format_table << [format_duration, format_size, format_title]\n end\n @format_table\n end", "def logs_table(obj, args = {})\n html = args[:out] || $stdout\n\n html << \"<table class=\\\"list hayabusa_log_table\\\">\"\n html << \"<thead>\"\n html << \"<tr>\"\n html << \"<th>ID</th>\"\n html << \"<th>Message</th>\"\n html << \"<th style=\\\"width: 130px;\\\">Date &amp; time</th>\"\n html << \"<th>Tag</th>\"\n html << \"<th>Objects</th>\" if args[:ob_use]\n html << \"<th>IP</th>\" if args[:show_ip]\n html << \"</tr>\"\n html << \"</thead>\"\n html << \"<tbody>\"\n\n count = 0\n @ob.list(:Log_link, {\"object_class\" => obj.class.name, \"object_id\" => obj.id, \"limit\" => 500, \"orderby\" => [[\"id\", \"desc\"]]}) do |link|\n count += 1\n log = link.log\n\n msg_lines = log.text.split(\"\\n\")\n first_line = msg_lines[0].to_s\n\n classes = [\"hayabusa_log\", \"hayabusa_log_#{log.id}\"]\n classes << \"hayabusa_log_multiple_lines\" if msg_lines.length > 1\n\n html << \"<tr class=\\\"#{classes.join(\" \")}\\\">\"\n html << \"<td>#{log.id}</td>\"\n html << \"<td>#{first_line.html}</td>\"\n html << \"<td>#{log.date_saved_str}</td>\"\n html << \"<td>#{log.tag.html}</td>\"\n\n if args[:ob_use]\n begin\n html << \"<td>#{log.objects_html(args[:ob_use])}</td>\"\n rescue => e\n html << \"<td>#{e.message.html}</td>\"\n end\n end\n\n html << \"<td>#{log.ip}</td>\" if args[:show_ip]\n html << \"</tr>\"\n end\n\n if count <= 0\n html << \"<tr>\"\n html << \"<td colspan=\\\"2\\\" class=\\\"error\\\">No logs were found for that object.</td>\"\n html << \"</tr>\"\n end\n\n html << \"</tbody>\"\n html << \"</table>\"\n\n return nil\n end", "def row_class_for(obj)\n s = ''\n if obj\n s += ' inactive' if obj.respond_to?(:active) && obj.active==false\n s += ' adjusted' if obj.respond_to?(:adjusted) && obj.adjusted\n s += ' unreleased' if obj.respond_to?(:released) && obj.released==false\n end # if obj\n return s\n end", "def prep_data_for_table(object, column)\n case column\n when \"type_id\"\n type_output(object)\n when \"amount\"\n amount_output(nil_to_zero(object))\n when \"interest\"\n interest_output(object) \n when \"liquid\"\n liquid_output(object)\n else\n object[column]\n end\n end", "def get_table(object)\n raise NotImplementedError, \"Subclasses must implement private method get_table\"\n end", "def table_style_info; end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Format +obj+ results as cards
def format_results(obj) o = '' obj.each_result do |key, res| links = format_result_links(res) stats = format_result_stats(res) next unless links || stats name = format_name(key) url_doc = 'http://manual.microbial-genomes.org/part5/workflow#' + key.to_s.tr('_', '-') o += <<~CARD <div class="col-md-6 mb-4"> <h3>#{name}</h3> <div class='border-left p-3'> #{stats} #{links} </div> <div class='border-top p-2 bg-light'> <a target=_blank href="#{url_doc}" class='p-2'>Learn more</a> </div> </div> CARD end "<div class='row'>#{o}</div>" end
[ "def render(obj)\n # We can't use a case statement here becuase \"when Hash\" doesn't work for\n # ActiveSupport::OrderedHash - respond_to?(:values) is a more reliable\n # indicator of hash-like behavior.\n if NilClass === obj\n print(\"null\")\n \n elsif TrueClass === obj\n print(\"true\")\n \n elsif FalseClass === obj\n print(\"false\")\n \n elsif String === obj\n print(escape_json_string(obj))\n \n elsif Symbol === obj\n print(\"\\\"#{obj}\\\"\")\n \n elsif Numeric === obj\n print(obj.to_s)\n \n elsif Time === obj\n print(obj.to_s)\n \n elsif obj.respond_to?(:keys)\n print(\"{\")\n indent_out\n last_key = obj.keys.last\n obj.each do |(key, val)|\n render(key)\n case val\n when Hash, Array\n indent_out\n print(\":\\n#{indent}\")\n render(val)\n indent_in\n else\n print(\": \")\n render(val)\n end\n print(\",\\n#{indent}\") unless key == last_key\n end\n indent_in\n print(\"}\")\n \n elsif Array === obj\n print(\"[\")\n indent_out\n last_index = obj.size - 1\n obj.each_with_index do |elem, index|\n render(elem)\n print(\",\\n#{indent}\") unless index == last_index\n end\n indent_in\n print(\"]\")\n \n else\n raise \"unrenderable object: #{obj.inspect}\"\n end\n end", "def pretty_print(cards)\n return if cards.nil?\n\n cards = [cards] unless cards.instance_of?(Array)\n pretty_table = table do |t|\n t.headings = cards.first.keys\n cards.each do |card|\n t << card.values\n end\n end\n puts pretty_table\n end", "def inspect\n \"[ #{@cards.map(&:inspect).join ', '} ]\"\n end", "def SplitJsonObj(jsonObj, sortedBy)\n\tjsonObj.each do |color|\n\t\tputs \"\\n---\" + color[sortedBy] + \"---\\n\"\n\t\tputs color[\"cards\"]\n\tend\nend", "def plain_format_object(object, html=true, &block)\n if block_given?\n object = yield object\n end\n case object.class.name\n when 'Array'\n formatted_objects = object.map {|o| format_object(o, html)}\n html ? safe_join(formatted_objects, ', ') : formatted_objects.join(', ')\n when 'Time'\n format_time(object)\n when 'Date'\n format_date(object)\n when 'Fixnum'\n object.to_s\n when 'Float'\n sprintf \"%.2f\", object\n when 'User'\n object.to_s\n when 'Project'\n object.to_s\n when 'Version'\n object.to_s\n when 'TrueClass'\n l(:general_text_Yes)\n when 'FalseClass'\n l(:general_text_No)\n when 'Issue'\n \"##{object.id}\"\n when 'Attachment'\n object.filename\n when 'CustomValue', 'CustomFieldValue'\n if object.custom_field\n f = object.custom_field.format.formatted_custom_value(self, object, html)\n if f.nil? || f.is_a?(String)\n f\n else\n format_object(f, html, &block)\n end\n else\n object.value.to_s\n end\n else\n html ? h(object) : object.to_s\n end\n end", "def card_display(i)\n Card.card_to_s(@cards[i])\n end", "def to_s\n result = ''\n @cards.each do |card|\n result = result + card.to_s + \"\\n\"\n end\n return result\n end", "def display_stream_object(obj, current_user_id)\n separator = \"\\n\"\n dbl_separator = \"\\n\\n\"\n meth = __method__\n if obj.is_a?(Tw::Tweet) then\n lastTweet = obj\n self.renderer.display([obj], @options.format(), separator: separator, current_user_id: current_user_id)\n elsif obj.is_a?(Tw::DMTweet) then\n self.renderer.display([obj], @options.format(), separator: separator, current_user_id: current_user_id)\n elsif obj.is_a?(Tw::Stream::Message) then\n self.renderer.display_stream_message(obj, @options.format, separator: separator)\n elsif obj.is_a?(Hash) && obj.size > 0 then\n if obj[:friends] then\n self.renderer.display_stream_message(obj, @options.format, separator: separator)\n else\n # Unknown data\n self.renderer.display_stream_message(obj, @options.format(), separator: separator)\n end\n else\n # do_nothing()\n end\n end", "def summarize(cards)\n cards.collect do |card|\n summarize_card(card)\n end\n end", "def _print_obj # print_obj object\r\n obj_id = @operands[0]\r\n\r\n text = parse_zchar_sequence peek_zchar_text (zobj obj_id).name_addr\r\n @screen.print text\r\n\r\n dbg :print { text }\r\n end", "def display_deck\n @deck_array.each do |x|\n x.display_card\n end\n end", "def pretty(object)\n PP.pp(object, out)\n end", "def display object\n # stringify symbols in YAML output for better readability\n puts object.to_yaml.gsub(/^([[:blank:]]*(- )?):(?=@?\\w+: )/, '\\1')\n rescue\n require 'pp'\n pp object\n end", "def cards_print(cards)\n print_this = cards.each {|x, y| print x.to_s, ', '}\n end", "def op5PrintObjects(type,objects)\n logIt(\"* Entering: #{thisMethod()}\", DEBUG)\n# pp objects\n objects.each do |object|\n case type\n when OBJECT_TYPE_HOST\n printf \"name: %-25s alias: %-40s address: %-15s state: %i\\n\", object[\"name\"], object[\"alias\"], object[\"address\"], object[\"state\"] if (! object[\"name\"].nil?)\n when OBJECT_TYPE_HOSTGROUP, OBJECT_TYPE_CONTACTGROUP, OBJECT_TYPE_SERVICEGROUP\n\n printf \"name: %-25s alias: %-40s members: \", object[\"name\"], object[\"alias\"] if (! object[\"name\"].nil?)\n printCount=0\n object[\"members\"].each do | member |\n printf \",\" if printCount > 0\n printf \"%s\", (member.class.to_s == \"Array\") ? member[0] + \";\" + member[1] : member\n printCount += 1\n end\n printf \"\\n\"\n \n when OBJECT_TYPE_SERVICE\n printf \"host_name: %-25s description: %-40s\\n\", object[\"host\"][\"name\"], object[\"description\"] if (! object[\"description\"].nil?)\n when OBJECT_TYPE_CONTACT\n printf \"name: %-25s alias: %-30s email: %-35s contactgroups: \", object[\"name\"], object[\"alias\"], object[\"email\"] if (! object[\"name\"].nil?)\n\n printCount=0\n object[\"contactgroups\"].each do |contactgroup|\n printf \",\" if printCount > 0\n printf \"%s\", contactgroup[0][\"name\"]\n printCount += 1\n end\n printf \"\\n\"\n end\n end\nend", "def format_obj( obj )\n case obj\n when String; obj\n when Exception\n str = \"<#{obj.class.name}> #{obj.message}\"\n if @backtrace && !obj.backtrace.nil?\n str << \"\\n\\t\" << obj.backtrace.join(\"\\n\\t\")\n end\n str\n when nil; \"<#{obj.class.name}> nil\"\n else\n str = \"<#{obj.class.name}> \"\n str << case @obj_format\n when :inspect; obj.inspect\n when :yaml; try_yaml(obj)\n when :json; try_json(obj)\n else obj.to_s end\n str\n end\n end", "def vcard_to_html card\n\thtml = '<div class=\"vcard\">' + \"\\n\"\n\thtml += write_category card.categories\n\thtml += write_name card.name\n\thtml += write_telephone card.telephones\n\thtml += write_email card.emails\n\thtml += write_url card.urls\n\thtml += write_birthday card.birthday\n\thtml += write_address card.addresses\n\thtml += '</div>' + \"\\n\"\n\treturn html\nend", "def print_objects\n @header_object.each_pair do |key,val|\n puts \"#{key}: #{val[0]} #{val[1]} #{val[2]}\"\n end\n end", "def to_s\n # Cadena de texto con las cartas de la HAND (Se usa para mostrar las cartas)\n # Con el método \"to_s\" en la clase CARD mostramos el valor y la pinta de las cartas que cumple con una función similar al \"to_s\" creado para HAND aca.\n str = \"\"\n @cards.each do |card|\n str += \"#{card} \"\n end\n str.strip\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Write +file+ within the browse folder of project +p+ using the passed block output as content
def write_file(p, file) File.open(browse_file(p, file), 'w') { |fh| fh.print yield } end
[ "def write(block)\n @filemgr.write(block, @contents)\n end", "def set_write_file(&block)\n @write_proc = block\n end", "def write_content\n File.open(absolute_path,'w') do |file|\n file << content if content\n end\n # TODO git functionality\n end", "def to_pwdump_file(path, &block)\n exporter = Metasploit::Credential::Exporter::Pwdump.new(workspace: workspace)\n\n output_file = File.open(path, 'w') do |file|\n file << exporter.rendered_output\n end\n output_file.path\n end", "def open_output_file(&block)\r\n outfile = options[:outfile]\r\n if outfile\r\n File.open(outfile, 'w') { |f| block.call(f) }\r\n else\r\n block.call(STDOUT)\r\n end\r\n end", "def with_io(&block)\n if self.output_file\n require 'fileutils'\n FileUtils.mkdir_p(File.dirname(self.output_file))\n File.open(self.output_file, \"w\", &block)\n else\n block.call(environment.output_buffer)\n end\n end", "def save_output(content, dir, file='index.html')\n open(\"#{dir}/#{file}\", \"w\") do |f|\n f.puts content\n end\n end", "def write_file(path, content = \"---\\n---\\n\\n# test\")\n path = in_source_dir(path)\n delete_file path\n FileUtils.mkdir_p File.dirname(path)\n File.write path, content\n JekyllAdmin.site.process\n path\nend", "def write_to_output filename, content = nil # :yields: file\n self.create_output_file(filename) do |output_path|\n File.open(output_path, 'w') do |f|\n if content\n f.write(content)\n else\n yield f\n end\n end\n end\n end", "def create_output_file(content)\n\t\t\tf = File.new(@params[:output_file], 'w')\n\t\t\tf << content\n\t\t\tf.close\n\t\tend", "def create_and_write_to_file(filename, content)\n File.open(\"public/gallery.html\", \"w\") do |file|\n file.write(top_html)\n file.write(image_tag_list(ARGV))\n file.write(bottom_html)\n end\nend", "def save_content(title, content)\n File.open(\"pages/#{title}.txt\", \"w\") do |file| # title as argument\n file.print(content) # content as a string puts on the file\n end\nend", "def browse_file(p, file)\n File.join(p.path, 'browse', file)\n end", "def write\n path = \"#{site.root}/_site/#{output_file}\"\n File.open(path, 'w') do |f|\n f.write(@output)\n end\n end", "def writeWebPage\n File.open(@dest, \"w\") do |output|\n @out = output\n @out.puts @@templatefront\n writeHtmlBody\n @out.puts @@templateend\n end\n end", "def write(out=nil)\n out ||= project.path(:output, path)\n FileUtils.mkdir_p File.dirname(out)\n\n if tilt?\n File.open(out, 'w') { |f| f.write to_html({}, :build => true) }\n else\n FileUtils.cp file, out\n end\n end", "def create_HTML_file(loc_name, content, out_dir)\n File.open(\"#{out_dir}/#{loc_name}.html\", 'w') do |f|\n f.write(HEADER_TEXT)\n f.write(\"<h1>Lonely Planet: #{loc_name}</h1>\")\n f.write(NAV_TITLE)\n f.write($navigation_html)\n f.write(BLOCK_TITLE)\n f.write(\"<h1><li class='first'><a href='#'>#{loc_name}</a></li></h1>\")\n f.write(MAIN_BLOCK)\n f.write(content)\n f.write(CLOSE_HTML)\n end\nend", "def write!\n File.write(path, file_template)\n end", "def log_and_stream(output)\n write_file output, @filename if @filename\n @block.call(output)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Use a +template+ file to generate content with a hash of +data+ over the layout page if +layout+ is true
def build_from_template(template, data = {}, layout = true) cont = File.read(template_file(template)).miga_variables(data) return cont unless layout build_from_template( 'layout.html', data.merge(content: cont, project_name: cli.load_project.name), false ) end
[ "def mustache(template, args={}, layout=true)\n args = args.update(:site => site, :site_tags => tags, :current_user => current_user)\n layout_class = UserApp::Views::Layout\n layout_class.template = design.layout\n view_class = UserApp::Views.const_get(template.to_s.classify)\n view_class.template = design.send(template)\n view_initialized = view_class.new(args)\n view_rendered = view_initialized.render\n if layout\n if view_initialized.respond_to?(:title)\n args.update(:page_title => view_initialized.title)\n end\n layout_class.new(args).render(:yield => view_rendered)\n else\n view_rendered\n end\n end", "def load_layout_template\n\t\ttemplate = self.load_template( 'layout.tmpl' )\n\n\t\ttemplate.files = @files\n\t\ttemplate.classes = @classes\n\t\ttemplate.methods = @methods\n\t\ttemplate.modsort = @modsort\n\t\ttemplate.rdoc_options = @options\n\n\t\ttemplate.rdoc_version = RDoc::VERSION\n\t\ttemplate.fivefish_version = Fivefish.version_string\n\n\t\treturn template\n\tend", "def analyze_template(filename)\n tmp = \"\"\n collect = { :data => false, :css => false, :js => false }\n item = \"\"\n name = \"application\"\n load = { :css => true, :js => true }\n \n load[:css] = !File.exists?(\"#{@www}#{name}.css\")\n load[:js] = !File.exists?(\"#{@www}#{name}.js\")\n \n if not File.exists? filename\n filename = \"#{@www}home.#{@lang}.soli\"\n #exit 404\n end\n IO.readlines(filename).each do |line|\n if line[0..6] == \":layout\"\n analyze_template \"#{@www}#{line[7..-1].strip}\"\n end\n if line[0..4] == \":page\"\n @layout = IO.read \"#{@www}#{line[5..-1].strip}\" rescue \"\"\n end\n if line[0..4] == \":name\"\n name = line[5..-1].strip\n load[:css] = !File.exists?(\"#{@www}#{name}.css\")\n load[:js] = !File.exists?(\"#{@www}#{name}.js\")\n end\n \n if [\"#\", \":\", \"@\"].include?(line[0])\n unless item == \"\"\n #\n @layout.gsub!(\"<!-- #{item} -->\", tmp)\n item = \"\"\n tmp = \"\"\n end\n if collect[:css]\n Dir.mkdir \"#{@www}cache/\" rescue nil\n IO.write \"#{@www}cache/#{name}.css\", tmp unless File.exists? \"#{@www}cache/#{name}.css\"\n @layout.gsub!(\"<!-- :css -->\", \"<link rel='stylesheet' type='text/css' href='/cache/#{name}.css' />\")\n tmp = \"\"\n collect[:css] = false\n end\n if collect[:js]\n Dir.mkdir \"#{@www}cache/\" rescue nil\n IO.write \"#{@www}cache/#{name}.js\", tmp unless File.exists? \"#{@www}cache/#{name}.js\"\n @layout.gsub!(\"<!-- :js -->\", \"<script src='/cache/#{name}.js'></script>\")\n tmp = \"\"\n collect[:js] = false\n end\n end\n \n if line[0..3] == \":css\"\n collect[:css] = true\n end\n\n if line[0..2] == \":js\"\n collect[:js] = true\n end\n \n if line[0] == \"#\"\n item = line.strip\n collect[:data] = true\n end\n\n if not [\"#\", \":\", \"@\"].include?(line[0]) and collect[:data]\n tmp += line\n end\n if not [\"#\", \":\", \"@\"].include?(line[0]) and (collect[:css] or collect[:js]) and line.strip.size > 0\n tmp += IO.read \"#{@www}#{line.strip}\" rescue \"\"\n tmp += \"\\n\"\n end\n end\nend", "def inside_layout(layout, &block)\n layout = layout.to_s\n layout = layout.include?('/') ? layout : \"layouts/#{layout}\"\n @template.instance_variable_set('@content_for_layout', capture(&block))\n concat (\n @template.render(:file => layout, :use_full_path => true)\n ) \n end", "def render_layout(output, layout, info); end", "def render_template(view, template, layout_name, locals); end", "def layout\n return @layout if @layout\n return if no_layout?\n\n @layout = site.layouts[data.layout].tap do |layout|\n unless layout\n Bridgetown.logger.warn \"Generated Page:\", \"Layout '#{data.layout}' \" \\\n \"requested via #{relative_path} does not exist.\"\n end\n end\n end", "def create_layout(content, attributes, identifier)\n # Get filenames\n base_path = 'layouts' + identifier[0..-2]\n meta_filename = base_path + '.yaml'\n content_filename = base_path + '.html'\n\n # Notify\n Nanoc3::NotificationCenter.post(:file_created, meta_filename)\n Nanoc3::NotificationCenter.post(:file_created, content_filename)\n\n # Create files\n FileUtils.mkdir_p(File.dirname(meta_filename))\n File.open(meta_filename, 'w') { |io| io.write(YAML.dump(attributes.stringify_keys)) }\n File.open(content_filename, 'w') { |io| io.write(content) }\n end", "def layout(name=:layout, &b)\n template(name, &b)\n end", "def render_with_layout(template)\n @template.instance_variable_set(\"@relative_path_to_root\", \"#{calculate_relative_path_to_root(full_template_path(template))}\")\n content_for_layout = render(template)\n \n @template.instance_variable_set(\"@current_page\", template)\n @template.instance_variable_set(\"@content_for_layout\", content_for_layout)\n \n layout = @template.instance_variable_get(\"@layout\")\n \n # Clean @layout variable for next request\n @template.instance_variable_set(\"@layout\", nil)\n \n layout ||= \"site\"\n \n render(\"layouts/#{layout}\")\n end", "def render_with_layout(view_file_name,variables={})\n # read the file everytime for development ease\n erubis = Erubis::Eruby.new(File.read(File.join(VIEW_PATH,\"#{view_file_name}.html.erb\")))\n @layout.render({:content_body => erubis.result(variables) })\n end", "def render\n template_body = Tilt.new(@template).render(self)\n if @layout\n layout = Dir[File.join(File.dirname(@template), @layout) + '*'].first\n raise \"#{Guinness::EMOJI} Guinness : Unable to locate layout at: '#{@layout}'\" unless layout\n @body = Tilt.new(layout).render(self) { template_body }\n end\n @body || template_body\n end", "def render (template_name, locals = {})\n puts \" Rendering using template <#{template_name}>...\"\n template = File.read(TEMPLATES_PATH + '/' + template_name + '.haml')\n haml_engine = Haml::Engine.new(template)\n output = haml_engine.render Data_helper.new(@points, @lasyers, @whs), locals\n File.open(OUT_PATH + '/' + template_name + '.html', 'w') {|f| f.write(output) }\n puts ' rendered ok.'\nend", "def load_template()\n file = @site\n .liquid_renderer\n .file(template_path(@template_name))\n\n content = template_content(@template_name)\n\n template = Hash.new\n data = get_front_matter(content)\n markup = strip_front_matter(content)\n\n if content\n template[\"data\"] = data\n template[\"template\"] = file.parse(markup)\n template\n end\n end", "def inside_layout(layout, &block)\n binding = block.binding if BINDING_REQUIRED\n\n layout = Dir.entries('app/views/layouts').detect { |a| /#{layout}/.match(a) }\n @template.instance_variable_set('@content_for_layout', capture(&block))\n concat(\n @template.render(:file => \"#{RAILS_ROOT}/app/views/layouts/#{layout}\", :user_full_path => true),\n binding\n )\n end", "def load_layout_template\n\t\treturn nil unless ( lt_path = self.class.layout_template )\n\t\tenc = Encoding.default_internal || Encoding::UTF_8\n\t\treturn Inversion::Template.load( lt_path, encoding: enc )\n\tend", "def layout(name = :layout, &block)\n template name, &block\n end", "def layout_contents\n @layout = <<-LAYOUT\n blueprint-deploy:\n layout: #{spec_bp_name} # name of the layout file we use\n blueprint : #{spec_bp_name} # name of the default blueprint\n servers:\n - server1:\n name: util\n applications:\n - app1:\n - app2:\n - server2:\n name: review\n applications:\n - app1:\n - app3:\n LAYOUT\n @layout\n end", "def set_layout_file_and_view_string\n view_string = File.read(\"#{@@project_vars[:template_path]}/#{@@project_vars[:view_file]}\")\n if RUBY_VERSION < \"1.9\"\n \n else\n view_string = view_string.force_encoding(\"UTF-8\")\n end\n match = view_string.match(/=layout :(.*?)\\n/m)\n \n if match == nil\n layout_file = \"none\"\n else\n layout_file = match[1].strip\n end\n \n if layout_file != \"none\"\n if File.exist?(\"#{@@project_vars[:template_path]}/layouts/#{layout_file}.haml\")\n @@project_vars[:layout_file] = layout_file + '.haml'\n @@project_vars[:view_string] = view_string.gsub(\"=layout :#{layout_file}\\n\",'')\n else\n raise Haml::Error.new(\"Layout file specified in #{@@project_vars[:view_file]} does not exist.\")\n end\n else\n @@project_vars[:layout_file] = \"\"\n @@project_vars[:view_string] = view_string\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Path to the template browse file
def template_file(file) File.join( MiGA::MiGA.root_path, 'lib', 'miga', 'cli', 'action', 'browse', file ) end
[ "def template_path\n File.expand_path('../templates', __FILE__)\n end", "def template_path\n File.join(File.dirname(__FILE__), \"templates\", \"Cloudfile.erb\")\n end", "def template_path(path)\n File.join(@site.source.to_s, TEMPLATE_DIR, path.to_s)\n end", "def rtfile_template_path\n File.join(repo_set_parent_path, \"repositext/templates/Rtfile.erb\")\n end", "def source_path\n \"#{template_path}/#{source_filename}\"\n end", "def full_template_path\n @template_root.join(\"#{template}.erb\").to_s.squeeze(\"/\")\n end", "def template_preview_path\n File.join(template_folder_path,\"assets\",\"preview.png\")\n end", "def template_path\n return File.join(File.dirname(__FILE__), \"../../../templates\", platform)\n end", "def template_path\n return File.expand_path(File.join(File.dirname(__FILE__), \"../../../templates/user/\", platform))\n end", "def template_path() 'demo/admin/billing.erb' end", "def template_path()\n 'admin/account.erb'\n end", "def template_path\n self.views_path.each do |path|\n template = Dir[File.join(path, self.class.name.underscore.split(\"/\").last, \"#{@mail_name}.*\")].first\n return template if template\n end\n end", "def template_path()\n 'demo/auth/signup.erb'\n end", "def template_path\n exact_path = File.join(root, request.path)\n with_erb = File.join(root, \"#{request.path}.html.erb\")\n with_index = File.join(root, File.dirname(request.path), 'index.html.erb')\n\n [ exact_path, with_erb, with_index ].find { |f| File.file?(f) }\n end", "def scaffold_path(template_name)\n File.join(self.class.scaffold_template_dir, \"#{template_name}.rhtml\")\n end", "def scaffold_path(template_name)\n File.join(scaffold_template_dir, template_name+'.rhtml')\n end", "def template_base_path\n @template_base_path ||= Inkblot.vendor_path('templates')\n end", "def template_path(name)\n name = name.to_s\n if name.include?('/') # Specific path like 'representers/somethingorother/foo.haml' given.\n name\n else\n File.join(self.class.representer_path, name)\n end\n end", "def templates_path\n File.join((File.expand_path '..', File.dirname(__FILE__)), 'templates')\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Path to the browse file in the project
def browse_file(p, file) File.join(p.path, 'browse', file) end
[ "def path_to_current_file\n @path\n end", "def path\n project.path\n end", "def path\n @file.path\n end", "def control_file\n File.join(AimsProject::CONTROL_DIR, self.control)\n end", "def template_file(file)\n File.join(\n MiGA::MiGA.root_path,\n 'lib', 'miga', 'cli', 'action', 'browse', file\n )\n end", "def get_project_path\n return File.absolute_path File.join(root_dir, src)\n end", "def file\n locator.file\n end", "def path\n File.join Dubya.root_path, 'vendor/wiki'\n end", "def selected_absolute_path\n selected_file_name = @files[@selected].first\n # This may not be the absolute path (e.g. file_name may be '.')\n selected_full_path = File.join(@current_path, selected_file_name)\n File.absolute_path(selected_full_path)\n end", "def show_browse_form(p_starting_path = nil)\n\t\t\n\t\t\t# If available as argument, select the path within the Tree View\n\t\t\t@dir_browse_ui.select_path(p_starting_path) unless p_starting_path.nil?\n\t\t\n\t\t\t# Show Browse form\n\t\t\t@dir_browse_form.show\n\t\t\t@dir_browse_form.activateWindow\t\t\n\t\tend", "def project_file(fname)\n \"#{@project_path}/#{fname}\"\nend", "def browse_loan_file_portfolio(file_path)\n\t@browser.frame(:name, \"content\").file_field(:id, 'loanFile').set file_path\nend", "def relative_file_name; end", "def files_path\n File.expand_path(\"#{Config.project_root}/files\")\n end", "def path\n File.join(Dir.pwd, \"Cloudfile\")\n end", "def file_path\n file.current_path\n end", "def src_path(path)\n app_path('src/' + path)\n end", "def project_path\n project.path.relative_path_from(Pathname.pwd)\n end", "def browse (dir, file)\n if dir != \".\"\n file=\"#{dir}/#{file}\"\n if File.isdirectory? file\n system \"browse #{file} &\"\n else\n if File.isfile? file\n\tif ENV['EDITOR']\n\t system format(\"%s %s&\", ENV['EDITOR'], file)\n\telse\n\t sysmte \"xedit #{file}&\"\n\tend\n else\n\tSTDERR.print \"\\\"#{file}\\\" isn't a directory or regular file\"\n end\n end\n end\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
determine the future value of an investment of present_value with given interest rate over time periods number of periods (years) that the interest is cumulated rate the annual rate of return present_value the value at the start of the period compound_frequency number of compounds / period(year)
def future_value(rate,periods,present_value,compound_frequency = 1) compound_frequency = resolve_compound_frequency!(compound_frequency) if compound_frequency == :continuous return continuous_compound_fv(rate,periods,present_value) end future_value = present_value * (1+rate/compound_frequency)** (periods * compound_frequency) end
[ "def present_value(rate,periods,future_value)\n present_value = future_value / (1+rate)**periods\n end", "def compound_return(periods,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n n = periods.to_d\n rate = ((fv / pv)**(1/n))-1\n end", "def future_cost(initial_cost, number_of_years, interest_rate)\n initial_cost * (1 + interest_rate) ** number_of_years\n end", "def nominal_due_given_efective(effective_rate, periods)\n\t\t\t((((1 + (effective_rate.to_f/100))**(1/periods.to_f)-1)*periods.to_f)*100).round(4)\n end", "def present_value(rate,nper,pmt,fv=0,type = 0)\n #present value of the payments\n pv = pmt * pv_compounding(rate,nper)\n \n end", "def investment_horizon(rate,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n periods = Math.log(fv/pv) / Math.log(1+rate)\n end", "def effective_annual_rate(rate,compound_frequency)\n compound_frequency = resolve_compound_frequency!(compound_frequency)\n if compound_frequency == :continuous\n return continuous_effective_annual_rate(rate)\n end\n m= compound_frequency\n e_rate = (1 + rate/m)**m -1\n end", "def current_compound_interest_rate() 4.3 end", "def pmt(interest_rate, number_of_payments, present_value)\n interest_rate_monthly = interest_rate/12\n interest_rate_monthly_plus_one = interest_rate_monthly+1\n monthly_payment = present_value*interest_rate_monthly*interest_rate_monthly_plus_one**(number_of_payments)/(interest_rate_monthly_plus_one**number_of_payments-1)\n monthly_payment.round(2)\n\n# The equation for calculating the mortgage payment is as follows\n# - Monthly payment C\n# - interest_rate must be converted into a monthly interest rate\n# -- interest_rate / 12 / 100\n# - Number_of_payments must be converted into months number_of_payments * 12\n# - present_value requires no changes.\n# - The final equation is an equation for an annuity Pr(1+r)^N / (1+r)^N-1\n\nend", "def calculate_payment\n x = @periodic_rate * @principal * ((1 + @periodic_rate)**@periods)\n y = ((1 + @periodic_rate)**@periods) - 1\n (x / y).round(2)\n end", "def net_present_value\n lambda do |x|\n relative_payments.reduce(0) do |sum, relative_payment|\n sum + relative_payment.amount * (1 + x)**-relative_payment.offset\n end\n end\n end", "def installment_value(interest_rate, financing_time_months, loan_value)\n @installment_calculator.calculate(interest_rate, financing_time_months, loan_value)\n end", "def nominal_anticipated_given_efective(effective_rate, periods)\n nominalRate = (1+(effective_rate.to_f/100))**(1/periods.to_f)-1\n toAnticipated = nominalRate / (1+nominalRate)\n (toAnticipated * periods.to_f * 100).round(4)\n end", "def calculate_years(principal, interest, tax, desired)\n i = 0\n return i if principal == desired\n \n while principal < desired\n principal += (principal * interest) - (principal * interest * tax)\n i += 1\n end\n \n i\nend", "def amounts_owed_on_date_assuming_scheduled_payments(date)\n raise Exception.new('Loan must be funded to calculate future interest owed assuming scheduled payments') unless ['active','collections','garnishments'].include? aasm_state and funded_on\n raise Exception.new('Cannot calculate anticipated interest for a date in the past') if date < Date.today\n raise Exception.new('Date for interest calculation must be on or after the funding date.') if date < self.funded_on\n if amounts_owed_updated_on < Date.today\n update_amounts_owed\n end\n\n # Iterate over scheduled payments from date balance last updated through the desired date\n hypothetical_principal = principal_owed\n hypothetical_principal_last_changed_on = amounts_owed_updated_on\n total_interest = 0\n interest_payments = 0\n hypothetical_fees_owed = fees_owed\n fee_payments = 0\n puts \"Initial principal #{hypothetical_principal} on #{hypothetical_principal_last_changed_on}\"\n puts \"Daily interest rate: #{daily_interest_rate}\"\n scheduled_payments.each do |scheduled_payment|\n break if scheduled_payment.draft_date >= date\n interest_payments += scheduled_payment.interest\n fee_payments += scheduled_payment.fees\n if scheduled_payment.principal != 0\n days = scheduled_payment.draft_date - hypothetical_principal_last_changed_on\n interest_for_span = (daily_interest_rate * hypothetical_principal * days.to_f).to_money\n total_interest += interest_for_span\n puts \" Principal reduced by #{scheduled_payment.principal} on #{scheduled_payment.draft_date}\"\n puts \" Interest on #{hypothetical_principal} for #{days} days = #{interest_for_span}\"\n hypothetical_principal -= scheduled_payment.principal # All amounts on scheduled payments are >= 0\n hypothetical_principal_last_changed_on = scheduled_payment.draft_date\n end\n end\n\n # Calculate interest since last date hypothetical principal changed until target date\n days = date - hypothetical_principal_last_changed_on\n puts \"#{days} days from date of last change in hypothetical principal to target date\"\n puts \"Multiplying: #{daily_interest_rate} * #{hypothetical_principal.to_f} * #{days.to_f}\"\n interest_for_span = (daily_interest_rate * hypothetical_principal.to_f * days.to_f).to_money\n puts \"Interest since last change in hypothetical principal: #{interest_for_span}\"\n total_interest += interest_for_span\n\n interest_owed = (total_interest - interest_payments).to_money\n puts \"Interest expected to be owing on #{date}: #{interest_owed}\"\n hypothetical_fees_owed -= fee_payments\n { :total => hypothetical_principal+interest_owed+hypothetical_fees_owed, :principal => hypothetical_principal, :interest => interest_owed, :fees => hypothetical_fees_owed}\n end", "def calculate_years(principal, interest, tax, desired)\n years = 0\n total = principal\n while principal < desired\n gain = principal * ( interest)\n tax_paid = ( gain ) * tax\n principal = principal - tax_paid + gain\n years += 1\n end\n return years\nend", "def interest(loan_amount, nominal_rate, days_in_month = 30 , days_in_year = 360)\n interest_in_cents = ((loan_amount * nominal_rate * days_in_month) / days_in_year)# To convert cents to euro by dividing it by 100 and round off decimal by 2\n interest_in_euro = (interest_in_cents.to_f/100).round(2)\n end", "def annual_ror(initial_value, final_value, years)\n if years <= 0\n 0\n elsif initial_value == 0\n # BigDecimal::INFINITY\n Float::INFINITY\n else\n 100.to_d * if final_value < 0 # fudge if final value is less than zero\n (((initial_value.to_d - final_value.to_d) / initial_value.to_d) ** (1 / years.to_d)) * -1\n else\n ((final_value.to_d / initial_value.to_d) ** (1 / years.to_d)) - 1\n end\n end\n end", "def net_present_value_derivative\n lambda do |x|\n relative_payments.reduce(0) do |sum, relative_payment|\n sum + relative_payment.amount * -relative_payment.offset * (1 + x)**(-relative_payment.offset - 1)\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
determine the present value required to acheive a future value with given interest rate over time periods number of periods (years) that the interest is cumulated rate the annual rate of return future_value the value at the end of the period
def present_value(rate,periods,future_value) present_value = future_value / (1+rate)**periods end
[ "def compound_return(periods,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n n = periods.to_d\n rate = ((fv / pv)**(1/n))-1\n end", "def future_value(rate,periods,present_value,compound_frequency = 1)\n compound_frequency = resolve_compound_frequency!(compound_frequency)\n if compound_frequency == :continuous\n return continuous_compound_fv(rate,periods,present_value)\n end\n future_value = present_value * (1+rate/compound_frequency)** (periods * compound_frequency)\n end", "def future_cost(initial_cost, number_of_years, interest_rate)\n initial_cost * (1 + interest_rate) ** number_of_years\n end", "def investment_horizon(rate,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n periods = Math.log(fv/pv) / Math.log(1+rate)\n end", "def calculate_years(principal, interest, tax, desired)\n i = 0\n return i if principal == desired\n \n while principal < desired\n principal += (principal * interest) - (principal * interest * tax)\n i += 1\n end\n \n i\nend", "def annual_ror(initial_value, final_value, years)\n if years <= 0\n 0\n elsif initial_value == 0\n # BigDecimal::INFINITY\n Float::INFINITY\n else\n 100.to_d * if final_value < 0 # fudge if final value is less than zero\n (((initial_value.to_d - final_value.to_d) / initial_value.to_d) ** (1 / years.to_d)) * -1\n else\n ((final_value.to_d / initial_value.to_d) ** (1 / years.to_d)) - 1\n end\n end\n end", "def calculate_years(principal, interest, tax, desired)\n years = 0\n total = principal\n while principal < desired\n gain = principal * ( interest)\n tax_paid = ( gain ) * tax\n principal = principal - tax_paid + gain\n years += 1\n end\n return years\nend", "def calculate_years(principal, interest, tax, desired)\n total = principal\n years = 0\n loop do\n break if total >= desired\n total_interest = total*interest\n total += total_interest\n total -= total_interest*tax\n years += 1\n end\n years\nend", "def net_present_value\n lambda do |x|\n relative_payments.reduce(0) do |sum, relative_payment|\n sum + relative_payment.amount * (1 + x)**-relative_payment.offset\n end\n end\n end", "def nominal_due_given_efective(effective_rate, periods)\n\t\t\t((((1 + (effective_rate.to_f/100))**(1/periods.to_f)-1)*periods.to_f)*100).round(4)\n end", "def required_annual_savings\n needed_amount_less_savings / years_to_retirement\n end", "def follow_up_interval\n if should_take_low_intensity_questionnaire?\n 0\n elsif low_intensity? or recent_loss?\n 6.months\n else\n 3.months\n end\n end", "def call(year)\n previous_year = Population.previous_known(year)\n\n return previous_year.population if previous_year.year == year # year entered is known\n\n next_year = Population.next_known(year)\n\n # there is no next year - unable to calculate\n return nil if next_year.nil? \n\n # calculate the percentage that year is between next and previous years\n mod_percentage = (year - previous_year.year).to_f / (next_year.year - previous_year.year).to_f\n delta_population = next_year.population - previous_year.population\n\n (delta_population * mod_percentage).to_i + previous_year.population\n end", "def amounts_owed_on_date_assuming_scheduled_payments(date)\n raise Exception.new('Loan must be funded to calculate future interest owed assuming scheduled payments') unless ['active','collections','garnishments'].include? aasm_state and funded_on\n raise Exception.new('Cannot calculate anticipated interest for a date in the past') if date < Date.today\n raise Exception.new('Date for interest calculation must be on or after the funding date.') if date < self.funded_on\n if amounts_owed_updated_on < Date.today\n update_amounts_owed\n end\n\n # Iterate over scheduled payments from date balance last updated through the desired date\n hypothetical_principal = principal_owed\n hypothetical_principal_last_changed_on = amounts_owed_updated_on\n total_interest = 0\n interest_payments = 0\n hypothetical_fees_owed = fees_owed\n fee_payments = 0\n puts \"Initial principal #{hypothetical_principal} on #{hypothetical_principal_last_changed_on}\"\n puts \"Daily interest rate: #{daily_interest_rate}\"\n scheduled_payments.each do |scheduled_payment|\n break if scheduled_payment.draft_date >= date\n interest_payments += scheduled_payment.interest\n fee_payments += scheduled_payment.fees\n if scheduled_payment.principal != 0\n days = scheduled_payment.draft_date - hypothetical_principal_last_changed_on\n interest_for_span = (daily_interest_rate * hypothetical_principal * days.to_f).to_money\n total_interest += interest_for_span\n puts \" Principal reduced by #{scheduled_payment.principal} on #{scheduled_payment.draft_date}\"\n puts \" Interest on #{hypothetical_principal} for #{days} days = #{interest_for_span}\"\n hypothetical_principal -= scheduled_payment.principal # All amounts on scheduled payments are >= 0\n hypothetical_principal_last_changed_on = scheduled_payment.draft_date\n end\n end\n\n # Calculate interest since last date hypothetical principal changed until target date\n days = date - hypothetical_principal_last_changed_on\n puts \"#{days} days from date of last change in hypothetical principal to target date\"\n puts \"Multiplying: #{daily_interest_rate} * #{hypothetical_principal.to_f} * #{days.to_f}\"\n interest_for_span = (daily_interest_rate * hypothetical_principal.to_f * days.to_f).to_money\n puts \"Interest since last change in hypothetical principal: #{interest_for_span}\"\n total_interest += interest_for_span\n\n interest_owed = (total_interest - interest_payments).to_money\n puts \"Interest expected to be owing on #{date}: #{interest_owed}\"\n hypothetical_fees_owed -= fee_payments\n { :total => hypothetical_principal+interest_owed+hypothetical_fees_owed, :principal => hypothetical_principal, :interest => interest_owed, :fees => hypothetical_fees_owed}\n end", "def installment_value(interest_rate, financing_time_months, loan_value)\n @installment_calculator.calculate(interest_rate, financing_time_months, loan_value)\n end", "def current_compound_interest_rate() 4.3 end", "def calculate_years(principal, interest, tax, desired)\n\tyear = 0\n\t while principal < desired\n\t year += 1\n\t income = principal * interest\n\t principal += income - income * tax\n\t end\n\t year\nend", "def amount_after_years(q0, t)\n q0 * Math.exp(1.0 / $h * Math.log(1.0/2.0) * t)\nend", "def calculate_interest(current_period, previous_period=0)\n previous_balance = @txns[previous_period][:balance]\n period_of_interest = current_period - previous_period\n @interest += (previous_balance * daily_interest * period_of_interest).round(2)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
determine the rate of return over a period periods number of periods (years) that the interest is cumulated present_value the value at the start of the period future_value the value at the end of the period
def compound_return(periods,present_value,future_value) pv = present_value.to_d fv = future_value.to_d n = periods.to_d rate = ((fv / pv)**(1/n))-1 end
[ "def present_value(rate,periods,future_value)\n present_value = future_value / (1+rate)**periods\n end", "def future_value(rate,periods,present_value,compound_frequency = 1)\n compound_frequency = resolve_compound_frequency!(compound_frequency)\n if compound_frequency == :continuous\n return continuous_compound_fv(rate,periods,present_value)\n end\n future_value = present_value * (1+rate/compound_frequency)** (periods * compound_frequency)\n end", "def investment_horizon(rate,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n periods = Math.log(fv/pv) / Math.log(1+rate)\n end", "def nominal_due_given_efective(effective_rate, periods)\n\t\t\t((((1 + (effective_rate.to_f/100))**(1/periods.to_f)-1)*periods.to_f)*100).round(4)\n end", "def calculate_years(principal, interest, tax, desired)\n years = 0\n total = principal\n while principal < desired\n gain = principal * ( interest)\n tax_paid = ( gain ) * tax\n principal = principal - tax_paid + gain\n years += 1\n end\n return years\nend", "def calculate_years(principal, interest, tax, desired)\n i = 0\n return i if principal == desired\n \n while principal < desired\n principal += (principal * interest) - (principal * interest * tax)\n i += 1\n end\n \n i\nend", "def annual_ror(initial_value, final_value, years)\n if years <= 0\n 0\n elsif initial_value == 0\n # BigDecimal::INFINITY\n Float::INFINITY\n else\n 100.to_d * if final_value < 0 # fudge if final value is less than zero\n (((initial_value.to_d - final_value.to_d) / initial_value.to_d) ** (1 / years.to_d)) * -1\n else\n ((final_value.to_d / initial_value.to_d) ** (1 / years.to_d)) - 1\n end\n end\n end", "def calculate_payment\n x = @periodic_rate * @principal * ((1 + @periodic_rate)**@periods)\n y = ((1 + @periodic_rate)**@periods) - 1\n (x / y).round(2)\n end", "def calculate_years(principal, interest, tax, desired)\n total = principal\n years = 0\n loop do\n break if total >= desired\n total_interest = total*interest\n total += total_interest\n total -= total_interest*tax\n years += 1\n end\n years\nend", "def calculate_years(principal, interest, tax, desired)\n\tyear = 0\n\t while principal < desired\n\t year += 1\n\t income = principal * interest\n\t principal += income - income * tax\n\t end\n\t year\nend", "def required_annual_savings\n needed_amount_less_savings / years_to_retirement\n end", "def net_present_value\n lambda do |x|\n relative_payments.reduce(0) do |sum, relative_payment|\n sum + relative_payment.amount * (1 + x)**-relative_payment.offset\n end\n end\n end", "def nominal_anticipated_given_efective(effective_rate, periods)\n nominalRate = (1+(effective_rate.to_f/100))**(1/periods.to_f)-1\n toAnticipated = nominalRate / (1+nominalRate)\n (toAnticipated * periods.to_f * 100).round(4)\n end", "def nb_year(p0, percent, aug, p)\n years = 0\n cumulativePopulation = p0\n while cumulativePopulation < p do \n years += 1\n cumulativePopulation += (cumulativePopulation * ((percent.to_f)/100)).to_i + aug\n end\n return years\nend", "def nb_year(p0, percent, aug, p)\n years = 0\n while p >= p0 do\n p0 += (percent/100.0 * p0) + aug\n years += 1\n end\n p years\nend", "def nb_year(p0, percent, aug, p)\n year_count = 0\n until p0 >= p do\n p0 += (p0 * (percent/100.to_f)) + aug\n year_count += 1\n # binding.pry\n end\n year_count\n # binding.pry\nend", "def follow_up_interval\n if should_take_low_intensity_questionnaire?\n 0\n elsif low_intensity? or recent_loss?\n 6.months\n else\n 3.months\n end\n end", "def portfolio_return\r\n \r\n num_stocks = @stock_names.length # number of assets in portfolio\r\n openval = @stock_open.map { |price| 1/price }\r\n closevals = @stock_close\r\n num_years = @period_actual / 365.25\r\n \r\n final_val = openval.zip(closevals).inject(0) do |dp,(openval,closevals)| dp + openval*closevals end\r\n \r\n if (num_stocks==0)\r\n return 0\r\n else\r\n if num_years >= 1.0\r\n return (((final_val / num_stocks) ** ( 1 / num_years)) - 1) * 100\r\n else\r\n return (final_val / num_stocks - 1 ) * 100\r\n end\r\n end\r\n end", "def annualized_rate_of_return(rate)\n\t\t\t(1+rate)**12 - 1\n\t\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
determine the investment horizon (length of time) required to create a future value given a present value and interest rate present_value the value at the start of the period future_value the value at the end of the period rate the annual rate of return
def investment_horizon(rate,present_value,future_value) pv = present_value.to_d fv = future_value.to_d periods = Math.log(fv/pv) / Math.log(1+rate) end
[ "def present_value(rate,periods,future_value)\n present_value = future_value / (1+rate)**periods\n end", "def compound_return(periods,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n n = periods.to_d\n rate = ((fv / pv)**(1/n))-1\n end", "def future_value(rate,periods,present_value,compound_frequency = 1)\n compound_frequency = resolve_compound_frequency!(compound_frequency)\n if compound_frequency == :continuous\n return continuous_compound_fv(rate,periods,present_value)\n end\n future_value = present_value * (1+rate/compound_frequency)** (periods * compound_frequency)\n end", "def future_increase\n future - present\n end", "def future_cost(initial_cost, number_of_years, interest_rate)\n initial_cost * (1 + interest_rate) ** number_of_years\n end", "def follow_up_interval\n if should_take_low_intensity_questionnaire?\n 0\n elsif low_intensity? or recent_loss?\n 6.months\n else\n 3.months\n end\n end", "def future_expiry_phase(pair, days_remain)\n\t\t\tif days_remain >= 200\n\t\t\t\traise \"days_remain too large #{days_remain} maybe an error? #{pair}\"\n\t\t\telsif days_remain >= 70\n\t\t\t\treturn 1\n\t\t\telsif days_remain >= 20\n\t\t\t\treturn 2\n\t\t\telsif days_remain >= 2\n\t\t\t\treturn 5\n\t\t\telse\n\t\t\t\treturn 6\n\t\t\tend\n\t\tend", "def net_present_value\n lambda do |x|\n relative_payments.reduce(0) do |sum, relative_payment|\n sum + relative_payment.amount * (1 + x)**-relative_payment.offset\n end\n end\n end", "def nominal_due_given_efective(effective_rate, periods)\n\t\t\t((((1 + (effective_rate.to_f/100))**(1/periods.to_f)-1)*periods.to_f)*100).round(4)\n end", "def installment_value(interest_rate, financing_time_months, loan_value)\n @installment_calculator.calculate(interest_rate, financing_time_months, loan_value)\n end", "def present_value\n # Payoff amount = 0, we’re assuming a fully amortizing loan\n payoff_amount = 0\n end", "def set_estimate_from_effort\n estimated_effort = self.story_points #custom_value.value\n unless estimated_effort.blank?\n self.estimated_hours = case estimated_effort.to_i\n when 1 then 4\n when 2 then 8\n when 3 then 12\n when 5 then 20\n when 8 then 32\n end\n end\n end", "def pmt(interest_rate, number_of_payments, present_value)\n interest_rate_monthly = interest_rate/12\n interest_rate_monthly_plus_one = interest_rate_monthly+1\n monthly_payment = present_value*interest_rate_monthly*interest_rate_monthly_plus_one**(number_of_payments)/(interest_rate_monthly_plus_one**number_of_payments-1)\n monthly_payment.round(2)\n\n# The equation for calculating the mortgage payment is as follows\n# - Monthly payment C\n# - interest_rate must be converted into a monthly interest rate\n# -- interest_rate / 12 / 100\n# - Number_of_payments must be converted into months number_of_payments * 12\n# - present_value requires no changes.\n# - The final equation is an equation for an annuity Pr(1+r)^N / (1+r)^N-1\n\nend", "def amounts_owed_on_date_assuming_scheduled_payments(date)\n raise Exception.new('Loan must be funded to calculate future interest owed assuming scheduled payments') unless ['active','collections','garnishments'].include? aasm_state and funded_on\n raise Exception.new('Cannot calculate anticipated interest for a date in the past') if date < Date.today\n raise Exception.new('Date for interest calculation must be on or after the funding date.') if date < self.funded_on\n if amounts_owed_updated_on < Date.today\n update_amounts_owed\n end\n\n # Iterate over scheduled payments from date balance last updated through the desired date\n hypothetical_principal = principal_owed\n hypothetical_principal_last_changed_on = amounts_owed_updated_on\n total_interest = 0\n interest_payments = 0\n hypothetical_fees_owed = fees_owed\n fee_payments = 0\n puts \"Initial principal #{hypothetical_principal} on #{hypothetical_principal_last_changed_on}\"\n puts \"Daily interest rate: #{daily_interest_rate}\"\n scheduled_payments.each do |scheduled_payment|\n break if scheduled_payment.draft_date >= date\n interest_payments += scheduled_payment.interest\n fee_payments += scheduled_payment.fees\n if scheduled_payment.principal != 0\n days = scheduled_payment.draft_date - hypothetical_principal_last_changed_on\n interest_for_span = (daily_interest_rate * hypothetical_principal * days.to_f).to_money\n total_interest += interest_for_span\n puts \" Principal reduced by #{scheduled_payment.principal} on #{scheduled_payment.draft_date}\"\n puts \" Interest on #{hypothetical_principal} for #{days} days = #{interest_for_span}\"\n hypothetical_principal -= scheduled_payment.principal # All amounts on scheduled payments are >= 0\n hypothetical_principal_last_changed_on = scheduled_payment.draft_date\n end\n end\n\n # Calculate interest since last date hypothetical principal changed until target date\n days = date - hypothetical_principal_last_changed_on\n puts \"#{days} days from date of last change in hypothetical principal to target date\"\n puts \"Multiplying: #{daily_interest_rate} * #{hypothetical_principal.to_f} * #{days.to_f}\"\n interest_for_span = (daily_interest_rate * hypothetical_principal.to_f * days.to_f).to_money\n puts \"Interest since last change in hypothetical principal: #{interest_for_span}\"\n total_interest += interest_for_span\n\n interest_owed = (total_interest - interest_payments).to_money\n puts \"Interest expected to be owing on #{date}: #{interest_owed}\"\n hypothetical_fees_owed -= fee_payments\n { :total => hypothetical_principal+interest_owed+hypothetical_fees_owed, :principal => hypothetical_principal, :interest => interest_owed, :fees => hypothetical_fees_owed}\n end", "def future_intervals_for_user(user)\n future_intervals = []\n periods = user.critical_periods.order_by(:from => 'desc').limit(@to_consider).all.to_a\n if periods.length > 0\n average_period_values = average_period_values(periods)\n average_cycle = average_period_values[:average_cycle]\n average_length = average_period_values[:average_length]\n\n future_period_from = periods[0].from\n @to_predict.times do ||\n future_period_from = future_period_from + average_cycle.days\n future_period_to = future_period_from + average_length.days\n future_intervals.push(from: future_period_from, to: future_period_to)\n end\n end\n future_intervals\n end", "def extrapolateData( key, startYear, startMonth, endYear, endMonth, annualGrowthRate )\n y = startYear\n m = startMonth\n # annual growth rate as monthly compound\n monthlyGrowthRate =( 1.0 + ( annualGrowthRate / 100.0 )) ** ( 1.0 / 12.0)\n puts \"growth rate #{annualGrowthRate} monthlyGrowthRate #{monthlyGrowthRate}\"\n loop do\n tm = m - 1\n ty = y\n if( tm == 0 )then\n tm = 12;\n ty = ty - 1\n end;\n date = Date::new( ty, tm, 1 );\n last = getByD( key, date )\n new = last * monthlyGrowthRate\n puts \"y=#{y} m=#{m} last=#{last} new=#{new} growthRate=#{monthlyGrowthRate}\"\n setByM( key, y, m, new ); \n break if y == endYear and m == endMonth \n m += 1\n if( m == 13 )then\n m = 1\n y += 1\n end\n end\n end", "def future_pe\n if $pe\n $pe.to_i\n elsif growth_rate || historical_pe\n pe = [growth_rate ? growth_rate * 200.0 : nil,\n Moscalc.ema(historical_pe + [current_pe]),\n Max_Future_PE\n ].compact.min\n pe < 0 ? current_pe : pe\n else\n current_pe\n end\n end", "def forecast_finish_date(basis_hours)\n if complete_ev(basis_hours) == 100.0\n @ev.keys.max\n elsif today_spi(basis_hours) == 0.0\n @pv.keys.max\n else\n if @issue_max_date < @basis_date\n rest_days = (@pv[@pv.keys.max] - @ev[@ev.keys.max]) / today_spi(basis_hours) / basis_hours\n @basis_date + rest_days\n else\n rest_days = @pv.count { |key, _value| key > @basis_date }\n @pv.keys.max - (rest_days - (rest_days / today_spi(basis_hours)))\n end\n end\n end", "def nominal_anticipated_given_efective(effective_rate, periods)\n nominalRate = (1+(effective_rate.to_f/100))**(1/periods.to_f)-1\n toAnticipated = nominalRate / (1+nominalRate)\n (toAnticipated * periods.to_f * 100).round(4)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
determine the effective annual rate for a given simple interest rate compounded over a given number of periods rate simple annual rate compound_per_period compound / period
def effective_annual_rate(rate,compound_frequency) compound_frequency = resolve_compound_frequency!(compound_frequency) if compound_frequency == :continuous return continuous_effective_annual_rate(rate) end m= compound_frequency e_rate = (1 + rate/m)**m -1 end
[ "def current_compound_interest_rate() 4.3 end", "def nominal_anticipated_given_efective(effective_rate, periods)\n nominalRate = (1+(effective_rate.to_f/100))**(1/periods.to_f)-1\n toAnticipated = nominalRate / (1+nominalRate)\n (toAnticipated * periods.to_f * 100).round(4)\n end", "def interest(loan_amount, nominal_rate, days_in_month = 30 , days_in_year = 360)\n interest_in_cents = ((loan_amount * nominal_rate * days_in_month) / days_in_year)# To convert cents to euro by dividing it by 100 and round off decimal by 2\n interest_in_euro = (interest_in_cents.to_f/100).round(2)\n end", "def annual_salary(hourly_rate)\n hourly_rate * work_hours_per_year / employee_cost_factor\n end", "def compound_return(periods,present_value,future_value)\n pv = present_value.to_d\n fv = future_value.to_d\n n = periods.to_d\n rate = ((fv / pv)**(1/n))-1\n end", "def annualized_rate_of_return(rate)\n\t\t\t(1+rate)**12 - 1\n\t\tend", "def nominal_due_given_efective(effective_rate, periods)\n\t\t\t((((1 + (effective_rate.to_f/100))**(1/periods.to_f)-1)*periods.to_f)*100).round(4)\n end", "def pmt(interest_rate,payments,principal)\n numerator =interest_rate*principal*(1 + interest_rate)**payments\n denominator= (1+ interest_rate)**payments - 1\n return numerator/denominator.to_f\nend", "def calculate_annuity(loan_amount, interest_percent_rate, months)\n interest_rate = interest_percent_rate.to_f / 100.0\n monthly_interest_rate = interest_rate / 12.0\n\n numerator = monthly_interest_rate * loan_amount.to_f\n denominator = 1.0 - ((1.0 + monthly_interest_rate)**-months)\n\n (numerator / denominator)\nend", "def calculate_payment\n x = @periodic_rate * @principal * ((1 + @periodic_rate)**@periods)\n y = ((1 + @periodic_rate)**@periods) - 1\n (x / y).round(2)\n end", "def hourly_rate(annual_salary)\n employee_cost_factor * annual_salary / work_hours_per_year\n end", "def rate(rate_period_date, benefit_begin_date, birth_date)\n age = Ager.new(birth_date).age_as_of(benefit_begin_date)\n premiums = Collections::Premiums.new(self.premium_tables).for_date(rate_period_date).for_age(age)\n premiums.to_a.first\n end", "def interestRate \n # easy variables to use\n firstTrans = self.transactions[0].sum\n secondTrans = self.transactions[1].sum\n\n # finding which set of transactions are greater\n if firstTrans > secondTrans\n # subtract the smaller trans\n diff = firstTrans - secondTrans\n\n # return the interest rate needed to return to zero\n return (diff * 1.0)/secondTrans\n elsif firstTrans < secondTrans \n # subtract the smaller trans\n diff = secondTrans - firstTrans\n\n # return the interest rate needed to return to zero\n return (diff * 1.0)/firstTrans\n else\n return 0\n end\n end", "def compound_interest\n\tp \"What is the principal amount?\"#\n\tprincipal = gets.chomp.to_i\n\tp \"What is the rate?\"\n\trate = gets.chomp.to_f\n\tp \"What is the number of years?\"\n\tterm = gets.chomp.to_i\n\tp \"What is the number of time the interest in compounded per year?\"\n\tcompounded = gets.chomp.to_i\n\t\n\tnew_rate = ((rate / compounded)/100) + 1\n\ttotal = principal\n\t(term * compounded).times do\n\t\ttotal = total * new_rate\n\tend\n\t\n\tp \"$#{principal} invested at #{rate}% for #{term} years compounded #{compounded} times per year is #{total.round(2)}\"\n\t\nend", "def efective_given_nominal_anticipated(nominal_rate, term)\n (((1/((1-((nominal_rate.to_f/100)/term))**term))-1)*100).round(4)\n end", "def debt_rate\n ten_year_treasury + bps(200)\n end", "def calculateAnnuity\n rate = @i /100.0\n @pmt*((1 - (1 / ((1 + rate)**@n))) / rate)\n end", "def initialize(rate, type=:yearly, options = {})\n if type.class.to_s == 'Float'\n @rate = convert_period(1/type, rate)\n elsif type.to_sym == :apr\n options = {:compounded => :yearly}.merge(options)\n factor = @@periods[options[:compounded]]\n @rate = (1 + (rate*factor.to_f))**(1/factor.to_f) - 1\n else\n @rate = convert_period(1/@@periods[type], rate)\n end\n end", "def calculateInterest\n\t\tinterest = 0\n\t\t@transactions.each do |trans|\n\t\t\tinterest += -1 * ((31 - trans[0]) * trans[-1] * @APR / 365)\n\t\tend\n\t\tinterest < 0 ? 0 : interest.round(2)\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrating this to `authorizerhomerooms`
def authorized_homerooms if EnvironmentVariable.is_true('ENABLE_HOMEROOM_AUTHORIZATION_V2') authorizer.homerooms else authorizer.allowed_homerooms_DEPRECATED(acknowledge_deprecation: true) end end
[ "def authorizes\n @authorizes.constantize\n end", "def authorize\n end", "def valid_author_headers\n {\n \"Authorization\" => author_token_generator(author.id)\n }\n end", "def documentation_authorizer\n @documentation_authorizer ||= Documentation.config.authorizer.new(controller)\n end", "def author_index; end", "def apply_authorities(env)\n DogBiscuits.config.authorities_add_new.each do |authority_name|\n term = authority_name.to_s.singularize.to_sym\n next unless env.attributes.key? term\n env.attributes[term].each do |attr|\n add_new(authority_name.to_s, attr)\n end\n end\n end", "def index\n @authorities = current_user.authorities\n end", "def subauthorities\n []\n end", "def normalize_author(hsh)\n str = hsh['author']\n tokens = repair_and_tokenize_author_text(str)\n authors = []\n current_auth = []\n begin_auth = 1\n tokens.each {|tok|\n if tok =~ /^(&|and)$/i\n if !current_auth.empty?\n auth = normalize_author_name(current_auth)\n authors << auth\n end\n current_auth = []\n begin_auth = 1\n next\n end\n if begin_auth > 0\n current_auth << tok\n begin_auth = 0\n next\n end\n if tok =~ /,$/\n current_auth << tok\n if !current_auth.empty?\n auth = normalize_author_name(current_auth)\n authors << auth\n current_auth = []\n begin_auth = 1\n end\n else\n current_auth << tok\n end\n }\n if !current_auth.empty?\n auth = normalize_author_name(current_auth)\n authors << auth unless auth.strip == \"-\"\n end\n hsh['authors'] = authors\n hsh\n end", "def author_roles\n @author_roles ||= Sappiamo::API::AuthorRoles.new(credentials)\n end", "def ach_authorizations(entity_id)\n API::request(:get, \"entities/#{entity_id}/ach_authorizations\")\n end", "def homerooms(options = {})\n # Query for all students up front, then pass through authorizer.\n unsafe_all_students = Student.active.includes(:homeroom).to_a\n students = authorized { unsafe_all_students }\n\n # In memory, group each list by homeroom.\n unsafe_students_by_homeroom = unsafe_all_students.group_by(&:homeroom)\n authorized_students_by_homeroom = students.group_by(&:homeroom)\n\n # In the deprecated method, it would include access to Homerooms that\n # had no students. In the new method, this doesn't make sense (and there's\n # no real point). But this allows an option to force using the older method\n # within tests, which will over-include homerooms with zero students.\n potential_homerooms = if options.fetch(:force_search_all_homerooms, false)\n Homeroom.all.includes(:students)\n else\n authorized_students_by_homeroom.keys.compact\n end\n\n # Iterate in memory, and find only homerooms where educator\n # can access all students.\n homerooms = []\n potential_homerooms.each do |homeroom|\n authorized_students_in_homeroom = authorized_students_by_homeroom[homeroom]\n unsafe_students_in_homeroom = unsafe_students_by_homeroom[homeroom]\n next if Set.new(unsafe_students_in_homeroom) != Set.new(authorized_students_in_homeroom)\n homerooms << homeroom\n end\n homerooms\n end", "def authorize_leader!\n raise CanCan::AccessDenied if current_user.managed_chapters.count.zero?\n end", "def authorize\n @analytics.authorization = GaAuthorizer.token\n end", "def create_authorizable_getter\n define_singleton_method(authorizable_getter_name) { @authorizable }\n end", "def auth_policies=(_arg0); end", "def ach_authorizations(entity_id)\n API::request(:get, FundAmerica.base_uri + \"entities/#{entity_id}/ach_authorizations\")\n end", "def host_authorization; end", "def authorized_students(homeroom)\n authorized do\n homeroom.students\n .active\n .includes(:event_notes, :interventions, :homeroom)\n .to_a\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Query for students through homeroom, but scoped within studentlevel authorization check. This is essentially doublewrapping the authorizaiton checks; the homeroom authorization check should separately only allow access when the educator can access all students in the homeroom.
def authorized_students(homeroom) authorized do homeroom.students .active .includes(:event_notes, :interventions, :homeroom) .to_a end end
[ "def homerooms(options = {})\n # Query for all students up front, then pass through authorizer.\n unsafe_all_students = Student.active.includes(:homeroom).to_a\n students = authorized { unsafe_all_students }\n\n # In memory, group each list by homeroom.\n unsafe_students_by_homeroom = unsafe_all_students.group_by(&:homeroom)\n authorized_students_by_homeroom = students.group_by(&:homeroom)\n\n # In the deprecated method, it would include access to Homerooms that\n # had no students. In the new method, this doesn't make sense (and there's\n # no real point). But this allows an option to force using the older method\n # within tests, which will over-include homerooms with zero students.\n potential_homerooms = if options.fetch(:force_search_all_homerooms, false)\n Homeroom.all.includes(:students)\n else\n authorized_students_by_homeroom.keys.compact\n end\n\n # Iterate in memory, and find only homerooms where educator\n # can access all students.\n homerooms = []\n potential_homerooms.each do |homeroom|\n authorized_students_in_homeroom = authorized_students_by_homeroom[homeroom]\n unsafe_students_in_homeroom = unsafe_students_by_homeroom[homeroom]\n next if Set.new(unsafe_students_in_homeroom) != Set.new(authorized_students_in_homeroom)\n homerooms << homeroom\n end\n homerooms\n end", "def student_ranking_per_school\n @courses ||= Course.all\n @students ||= Student.all\n @exam_groups ||= ExamGroup.result_published\n authorize! :read, @exam_groups.first\n end", "def school_students(query={})\n self.simple_client.get(\"/api/v1/schools/my/students?#{query.to_query}\")\n end", "def view_students\n requires({'role'=>'admin'})\n @students = User.find_all_by_role_and_deleted_at(:student, nil)\n end", "def index\n authorize Highschool\n @highschools = Highschool.order(:name).search(params[:search]).page(params[:page])\n end", "def students\n User.joins(course_enrollments: :course_role).where(\n course_enrollments: {\n course_offering_id: id,\n course_role_id: CourseRole.student\n })\n end", "def index\n if params[:course_id]\n begin\n course = Course.find(params[:course_id])\n must_be_a_teacher_of(params[:token], course)\n\n participations = course.participations.where(role: ROLE_STUDENT)\n @students = User.none\n participations.each do |p|\n @students <<= p.user\n end\n rescue ActiveRecord::RecordNotFound\n json_failed(REASON_RESOURCE_NOT_FOUND)\n end\n elsif params[:lesson_id]\n json_failed(REASON_NOT_IMPLEMENTED)\n else\n json_failed(REASON_INVALID_OPERATION)\n end\n end", "def why_authorized_for_student?(student, options = {})\n begin\n should_consider_sections = options.fetch(:should_consider_sections, PerDistrict.new.should_consider_sections_for_student_level_authorization?)\n return :districtwide if @educator.districtwide_access?\n # As a performance optimization, this check excludes `dynamic_labels`, since they're a bit more\n # expensive to compute, and here we only need to check one specific label that we know\n # is static. Also, PerfTest shows that although it might seem like moving the env check first would\n # speed this up by avoiding the labels query, this order is consistently faster when the ENV value\n # is not set.\n return :housemaster if @educator.labels(exclude_dynamic_labels: true).include?('high_school_house_master') && student.grade == '8' && EnvironmentVariable.is_true('HOUSEMASTERS_AUTHORIZED_FOR_GRADE_8')\n\n return nil if @educator.restricted_to_sped_students && !(student.program_assigned.in? ['Sp Ed', 'SEIP'])\n return nil if @educator.restricted_to_english_language_learners && student.limited_english_proficiency == 'Fluent'\n return nil if @educator.school_id.present? && student.school_id.present? && @educator.school_id != student.school_id\n\n return :schoolwide if @educator.schoolwide_access? || @educator.admin? # Schoolwide admin\n return :grade_level if @educator.has_access_to_grade_levels? && student.grade.in?(@educator.grade_level_access) # Grade level access\n\n # The next two checks call `#to_a` as a performance optimization.\n # In loops with `authorized { Student.active }`, without forcing this\n # to an eagerly evaluated array, repeated calls will keep making the\n # same queries. This seems unexpected to me, but adding `to_a` at\n # the end results in Rails caching these queries across the repeated\n # calls in the loop. So we can do that here, and let callers who\n # are calling this in a loop get the optimization without having to\n # optimize themselves.\n return :homeroom if student.in?(@educator.students.to_a) # Homeroom level access\n\n return :section if should_consider_sections && student.in?(@educator.section_students.to_a) # Section level access\n rescue ActiveModel::MissingAttributeError => err\n # We can't do authorization checks on models with `#select` that are missing\n # fields. If this happens, it's probably because the developer is trying to\n # to optimize a query. The authorization layer could only recover by making more\n # queries, so instead we raise and force the developer to figure out how to resolve.\n #\n # See `Authorizer.student_fields_for_authorization` and `Authorizer.educator_field_for_authorization`\n # to see what fields are required on each model.\n raise err\n end\n nil\n end", "def students\n if current_user.is_admin?\n @students= User.find(:all, :conditions => \"is_teacher = '0' and is_admin = '0'\")\n respond_to do |format|\n format.xml { render :xml => @students }\n end\n else\n respond_to do |format|\n format.xml { render :text => \"error\" }\n end\n end\n end", "def authorized_homerooms\n if EnvironmentVariable.is_true('ENABLE_HOMEROOM_AUTHORIZATION_V2')\n authorizer.homerooms\n else\n authorizer.allowed_homerooms_DEPRECATED(acknowledge_deprecation: true)\n end\n end", "def list_students\n if current_user.is_admin?\n @course = Course.find(params[:id])\n else\n @course = current_user.courses.find(params[:id])\n end\n @users = @course.users\n @students = Array.new\n @users.each do |x|\n if !x.is_teacher?\n @students << x\n end\n end\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @students }\n end\n rescue ActiveRecord::RecordNotFound => e\n prevent_access(e)\n end", "def set_hs_students_from_housing\n high_school_students_from_housing = CeqrData::ScaHousingPipelineByBoro.version(\n data_package.table_for(\"sca_housing_pipeline_by_boro\")\n ).high_school_students_from_new_housing_by_boro(project.borough)\n\n hs_students = high_school_students_from_housing.map{|s| s[:hs_students]}\n\n self.hs_students_from_housing = hs_students.first\nend", "def index\n \n parent_user = User.find( params[:parent_id] )\n\n\t# check that the issuer of the request has both the username and ID of the parent, prevent attack\n if params[:parent_login].gsub(/ /,'') != parent_user.login.gsub(/ /,'')\n \tlog_attack \"Student index() for parent \" + params[:parent_id] + \" : \" + params[:parent_login] + \" - parent_user.login = \" + parent_user.login \t\n respond_to do |format|\n format.xml { render :xml => errorRsp( \"Security error\") }\n end\n \treturn\n end\n \n \n if params[:parent_id] != nil\n @students = Student.find_all_by_parent_id(params[:parent_id], \n :conditions => { :status => [ \"active\" ] } )\n end\n \n \n if params[:admin_list] != nil\n @students = Student.find_all_by_status( [ \"active\", \"suspended\" ] )\n end\n \n respond_to do |format|\n format.xml { render :xml => @students }\n end\n end", "def index\n authorize! :read, GradeSection\n @grade_level = GradeLevel.find(params[:grade_level_id])\n @grade_sections = @grade_level.grade_sections.includes([:academic_year, :homeroom])\n end", "def student_granted_organizations\n granted_organizations_for :student\n end", "def index\n if current_user.is_company_admin?\n if current_user.company.reg_status == 3\n @students = Student.scoped\n else\n @students = []\n end\n #elsif current_user.is_department_admin?\n # @students = current_user.department.students\n else\n @students = Student.scoped\n end\n\n if params.keys.include? \"event_id\"\n @students = @students.joins(:registered_events).where(\"event_id = ?\", params[:event_id])\n end\n\n unless current_user.is_department_admin?\n @students.select! { |s| s.is_active? }\n end\n\n respond_with @students\n end", "def get_user_children(bearer) \n return get_info(bearer, \"v2/my/students\")\n end", "def elective\n @subject = Subject.shod(params[:id])\n @students ||= @subject.elective_group.batch.students\n authorize! :read, @student\n end", "def hals\n @course = Course.find(params[:id])\n # don't show any private hals\n @hals = @course.hals.not_private\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Serializes a Student into a hash with other fields joined in (that are used to perform filtering and slicing in the UI). This may be slow if you're doing it for many students without eager includes.
def fat_student_hash(student) HashWithIndifferentAccess.new(student.as_json({ except: [ :primary_phone, :primary_email, :student_address ] }).merge({ has_photo: student.has_photo, discipline_incidents_count: student.most_recent_school_year_discipline_incidents_count, absences_count: student.most_recent_school_year_absences_count, tardies_count: student.most_recent_school_year_tardies_count, homeroom_name: student.try(:homeroom).try(:name), event_notes_without_restricted: student.event_notes_without_restricted, interventions: student.interventions, sped_data: sped_data(student) })) end
[ "def flat_row_hash(student)\n # Remove some fields by default, these are likely to be misleading.\n # Allow callers to remove other fields (eg, address) for other uses,\n # to safelist with `only` instead.\n as_json_options = @options.fetch(:as_json, {\n except: [:created_at, :updated_at]\n })\n student_fields = student.as_json(as_json_options)\n\n # optionally include other fields\n student_fields\n .merge(additional_student_fields(student))\n .merge(service_fields(student))\n .merge(event_note_fields(student))\n .stringify_keys!\n end", "def students\n object.group.students.map do |student|\n Student.serialize(student)\n end\n end", "def include_students(fields)\n if fields.include?(:student_memberships)\n {student_memberships: {include: :user}}\n end\n end", "def get_student_section_columns_hash(students)\n students.inject({}) do |map, student|\n map[student[:student_id]] = section_columns_hash(student[:sections])\n map\n end\n end", "def flat_row_hash(student, all_service_types)\n student_fields = student.as_json.except(*[\n 'created_at',\n 'updated_at',\n 'student_address',\n 'hispanic_latino',\n 'race'\n ])\n\n additional_student_fields = {\n student_risk_level: student.student_risk_level.level,\n discipline_incidents_count: student.most_recent_school_year.discipline_incidents.count,\n absences_count: student.most_recent_school_year.absences.count,\n tardies_count: student.most_recent_school_year.tardies.count,\n homeroom_name: student.try(:homeroom).try(:name)\n }\n\n # unroll all service types\n student_service_type_ids = student.services.active.map(&:service_type_id)\n service_fields = all_service_types.reduce({}) do |hash, service_type|\n service = student.services.active.find {|service| service.service_type_id == service_type.id }\n value = if service.nil? then '' else service.date_started.strftime(\"%Y-%m-%d\") end\n hash[\"#{service_type.name} (active_service_date_started)\"] = value\n hash\n end\n\n # Unroll all event note types.\n # This will include the presence of restricted notes, but only the date and\n # no content.\n all_event_note_types = EventNoteType.all\n event_note_type_ids = student.event_notes.map(&:event_note_type_id)\n event_note_fields = all_event_note_types.reduce({}) do |hash, event_note_type|\n event_note = student.event_notes.find {|event_note| event_note.event_note_type_id == event_note_type.id }\n value = if event_note.nil? then '' else event_note.recorded_at.strftime(\"%Y-%m-%d\") end\n hash[\"#{event_note_type.name} (last_event_note_recorded_at)\"] = value\n hash\n end\n\n student_fields\n .merge(additional_student_fields)\n .merge(service_fields)\n .merge(event_note_fields)\n .stringify_keys!\n end", "def profile\n student = Student.find(params[:id])\n chart_data = StudentProfileChart.new(student).chart_data\n @serialized_data = {\n current_educator: current_educator,\n student: student.serialized_data,\n notes: student.student_notes.map { |note| serialize_student_note(note) },\n feed: student_feed(student),\n chart_data: chart_data,\n intervention_types_index: intervention_types_index,\n educators_index: educators_index,\n attendance_data: {\n discipline_incidents: student.most_recent_school_year.discipline_incidents,\n tardies: student.most_recent_school_year.tardies,\n absences: student.most_recent_school_year.absences\n }\n }\n end", "def sample_students_json\n seed = params.fetch(:seed, '42').to_i\n n = params.fetch(:n, '40').to_i\n authorized_sample_students = authorized do\n Student.active.sample(n, random: Random.new(seed))\n end\n sample_students_json = authorized_sample_students.as_json({\n only: [:id, :grade, :first_name, :last_name],\n include: {\n school: {\n only: [:id, :name, :school_type]\n }\n }\n })\n render json: {\n sample_students: sample_students_json\n }\n end", "def merge_mutable_fields_for_slicing(student_hashes)\n student_ids = student_hashes.map {|student_hash| student_hash[:id] }\n summer_service_type_ids = ServiceType.where(summer_program: true).pluck(:id)\n all_event_notes = EventNote.where(student_id: student_ids)\n all_active_services = Service.where(student_id: student_ids).active\n all_interventions = Intervention.where(student_id: student_ids)\n all_summer_services = Service.where(student_id: student_ids)\n .where(service_type_id: summer_service_type_ids)\n .where(\"date_started > ?\", 1.year.ago)\n\n student_hashes.map do |student_hash|\n for_student = {\n event_notes: all_event_notes.select {|event_note| event_note.student_id == student_hash[:id] },\n active_services: all_active_services.select {|service| service.student_id == student_hash[:id] },\n summer_services: all_summer_services.select {|service| service.student_id == student_hash[:id] },\n interventions: all_interventions.select {|intervention| intervention.student_id == student_hash[:id] }\n }\n student_hash.merge({\n event_notes: for_student[:event_notes].map {|x| serialize_event_note_without_attachments(x) },\n active_services: for_student[:active_services].map {|x| serialize_service(x) },\n summer_services: for_student[:summer_services].map {|x| serialize_service(x) },\n interventions: for_student[:interventions].map {|x| serialize_intervention(x) },\n })\n end\n end", "def hash_builder(student)\n student_hash = { id: student.id, name: student.first_name + ' ' + student.s_last_name,\n other_interventions: student.num_other_programs,\n tutor: student.vol_name, first_attempt_average: @acumen_one,\n second_attempt_average: @acumen_two,\n hug_gain: (@acumen_two - @acumen_one).round(2),\n last_year_dra_gains: @last_year_dra_gains,\n fall_dra: @student_record[:fall_dra], winter_dra: @student_record[:winter_dra],\n mid_year_dra_gain: @student_record[:mid_year_dra_gain],\n spring_dra: @student_record[:spring_dra], end_year_dra_gain: @student_record[:end_year_dra_gain],\n fall_rit: @student_record[:fall_rit], winter_rit: @student_record[:winter_rit],\n mid_year_rit_gain: @student_record[:mid_year_rit_gain],\n spring_rit: @student_record[:spring_rit], end_year_rit_gain: @student_record[:end_year_rit_gain],\n fall_rank: @student_record[:fall_rank], winter_rank: @student_record[:winter_rank],\n mid_year_rank_gain: @student_record[:mid_year_rank_gain],\n spring_rank: @student_record[:spring_rank], end_year_rank_gain: @student_record[:end_year_rank_gain],\n fall_lexile: @student_record[:fall_lexile], winter_lexile: @student_record[:winter_lexile],\n spring_lexile: @student_record[:spring_lexile] }\n student_hash\n end", "def students_by_section\n class_map = Hash.new{ |h,k| h[k] = [] }\n self.followers.includes([:section, :student_user]).each do |f|\n class_map[f.section] << f.student_user\n end\n class_map\n end", "def to_jaxb_json_hash\n _h = {}\n if !students.nil?\n _ha = Array.new\n students.each { | _item | _ha.push _item.to_jaxb_json_hash }\n _h['students'] = _ha\n end\n return _h\n end", "def sample_students_json\n raise Exceptions::EducatorNotAuthorized unless current_educator.can_set_districtwide_access?\n\n seed = params.fetch(:seed, '42').to_i\n n = params.fetch(:n, '40').to_i\n authorized_sample_students = authorized do\n Student.active.sample(n, random: Random.new(seed))\n end\n sample_students_json = authorized_sample_students.as_json({\n only: [:id, :grade, :first_name, :last_name],\n include: {\n school: {\n only: [:id, :name, :school_type]\n }\n }\n })\n render json: {\n sample_students: sample_students_json\n }\n end", "def student_feed(student, restricted_notes: false)\n {\n event_notes: student.event_notes\n .select {|note| note.is_restricted == restricted_notes}\n .map {|event_note| serialize_event_note(event_note) },\n services: {\n active: student.services.active.map {|service| serialize_service(service) },\n discontinued: student.services.discontinued.map {|service| serialize_service(service) }\n },\n deprecated: {\n interventions: student.interventions.map { |intervention| serialize_intervention(intervention) }\n }\n }\n end", "def add_attributes_to_students\n Student.all.each do |student|\n attributes = Scraper.scrape_profile_page(BASE_PATH + student.profile_url)\n student.add_student_attributes(attributes)\n end\n end", "def get_student_attributes\n old_loc = I18n.locale\n I18n.locale = :cs\n self.uic = student.uic\n self.lastname = student.lastname\n self.firstname = student.firstname\n self.birthname = student.birthname\n self.birth_number = student.birth_number\n self.sex = student.sex\n self.created_on = student.created_on\n self.title_before = student.title_before.label if student.title_before\n self.title_after = student.title_after.label if student.title_after\n self.birth_place = student.birth_place\n self.birth_on = student.birth_on\n self.email = student.email\n self.phone = student.phone\n self.citizenship = student.citizenship\n self.qualif_citizenship = Country.qualified_code(student.citizenship)\n self.permaddress_street = student.street\n self.permaddress_housenr = student.desc_number\n self.permaddress_housenrguid = student.orient_number\n self.permaddress_city = student.city\n self.permaddress_zip = student.zip\n self.contact_street = student.postal_street\n self.contact_housenr = student.postal_desc_number\n self.contact_housenrguid = student.postal_orient_number\n self.contact_city = student.postal_city\n self.contact_zip = student.postal_zip\n self.marital_status = student.marital_status\n # TODO isn't it here only cause of specing?\n if student.index.try(:account_number)\n self.bank_branch = student.index.account_number_prefix\n self.bank_account = student.index.account_number\n self.bank_code = student.index.account_bank_number\n end\n I18n.locale = old_loc\n return self\n end", "def students_with_low_grades_json(time_now, time_threshold, grade_threshold)\n all_assignments = assignments(time_now, time_threshold, grade_threshold)\n by_student = all_assignments.group_by(&:student)\n json = by_student.map do |student, assignments|\n {\n student: student.as_json(:only => [:id, :email, :first_name, :last_name, :grade, :house]),\n assignments: assignments.map {|assignment| serialize_assignment(assignment) }\n }\n end\n json.as_json\n end", "def student\n\n\t\tif(params[:student_user_id].eql?('null') || params[:classroom_id].eql?('null') )\n\t\t\trender json: {status: \"error\", error: \"invalid-student-user-or-classroom\"}\n\t\telse\n\t\t\tstudent = StudentUser.joins(:classrooms)\n\t\t\t\t.joins(\"inner join teacher_users t on classrooms.teacher_user_id = t.id\")\n\t\t\t\t.where(\"t.id = ?\", @current_teacher_user.id)\n\t\t\t\t.where(\"classrooms.id = ?\", params[:classroom_id])\n\t\t\t\t.where(\"student_users.id = ?\" , params[:student_user_id])\n\t\t\t\t.first\n\n\t\t\tif(!student.nil?)\n\t\t\t\tstudent = student.as_json\n\t\t\t\tstudent.delete(\"salt\")\n\t\t student.delete(\"password_digest\")\n\t\t student.delete(\"oauth_expires_at\")\n\t\t student.delete(\"oauth_token\")\n\t\t student.delete(\"updated_at\")\n\t\t student.delete(\"create_at\")\n\n\t\t\t\trender json: {status: \"success\", student: student}\n\t\t\t\n\t\t\telse\n\n\t\t\t\trender json: {status: \"error\", error: \"invalid-student-user-or-classroom\"}\n\n\t\t\tend\n\t\tend\n\t\t\n\t\t\n\tend", "def get_student_section_association(school_id, section_id, begin_date)\n {:student => @id, :ed_org_id => school_id, :unique_section_code => section_id, :begin_date => begin_date}\n end", "def students\n if usernames\n data = {}\n usernames.each {|u| data[u.username] = u.students}\n render json: data and return\n elsif current_user\n render json: {status: \"success\", response: current_user.students} and return\n else\n render json: {status: StatusCode::FAILURE, reason: \"no data provided\"}\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
SpEd Data as defined by Somerville Schools
def sped_data(student) { sped_level: sped_level(student), sped_tooltip_message: sped_tooltip_message(student), sped_bubble_class: sped_bubble_class(student) } end
[ "def sped_data\n {\n sped_level: sped_level,\n sped_tooltip_message: sped_tooltip_message,\n sped_bubble_class: sped_bubble_class\n }\n end", "def parse_schools_data(schools)\n parsed_schools = schools\n return parsed_schools\n end", "def summer_olympics_sport; end", "def summer_paralympics_sport; end", "def stahp\n end", "def spm\n @spm\n end", "def sgf\n @sgf ||= data[\"sgf\"]\n end", "def visible_sis_data(driver, student)\n #\n # Note: 'row_index' is position of student in list. For each student listed, the page has two hidden span elements\n # useful in determining (1) 'row_index' if you know the SID, or (2) SID if you know the 'row_index':\n #\n # <span id=\"row-index-of-{student.sid}\">{{ rowIndex }}</span>\n # <span id=\"student-sid-of-row-{rowIndex}\">{{ student.sid }}</span>\n #\n wait_until(Utils.medium_wait) { player_link_elements.any? }\n level_el = div_element(xpath: \"#{student_row_xpath student}//div[contains(@id,\\\"student-level\\\")]\")\n major_els = driver.find_elements(xpath: \"#{student_row_xpath student}//span[contains(@id,\\\"student-major\\\")]\")\n grad_term_el = div_element(xpath: \"#{student_row_xpath student}//div[contains(@id,\\\"student-grad-term\\\")]\")\n sports_els = driver.find_elements(xpath: \"#{student_row_xpath student}//span[contains(@id,\\\"student-team\\\")]\")\n gpa_el = span_element(xpath: \"#{student_row_xpath student}//span[contains(@id,\\\"student-cumulative-gpa\\\")]\")\n term_units_el = div_element(xpath: \"#{student_row_xpath student}//div[contains(@id,\\\"student-enrolled-units\\\")]\")\n cumul_units_el = div_element(xpath: \"#{student_row_xpath student}//div[contains(@id,\\\"cumulative-units\\\")]\")\n class_els = driver.find_elements(xpath: \"#{student_row_xpath student}//div[contains(@id,\\\"student-enrollment-name\\\")]\")\n {\n :level => (level_el.text.strip if level_el.exists?),\n :majors => (major_els.map &:text if major_els.any?),\n :grad_term => ((\"#{grad_term_el.text.split[1]} #{grad_term_el.text.split[2]}\") if grad_term_el.exists?),\n :sports => (sports_els.map &:text if sports_els.any?),\n :gpa => (gpa_el.text.gsub('No data', '').chomp if gpa_el.exists?),\n :term_units => (term_units_el.text if term_units_el.exists?),\n :units_cumulative => ((cumul_units_el.text.gsub('No data', '').chomp == '--' ? '0' : cumul_units_el.text) if cumul_units_el.exists?),\n :classes => class_els.map(&:text)\n }\n end", "def pcode4\n school.sierra_code\n end", "def sinfo\n sinfo = []\n parse(SINFO_REGEX) do |title_id, section_id, code, info|\n code = RakeMKV::Code[code]\n title = title_id.to_i\n section = section_id.to_i\n sinfo[title] ||= Array.new\n sinfo[title][section] ||= Hash.new\n sinfo[title][section][code] = info\n end\n sinfo\n end", "def sea\n fetch('one_piece.seas')\n end", "def winter_olympics_sport; end", "def add_snp_entry(data, indels, col_individual)\n snp_entry = Struct.new(:chr, :coor, :ori, :genotype)\n # Get the columns we need \n se = snp_entry.new(data[1], data[2], data[3], data[col_individual])\n @h_snps[se.chr + se.coor] = se\n end", "def spa\n return @spa\n end", "def national_sport; end", "def sis_data(all_students)\n set_global_configs all_students\n set_default_cohort\n set_max_cohort_members CONFIG['sis_data_max_users']\n end", "def set_sersol_number(id_data)\n the001 = id_data['001']\n if the001.start_with?('ss')\n the001.sub!('sseb', 'ssib')\n the001.sub!('sse', 'ssj')\n return the001\n end\n end", "def spanish_foreign_citizen_number; end", "def parse_semesters\n response = @networker.fetch_page_containing_semester_data\n document = Nokogiri::HTML(response) # parse the document from the HTTP response\n get_semesters_from_option_values(document).compact\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /featured_clients GET /featured_clients.json
def index @featured_clients = FeaturedClient.all end
[ "def index\n @clients = current_user.clients\n render json: @clients\n end", "def get_featured\n render json: Event.where(is_featured: true), status: :ok\n end", "def create\n @featured_client = FeaturedClient.new(featured_client_params)\n\n respond_to do |format|\n if @featured_client.save\n format.html { redirect_to @featured_client, notice: 'Featured client was successfully created.' }\n format.json { render :show, status: :created, location: @featured_client }\n else\n format.html { render :new }\n format.json { render json: @featured_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def show\n @agency_client = AgencyClient.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @agency_client }\n end\n end", "def show\n @client_service = ClientService.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @client_service }\n end\n end", "def index\n @clientes = Cliente.all\n render json: @clientes\n end", "def show\n @clientsOffers = ClientsOffers.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @clientsOffers }\n end\n end", "def update\n respond_to do |format|\n if @featured_client.update(featured_client_params)\n format.html { redirect_to @featured_client, notice: 'Featured client was successfully updated.' }\n format.json { render :show, status: :ok, location: @featured_client }\n else\n format.html { render :edit }\n format.json { render json: @featured_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def show\n\n @client_spotlight = ClientSpotlight.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @client_spotlight }\n end\n end", "def featured\n\t\t@concerts = Concert.where(featured: true)\n\t\trender(\"index\")\n\tend", "def show\n \t@pickup_list = PickupList.find(params[:id])\n \t@client = @pickup_list.client\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @pickup_list }\n end\n end", "def clients\n @clients = Vendor.find(params[:id]).clients\n end", "def index\n @client_services = ClientService.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @client_services }\n end\n end", "def show\n @featured_item = FeaturedItem.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @featured_item }\n end\n end", "def index\n get_clientes\n end", "def featured_games\n FeaturedGameList.new perform_request api_url \"featured-games\"\n end", "def show\n #using find_by for if else conditional\n selected_client = Client.find_by(id: params[:id])\n if selected_client\n render json: selected_client, status: :created\n else\n render json: {error: \"Show has failed, in tragedy\"}, status: 422\n end\n end", "def show\n @my_studio_client = MyStudio::Client.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @my_studio_client }\n end\n end", "def show\n @client_need = ClientNeed.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @client_need }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /featured_clients POST /featured_clients.json
def create @featured_client = FeaturedClient.new(featured_client_params) respond_to do |format| if @featured_client.save format.html { redirect_to @featured_client, notice: 'Featured client was successfully created.' } format.json { render :show, status: :created, location: @featured_client } else format.html { render :new } format.json { render json: @featured_client.errors, status: :unprocessable_entity } end end end
[ "def update_features(client_id)\n response = self.class.put(\"https://app.klipfolio.com/api/1.0/clients/#{client_id}/features\", basic_auth: @auth, headers: { \"Content-Type\" => \"application/json\" },\n body: {\n features:[{\"name\":\"public_dashboards\",\"enabled\":true},\n {\"name\":\"published_dashboards\",\"enabled\":true},\n {\"name\":\"downloadable_reports\",\"enabled\":true},\n {\"name\":\"scheduled_emails\",\"enabled\":true}]\n }.to_json)\n puts response.body\n puts \"Client's features were updated.\" if response.success?\n end", "def index\n @featured_clients = FeaturedClient.all\n end", "def update\n respond_to do |format|\n if @featured_client.update(featured_client_params)\n format.html { redirect_to @featured_client, notice: 'Featured client was successfully updated.' }\n format.json { render :show, status: :ok, location: @featured_client }\n else\n format.html { render :edit }\n format.json { render json: @featured_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @client = current_firm.clients.build(params[:client])\n\n respond_to do |format|\n if @client.save\n format.html { redirect_to firm_client_path(current_firm, @client), notice: 'Client was successfully created.' }\n format.json { render json: @client, status: :created, location: @client }\n else\n format.html { render action: \"new\" }\n format.json { render json: @client.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @client = current_firm.clients.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @client }\n end\n end", "def create\n @client = Client.new(client_params)\n\n respond_to do |format|\n if @client.save\n format.html { redirect_to clients_url, notice: 'El cliente se creó correctamente' }\n format.json { render :index, status: :created, location: @client }\n else\n format.html { render :new }\n format.json { render json: @client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @client = current_user.clients.build(client_params)\n\n respond_to do |format|\n if @client.save\n format.html { redirect_to @client, notice: 'Client was successfully created.' }\n format.json { render :show, status: :created, location: @client }\n else\n format.html { render :new }\n format.json { render json: @client.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @featured_client.destroy\n respond_to do |format|\n format.html { redirect_to featured_clients_url, notice: 'Featured client was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def create\n @related_client = RelatedClient.new(related_client_params)\n\n respond_to do |format|\n if @related_client.save\n format.html { redirect_to related_clients_url, notice: 'Related client was successfully created.' }\n # format.json { render :show, status: :created, location: @related_client }\n else\n format.html { render :new }\n format.json { render json: @related_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @invoice_client = InvoiceClient.new(invoice_client_params)\n\n respond_to do |format|\n if @invoice_client.save\n format.html { redirect_to @invoice_client, notice: 'Invoice client was successfully created.' }\n format.json { render :show, status: :created, location: @invoice_client }\n else\n format.html { render :new }\n format.json { render json: @invoice_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @client_post = ClientPost.new(params[:client_post])\n @client_post.staff_id = @staff_login.id\n\n respond_to do |format|\n if @client_post.save\n format.html { redirect_to client_posts_path(:cl_id => params[:cl_id]), notice: 'Заметка добавлена.' }\n format.json { render json: @client_post, status: :created, location: @client_post }\n else\n format.html { render action: \"new\" }\n format.json { render json: @client_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @enabled_client = EnabledClient.new(enabled_client_params)\n\n respond_to do |format|\n if @enabled_client.save\n format.html { redirect_to @enabled_client, notice: 'Enabled client was successfully created.' }\n format.json { render :show, status: :created, location: @enabled_client }\n else\n format.html { render :new }\n format.json { render json: @enabled_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @client = Client.find(params[:client_id])\n @fleet = @client.fleets.create(fleet_params)\n\n respond_to do |format|\n if @fleet.save!\n format.html { redirect_to client_fleets_path(@client), notice: 'Fleet was successfully created.' }\n format.json { render 'success', status: :created, location: @fleet }\n format.js\n else\n format.html { render :new }\n format.json { render json: @fleet.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_client(name, facebook_id, twitter_handle)\n puts name, facebook_id, twitter_handle\n # Point the HTTP POST method at the clients endpoint of Klipfolio's API.\n response = self.class.post(\"https://app.klipfolio.com/api/1.0/clients\", basic_auth: @auth, headers: { \"Content-Type\" => \"application/json\" },\n body: {\n \"name\": name,\n \"description\": \"\",\n \"seats\": 5,\n \"status\": \"active\"\n }.to_json)\n puts response.body\n puts \"Client was successfully created.\" if response.success?\n\n # Extract the new client's ID from the HTTP response so that it can be passed to the update_features & update_resources methods.\n client_id = response[\"meta\"][\"location\"]\n client_id.slice!(\"/clients/\")\n p client_id\n\n update_resources(client_id)\n update_features(client_id)\n update_company_properties(client_id, facebook_id, twitter_handle)\n create_group(client_id)\n share_dashboard(client_id)\n end", "def create\n @agency_client = AgencyClient.new(params[:agency_client])\n\n respond_to do |format|\n if @agency_client.save\n format.html { redirect_to @agency_client, notice: 'Agency client was successfully created.' }\n format.json { render json: @agency_client, status: :created, location: @agency_client }\n else\n format.html { render action: \"new\" }\n format.json { render json: @agency_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @live_client = LiveClient.new(live_client_params)\n\n respond_to do |format|\n if @live_client.save\n format.html { redirect_to @live_client, notice: 'Live client was successfully created.' }\n format.json { render :show, status: :created, location: @live_client }\n else\n format.html { render :new }\n format.json { render json: @live_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\r\n @natural_client = NaturalClient.new(natural_client_params)\r\n\r\n respond_to do |format|\r\n if @natural_client.save\r\n format.html { redirect_to @natural_client, notice: 'Natural client was successfully created.' }\r\n format.json { render action: 'show', status: :created, location: @natural_client }\r\n else\r\n format.html { render action: 'new' }\r\n format.json { render json: @natural_client.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def create\n @vkontakte_client = VkontakteClient.new(params[:vkontakte_client])\n\n respond_to do |format|\n if @vkontakte_client.save\n format.html { redirect_to @vkontakte_client, notice: 'Vkontakte client was successfully created.' }\n format.json { render json: @vkontakte_client, status: :created, location: @vkontakte_client }\n else\n format.html { render action: \"new\" }\n format.json { render json: @vkontakte_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @client = Client.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @client }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /featured_clients/1 PATCH/PUT /featured_clients/1.json
def update respond_to do |format| if @featured_client.update(featured_client_params) format.html { redirect_to @featured_client, notice: 'Featured client was successfully updated.' } format.json { render :show, status: :ok, location: @featured_client } else format.html { render :edit } format.json { render json: @featured_client.errors, status: :unprocessable_entity } end end end
[ "def update_features(client_id)\n response = self.class.put(\"https://app.klipfolio.com/api/1.0/clients/#{client_id}/features\", basic_auth: @auth, headers: { \"Content-Type\" => \"application/json\" },\n body: {\n features:[{\"name\":\"public_dashboards\",\"enabled\":true},\n {\"name\":\"published_dashboards\",\"enabled\":true},\n {\"name\":\"downloadable_reports\",\"enabled\":true},\n {\"name\":\"scheduled_emails\",\"enabled\":true}]\n }.to_json)\n puts response.body\n puts \"Client's features were updated.\" if response.success?\n end", "def update\n @client = set_client\n @client.update(client_params)\n render json: @client\n end", "def update\n resource.update(client_params)\n respond_with resource\n end", "def update\n @client = current_client\n\n respond_to do |format|\n if @client.update_attributes(params[:client])\n format.html { redirect_to firm_client_path(current_firm, @client), notice: 'Client was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @agency_client = AgencyClient.find(params[:id])\n\n respond_to do |format|\n if @agency_client.update_attributes(params[:agency_client])\n format.html { redirect_to @agency_client, notice: 'Agency client was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @agency_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @api_client = ApiClient.find(params[:id])\n\n respond_to do |format|\n if @api_client.update_attributes(params[:api_client])\n format.html { redirect_to @api_client, notice: 'Api client was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @api_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @vkontakte_client = VkontakteClient.find(params[:id])\n\n respond_to do |format|\n if @vkontakte_client.update_attributes(params[:vkontakte_client])\n format.html { redirect_to @vkontakte_client, notice: 'Vkontakte client was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @vkontakte_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @live_client.update(live_client_params)\n format.html { redirect_to @live_client, notice: 'Live client was successfully updated.' }\n format.json { render :show, status: :ok, location: @live_client }\n else\n format.html { render :edit }\n format.json { render json: @live_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @potential_client.update(potential_client_params)\n format.html { redirect_to @potential_client, notice: 'Potential client was successfully updated.' }\n format.json { render :show, status: :ok, location: @potential_client }\n else\n format.html { render :edit }\n format.json { render json: @potential_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @client_service = ClientService.find(params[:id])\n\n respond_to do |format|\n if @client_service.update_attributes(params[:client_service])\n format.html { redirect_to @client_service, notice: 'Client service was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @client_service.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @crm_client.update(crm_client_params)\n format.html { redirect_to @crm_client, notice: 'Client was successfully updated.' }\n format.json { render :show, status: :ok, location: @crm_client }\n else\n format.html { render :edit }\n format.json { render json: @crm_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @carrierwave_client = CarrierwaveClient.find(params[:id])\n @client = @carrierwave_client\n\n respond_to do |format|\n if @carrierwave_client.update_attributes(params[:carrierwave_client])\n format.html { redirect_to(@carrierwave_client, :notice => 'Carrierwave client was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render 'clients/edit' }\n format.xml { render :xml => @carrierwave_client.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\r\n respond_to do |format|\r\n if @natural_client.update(natural_client_params)\r\n format.html { redirect_to @natural_client, notice: 'Natural client was successfully updated.' }\r\n format.json { head :no_content }\r\n else\r\n format.html { render action: 'edit' }\r\n format.json { render json: @natural_client.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def update\n @my_studio_client = MyStudio::Client.find(params[:id])\n\n respond_to do |format|\n if @my_studio_client.update_attributes(params[:my_studio_client])\n format.html { redirect_to @my_studio_client, notice: 'Client was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @my_studio_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_resources(client_id)\n response = self.class.put(\"https://app.klipfolio.com/api/1.0/clients/#{client_id}/resources\", basic_auth: @auth, headers: { \"Content-Type\" => \"application/json\" },\n body: {\n \"resources\": [{\"name\":\"dashboard.tabs.total\", \"value\":1}]\n }.to_json)\n puts response.body\n puts \"Client's resources were updated.\" if response.success?\n end", "def update\n respond_to do |format|\n if @todo_client.update(todo_client_params)\n format.html { redirect_to @todo_client, notice: 'Todo client was successfully updated.' }\n format.json { render :show, status: :ok, location: @todo_client }\n else\n format.html { render :edit }\n format.json { render json: @todo_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @emails_of_client = EmailsOfClient.find(params[:id])\n\n respond_to do |format|\n if @emails_of_client.update_attributes(params[:emails_of_client])\n format.html { redirect_to @emails_of_client, notice: 'Emails of client was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @emails_of_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @qa_client = QaClient.find(params[:id])\n\n respond_to do |format|\n if @qa_client.update_attributes(params[:qa_client])\n format.html { redirect_to @qa_client, notice: 'Qa client was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @qa_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @carrier_client.update(carrier_client_params)\n format.html { redirect_to @carrier_client, notice: 'Carrier client was successfully updated.' }\n format.json { render :show, status: :ok, location: @carrier_client }\n else\n format.html { render :edit }\n format.json { render json: @carrier_client.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /featured_clients/1 DELETE /featured_clients/1.json
def destroy @featured_client.destroy respond_to do |format| format.html { redirect_to featured_clients_url, notice: 'Featured client was successfully destroyed.' } format.json { head :no_content } end end
[ "def destroy\n http_api.delete(\"clients/#{@name}\")\n end", "def destroy\n render json: @api_v1_client if @api_v1_client.destroy\n end", "def destroy\n @client = Client.find(params[:id])\n @client.destroy\n\n respond_to do |format|\n format.html { redirect_to clients_url }\n format.json { head :ok }\n end\n end", "def destroy\n @agency_client = AgencyClient.find(params[:id])\n @agency_client.destroy\n\n respond_to do |format|\n format.html { redirect_to agency_clients_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @client = current_client\n @client.destroy\n\n respond_to do |format|\n format.html { redirect_to current_firm }\n format.json { head :ok }\n end\n end", "def destroy\n @my_studio_client = MyStudio::Client.find(params[:id])\n @my_studio_client.destroy\n\n respond_to do |format|\n format.html { redirect_to my_studio_clients_url }\n format.json { head :ok }\n end\n end", "def destroy\r\n @client1.destroy\r\n respond_to do |format|\r\n format.html { redirect_to client1s_url }\r\n format.json { head :no_content }\r\n end\r\n end", "def destroy\n @api_client = ApiClient.find(params[:id])\n @api_client.destroy\n\n respond_to do |format|\n format.html { redirect_to api_clients_url }\n format.json { head :no_content }\n end\n end", "def delete(client_id)\n id = client_id.to_s\n Client.collection.filter(:id => id).delete\n AuthRequest.collection.filter(:client_id => id).delete\n AccessGrant.collection.filter(:client_id => id).delete\n AccessToken.collection.filter(:client_id => id).delete\n end", "def destroy\n @qa_client = QaClient.find(params[:id])\n @qa_client.destroy\n\n respond_to do |format|\n format.html { redirect_to qa_clients_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @client = Client.find(params[:id])\n @client.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_clients_url) }\n format.xml { head :ok }\n end\n end", "def destroy\r\n @natural_client.destroy\r\n respond_to do |format|\r\n format.html { redirect_to natural_clients_url }\r\n format.json { head :no_content }\r\n end\r\n end", "def destroy\n @otg_client.destroy\n respond_to do |format|\n format.html { redirect_to otg_clients_url, notice: 'Otg client was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @client_info.destroy\n respond_to do |format|\n format.html { redirect_to client_infos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @live_client.destroy\n respond_to do |format|\n format.html { redirect_to live_clients_url, notice: 'Live client was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @client = Client.find(params[:id])\n @client.destroy\n\n respond_to do |format|\n format.html { redirect_to(clients_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @client = Client.find(params[:id])\n define_path\n ServerFileOperation.delete(@client.home_directory,@public_path)\n @client.destroy\n\n respond_to do |format|\n format.html { redirect_to(clients_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @client = Client.find(params[:id])\n @client.destroy\n\n respond_to do |format|\n format.html { redirect_to(clients_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @client_service = ClientService.find(params[:id])\n @client_service.destroy\n\n respond_to do |format|\n format.html { redirect_to client_services_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert value to bit array to_multi_bit( '0b101010', 6 ) => [1,0,1,0,1,0] to_multi_bit( '0b101010', 10 ) => [1,0,1,0,1,0,0,0,0,0] to_multi_bit( '0b101010', 3 ) => [1,0,1] to_multi_bit( '0b01010' ) => [1,0,1,0] minimum array size for val to_multi_bit( '' ) => [0] null string is equivalent to 0
def to_multi_bit( val, array_size=-1 ) array_size = Integer(array_size) unless array_size.kind_of?(Integer) if val == '' then val = '0' end begin val = Integer(val).to_s(2) rescue => e raise ParameterError, "#{__method__} contains invalid string for number" end arr = val.scan(/./).map{ |b| b.to_i(2) } return arr if array_size < 0 return _fit_array_length( arr, array_size ) end
[ "def to_bit(**options) = convert_to('bit', **options)", "def input_to_bitstring( value )\r\n value\r\n end", "def number_to_bit_array(number, minimum_binary_places = 0)\n assert_non_negative(number)\n array = []\n while number > 0\n array << (number & 1)\n number >>= 1\n end\n array.reverse!\n zero_pad_count = minimum_binary_places - array.size\n zero_pad_count.times { array.unshift(0) }\n array\n end", "def bits(n)\n x = []\n n.times {x << bit}\n x\n end", "def set_bits\n bits = []\n 0.upto(63) {|i| bits << i if set?(i)}\n bits\n end", "def int_to_binary(value); end", "def to_7bit(value)\n [value & 127, (value >> 7) & 127]\n end", "def translate_to_binary(array_of_hex)\n array_of_binary = []\n array_of_hex.each do |num|\n array_of_binary << sprintf(\"%b\", num).rjust(32, '0')\n end\n array_of_binary\n end", "def to_bits(base10_num)\n bits = @bit_vals.map do |b|\n if base10_num >= b\n base10_num -= b\n '1'\n else\n '0'\n end\n end\n bits.join\n end", "def read_bit_array(length)\n data = reader.read((length+7)/8)\n data.unpack(\"b*\").first. # Unpack into a string of \"10101\"\n split(\"\").map { |c| c == \"1\" }.shift(length) # Return true/false array\n end", "def binary_string_to_bit_array(string, minimum_binary_places = 0)\n number = binary_string_to_number(string)\n number_to_bit_array(number, minimum_binary_places)\n end", "def to_set_bit_position_array\n BitMapping.number_to_set_bit_positions_array(number)\n end", "def encode integer_array\n integer_array = integer_array.clone\n bits = BitArray.new\n integer_array.each do |x|\n q = x/@M\n q.times {bits.push 1}\n bits.push 0\n r = x % @M\n (@b-1).downto(0){|i| bits.push r[i]}\n end\n bits\n end", "def bool_to_binary(val); end", "def to_bin(number)\n number = Integer(number);\n if(number == 0)\n return 0;\n end\n ret_bin = \"\";\n ## Untill val is zero, convert it into binary format\n while(number != 0)\n ret_bin = String(number % 2) + ret_bin;\n number = number / 2;\n end\n return ret_bin;\n end", "def encode_bits(bits)\n [bits.map { |b| b ? '1' : '0' }.join].pack('b*')\n end", "def parse_bits(str)\n is_single = str.split(' ').size == 1\n\n # Constant gate (likely vcc or gnd)\n if is_single\n input_bits = []\n output_bit = str\n else\n # Parse strings that look like \"0001 1\"\n (input_bits, output_bit) = str.split(' ')\n input_bits = input_bits.split('')\n end\n\n [input_bits, output_bit]\n end", "def convert_bits(chunks, from_bits:, to_bits:, pad:)\n output_mask = (1 << to_bits) - 1\n buffer_mask = (1 << (from_bits + to_bits - 1)) - 1\n\n buffer = 0\n bits = 0\n\n output = []\n chunks.each do |chunk|\n buffer = ((buffer << from_bits) | chunk) & buffer_mask\n bits += from_bits\n while bits >= to_bits\n bits -= to_bits\n output << ((buffer >> bits) & output_mask)\n end\n end\n\n output << ((buffer << (to_bits - bits)) & output_mask) if pad && bits > 0\n\n return nil if !pad && (bits >= from_bits || ((buffer << (to_bits - bits)) & output_mask) != 0)\n\n output\n end", "def bits\n \"1\" * @prefix + \"0\" * (128 - @prefix)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
class_exsits?("String") => ture class_exists?("djfakf20dak") => false
def class_exists?(classname) str = classname.to_s eval("defined?(#{str}) && #{str}.is_a?(Class)") == true end
[ "def class_exists? string\n\tc = Object.const_get string\n\treturn c.is_a? Class\nrescue NameError\n\treturn false\nend", "def class_exists?(class_name_str)\r\n begin\r\n true if class_name_str.constantize\r\n rescue NameError\r\n false\r\n end\r\n end", "def class_exists?(name)\n get_class(name) != nil\n end", "def class_exists?(class_name)\n eval(\"defined?(#{class_name}) && #{class_name}.is_a?(Class)\") == true\n end", "def has_class?(name)\n @class_syms ||= classes.map(&:to_sym)\n @class_syms.include?(name.to_sym)\n end", "def class?(has_class)\n attribute('class').include? has_class\n end", "def custom_class_present?(cls)\n custom_class.to_s.split.include?(cls)\n end", "def class_variable_defined?(arg0)\n end", "def class_variable_defined?(sym) end", "def isa? classname\n\t\t\tinit_classlist\n\t\t\t@cf.classlist.isa? classname\n\t\tend", "def findExactClassMatch(name)\n fname = name.tr(':', '_')\n return ClassIndex.classExists?(fname)\n end", "def can_get_class?(klass); true; end", "def has_class?(a_class)\n class_names.include?(a_class)\n end", "def is_class(resource)\n if resource.instance_of? String\n return false\n end \n resource.type.each do |type|\n if type.localname.downcase == 'class'\n return true\n end\n\n end\n return false\n end", "def system_class?(arg)\n return true ? arg == system_class : false\nend", "def is_strclass?(); @type == GRT_STRCLASS; end", "def contains_class? exp\n todo = [exp]\n\n until todo.empty?\n current = todo.shift\n\n if node_type? current, :class\n return true\n elsif sexp? current\n todo = current[1..-1].concat todo\n end\n end\n\n false\n end", "def test_class?(data)\r\n return data.include?('.new')\r\n end", "def string_class_name?(class_pair)\n class_pair.children[1].str_type?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns info for the specified venue.
def venue(id) options = { :venue_id => id } get('/venue_info', options) end
[ "def venue_info(venue_id)\n venue = self.class.foursquare.venue(venue_id)\n info = {}\n info[:category_id] = venue.categories.first.id\n info[:category_name] = venue.categories.first.name\n info[:total_checkins] = venue.stats.checkinsCount\n info\n end", "def venue(id_or_slug, opts={})\n check_id_arg('venue', id_or_slug)\n perform_get_request(format('venues/%s.json', id_or_slug.to_s), opts)\n end", "def venue(vid, options = {})\n options.merge!({ :query => { :key => @api_key } })\n self.class.get(\"/venues/#{vid}\", options)\n end", "def getEventBriteVenue(eventInfo)\n venueID = eventInfo[\"venue_id\"]\n uri = 'https://www.eventbriteapi.com/v3/venues/' + venueID + \"/?token=\" + ENV[\"eventbritePersonal\"]\n require 'open-uri'\n response = open(uri).read\n venueInfo = JSON.parse(response)\n return venueInfo\n end", "def venue_id\n response[\"venueId\"]\n end", "def venue\n @venue ||= Zvents::Venue.find(@venue_id)\n end", "def show\n # @venues = Venue.find_by(id: params[:id])\n end", "def venue\n result = nil\n events_array = events_happening_at\n if !events_array.blank?\n venues_array = events_array[0].venues_held_at\n result = venues_array[0] if !venues_array.blank?\n end\n \n result\n \n end", "def location\n if @event_info[\"venue\"]\n return @event_info[\"venue\"][\"name\"]\n else \n return \"TBD\"\n end\n end", "def venue_name\n venue ? venue.name : \"\"\n end", "def events_at_venue(venue)\n venue_events[venue.id]\n end", "def show\n @venue_visitor = VenueVisitor.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @venue_visitor }\n end\n end", "def index\n @venue_infos = VenueInfo.all\n end", "def venue_stats(id, options = {})\n get(\"venues/#{id}/stats\", options)\n end", "def show\n @venue_user = VenueUser.find(params[:id])\n end", "def vinfo_fmt venue, lat, lon\n dnow = Time.now\n\n gmap_str = ''\n dist_str = ''\n dist = nil\n location = venue['location'] || {}\n vlat = location['lat']\n vlon = location['lng']\n if vlat and vlon\n # Add static map image to the page.\n gmap_str = map_image(vlat, vlon)\n\n dist = distance lat, lon, vlat, vlon\n compass = bearing lat, lon, vlat, vlon\n dist_str = '(%.1f mi %s)<br>' % [dist, compass]\n end\n\n s = \"<p>#{escapeHTML venue['name']} #{venue_cmds venue, dist}<br>#{addr_fmt venue}\"\n s += dist_str\n\n url = venue['url']\n s += \"<br><a href=\\\"#{url}\\\">#{escapeHTML url}</a>\" if url\n\n s += gmap_str\n\n cats = venue['categories'] || []\n s += cats.map { |c| category_fmt c }.join ''\n\n tags = venue['tags'] || []\n s += \"<p>Tags: #{escapeHTML tags.join(', ')}\" unless tags.empty?\n\n stats = venue['stats']\n s += \"<p>Checkins: #{escapeHTML stats['checkinsCount']} <br>Users: #{escapeHTML stats['usersCount']}\" if stats \n beenhere = venue['beenHere']\n s += \"<br>Your checkins: #{escapeHTML beenhere['count']}\" if beenhere \n\n herenow = venue['hereNow']\n s += \"<br>Here now: #{escapeHTML herenow['count']}\" if herenow \n\n venue_mayor = venue['mayor']\n mayor = venue_mayor ? venue_mayor['user'] : nil\n\n s += \"<p><img src=\\\"#{mayor['photo']}\\\" alt=\\\"\\\" class=\\\"usericon\\\" style=\\\"float:left\\\">#{name_fmt mayor} (#{escapeHTML venue_mayor['count']}x) from #{escapeHTML mayor['homeCity']} is the mayor<br style=\\\"clear:both\\\">\" if mayor\n\n if herenow and herenow['count'] > 0\n s += '<p><b>Checked in here:</b>'\n s += (herenow['groups'] || []).map { |g|\n (g['items'] || []).map { |c| venue_checkin_fmt c, dnow }.join ''\n }.join ''\n end\n\n s += tips_fmt(venue['tips'] || [])\n s += specials_fmt(venue['specials'] || [])\n s += specials_fmt(venue['specialsNearby'] || [], true)\n\n photos = venue['photos']\n count = (photos ? photos['count'] : 0) || 0\n\n s += if count == 0\n '<p>-- No photos --'\n else\n photos['groups'].map { |group|\n \"<p>-- #{escapeHTML group['name']}: #{escapeHTML group['count']} --\" + group['items'].map { |p|\n photo_fmt p, dnow, :venue_id => venue['id']\n }.join('')\n }.join ''\n end\n\n s += <<-EOM\n<p>\n<form style=\\\"margin:0; padding:0;\\\" enctype=\\\"multipart/form-data\\\" action=\\\"/addphoto\\\" method=\\\"post\\\">\n<input type=\\\"file\\\" name=\\\"photo\\\"><br>\n<input type=\\\"hidden\\\" value=\\\"#{escapeHTML venue['id']}\\\" name=\\\"venid\\\">\n<input type=\\\"submit\\\" value=\\\"Add JPEG photo\\\"><br>\n</form>\n EOM\n\n s\nend", "def show\n @venue_type = VenueType.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @venue_type }\n end\n end", "def for_venue\n result = Event.search query: { match: { 'venue.name' => params[:v] } }\n\n render json: { events: result, total: result.total }\n end", "def venue_by_number\n all_venues = Venue.all.find_each.with_index do |venue, index|\n puts \"#{index + 1}. #{venue.name}\"\n all_venues\n end\n puts \"\\n #{\"*\" * 23}\"\n puts \"-- Enter a Venue's number for more info:\".colorize(:blue)\n num_select = gets.chomp.to_i\n if num_select == Venue.all.find_by(id: num_select).id\n ticket_info = Venue.all.find_by(id: num_select).tickets.first.band_name\n venue_name = Venue.all.find_by(id: num_select).name\n puts \"#{venue_name} has the following shows: #{ticket_info}.\".colorize(:blue)\n puts \"Would you like to see who has tickets? Y/N\".colorize(:blue)\n view_ticketholder = gets.chomp\n if view_ticketholder.downcase == \"y\"\n user_id = Venue.all.find_by(id: num_select).tickets.first.user_id\n puts \"-- #{User.find(user_id).name} has an extra ticket. We're working to let you message them soon!\".colorize(:blue)\n puts \" \"\n else\n puts \"It looks like there aren't any extra tickets to upcoming shows at #{venue_name}.\".colorize(:blue)\n end\n self.user_menu(user)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /communities/new GET /communities/new.json
def new @community = Community.new respond_to do |format| format.html # new.html.erb format.json { render json: @community } end end
[ "def new\n @community = @district.communities.new\n\n respond_to do |format|\n format.html\n format.json { render json: @community }\n end\n end", "def create\n @community = current_user.communities.new(community_params)\n\n respond_to do |format|\n if @community.save\n format.html { redirect_to @community, notice: 'Community was successfully created.' }\n format.json { render :show, status: :created, location: @community }\n else\n format.html { render :new }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @community = @district.communities.new(params[:community])\n\n respond_to do |format|\n if @community.save\n format.html { redirect_to admin_district_communities_url, notice: 'Community was successfully created.' }\n format.json { render json: @community, status: :created, location: @community }\n else\n format.html { render action: \"new\" }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @community_type = CommunityType.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @community_type }\n end\n end", "def new\n @community_level = CommunityLevel.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @community_level }\n end\n end", "def new\n @communities_user = CommunitiesUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @communities_user }\n end\n end", "def new\n @community = Community.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @community }\n end\n end", "def new\n @commune = Commune.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @commune }\n end\n end", "def new\n @new_comm = NewComm.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @new_comm }\n end\n end", "def new\n @comm = Comm.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @comm }\n end\n end", "def new\n @communication = Communication.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @communication }\n end\n end", "def new\n @community_post = CommunityPost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @community_post }\n end\n end", "def new\n @social_network = SocialNetwork.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @social_network }\n end\n end", "def create\n @community = current_user.communities.build(community_params)\n respond_to do |format|\n if @community.save\n @community.reindex\n format.html { redirect_to @community, notice: 'Community was successfully created.' }\n format.json { render :show, status: :created, location: @community }\n else\n format.html { render :new }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @communicatable = Communicatable.new(communicatable_params)\n\n respond_to do |format|\n if @communicatable.save\n format.html { redirect_to @communicatable, notice: 'Communicatable was successfully created.' }\n format.json { render :show, status: :created, location: @communicatable }\n else\n format.html { render :new }\n format.json { render json: @communicatable.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @colaboration = Colaboration.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @colaboration }\n end\n end", "def new\n @network = Network.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @network }\n end\n end", "def new\n @network = Network.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @network }\n end\n end", "def new\n @committees_voivodeship = CommitteesVoivodeship.new\n\t@committees = Committee.all.map do |commi|\n\t\t[commi.id]\n\tend\n\t@voivodeships = Voivodeship.all.map do |voi|\n\t\t[voi.id]\n\tend\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @committees_voivodeship }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /communities/1 PUT /communities/1.json
def update @community = Community.find(params[:id]) respond_to do |format| if @community.update_attributes(params[:community]) format.html { redirect_to @community, notice: 'Community was successfully updated.' } format.json { head :no_content } else format.html { render action: "edit" } format.json { render json: @community.errors, status: :unprocessable_entity } end end end
[ "def update\n @community = Community.find(params[:id])\n\n if @community.update(community_params(params[:community]))\n head :no_content\n else\n render json: @community.errors, status: :unprocessable_entity\n end\n end", "def update\n @community = current_user.own_communities.find(params[:id])\n flash[:notice] = 'Community was successfully updated.' if @community.update_attributes(update_params)\n respond_with(@community)\n end", "def update\n @community = Community.find(params[:id])\n\n respond_to do |format|\n if @community.update_attributes(params[:community])\n format.html { redirect_to admin_district_communities_url, notice: 'Community was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n respond_to do |format|\n if @community.update!(community_params)\n format.html { redirect_to super_communities_path, notice: 'Community was successfully updated.' }\n else\n format.html { render :edit}\n end\n end\n end", "def update\n respond_to do |format|\n if @communicatable.update(communicatable_params)\n format.html { redirect_to @communicatable, notice: 'Communicatable was successfully updated.' }\n format.json { render :show, status: :ok, location: @communicatable }\n else\n format.html { render :edit }\n format.json { render json: @communicatable.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @community = Community.find(params[:id])\n\n respond_to do |format|\n if @community.update_attributes(params[:community])\n format.html { redirect_to(@community, :notice => 'Community was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @community.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @community = current_user.communities.build(community_params)\n respond_to do |format|\n if @community.save\n @community.reindex\n format.html { redirect_to @community, notice: 'Community was successfully created.' }\n format.json { render :show, status: :created, location: @community }\n else\n format.html { render :new }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @community = current_user.communities.new(community_params)\n\n respond_to do |format|\n if @community.save\n format.html { redirect_to @community, notice: 'Community was successfully created.' }\n format.json { render :show, status: :created, location: @community }\n else\n format.html { render :new }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @user_community.update(user_community_params)\n format.html { redirect_to @user_community, notice: 'User community was successfully updated.' }\n format.json { render :show, status: :ok, location: @user_community }\n else\n format.html { render :edit }\n format.json { render json: @user_community.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @discipleship_community.update(discipleship_community_params)\n format.html { redirect_to @discipleship_community, notice: 'Discipleship community was successfully updated.' }\n format.json { render :show, status: :ok, location: @discipleship_community }\n else\n format.html { render :edit }\n format.json { render json: @discipleship_community.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @communities_joining.update(communities_joining_params)\n format.html { redirect_to @community, notice: 'メンバーを変更しました' }\n format.json { render :show, status: :ok, location: @communities_joining }\n else\n format.html { render :edit }\n format.json { render json: @communities_joining.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @communities_user = CommunitiesUser.find(params[:id])\n\n respond_to do |format|\n if @communities_user.update_attributes(params[:communities_user])\n format.html { redirect_to(communities_users_path, :notice => 'Communities user was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @communities_user.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @_community_poin.update(_community_poin_params)\n format.html { redirect_to @_community_poin, notice: 'Community poin was successfully updated.' }\n format.json { render :show, status: :ok, location: @_community_poin }\n else\n format.html { render :edit }\n format.json { render json: @_community_poin.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @communities = Community.all\n render json: {items: @communities}\n end", "def update\n if @community_umkm.update(community_umkm_params)\n render :show, status: :ok, location: @community_umkm\n else\n render json: @community_umkm.errors, status: :unprocessable_entity\n end\n end", "def destroy\n @community = Community.find(params[:id])\n @community.destroy\n\n respond_to do |format|\n format.html { redirect_to communities_url }\n format.json { head :ok }\n end\n end", "def create\n @community = @district.communities.new(params[:community])\n\n respond_to do |format|\n if @community.save\n format.html { redirect_to admin_district_communities_url, notice: 'Community was successfully created.' }\n format.json { render json: @community, status: :created, location: @community }\n else\n format.html { render action: \"new\" }\n format.json { render json: @community.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @community_service.update(community_service_params)\n format.html { redirect_to @community_service, notice: 'Community service was successfully updated.' }\n format.json { render :show, status: :ok, location: @community_service }\n else\n format.html { render :edit }\n format.json { render json: @community_service.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @community = Community.find(params[:id])\n @community.destroy\n\n respond_to do |format|\n format.html { redirect_to communities_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /communities/1 DELETE /communities/1.json
def destroy @community = Community.find(params[:id]) @community.destroy respond_to do |format| format.html { redirect_to communities_url } format.json { head :no_content } end end
[ "def destroy\n @community = Community.find(params[:id])\n @community.destroy\n\n respond_to do |format|\n format.html { redirect_to communities_url }\n format.json { head :ok }\n end\n end", "def destroy\n @community.destroy\n respond_to do |format|\n format.html { redirect_to communities_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @community.destroy\n respond_to do |format|\n format.html { redirect_to communities_url, notice: '작성한 게시글이 삭제되었습니다.' }\n format.json { head :no_content }\n end\n end", "def destroy\n\n @community.destroy\n respond_to do |format|\n format.html { redirect_to admin_communities_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @community = Community.find(params[:id])\n @community.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_district_communities_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @community = Community.find(params[:id])\n @community.destroy\n\n respond_to do |format|\n format.html { redirect_to(communities_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @discipleship_community.destroy\n respond_to do |format|\n format.html { redirect_to discipleship_communities_url, notice: 'Discipleship community was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @communities_joining.destroy\n respond_to do |format|\n format.html { redirect_to community_joinings_path(params[:community_id]), notice: 'メンバーは削除されました' }\n format.json { head :no_content }\n end\n end", "def destroy\n @commune = Commune.find(params[:id])\n @commune.destroy\n\n respond_to do |format|\n format.html { redirect_to communes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @communicatable.destroy\n respond_to do |format|\n format.html { redirect_to communicatables_url, notice: 'Communicatable was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @user_community.destroy\n respond_to do |format|\n format.html { redirect_to user_communities_url, notice: 'User community was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @communities_user = CommunitiesUser.find(params[:id])\n @communities_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(communities_users_url) }\n format.xml { head :ok }\n end\n end", "def delete \n\t@community = Community.find(params[:id])\nend", "def destroy\n DspaceCommunity.destroy(@dspace_community.id)\n respond_to do |format|\n format.html { redirect_to dspace_communities_url, notice: 'Dspace community was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @communication = Communication.find(params[:id])\n @communication.destroy\n\n respond_to do |format|\n format.html { redirect_to communications_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @_community_poin.destroy\n respond_to do |format|\n format.html { redirect_to _community_poins_url, notice: 'Community poin was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @community_member.destroy\n respond_to do |format|\n format.html { redirect_to community_members_url, notice: 'Community member was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @community_content.destroy\n respond_to do |format|\n format.html { redirect_to community_contents_url, notice: 'Community content was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @community_config.destroy\n respond_to do |format|\n format.html { redirect_to community_configs_url, notice: 'Community config was successfully destroyed.' }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Load appium.txt (toml format) into system ENV the basedir of this file + appium.txt is what's used
def load_appium_txt opts raise 'opts must be a hash' unless opts.kind_of? Hash opts.each_pair { |k,v| opts[k.to_s.downcase.strip.intern] = v } opts = {} if opts.nil? file = opts.fetch :file, nil raise 'Must pass file' unless file verbose = opts.fetch :verbose, false # Check for env vars in .txt parent_dir = File.dirname file toml = File.expand_path File.join parent_dir, 'appium.txt' puts "appium.txt path: #{toml}" if verbose # @private def update data, *args args.each do |name| var = data[name] ENV[name] = var if var end end toml_exists = File.exists? toml puts "Exists? #{toml_exists}" if verbose data = nil if toml_exists require 'toml' puts "Loading #{toml}" if verbose # bash requires A="OK" # toml requires A = "OK" # # A="OK" => A = "OK" data = File.read toml data = data.split("\n").map do |line| line.sub /([^\s])\=/, "\\1 = " end.join "\n" data = TOML::Parser.new(data).parsed ap data unless data.empty? if verbose update data, 'APP_PATH', 'APP_APK', 'APP_PACKAGE', 'APP_ACTIVITY', 'APP_WAIT_ACTIVITY', 'DEVICE' # ensure app path is resolved correctly from the context of the .txt file ENV['APP_PATH'] = Appium::Driver.absolute_app_path ENV['APP_PATH'] end # return list of require files as an array # nil if require doesn't exist if data && data['require'] r = data['require'] r = r.kind_of?(Array) ? r : [ r ] # ensure files are absolute r.map! do |file| file = file.include?(File::Separator) ? file : File.join(parent_dir, file) file = File.expand_path file File.exists?(file) ? file : nil end r.compact! # remove nils files = [] # now expand dirs r.each do |item| unless File.directory? item # save file files << item next # only look inside folders end Dir.glob(File.join(item, '**/*.rb')) do |file| # do not add folders to the file list files << File.expand_path(file) unless File.directory? file end end files end end
[ "def load_appium_txt opts\n raise 'opts must be a hash' unless opts.kind_of? Hash\n opts.each_pair { |k,v| opts[k.to_s.downcase.strip.intern] = v }\n opts = {} if opts.nil?\n file = opts.fetch :file, nil\n raise 'Must pass file' unless file\n verbose = opts.fetch :verbose, false\n # Check for env vars in .txt\n parent_dir = File.dirname file\n toml = File.expand_path File.join parent_dir, 'appium.txt'\n puts \"appium.txt path: #{toml}\" if verbose\n # @private\n def update data, *args\n args.each do |name|\n var = data[name]\n ENV[name] = var if var\n end\n end\n\n toml_exists = File.exists? toml\n puts \"Exists? #{toml_exists}\" if verbose\n data = nil\n\n if toml_exists\n require 'toml'\n require 'ap'\n puts \"Loading #{toml}\" if verbose\n\n # bash requires A=\"OK\"\n # toml requires A = \"OK\"\n #\n # A=\"OK\" => A = \"OK\"\n data = File.read toml\n\n data = data.split(\"\\n\").map do |line|\n line.sub /([^\\s])\\=/, \"\\\\1 = \"\n end.join \"\\n\"\n\n data = TOML::Parser.new(data).parsed\n ap data unless data.empty? if verbose\n\n update data, 'APP_PATH', 'APP_APK', 'APP_PACKAGE',\n 'APP_ACTIVITY', 'APP_WAIT_ACTIVITY',\n 'DEVICE'\n\n # Ensure app path is absolute\n ENV['APP_PATH'] = File.expand_path ENV['APP_PATH'] if ENV['APP_PATH'] &&\n !ENV['APP_PATH'].empty?\n\n if ! %w(ios android selendroid).include? ENV['DEVICE']\n raise 'DEVICE must be ios, android, or selendroid'\n end\n end\n\n # return list of require files as an array\n # nil if require doesn't exist\n if data && data['require']\n r = data['require']\n r = r.kind_of?(Array) ? r : [ r ]\n # ensure files are absolute\n r.map! do |file|\n file = file.include?(File::Separator) ? file :\n File.join(parent_dir, file)\n file = File.expand_path file\n File.exists?(file) ? file : nil\n end\n r.compact # remove nils\n end\nend", "def read_launchfile\n eval(File.read('Launchfile'))\nend", "def root_dir\n __FILE__.match(%r{.*(appium-test-runner)}).to_s\n end", "def setup_app_files\n cp PADRINO_TEMPLATES.join('config/padrino-vite.json'), config.config_path\n inject_line_after root.join('app/app.rb'), 'register', ' register VitePadrino'\n append root.join('Rakefile'), <<~RAKE\n require 'vite_padrino'\n ViteRuby.install_tasks\n RAKE\n end", "def make_app_file(project_name)\n file = File.open(\"#{project_name}/app.rb\", 'w')\n file.puts \"require 'bundler'\"\n file.puts \"Bundler.require\"\n file.close\n path = system(\"pwd\")\n end", "def user_data\n conf = Bowline.configuration\n case Bowline::Platform.type\n when :linux\n File.join(home, \".\" + conf.name)\n when :win32\n File.join(home, \"Application Data\", conf.name)\n when :osx\n File.join(home, \"Library\", \"Application Support\", conf.name)\n end\n end", "def make_app_file(project_name)\n file = File.open(\"#{project_name}/app.rb\", 'w')\n file.puts \"require 'bundler'\"\n file.puts \"Bundler.require\"\n file.close\n path = system(\"pwd\")\nend", "def load_app_config_file(filename)\n config_file = find_config_file(filename)\n config = YAML.load_file(config_file)\n app_context = config[\"http\"][\"rootPath\"].split(\"/\")[1]\n\n $config[app_context] = config\nend", "def load_custom_file(env_binding)\n if File.exists?(File.join(HOST_PROJECT_PATH,'Customfile')) then\n eval(IO.read(File.join(HOST_PROJECT_PATH,'Customfile')), env_binding)\n end\nend", "def load_monkfile\n file = find_in_project(\"Monkfile\")\n\n if file\n load file\n @project = File.dirname(file)\n Dir.chdir @project\n end\n end", "def env_file\n\t\tdir['env.rb']\n\tend", "def config_file\n @config_file ||= File.exists?('config/hirb.yml') ? 'config/hirb.yml' :\n File.expand_path(File.join(ENV[\"HOME\"] || \".\", \".hirb.yml\"))\n end", "def env_file\n dir['env.rb']\n end", "def setup_app_files\n cp HANAMI_TEMPLATES.join('config/hanami-vite.json'), config.config_path\n inject_line_after root.join('config/environment.rb'), 'environment :development do', ' middleware.use(ViteRuby::DevServerProxy, ssl_verify_none: true) if ViteRuby.run_proxy?'\n inject_line_after_last root.join('apps/web/application.rb'), 'include Web::Assets::Helpers', ' include ViteHanami::TagHelpers'\n inject_line_after root.join('apps/web/application.rb'), 'configure :development do', <<-CSP\n # Allow @vite/client to hot reload changes in development\n security.content_security_policy(\n security.content_security_policy\n .sub('script-src', \"script-src 'unsafe-eval'\")\n .sub('connect-src', \"connect-src ws://\\#{ ViteRuby.config.host_with_port }\")\n )\n CSP\n append root.join('Rakefile'), <<~RAKE\n require 'vite_hanami'\n ViteRuby.install_tasks\n RAKE\n end", "def load_launcher_data launcher_data_file=\"bin/LAUNCHER_TYPE\"\n launcher_data = nil\n\n begin\n File.open launcher_data_file do |f|\n launcher_data = YAML.load(f.read)\n end\n rescue Errno::ENOENT\n end\n\n return launcher_data\nend", "def configure_data\n [:bitcask, :eleveldb, :merge_index].each {|k| env[k] ||= {} }\n env[:bitcask][:data_root] ||= (data + 'bitcask').expand_path.to_s\n env[:eleveldb][:data_root] ||= (data + 'leveldb').expand_path.to_s\n env[:merge_index][:data_root] ||= (data + 'merge_index').expand_path.to_s\n env[:riak_core][:slide_private_dir] ||= (data + 'slide-data').expand_path.to_s\n env[:riak_core][:ring_state_dir] ||= (data + 'ring').expand_path.to_s\n\n TS_NODE_DIRECTORIES.each do |dir|\n env[:riak_core][:\"platform_#{dir}_dir\"] ||= send(dir).to_s\n end\n end", "def app_root= path\n self.init_from path\n end", "def load_launcher_data(launcher_data_file=\"bin/LAUNCHER_TYPE\")\n launcher_data = nil\n\n begin\n File.open launcher_data_file do |f|\n launcher_data = YAML.load(f.read)\n end\n rescue Errno::ENOENT\n end\n\n return launcher_data\nend", "def app_file_path\n File.join(tmp_dir, \"example_app_#{$example_app_counter}.rb\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the server url for sauce or local based on env vars.
def server_url return @custom_url if @custom_url if !@sauce_username.nil? && !@sauce_access_key.nil? "http://#{@sauce_username}:#{@sauce_access_key}@ondemand.saucelabs.com:80/wd/hub" else "http://127.0.0.1:#{@port}/wd/hub" end end
[ "def server_url\n port = config[:ports].sample\n \"#{config[:hostname]}:#{port}#{config[:pathname]}\"\n end", "def env_url\n ENV['SWAN_URL']\n end", "def env_url\n ENV['CHRONOS_URL']\n end", "def get_base_uri(server = Server::DEFAULT)\r\n ENVIRONMENTS[environment][server].clone\r\n end", "def get_base_url\n @environment_mode == \"production\" ? BASE_PRODUCTION_URL : BASE_DEVELOPMENT_URL\n end", "def installers_url\n if ENV['APP_URL']\n \"#{ENV['APP_URL']}/\"\n else\n ''\n end\n end", "def kryptonite_config_hostname\n if ENV['RAILS_ENV'] == 'production'\n 'http://www.kryptonitecms.com'\n else\n 'http://localhost:3000'\n end\n end", "def base_url\n get_data_from_yml_file(\"environment_urls.yml\")[\"#{application_environment}_BASE_URL\"]\n end", "def _host_from_env\n ENV['BEANSTALKD_URL'].respond_to?(:length) && ENV['BEANSTALKD_URL'].length > 0 && ENV['BEANSTALKD_URL'].strip\n end", "def _host_from_env\n ENV['ALLQ_CLIENT_URL'].respond_to?(:length) && ENV['ALLQ_CLIENT_URL'].length > 0 && ENV['ALLQ_CLIENT_URL'].strip\n end", "def server_url\n @uri\n end", "def auth_server(environment)\n auth_server_url = ENV['ADSAPI_AUTH_URL']\n if auth_server_url.nil?\n environment = environment.upcase.to_sym\n auth_server_url = auth_server_config[environment]\n end\n if auth_server_url.nil?\n # If we don't have an entry for this environment, we just return the\n # default server (the same one being used for the default environment)\n auth_server_url = auth_server_config[default_environment()]\n end\n return auth_server_url\n end", "def host_url_from_rack_env env\n port = ((env[\"SERVER_PORT\"] == 80) && \"\") || \":#{env['SERVER_PORT']}\"\n host = (env[\"HTTP_HOST\"]) || (env[\"SERVER_NAME\"] + port)\n \"#{scheme(env)}://#{host}\"\n end", "def myservices_environment_details_host\n ENV['ENV_DETAILS'].nil? ? 'esu2v871:9080' : ENV['ENV_DETAILS']\n end", "def chef_server_url\n Chef::Config[:chef_server_url]\n end", "def server_path\n guarded_config_env_value('server_path', 'P_SERVER_PATH')\n end", "def get_url\n url_option = ENV[CERBERUS_URL_ENV_KEY]\n\n if(url_option != nil)\n return url_option\n else\n raise Cerberus::Exception::NoValueError\n end\n end", "def main_site_host\n case Rails.env\n when 'development'\n # '192.168.1.140' # to test in ie\n # 'ngoaidmap.dev'\n 'iom.dev'\n when 'test'\n 'example.com'\n when 'staging'\n Settings.main_site_host\n when 'production'\n Settings.main_site_host\n end\n end", "def uri_from env\n \"#{ env['rack.url_scheme'] }://#{ env['SERVER_NAME'] }\"\\\n \":#{ env['SERVER_PORT'] }#{ env['PATH_INFO'] }\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes a png screenshot and saves to the target path. Example: screenshot '/tmp/hi.png'
def screenshot png_save_path @driver.save_screenshot png_save_path nil end
[ "def screenshot png_save_path\n @driver.save_screenshot png_save_path\n end", "def screenshot(png_save_path)\n @driver.save_screenshot png_save_path\n end", "def screenshot!\n page.save_screenshot(Rails.root.join('tmp/rspec_screens/screenshot.png'))\n end", "def make_screenshot_here\n data_time = Time.now.strftime('%d-%m-%Y_%H:%M')\n puts \"Saving screenshot #{data_time}.png ...\"\n page.save_screenshot('tmp/screens/' + data_time + '.png')\n end", "def save_shot(screenshot)\n\t\t@browser.screenshot.save(screenshot)\n\tend", "def save_screenshot(url, path)\n load_url url\n @driver.save_screenshot path\n end", "def save_screenshot\n @suite.p \"-- CAPTURE SCREENSHOT ::\"\n begin\n screenshot_flag = true\n filename = (ENV['REPORTS_DIR'] + \"/\" + self.class.name + '.png')\n @suite.capture_screenshot(filename)\n @suite.p \"-- SCREENSHOT CAPTURED TO: {#{filename}}\"\n screenshot_flag = false\n rescue => e\n if screenshot_flag\n @suite.p \"FAILED TO CAPTURE SCREENSHOT: \"\n @suite.p e.inspect\n @suite.p e.backtrace\n end\n end\n end", "def screenshoot(url, filename)\n unless File.exists?(\"#{IMG_DIR}/#{filename}.png\")\n system \"python webkit2png.py -t #{IMG_TIMEOUT} -o #{IMG_DIR}/#{filename}.png #{url} \"\n else \n puts \"Already screenshoted: #{IMG_DIR}/#{filename}.png\"\n end\nend", "def saos\n save_and_open_screenshot\n end", "def save_step(step)\n FileUtils.mkdir_p final_path unless Dir.exist? final_path\n path_to_ss = \"#{final_path}/#{step.text}.png\"\n screenshot(path_to_ss.to_s)\n end", "def make_screenshot\n Logbook.step('Taking a screenshot of a result page')\n @browser.save_screenshot(\"screenshots/screenshot - #{Time.now.strftime('%Y-%m-%d %H-%M-%S')}.png\")\n end", "def store_screenshot(path)\n screenshot = screenshots.first\n if (screenshot)\n begin \n variant = screenshot.variant(resize_to_limit: [425, nil], resize_to_fill: [425, 250, { crop: :low }]).processed\n path = variant.blob.service.send(:path_for, variant.key)\n FileUtils.cp(path, \"/Users/jan.prill/Documents/workspace/msp/inviadorepo/web/js/gridsome/inviado/src/assets/images/inviado/#{id}.png\")\n rescue\n p \"There is a problem on #{variant}\"\n end\n end\n end", "def screenshot(filename=nil)\n fn = filename || @ticket || SecureRandom.uuid.to_s\n f = \"screenshots/#{fn}.png\"\n @session.save_screenshot(f)\n puts \"Saved #{f}:\"\n return f\n end", "def save_screenshot(file_name = nil)\n $focus_driver = self\n file_name = \"#{Pathname.pwd}/#{$conf['screenshot_location']}/#{Time.new.strftime(\"%Y-%m-%d-%H-%M-%S\")}.png\" if file_name.nil?\n @driver.save_screenshot(file_name)\n end", "def save(path)\n @driver.save_screenshot(path)\n end", "def screenshot\n @screen_local = true\n redraw\n export(TH::Map_Saver::Screenshot_Directory)\n $game_message.add(\"Screenshot taken\")\n end", "def screenshot\n @browser.save_screenshot(\"screenshot.png\")\n end", "def png\n @driver.screenshot_as(:png)\n end", "def screen_capture(fileName)\n return $marathon.saveScreenShot(fileName)\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set implicit wait and default_wait to zero.
def no_wait @last_waits = [@default_wait, 0] @default_wait = 0 @driver.manage.timeouts.implicit_wait = 0 end
[ "def no_wait\n @last_waits = [@default_wait, 0]\n @default_wait = 0\n @driver.manage.timeouts.implicit_wait = 0\n end", "def default_wait\n @default_wait\n end", "def set_wait timeout=@default_wait\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def set_wait timeout=nil\n if timeout.nil?\n # puts \"timeout = @default_wait = @last_wait\"\n # puts \"timeout = @default_wait = #{@last_waits}\"\n timeout = @default_wait = @last_waits.first\n else\n @default_wait = timeout\n # puts \"last waits before: #{@last_waits}\"\n @last_waits = [@last_waits.last, @default_wait]\n # puts \"last waits after: #{@last_waits}\"\n end\n\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def no_wait\n @driver.manage.timeouts.implicit_wait = 0\n end", "def set_wait(timeout = nil)\n if timeout.nil?\n # Appium::Logger.info \"timeout = @default_wait = @last_wait\"\n # Appium::Logger.info \"timeout = @default_wait = #{@last_waits}\"\n timeout = @default_wait = @last_waits.first\n else\n @default_wait = timeout\n # Appium::Logger.info \"last waits before: #{@last_waits}\"\n @last_waits = [@last_waits.last, @default_wait]\n # Appium::Logger.info \"last waits after: #{@last_waits}\"\n end\n\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def wait\n return if set?\n \n sleep(0.01) until set?\n end", "def skip_waiting\n @wait_policy = SkipWaitPolicy.new\n end", "def reset_wait\n @wait = @t + rand(@t_rand)\n end", "def update_waiting\n @wait -= 1\n @wait_handler.call if @wait_handler && @wait == 0\n end", "def no_wait(&block)\n Capybara.using_wait_time(0, &block)\n end", "def wait_timeout\n @wait_timeout ||= options[:wait_timeout] || DEFAULT_WAIT_TIMEOUT\n end", "def setLockWait(sWait)\n \n @_sLockWait = sWait\n end", "def set_max_wait timeout_seconds\n driver.set_max_wait timeout_seconds\n end", "def abs_wait_short\n wait(15)\n end", "def default_wait_for_time\n 5\n end", "def wait()\n go_to_termination(false)\n end", "def set_waitpid_blocking_sleep(period)\n @waitpid_blocking_sleep = period\n end", "def set_wait_flags(*flags)\n # This method is tester-specific and must be overridden by the child class\n fail 'The #{self.class} class has not defined a set_wait_flags method!'\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set implicit wait and default_wait to timeout, defaults to 30. if set_wait is called without a param then the second to last wait will be used. ```ruby` set_wait 2 set_wait 3 set_wait 2 ````
def set_wait timeout=nil if timeout.nil? # puts "timeout = @default_wait = @last_wait" # puts "timeout = @default_wait = #{@last_waits}" timeout = @default_wait = @last_waits.first else @default_wait = timeout # puts "last waits before: #{@last_waits}" @last_waits = [@last_waits.last, @default_wait] # puts "last waits after: #{@last_waits}" end @driver.manage.timeouts.implicit_wait = timeout end
[ "def set_wait timeout=@default_wait\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def set_wait(timeout = nil)\n if timeout.nil?\n # Appium::Logger.info \"timeout = @default_wait = @last_wait\"\n # Appium::Logger.info \"timeout = @default_wait = #{@last_waits}\"\n timeout = @default_wait = @last_waits.first\n else\n @default_wait = timeout\n # Appium::Logger.info \"last waits before: #{@last_waits}\"\n @last_waits = [@last_waits.last, @default_wait]\n # Appium::Logger.info \"last waits after: #{@last_waits}\"\n end\n\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def set_max_wait timeout_seconds\n driver.set_max_wait timeout_seconds\n end", "def default_wait\n @default_wait\n end", "def wait_timeout\n @wait_timeout ||= options[:wait_timeout] || DEFAULT_WAIT_TIMEOUT\n end", "def set_max_wait_time\n q = 'Set the MAX WAIT TIME after executing the RESTART command ' \\\n '(>= 180 secs): '\n until @max_wait && @max_wait.to_i > 179\n @max_wait = Utils.qna(q.cyan, true)\n end\n end", "def setLockWait(sWait)\n \n @_sLockWait = sWait\n end", "def set_TimeToWait(value)\n set_input(\"TimeToWait\", value)\n end", "def wait\n return if set?\n \n sleep(0.01) until set?\n end", "def wait(milliseconds:)\n @response[:wait] = milliseconds\n self\n end", "def default_wait_for_time\n 5\n end", "def set_max_page_wait timeout_seconds\n driver.set_max_page_wait timeout_seconds\n end", "def wait_for(wait_max: 3, step: 0.001, &block)\n stop_at = wait_max.seconds.from_now\n\n sleep step while !block.call && (@time = Time.now) < stop_at\n\n fail \"Timeout of #{wait_max} seconds exceeded!\" unless @time < stop_at\nend", "def no_wait\n @last_waits = [@default_wait, 0]\n @default_wait = 0\n @driver.manage.timeouts.implicit_wait = 0\n end", "def do_wait(waited)\n wait = get_config(:docker_wait)\n return unless wait.is_a?(Integer) || wait.is_a?(Float)\n return if waited >= wait\n sleep(wait - waited)\n end", "def set_wait_flags(*flags)\n # This method is tester-specific and must be overridden by the child class\n fail 'The #{self.class} class has not defined a set_wait_flags method!'\n end", "def set_timeout(timeout)\n @log.info('Setting the selenium timeout to: ' + timeout.to_s)\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def state_wait(set, state, timeout=1200)\n # do a special wait, if waiting for operational (for dns)\n if state == \"operational\"\n set.each { |server| transaction { server.wait_for_operational_with_dns(timeout) } }\n else\n set.each { |server| transaction { server.wait_for_state(state, timeout) } }\n end\n end", "def abs_wait_short\n wait(15)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the default client side wait. This value is independent of what the server is using
def default_wait @default_wait end
[ "def wait_timeout\n @wait_timeout ||= options[:wait_timeout] || DEFAULT_WAIT_TIMEOUT\n end", "def default_wait_for_time\n 5\n end", "def wait_time\n @wait_time ||= (ENV[\"QC_LISTEN_TIME\"] || 5).to_i\n end", "def waiter_options\n @waiter_options || {}\n end", "def wait_for_status\n if Chewy.configuration[:wait_for_status].present?\n client.cluster.health wait_for_status: Chewy.configuration[:wait_for_status]\n end\n end", "def secs_to_wait\n 7\nend", "def io_wait\n aruba.config.io_wait_timeout\n end", "def balloon_wait\n return (@battler.nil? ? BALLOON_WAIT : @battler.balloon_wait)\n end", "def retry_max_wait\n @retry_max_wait ||= Vault::Defaults::RETRY_MAX_WAIT\n end", "def is_a_wait?\r\n return (@kind == 0) && (WAIT_USING.include?(@basic) || @basic == 3)\r\n end", "def wait(handler)\n handler.wait(\n max_wait_timeout: @config.max_wait_timeout,\n wait_timeout: @config.wait_timeout\n )\n end", "def abs_wait_short\n wait(15)\n end", "def max_wait_time\n @data[\"max_wait_time\"]\n end", "def set_wait timeout=@default_wait\n @driver.manage.timeouts.implicit_wait = timeout\n end", "def wait_for(options)\n if options[:wait_for] == :page\n wait_for_page options[:timeout_in_seconds]\n\t elsif options[:wait_for] == :ajax\n\t wait_for_ajax options\n\t elsif options[:wait_for] == :element\n\t wait_for_element options[:element], options\n\t elsif options[:wait_for] == :no_element\n\t wait_for_no_element options[:element], options\n\t elsif options[:wait_for] == :text\n\t wait_for_text options[:text], options\n\t elsif options[:wait_for] == :no_text\n wait_for_no_text options[:text], options\n\t elsif options[:wait_for] == :effects\n\t wait_for_effects options\n elsif options[:wait_for] == :popup\n wait_for_popup options[:window], options[:timeout_in_seconds]\n select_window options[:window] if options[:select]\n elsif options[:wait_for] == :value\n wait_for_field_value options[:element], options[:value], options\n elsif options[:wait_for] == :no_value\n wait_for_no_field_value options[:element], options[:value], options\n elsif options[:wait_for] == :visible\n wait_for_visible options[:element], options\n elsif options[:wait_for] == :not_visible\n wait_for_not_visible options[:element], options\n\t elsif options[:wait_for] == :condition\n\t wait_for_condition options[:javascript], options[:timeout_in_seconds]\n end\n end", "def wait_time_for(message)\n case Command.response_for(message)\n when :required\n response_wait\n when :error_only\n error_wait\n else\n 0\n end\n end", "def name_with_optional_wait name, options\r\n if options[:wait] then \"#{name}AndWait\" else name end\r\n end", "def wait_for(request, property = T.unsafe(nil)); end", "def waitpid_blocking_sleep\n @waitpid_blocking_sleep\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Quit the driver and Pry. quit and exit are reserved by Pry.
def x driver_quit exit # exit pry end
[ "def quit\n exit(1)\n end", "def command_quit\n\t exit(0) \n end", "def quit; @quit = 1 end", "def exit_program\n exit\n end", "def terminate\n @driver.quit\n end", "def quit\r\n Log.info(\"Closing browser..\")\r\n @driver.quit\r\n end", "def quit\r\n raise Shells::NotRunning unless running?\r\n raise Shells::ShellBase::QuitNow\r\n end", "def stop_driver\n @driver.quit rescue nil\n @driver = nil\n end", "def quit\n @session.terminate if @session\n @session = nil\n end", "def quit exit_code = 0, message = nil\n warn message if message\n exit exit_code\n end", "def quit\n @browser.quit\n @headless.destroy if @opts[:headless?] && !@opts[:sauce?]\n if @opts[:logging?]\n report(\"--END--\")\n end\n end", "def exit\n @main_loop = false\n end", "def exit\n Rushmate::Exit\n end", "def exit\n Call.libusb_exit(@ctx)\n end", "def exit\n clear\n ascii.exit_screen\n sleep(3)\n clear\n end", "def quitCmd\n\tputs \"Cient sent a quit command\"\nend", "def cmd_quit(param)\n send_response \"221 Bye\"\n close_datasocket\n close_connection_after_writing\n end", "def quit!\n @done = true\n @quit = true\n\n # If quit! is called from other than a command, we need to interrupt the\n # breakpoint listener thread\n unless @debug_thread\n @breakpoint_tracker.debug_channel.send nil\n @breakpoint_listener.join\n end\n end", "def force_quit; @quit = 2 end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Finds and returns the 1st node whose value is 'value'
def find(value) self.each {|node| return node if node.value == value} end
[ "def node_with_value(value)\n current_node = @head\n while current_node.value\n return current_node if current_node.value == value\n current_node = current_node.next_node\n end\n end", "def get(value)\n @children.each { |child| return child if child.value == value }\n return nil\n end", "def find_value(value, node = @root)\n position = @nodes[node].find_subtree(value)\n if position==false\n {:node => node, :find => true}\n else\n if @nodes[node].leaf? || @nodes[node].sub_trees[position].nil? then {:node => node, :find => false} else find_value(value, @nodes[node].sub_trees[position]) end\n end\n end", "def value_of_node(node_name)\n value_of_nodes(node_name).first\nend", "def find(value)\n idx = 0\n node = list\n while node != nil\n return idx if node.value == value\n idx += 1\n node = node.nextNode\n end\n return nil\n end", "def search(value)\r\n # If the head is nil the list is empty\r\n if self.head.nil?\r\n return nil\r\n end\r\n # Start with the head\r\n element = self.head\r\n loop do\r\n # Check if the element has the correct value\r\n if element.value == value\r\n return element\r\n end\r\n # Return nil if the tail has been reached\r\n if element == self.tail\r\n return nil\r\n end\r\n # Look at the next element in the list\r\n element = element.next_element\r\n end\r\n end", "def search(value)\n node = @root\n while(true)\n return nil if node == nil\n yield node if block_given?\n return node if value == node.value \n if value < node.value \n node = node.left\n else\n node = node.right\n end\n end\n end", "def find value, root_node=@root\n case value <=> root_node.data\n when -1\n find(value, root_node.left)\n when 1\n find(value, root_node.right)\n when 0\n return root_node\n else\n return\n end\n end", "def first_with_value(value)\n collection = with_value(value)\n collection.any? ? collection.first : nil\n end", "def depth_first_search(tree, value)\n tgt_node = nil\n \n stack = Array(tree)\n \n while !stack.empty?\n cur_node = stack.pop\n \n\tif cur_node.value == value\n\t tgt_node = cur_node\n\t break\n\tend\n\t\n\tcur_node.children.reverse_each { |child| stack.push(child) unless child.nil? }\n end\n \n tgt_node\nend", "def find(x)\n node = search_for(x)\n return (node.value != nil) ? node : nil\n end", "def find(value, current_node = @root)\n # Base case: We found the node or past a leaf node\n return current_node if current_node.nil? || current_node.value == value\n\n return find(value, current_node.left) if value < current_node.value\n\n find(value, current_node.right)\n end", "def path_find_first(path)\n node = @root\n return node.value unless node.value.nil?\n path.each { |path_item|\n node = node.get_child(path_item)\n return nil unless node\n return node.value unless node.value.nil?\n }\n nil\n end", "def closest(value)\n closest = node = @root\n while node\n if value > node.value && node.right && node.right.value < value\n node = node.right\n elsif value < node.value && node.left && node.left.value > value\n node = node.left\n else\n break\n end\n closest = node\n end\n closest\n end", "def depth_first_search node= self.root, value\n\t\tstack =[node]\n\n\t\twhile stack.length > 0\n\t\t\tcurrent = stack.pop\n\t\t\treturn \"Value #{value} found in #{current.to_s}\" if current.value == value\n\t\t\tstack.push(current.left) if current.left\n\t\t\tstack.push(current.right) if current.right\n\t\tend\n\tend", "def first\n searchnode[0]\n end", "def find_at(pos)\n\t\tcounter = 0\n\t\tself.each_node do |node|\n\t\t\tif pos == counter\n\t\t\t\treturn node.value\n\t\t\tend\n\t\tend\n\t\tnil\n\tend", "def first_value\n tree = @nodes[@root]\n loop do\n break if tree.leaf?\n tree = @nodes[tree.sub_trees[0]]\n end\n if tree.size>0 then tree.left else false end\n end", "def find_item value\n self.find { |item| item.value == value }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
find_all(value) finds and return (in an array) all the nodes whose value is 'value'
def find_all(value) nodes = [] self.each {|node| nodes << node if node.value == value} nodes end
[ "def elements_by_xpath(value)\n find_by_xpath(value)\n end", "def find_all_nodes(xpath, select_result_value=false)\n if self.feed_data_type != :xml\n raise \"The feed data type is not xml.\"\n end\n return FeedTools::XmlHelper.try_xpaths_all(self.root_node, [xpath],\n :select_result_value => select_result_value)\n end", "def find_all_nodes(xpath, select_result_value=false)\n if self.feed_data_type != :xml\n raise \"The feed data type is not xml.\"\n end\n return FeedTools::XmlHelper.try_xpaths_all(self.channel_node, [xpath],\n :select_result_value => select_result_value)\n end", "def find(value)\n self.each {|node| return node if node.value == value}\n end", "def find_nodes(label, value = nil, session = Neo4j::Session.current!)\n session.find_nodes(label, value)\n end", "def find_nodes(query)\n query(query).to_a\n end", "def xpath_query_for_multi_value(path)\n xpath_result = @doc.xpath path\n arr = []\n xpath_result.each { |i| arr << i.text.strip }\n arr.uniq\n end", "def xpath_query_for_multi_value(path)\n xpath_result = xpath_query path\n arr = []\n xpath_result.each { |i| arr << i.text.strip }\n arr.uniq\n end", "def xpathall(path,xml)\n r=[]\n XPath.each(xml,path){|x|r<<x}\n r\nend\n", "def search_children(node, attribute, search_term)\n matches = []\n end", "def find_all(conditions)\r\n @root.find_all(conditions)\r\n end", "def get_elements(xpath); end", "def get_nodes(path)\n\t nodes = @node.xpath(path)\n\t return unless nodes\n\t nodes = nodes.map{|node| NokogiriNode.new(nil,node)}\n\t end", "def get_all_vals(node, arr)\n # add the value of the node to the array\n arr << node.val\n \n # using a ternary operator, check if there is another node in the list\n # if so, recursively run the function again\n # if not, return the array\n return node.next ? get_all_vals(node.next, arr) : arr\n end", "def find_all(conditions)\n @root.find_all(conditions)\n end", "def find( key, value )\n descendent_nodes.select do |node|\n if node.attributes[key]\n node.attributes[key] =~ /#{value}/\n else\n false\n end\n end\n end", "def find_all_nodes(*args)\n nodes = @nodes.find_all_nodes(*args)\n nodes.find_all { |n| context?(n) }\n end", "def find_entities_for(value)\n @json.select do |entity|\n nodes[value]&.include? entity['_id']\n end\n end", "def value_of_nodes(node_name)\n ns = nodes(node_name)\n ns.map do |n|\n raise \"failed to find #{node_name.inspect} in #{subject.inspect}\" if n.nil?\n n.content\n end\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
push(value) adds a value 'value' to the end of the linked list
def push(value) last.next_node = Node.new(value, nil) end
[ "def push(data_value)\r\n @head_value = LinkedListNode.new(data_value, @head_value)\r\n end", "def push(value)\n insert(value)\n self\n end", "def append(value)\n node = Node.new\n node.value = value\n if head.nil?\n @head = node\n else\n curr = head\n curr = curr.next until curr.next.nil?\n curr.next = node\n end\n head\n end", "def push(value)\n new_back = QueueNode.new(:value => value)\n \n @back.successor = new_back if @back\n @front = new_back unless @front\n @back = new_back\n \n @size += 1\n end", "def append(value)\n new_node = Node.new(value)\n node = @node\n\n while node.next\n node = node.next\n end\n\n node.next = new_node\n end", "def add_last(value)\r\n \r\n # if list is empty, insert the new value at the head\r\n if @head.nil?\r\n @head = Node.new(value, nil)\r\n return @head\r\n end \r\n \r\n # otherwise, traverse the list from start to last node ...\r\n current = @head\r\n until current.next.nil?\r\n current = current.next\r\n end\r\n \r\n # ... and insert new node after last node\r\n current.next = Node.new(value, nil)\r\n \r\n end", "def add_last(value)\r\n if !@head\r\n\r\n @head = Node.new(value)\r\n else\r\n cur = @head\r\n\r\n while(cur.next)\r\n cur = cur.next\r\n end\r\n\r\n cur.next = Node.new(value)\r\n end\r\n end", "def push(value)\n @queue << value\n end", "def add_at_head(val)\n node = Node.new val\n if @list.nil?\n @list = node\n else\n @list.prev = node\n @list = node\n end\n end", "def add_last(value)\r\n new_node = Node.new(value)\r\n if !@head\r\n @head = new_node\r\n else\r\n cursor = @head\r\n while cursor.next\r\n cursor = cursor.next\r\n end\r\n cursor.next = new_node\r\n end\r\n end", "def append(val)\n\t\t# create a new node\n\t\tnew_node = Node.new(val, nil)\n\t\t# make old tail point to new node\n\t\t@tail.next_node = new_node\n\t\t# update tail\n\t\t@tail = new_node\n\tend", "def insert(value)\n new_node = Node.new(value)\n @head.prev = new_node if @head\n new_node.next = @head\n @tail = new_node unless @tail\n @head = new_node\n @count += 1\n end", "def add_at_head(val)\n new_node = Node.new(val)\n new_node.next = @head\n @head = new_node\n\n end", "def add(value)\n @queue.push(value)\n end", "def push_last(value)\n @driver_instance.push_list_last(@key, value)\n end", "def add value\n raise 'Cannot add nil values to the heap.' if value.nil?\n # push onto the end of the @heap array\n @heap.push(value)\n new_item_index = @heap.size - 1\n # add it to our map\n map_add(value, new_item_index)\n # swim it upward (\"bubble up\") to where it belongs\n swim(new_item_index)\n end", "def push(key, value=key)\n raise ArgumentError, \"Heap keys must not be nil.\" unless key\n node = Node.new(key, value)\n @stored << node\n bubble_up!(size)\n end", "def add(value)\n @add_at_next = 0 unless @add_at_next\n add_at @add_at_next, value\n end", "def add(key, value)\r\n \t\t\t# Create new node for key, value to be added and set next to head \r\n \t\t\t# and then set head to the new node\r\n \t\t\tn = Node.new(key, value, @head)\r\n \t\t\t@head = n\r\n \t\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The "type" of dependencies this manager manages. This can be the language, tool, etc.
def type raise LicenseScout::Exceptions::Error.new("All DependencyManagers must have a `#type` method") end
[ "def dependency_type_name\n @values.fetch('dependencyTypeName') { \n @values['dependencyTypeName'] = nil\n }\n end", "def type\n @klass.is_a?(Rubinius::ToolSets::Runtime::ToolSet::AST::Class) ? \"class\" : \"module\"\n end", "def dependency_kind\n @values['dependencyKind']\n end", "def determine_dep_type(name)\n extension = Pathname(name).extname\n case\n when extension == '.a' then :a\n when extension == '.pc' then :pc\n when extension == '.x' then :x\n when name.start_with?('-I') then :incdirflag\n when name.start_with?('-L') then :libdirflag\n when name.start_with?('-l') then :ldflag\n when name.start_with?('-framework') then :ldflag\n end\nend", "def dependency name, version, type = :runtime\n raise \"Unknown dependency type: #{type}\" unless\n [:runtime, :dev, :development, :developer].include? type\n\n ary = if type == :runtime then\n extra_deps\n else\n extra_dev_deps\n end\n\n ary << [name, version]\n end", "def library_type\n str = self.class.to_s[/::(\\w+)Library$/, 1] || 'library'\n str.downcase.to_sym\n end", "def dependencies(type)\n type = type.to_sym\n deps = Dependencies.new\n deps.push(*superclass.dependencies(type)) if superclass.respond_to?(:dependencies)\n deps.push(*my_dependencies.select { |x| x.type == type })\n deps.uniq\n end", "def type\n @props[:type]\n end", "def dependency_type_name=(value)\n if value == @defaults['dependencyTypeName']\n @values.delete 'dependencyTypeName' if @values.key? 'dependencyTypeName'\n else\n @values['dependencyTypeName'] = value\n end\n end", "def package_type\n return @package_type\n end", "def type\n types.first\n end", "def type\n if @type.nil?\n @type = :association\n if [:has_one, :has_many].include?(@association.macro)\n autosaves = @association.options[:autosave]\n dependent = @association.options[:dependent]\n if autosaves || dependent == :nullify\n @type = :aggregation\n elsif dependent == :destroy || dependent == :delete\n @type = :composition\n end\n end\n end\n @type\n end", "def deploys_to_class(type)\n (\n \"Building::%s\" % CONFIG[\"units.#{type.underscore}.deploys_to\"]\n ).constantize\n end", "def algo_type\n @algo_type # instance-level variable\n end", "def external_type\n Doers::Config.external_types.first\n end", "def type\n @type ||= File.extname(path)[1..-1]\n end", "def type\n @config[:caching][:type]\n end", "def type\n @type ||= self.class.to_s.downcase.sub( /resource/, '' )\n end", "def resolve_dependency(dependency_type)\n dependency_class_name = dependency_type.to_s.camelize\n begin\n dependency_class = Class.class_eval(dependency_class_name)\n rescue NameError\n search_paths = (dependency_base_paths || []) + [File.dirname(__FILE__)]\n dependency_file_name = dependency_type + \".rb\"\n search_paths.each do |search_path|\n file_path = File.normalize_path(File.join(search_path, dependency_file_name))\n if File.file?(file_path)\n require File.normalize_path(File.join(search_path, dependency_type))\n break\n end\n end\n dependency_class = Class.class_eval(dependency_class_name)\n end\n dependency_class\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Implementation's of this method in subclasses are the methods that are responsible for all the heavylifting when it comes to determining the dependencies (and their licenses). They should return an array of `LicenseScout::Dependency`.
def dependencies [] end
[ "def licenses\n licenses = []\n uris = metadata[dataset_uri][dct.license.to_s]\n if uris.nil?\n []\n else\n uris.each do |uri|\n l = metadata[uri]\n licenses << License.new(:uri => uri, :name => l[dct.title.to_s])\n end\n return licenses\n end\n rescue\n []\n end", "def dependencies_for(specification)\n []\n end", "def licenses\n licenses = []\n uris = metadata[dataset_uri][RDF::DC.license.to_s]\n if uris.nil?\n []\n else\n uris.each do |license_uri|\n licenses << License.new(:uri => license_uri, :name => first_value( license_uri, RDF::DC.title ))\n end\n return licenses\n end\n rescue => e\n []\n end", "def depend_upon(match_name) #, constraint)\n list = []\n each do |name, libs|\n case libs\n when Library\n list << libs if libs.requirements.any?{ |r| match_name == r['name'] } \n else\n libs.each do |lib|\n list << lib if lib.requirements.any?{ |r| match_name == r['name'] } \n end\n end\n end\n list\n end", "def licenses\n @licenses ||= self.details['licenses'].collect{|l| License.new(self, l)}\n end", "def dependencies\n @dependencies.collect { |name, dependency| dependency }\n end", "def licenses\n @licenses ||= matched_files.map(&:license).uniq\n end", "def dependencies\n return @dependencies ||= []\n end", "def dependencies\n @dependencies.values\n end", "def licenses\n if @licenses.nil?\n @licenses = self.links.select do |link|\n link.rel == \"license\"\n end\n end\n return @licenses\n end", "def remaining_dependencies\n dependencies = []\n @current_packages.each do |_, package|\n package.spec.dependencies.each do |dep|\n next if satisfy? dep\n dependencies << dep\n end\n end\n dependencies\n end", "def dependencies\n version_req = if options[:version]\n ::Gem::Requirement.create(options[:version])\n else\n ::Gem::Requirement.default\n end\n if gem_dir\n ::Gem.clear_paths; ::Gem.path.unshift(gem_dir)\n ::Gem.source_index.refresh!\n end\n deps = []\n ::Gem.source_index.each do |fullname, gemspec| \n if version_req.satisfied_by?(gemspec.version)\n deps << ::Gem::Dependency.new(gemspec.name, \"= #{gemspec.version}\")\n end\n end\n ::Gem.clear_paths if gem_dir\n deps.sort\n end", "def verify_licenses!\n licenses = Array(Berkshelf.config.allowed_licenses)\n return if licenses.empty?\n\n dependencies.each do |dependency|\n next if dependency.location.is_a?(Berkshelf::PathLocation)\n cached = dependency.cached_cookbook\n\n begin\n unless licenses.include?(cached.metadata.license)\n raise Berkshelf::LicenseNotAllowed.new(cached)\n end\n rescue Berkshelf::LicenseNotAllowed => e\n if Berkshelf.config.raise_license_exception\n FileUtils.rm_rf(cached.path)\n raise\n end\n\n Berkshelf.ui.warn(e.to_s)\n end\n end\n end", "def dependent_gems\n out = []\n Gem.source_index.each do |name,gem|\n gem.dependencies.each do |dep|\n if self.satisfies_requirement?(dep) then\n sats = []\n find_all_satisfiers(dep) do |sat|\n sats << sat\n end\n out << [gem, dep, sats]\n end\n end\n end\n out\n end", "def return_selectable_licenses()\n LICENSES\n end", "def find_licenses_in_gem_source\n license_finder = UseCase::DependenciesIndex::FindLicenses.new(find_class: @find_class, gem_source: @gem_source)\n license_finder_result = license_finder.execute!\n\n license_finder_result.data[:licenses]\n end", "def dependencies\n EMPTY_SET\n end", "def find_licenses_in_source\n license_files = []\n\n @find_class.find(@gem_source) do |path|\n license_files << path if path.include?(\"LICENSE\")\n end\n\n license_files\n end", "def dependencies_check\n dependencies.each do |required|\n dependency = Dependency.find_by(name: required['name'])\n if dependency && (version = required['version'])\n dependency.do_download(version)\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A helper that allows you to quickly create a new Dependency (with the type)
def new_dependency(name, version, path) LicenseScout::Log.debug("[#{type}] Found #{name} #{version}#{" #{path}" unless path.nil?}") Dependency.new(name, version, path, type) end
[ "def d(*args)\n Dependency.new(*args)\n end", "def create_dependency_type(kind, category, dependency_type = nil)\n # support wildcard case for all dependency types in a category.\n kind = kind.to_sym\n category = category.to_sym\n if WILDCARD == dependency_type\n types = self.send(category)\n return types.map { |type| create_dependency_type(kind, category, type) }\n end\n\n # get specific type from category on cloud, if necessary.\n dependency_type = self.send(category) unless dependency_type\n raise NotImplementedError.new(\"The #{name.inspect} cloud has not declared a #{category} type.\") unless dependency_type\n dependency_type = dependency_type.to_s\n\n options = resolve_options(kind, category, dependency_type)\n dependency_class = resolve_dependency(dependency_type)\n return dependency_class.new(options)\n end", "def factory_dependency(name, *args)\n args.push({}) unless args.last.is_a?(Hash)\n args.last[:factory] = true\n constructor_dependency(name, *args)\n end", "def dep(name, *requirements)\n FoobarMod::Dependency.new name, *requirements\n end", "def add_dependency_with_type(dependency, type, requirements)\n requirements = if requirements.empty?\n Gem::Requirement.default\n else\n requirements.flatten\n end\n\n unless dependency.respond_to?(:name) &&\n dependency.respond_to?(:requirement)\n dependency = Gem::Dependency.new(dependency.to_s, requirements, type)\n end\n\n dependencies << dependency\n end", "def initialize(name, *requirements)\n case name\n when String then # ok\n when Regexp then\n msg = [\"NOTE: Dependency.new w/ a regexp is deprecated.\",\n \"Dependency.new called from #{Gem.location_of_caller.join(\":\")}\"]\n warn msg.join(\"\\n\") unless Gem::Deprecate.skip\n else\n raise ArgumentError,\n \"dependency name must be a String, was #{name.inspect}\"\n end\n\n type = Symbol === requirements.last ? requirements.pop : :runtime\n requirements = requirements.first if 1 == requirements.length # unpack\n\n unless TYPES.include? type\n raise ArgumentError, \"Valid types are #{TYPES.inspect}, \" +\n \"not #{type.inspect}\"\n end\n\n @name = name\n @requirement = Gem::Requirement.create requirements\n @type = type\n @prerelease = false\n\n # This is for Marshal backwards compatibility. See the comments in\n # +requirement+ for the dirty details.\n\n @version_requirements = @requirement\n end", "def factory(dependency, &block)\n define dependency, FactoryResolver.new(block)\n end", "def new_from_dependencies(dependencies={}, *other_args, &block_arg)\n raise NotImplementedError\n end", "def a klass, parameters = {}\n Object.factory.create_a klass, parameters\nend", "def artifact(type, params = nil, &blk)\n artifact_object = create_artifact_object(type, params, &blk)\n self[artifact_object.label] = artifact_object\n artifact_object.as_a_dependency.to_self\n end", "def create\n @dependency = Dependency.new(dependency_params)\n\n respond_to do |format|\n if @dependency.save\n format.html { redirect_to @dependency, notice: 'Dependency was successfully created.' }\n format.json { render json: @dependency, status: :created, location: @dependency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dependency.errors, status: :unprocessable_entity }\n end\n end\n end", "def Factory (name, attrs = {})\n Factory.create(name, attrs)\nend", "def dependency name, version, type = :runtime\n raise \"Unknown dependency type: #{type}\" unless\n [:runtime, :dev, :development, :developer].include? type\n\n ary = if type == :runtime then\n extra_deps\n else\n extra_dev_deps\n end\n\n ary << [name, version]\n end", "def factory(attrs={})\n factory = self.create! to_params.merge(attrs).values\n end", "def when_creating_a klass, options = {}\n Object.factory.when_creating_a klass, options\nend", "def create(clazz, *args)\n injector.create(clazz, *args)\n end", "def initialize(dependency, requester)\n @dependency = dependency\n @requester = requester\n end", "def parse_dependency name, op # :nodoc:\n return Gem::Dependency.new name unless peek[0] == :text\n\n _, version, = get :text\n\n requirements = [\"#{op} #{version}\"]\n\n while peek[0] == :comma do\n get :comma\n _, op, = get :requirement\n _, version, = get :text\n\n requirements << \"#{op} #{version}\"\n end\n\n Gem::Dependency.new name, requirements\n end", "def object_with_type(cl)\n o = cl.allocate.compile_time_init\n name = cl.name.split(\"::\").last.to_sym\n o.set_type @types[name]\n o\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates and returns a new RecBack MazeHelper object. Many options are supported: [:width] The number of columns in the maze. [:height] The number of rows in the maze. [:seed] The maze algorithm to use. This should be a class,
def initialize(options = {}) @width = (options[:width] || 10).to_i @height = (options[:height] || @width).to_i @seed = (options[:seed] || rand(0xFFFF_FFFF)).to_i @grid = Array.new(height) {Array.new(width, 0)} srand(@seed) # start carving the maze passage from the upper-left corner carve_passages_from(0, 0, @grid) end
[ "def create_maze\n\t\tgrid = initialize_empty_maze\n\t\treturn grid\n\tend", "def generate_maze\r\n puts \"Great! Let's get this started up...\"\r\n print \"How many rows should this maze have? \"\r\n x_coord = @helper.get_valid_numbers_only($stdin.gets.chomp, 2000) #To test, put in 5\r\n print \"How many columns should this maze have? \"\r\n y_coord = @helper.get_valid_numbers_only($stdin.gets.chomp, 2000) #Same here\r\n @driver = MazeDriver.new(x_coord, y_coord)\r\n # Sorry, but I'm not creating a randomizer. This map tests all the functionality of the maze-solver well enough to consider this complete.\r\n @driver.create(\"1111111111110000010101111110111111010101010111111010111100000001011111111011110101010101111111111111010101010111111111111\")\r\n puts \"Okay, I've got your maze right here...\"\r\n puts \"Note: coordinates are (0, 0) at the top-left corner.\"\r\n @driver.display\r\n work_with_maze(x_coord.to_i, y_coord.to_i) # The next part of this sequence, but in another method to make it cleaner.\r\n end", "def create_maze\n\t\tmaze_dimensions = nil\n\t\tbegin\n\t\t\tmaze_dimensions = parse_text_to_location(@input_data[0])\n\t\trescue Exception => e\n\t\t\tputs \"Looks like the input file contains non-integer data\"\n\t\tend\n\t\t\n\t\tmaze = Maze.new(maze_dimensions)\n\t\treturn block_custom_cells(maze)\n\tend", "def initialize r, c\n\t\t@maze_builder = MazeBuilder.new r, c\n\n\t\t@num_rows = @maze_builder.num_rows\n\t\t@num_cols = @maze_builder.num_cols\n\n\t\t@wall_matrix = @maze_builder.wall_matrix\n\t\t@cell_matrix = @maze_builder.cell_matrix\n\tend", "def initialize(maze, options={})\n mode = options[:mode] || :plain\n\n width, height = self.class.dimensions_for(maze, mode)\n super(width, height)\n\n maze.height.times do |y|\n length = maze.row_length(y)\n length.times do |x|\n case mode\n when :plain then draw_plain_cell(maze, x, y)\n when :unicode then draw_unicode_cell(maze, x, y)\n when :lines then draw_line_cell(maze, x, y)\n end\n end\n end\n end", "def generate_maze\n # Randomises generator start location\n x = rand(@width)\n y = rand(@height)\n # Put starting cell onto stack\n @stack << @cells[x][y]\n create_path_from(x, y)\n end", "def initialize(maze)\n @maze = maze\n end", "def initialize(maze, options)\n @options = DEFAULTS.merge(options)\n\n [:background, :wall_color, :cell_color, :solution_color].each do |c|\n @options[c] = ChunkyPNG::Color.from_hex(@options[c]) if String === @options[c]\n end\n\n @paths = @options[:paths] || []\n\n if @options[:solution]\n path = maze.new_solver(type: @options[:solution]).solve.to_path(color: @options[:solution_color])\n @paths = [path, *@paths]\n end\n end", "def generate_maze_cells_from_instructions\n @maze ||= algorithm\n .new(grid: instructions_grid)\n .tap { |generator| generator.generate }\n .maze\n end", "def initialize(row, col)\n #row_size and col_size take into account representing the walls in between\n @row_size = row * 2 + 1\n @col_size = col * 2 + 1\n @maze = Array.new\n end", "def gen_maze(file_path = nil)\n\t\tif file_path != nil\n\t\t\tgen_from_file(file_path)\n\t\telse\n\t\t\tgen_random_maze\n\t\tend\n\tend", "def create_maze(file)\n line = file.gets\n if line == nil then return end\n\n # read 1st line, must be maze header\n sz, sx, sy, ex, ey = line.split(/\\s/)\n #course is the maze course\n @course = Array.new(sz.to_i)\n @course.map!{Array.new(sz.to_i)}\n \n @course[sx.to_i][sy.to_i] = Cell.new(sx.to_i, sy.to_i)\n \n @start_x = sx.to_i\n @start_y = sy.to_i\n @course[sx.to_i][sy.to_i].change_mode(\"Start\")\n \n @course[ex.to_i][ey.to_i] = Cell.new(ex.to_i, ey.to_i)\n @end_x = ex.to_i\n @end_y = ey.to_i\n @course[ex.to_i][ey.to_i].change_mode(\"End\")\n \n @paths = Array.new\n # read additional lines\n while line = file.gets do\n\n # begins with \"path\", must be path specification\n if line[0...4] == \"path\"\n p, name, x, y, d = line.split(/\\s/)\n ds = d.split(//)\n temp = Path.new(nil, nil, nil, nil)\n temp.initialize(name, x.to_i, y.to_i, ds)\n @paths.push(temp)\n \n # otherwise must be cell specification (since maze spec must be valid)\n else\n x, y, d, w = line.split(/\\s/,4)\n if @course[x.to_i][y.to_i] == nil\n @course[x.to_i][y.to_i] = Cell.new(x.to_i,y.to_i)\n end\n \n ds = d.split(//)\n ws = w.split(/\\s/)\n (0...ds.size).each { |i| \n @course[x.to_i][y.to_i].new_direction(ds[i], ws[i])}\n end\n end\n end", "def initialize(maze, meta={})\n @maze = maze\n @paths = Hash.new(0)\n @cells = Hash.new(0)\n @meta = meta\n end", "def build_maze(x, y, wall, cell_size)\n terrain = []\n maze = Maze.new(x, y)\n\n wall_tiling = (cell_size / wall.width.to_f).ceil\n\n maze.grid.each.with_index do |row, i|\n row.each.with_index do |cell, j|\n wall_tiling.times do |k|\n if cell.walls[0]\n new_wall = wall.copy\n new_wall.pos = Vector.new(:x => i * cell_size + k * wall.width, :y => j * cell_size)\n terrain << new_wall\n end\n if cell.walls[1] && !(i == maze.grid.length-1 && j == maze.grid[0].length-1)\n new_wall = wall.copy\n new_wall.pos = Vector.new(:x => i * cell_size + cell_size - wall.width, :y => j * cell_size + k * wall.width)\n terrain << new_wall\n end\n if cell.walls[2]\n new_wall = wall.copy\n new_wall.pos = Vector.new(:x => i * cell_size + k * wall.width, :y => j * cell_size + cell_size - wall.width)\n terrain << new_wall\n end\n if cell.walls[3]\n new_wall = wall.copy\n new_wall.pos = Vector.new(:x => i * cell_size, :y => j * cell_size + k * wall.width)\n terrain << new_wall\n end\n end\n end\n end\n\n # returns the terrain that was generated\n terrain\nend", "def maze_generate\n @list = Array.new #our wall list\n @new_maze = Array.new(@row_count) { Array.new(@row_length, '1') }\n @new_maze[1][1] = '0' #our beginning spot\n @list.push([1,2, :w]); @list.push([2,1, :s]) #pushes the surrounding walls. \n until @list.empty? #while there are still walls to perform alogrithm on, continue.\n coordinates = @list.delete(@list.sample) #remove a random wall and check it.\n y = coordinates[0]; x = coordinates[1]; direc = coordinates[2] #pass wall's coordinates to wall_change\n wall_change(y, x, direc)\n end\n convert_maze_binary\n end", "def generate_maze\n cell_stack = []\n total_cells = @maze.length * @maze[0].length\n current_cell = { x: rand(@maze[0].length), y: rand(@maze.length) }\n visited_cells = 1\n \n while visited_cells < total_cells\n neighbour = find_neighbour current_cell[:x], current_cell[:y]\n unless neighbour.nil?\n knock_down_wall current_cell, neighbour\n cell_stack << current_cell\n current_cell = neighbour\n visited_cells += 1\n else\n current_cell = cell_stack.pop\n end\n end\n end", "def initialize r, c\n\t\t@row = r\n\t\t@col = c\n\n\t\t@neighbors = {top: nil, right: nil, bottom: nil, left: nil}\n\t\t@walls = {top: true, right: true, bottom: true, left: true}\n\n\t\t@parent = nil\n\t\t@distance = (r*c) + 10 # ensures the distance is greater than the number of cells in the maze\n\t\t@visited = false\n\tend", "def make_graph\n\t\t@graph = Graph.new(@width2, @height2, @maze)\n\tend", "def construct_default_maze_string\n\t\ttop = \"+-\" * @cell_width + \"+\"\n\t\tbody = \"|\" + (\" |\" * @cell_width)\n\t\t@maze_string = top + (body + top) * @cell_height\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns original window if defined, current window if not See Windowuse
def original_window @original_window ||= window end
[ "def get_window\n manager.is_reparenting?() ? frame_window : window\n end", "def get_window; @window; end", "def window\r\n return $window\r\n end", "def local_window; end", "def get_active_window\n xdotool(\"getactivewindow\").chomp\n end", "def window\n nil\n end", "def usable_window\n window = @windows.last\n window if window.loaded?\n end", "def active_window\n window_pointer = FFI::MemoryPointer.new :ulong, 1\n XDo::FFILib.xdo_window_get_active @_pointer, window_pointer\n XDo::Window.new self, window_pointer.read_ulong\n end", "def current_window\n @driver.window_handle\n rescue Selenium::WebDriver::Error::NoSuchWindowError\n nil\n end", "def current_window_builder\n return UI::Window.window_builder(current_windowskin)\n end", "def active_window\n if File.exists?(wmii_namespace)\n 'wmiir cat /client/sel/ctl | sed 1q'\n else\n %q[xprop -root _NET_ACTIVE_WINDOW | awk '/#/ { print $(NF) ; exit } END { exit 1 }' || xdotool getwindowfocus]\n end\nend", "def active_window\n @terminal.current_terminal\n end", "def current_windowskin_settings\n if $game_variables != nil\n winvar = YE::SYSTEM::WINDOW_VARIABLE\n if $game_variables[winvar] == 0\n $game_variables[winvar] = YE::SYSTEM::DEFAULT_WINDOW\n elsif !MENU_CONFIG::WINDOW_HASH.include?($game_variables[winvar])\n $game_variables[winvar] = YE::SYSTEM::DEFAULT_WINDOW\n end\n mso_windowskin = MENU_CONFIG::WINDOW_HASH[$game_variables[winvar]]\n else\n mso_windowskin = MENU_CONFIG::WINDOW_HASH[YE::SYSTEM::DEFAULT_WINDOW]\n end\n return mso_windowskin\n end", "def active_window\n current_terminal.current_session\n end", "def focus_previous_window\r\n window = get_current_workspace_managed_window\r\n if window\r\n previous_window = windawesome.current_workspace.get_previous_window window\r\n windawesome.switch_to_application previous_window.hWnd if previous_window\r\n elsif windawesome.current_workspace.get_windows_count > 0\r\n windawesome.switch_to_application windawesome.current_workspace.get_windows.first.value.hWnd\r\n end\r\nend", "def active_window\n if has_prog?('x-active-id')\n 'x-active-id'\n elsif File.exists?(wmii_namespace)\n 'wmiir cat /client/sel/ctl | sed 1q'\n else\n %q[xprop -root _NET_ACTIVE_WINDOW | awk '/#/ { print $(NF) ; exit } END { exit 1 }' || xdotool getwindowfocus]\n end\nend", "def parent(window)\n Tk.execute(:winfo, :parent, window)\n end", "def focus_previous_window\n window = get_current_workspace_managed_window\n if window\n previous_window = windawesome.current_workspace.get_previous_window window\n windawesome.switch_to_application previous_window.hWnd if previous_window\n elsif windawesome.current_workspace.get_windows_count > 0\n windawesome.switch_to_application windawesome.current_workspace.get_windows.first.value.hWnd\n end\nend", "def focused_window\n window_pointer = FFI::MemoryPointer.new :ulong, 1\n XDo::FFILib.xdo_window_get_focus @_pointer, window_pointer\n XDo::Window.new self, window_pointer.read_ulong\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns estimated effort for a ProjectPhase by calculating the sum of the estimated effort for each StockDeliverableType and CustomDeliverableType
def estimated_effort total_estimated_effort = 0 self.project_phase_deliverables.each do |deliverable| total_estimated_effort += deliverable.estimated_effort.to_f unless deliverable.nil? end return total_estimated_effort end
[ "def total_estimated_effort\n sum = 0.0\n self.project_phase_deliverables.each do |deliverable|\n if deliverable.total_effort.nil?\n next\n end\n sum += deliverable.total_effort\n end\n return sum\n end", "def total_estimated_effort\n sum = 0.0;\n self.project_phase_deliverables.each do |deliverable|\n if deliverable.total_effort.nil?\n next\n end\n sum += deliverable.total_effort\n end\n return sum\n end", "def estimated_effort\n total_estimated_effort = 0\n project_phases.each do |phase|\n total_estimated_effort += phase.estimated_effort unless phase.nil?\n end\n\n total_estimated_effort\n end", "def total_actual_effort\n sum = 0.0\n self.project_phase_deliverables.each do |deliverable|\n sum += deliverable.total_logged_effort\n end\n return sum\n end", "def calculate_phase_actual_effort(project_id, phase)\n @hours = 0\n @dataset = Deliverable.find_all_by_phase_and_project_id(phase, project_id)\n puts @dataset\n if @dataset.nil?\n return 0\n else\n @dataset.each do |d|\n @hours += d.hours_logged\n end\n return @hours\n end\n end", "def logged_effort\n total_logged_effort = 0\n\n self.project_phase_deliverables.each do |deliverable|\n total_logged_effort += deliverable.logged_effort.to_f unless deliverable.nil?\n end\n\n return total_logged_effort\n end", "def build_project_efforts(item_assignments_efforts_in_period, projects_in_period)\n projects_in_period.each do |project_active|\n effort_value_sum = 0\n efforts_project_active = item_assignments_efforts_in_period.where(demand: { project: project_active })\n effort_value_sum = efforts_project_active.sum(&:effort_value) if efforts_project_active.present?\n\n project_with_effort = @projects_efforts[project_active.name]\n if project_with_effort.present?\n @projects_efforts[project_active.name] << effort_value_sum.to_f\n else\n @projects_efforts[project_active.name] = [effort_value_sum.to_f]\n end\n end\n end", "def total_pending_schedule_impact_hours\n delta = 0.0\n self.design_changes.each do |design_change|\n if !design_change.approved?\n delta += design_change.schedule_impact\n end\n end\n delta\n end", "def total_approved_schedule_impact_hours\n delta = 0.0\n self.design_changes.each do |design_change|\n if design_change.approved?\n delta += design_change.schedule_impact\n end\n end\n delta\n end", "def project_phase_deliverables\n project_phase_deliverables = []\n stock_deliverable_types.each do |stock|\n stock.deliverables.each do |d|\n project_phase_deliverables << d\n end\n end\n\n custom_deliverable_types.each do |custom|\n custom.deliverables.each do |d|\n project_phase_deliverables << d\n end\n end\n project_phase_deliverables\n end", "def goals_progress(type)\n exp = type == \"expense\" ? true : false\n sum_actual = 0\n sum_target = 0\n self.goals.each do |goal|\n if goal.is_expense? == exp && goal.end_date >= Date.today\n sum_target += goal.target_value\n sum_actual += goal.total_actual_value\n end\n end\n sum_target == 0 ? 0 : (sum_actual/sum_target) #0% for when there is no goal matching the type.\n end", "def logged_effort\n total_logged_effort = 0\n project_phases.each do |phase|\n total_logged_effort += phase.logged_effort unless phase.nil?\n end\n\n total_logged_effort\n end", "def amount_missing_on_deliverables\n # Bisect the issues because NOT IN isn't reliable\n all_issues = self.project.issues.all\n return 0 if all_issues.empty?\n\n deliverable_issues = self.project.issues.find(:all, :conditions => [\"deliverable_id IN (?)\", self.deliverables.collect(&:id)])\n\n missing_issues = all_issues - deliverable_issues\n\n time_logs = missing_issues.collect(&:time_entries).flatten\n \n return time_logs.collect(&:cost).sum\n end", "def dynamic_estimates\n\n estimates = []\n deliverable_types = DeliverableType.find(:all)\n for deliverable_type in deliverable_types\n for complexity in Complexity.getValues\n estimates << {:type => deliverable_type.name, :complexity => complexity,\n :statistics => Deliverable.get_estimates(deliverable_type.name, complexity)}\n end\n end\n return estimates\n end", "def hours_spent\n hours_spent = 0\n project_tasks.each do |project_task|\n hours_spent += project_task.hours_spent\n end\n hours_spent\n end", "def increment_actual_effort effort_value\n\n self.actual_effort += effort_value\n self.actual_production_rate = ((self.actual_effort / self.actual_size)*100).round / 100\n if self.save! && self.project_phase\n self.project_phase.increment_actual_effort effort_value\n end\n end", "def calculate_payables\n ###\n #\n # \n #\n \n #\n raise RException.new(\"Invoice.calculate_payables: IS THIS OBSOLETE?\")\n #\n #\n payables.each do |p|\n case p.payable_type\n when \"InvoiceItem\" \n if p.cost != p.payable.cost\n p.cost = p.payable.cost\n p.save!\n end\n when \"CompanySalesAccount\"\n p.cost = p.payable.cost(items)\n p.save!\n end\n end\n end", "def total_project_cost_cents_invoiced\n profiles = self.payment_profiles.includes(:invoice_item).select { |p| p.invoice_item.present? }\n profiles.sum(&:expected_cost_cents)\n end", "def increment_total_estimated_effort estimated_effort\n\n self.total_estimated_effort += estimated_effort\n if self.save! && self.project\n self.project.increment_total_estimated_effort estimated_effort\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function to get the total logged effort for this deliverable
def logged_effort total_logged_effort = 0 self.project_phase_deliverables.each do |deliverable| total_logged_effort += deliverable.logged_effort.to_f unless deliverable.nil? end return total_logged_effort end
[ "def total_actual_effort\n sum = 0.0\n self.project_phase_deliverables.each do |deliverable|\n sum += deliverable.total_logged_effort\n end\n return sum\n end", "def total_logged_effort\n self.effort_logs.inject (0){|sum, ef| sum + ef.effort}\n end", "def estimated_effort\n total_estimated_effort = 0\n\n self.project_phase_deliverables.each do |deliverable|\n total_estimated_effort += deliverable.estimated_effort.to_f unless deliverable.nil?\n end\n\n return total_estimated_effort\n end", "def total_estimated_effort\n sum = 0.0\n self.project_phase_deliverables.each do |deliverable|\n if deliverable.total_effort.nil?\n next\n end\n sum += deliverable.total_effort\n end\n return sum\n end", "def total_estimated_effort\n sum = 0.0;\n self.project_phase_deliverables.each do |deliverable|\n if deliverable.total_effort.nil?\n next\n end\n sum += deliverable.total_effort\n end\n return sum\n end", "def logged_effort\n total_logged_effort = 0\n project_phases.each do |phase|\n total_logged_effort += phase.logged_effort unless phase.nil?\n end\n\n total_logged_effort\n end", "def effort_logs\n self.project_phase_deliverables.collect{|d| d.effort_logs }.flatten\n end", "def total_pending_schedule_impact_hours\n delta = 0.0\n self.design_changes.each do |design_change|\n if !design_change.approved?\n delta += design_change.schedule_impact\n end\n end\n delta\n end", "def total\n count = 0\n self.total_time_exercise_workouts.each do |ex|\n count += ex.duration\n end\n count\n end", "def amount_missing_on_deliverables\n # Bisect the issues because NOT IN isn't reliable\n all_issues = self.project.issues.all\n return 0 if all_issues.empty?\n\n deliverable_issues = self.project.issues.find(:all, :conditions => [\"deliverable_id IN (?)\", self.deliverables.collect(&:id)])\n\n missing_issues = all_issues - deliverable_issues\n\n time_logs = missing_issues.collect(&:time_entries).flatten\n \n return time_logs.collect(&:cost).sum\n end", "def avarage_calories_burned\n total_workout_calories / set_sport_by_user.count\n rescue\n 0\n end", "def calculate_phase_actual_effort(project_id, phase)\n @hours = 0\n @dataset = Deliverable.find_all_by_phase_and_project_id(phase, project_id)\n puts @dataset\n if @dataset.nil?\n return 0\n else\n @dataset.each do |d|\n @hours += d.hours_logged\n end\n return @hours\n end\n end", "def total_approved_schedule_impact_hours\n delta = 0.0\n self.design_changes.each do |design_change|\n if design_change.approved?\n delta += design_change.schedule_impact\n end\n end\n delta\n end", "def total_workout_calories\n set_sport_by_user.sum(:burned_calories) || 0\n end", "def delivery_drink_count\n self.account_deliveries.sum(:quantity)\n end", "def logged_effort\n unless self.start_date_time.nil? || self.stop_date_time.nil?\n # Calculate the logged time\n time_logged = (self.stop_date_time - self.start_date_time) / 60\n unless self.interrupt_time.nil? \n # return the number of hours that were logged minus the interrupt time\n time_logged = (time_logged - self.interrupt_time) / 60\n end\n # return the number of hours that were logged\n time_logged\n end\n end", "def total_tracked\n self.timings.submitted_timings.sum(:duration_minutes)\n end", "def hours_planned\n hours_planned = 0\n project_tasks.each do |project_task|\n hours_planned += project_task.hours_planned\n end\n hours_planned\n end", "def estimated_effort\n total_estimated_effort = 0\n project_phases.each do |phase|\n total_estimated_effort += phase.estimated_effort unless phase.nil?\n end\n\n total_estimated_effort\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function to return the aggregated project phase deliverables with both stock and custom
def project_phase_deliverables project_phase_deliverables = [] stock_deliverable_types.each do |stock| stock.deliverables.each do |d| project_phase_deliverables << d end end custom_deliverable_types.each do |custom| custom.deliverables.each do |d| project_phase_deliverables << d end end project_phase_deliverables end
[ "def gather_collections\n project_phase = ProjectPhase.find(@project_phase_id)\n\n # TODO: Add the custom deliverables into this list\n\n # Encode the id as stock_<id>\n sdt = project_phase.stock_deliverable_types unless project_phase.nil?\n @stock_deliverable_types = sdt.map {|s| [s.deliverable_type.name, \"stock_\" + s.id.to_s]}\n\n @complexities = Complexity.all\n @units_of_measure = UnitOfMeasure.all\n end", "def deliverables\n return Deliverable.where(project_id: @project.id)\n end", "def getDeliverableTypes\n @projectPhase = ProjectPhase.find(@project_phase_id)\n @deliverableTypes = DeliverableType.find_all_by_lifecycle_phase_id(@projectPhase.lifecycle_phase_id)\n @deliverableTypesArray = []\n @deliverableTypes.each do |type|\n @deliverableTypesArray.append([type.name,type.id])\n end\n end", "def global_project_breakdown(items)\n pf = [] # 4 items array\n a = [] # 5 items returning array\n # Only if items\n if items.count > 0\n # First: Sum net amounts\n # Store first project & initialize\n prev_pf_id = items.first.project_id\n prev_pf_net = 0\n project = Project.find(prev_pf_id) rescue nil\n # Loop thru items, previously ordered by project\n items.each do |i|\n # if project changed\n if i.project_id != prev_pf_id\n # Store previous project data\n pf = pf << [prev_pf_id, project.nil? ? 0 : project.max_order_total, project.nil? ? 0 : project.max_order_price, prev_pf_net]\n # Store current project & initialize\n prev_pf_id = i.project_id\n prev_pf_net = 0\n project = Project.find(prev_pf_id) rescue nil\n end\n # Add net amount while current project\n prev_pf_net += i.net\n end\n # Store last unsaved project data\n pf = pf << [prev_pf_id, project.nil? ? 0 : project.max_order_total, project.nil? ? 0 : project.max_order_price, prev_pf_net]\n\n # Second: Returning array with item prices\n items.each do |i|\n # Search project in pf\n d = pf.detect { |f| f[0] == i.project_id }\n # Add row to array\n a = a << [d[0], d[1], d[2], d[3], i.net_price]\n end\n end\n\n # Returns a\n a\n end", "def pending_refund_payments_projects\n pending_refund_payments.map(&:project)\n end", "def portfolio\n funding_rounds.map{|funding| funding.startup}.uniq\n end", "def show\n @project_phase = ProjectPhase.find(params[:id])\n @lifecycle_phase = @project_phase.lifecycle_phase\n @project_phase_deliverables = []\n @project_phase.stock_deliverable_types.each do |stock|\n stock.deliverables.each do |d|\n @project_phase_deliverables << d\n end\n end\n\n @project_phase.custom_deliverable_types.each do |custom|\n custom.deliverables.each do |d|\n @project_phase_deliverables << d\n end\n end\n\n respond_to do |format|\n format.json { render :json => { :lifecycle_phase_container => @lifecycle_phase,\n :deliverables_container => @project_phase_deliverables,\n :project_phase_estimated_effort => @project_phase.estimated_effort,\n :project_phase_logged_effort => @project_phase.logged_effort} }\n end\n end", "def capital_projects\n\n projects = []\n activity_line_items.each{|x| projects << x.capital_project}\n projects.uniq\n \n end", "def complete_projects\n completed = []\n self.projects.each do |project|\n completed << project if project.status == \"Complete\"\n end\n completed\n end", "def build_projects_in_efforts\n @projects_in_efforts = @item_assignments_efforts.map { |assignment| assignment.demand.project }.uniq\n end", "def get_available_portfolios\n Portfolio.order(:name) - self.portfolios\n end", "def portfolio\n arr = []\n FundingRound.all.select do |s|\n if s.venture_capitalist == self\n arr << s.startup.name\n end\n end\n arr.uniq\n end", "def order_pending_items_report\n detailed = params[:detailed]\n project = params[:project]\n @from = params[:from]\n @to = params[:to]\n supplier = params[:supplier]\n store = params[:store]\n order = params[:order]\n account = params[:account]\n status = params[:status]\n product = params[:product]\n petitioner = params[:petitioner]\n\n if project.blank?\n init_oco if !session[:organization]\n # Initialize select_tags\n @projects = projects_dropdown if @projects.nil?\n # Arrays for search\n current_projects = @projects.blank? ? [0] : current_projects_for_index(@projects)\n project = current_projects.to_a\n end\n\n # Dates are mandatory\n if @from.blank? || @to.blank?\n return\n end\n\n # Format dates\n from = Time.parse(@from).strftime(\"%Y-%m-%d\")\n to = Time.parse(@to).strftime(\"%Y-%m-%d\")\n\n if !project.blank? && !supplier.blank? && !store.blank? && !order.blank? && !account.blank? && !status.blank? && !petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.purchase_orders.store_id = ? AND purchase_orders.work_order_id = ? AND purchase_orders.charge_account_id = ? AND purchase_orders.order_status_id = ? AND purchase_orders.created_by = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,store,order,account,status,petitioner,from,to).order(:order_date)\n elsif !project.blank? && !supplier.blank? && !store.blank? && !order.blank? && !account.blank? && !status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.purchase_orders.store_id = ? AND purchase_orders.work_order_id = ? AND purchase_orders.charge_account_id = ? AND purchase_orders.order_status_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,store,order,account,status,from,to).order(:order_date)\n elsif !project.blank? && !supplier.blank? && !store.blank? && !order.blank? && !account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.purchase_orders.store_id = ? AND purchase_orders.work_order_id = ? AND purchase_orders.charge_account_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,store,order,account,from,to).order(:order_date)\n elsif !project.blank? && !supplier.blank? && !store.blank? && !order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.store_id = ? AND purchase_orders.work_order_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,store,order,from,to).order(:order_date)\n elsif !project.blank? && !supplier.blank? && !store.blank? && order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.store_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,store,from,to).order(:order_date)\n elsif !project.blank? && !supplier.blank? && store.blank? && order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && !store.blank? && order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.store_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,store,from,to).order(:order_date)\n\n elsif !project.blank? && supplier.blank? && !store.blank? && !order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.store_id = ? AND purchase_orders.work_order_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,store,order,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && !store.blank? && order.blank? && !account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.store_id = ? AND purchase_orders.charge_account_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,store,account,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && !store.blank? && order.blank? && account.blank? && !status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.store_id = ? AND purchase_orders.order_status_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,store,status,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && !store.blank? && order.blank? && account.blank? && status.blank? && !petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.store_id = ? AND purchase_orders.created_by = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,store,petitioner,from,to).order(:order_date)\n\n elsif !project.blank? && !supplier.blank? && store.blank? && order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.supplier_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,supplier,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && !order.blank? && !account.blank? && !status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.work_order_id = ? AND purchase_orders.charge_account_id = ? AND purchase_orders.order_status_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,order,account,status,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && !order.blank? && !account.blank? && !status.blank? && !petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.work_order_id = ? AND purchase_orders.charge_account_id = ? AND purchase_orders.order_status_id = ? AND purchase_orders.created_by = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,order,account,status,petitioner,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && !order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.work_order_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,order,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && order.blank? && !account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.charge_account_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,account,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && order.blank? && account.blank? && !status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.order_status_id = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,status,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && order.blank? && account.blank? && status.blank? && !petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.created_by = ? AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,petitioner,from,to).order(:order_date)\n elsif !project.blank? && supplier.blank? && store.blank? && order.blank? && account.blank? && status.blank? && petitioner.blank?\n @order_items_report = PurchaseOrderItem.joins(:purchase_order => :purchase_order_item_balances).group('purchase_order_items.id').having('sum(purchase_order_item_balances.balance) > ?', 0).where(\"purchase_orders.project_id in (?) AND purchase_orders.order_date >= ? AND purchase_orders.order_date <= ?\",project,from,to).order(:order_date)\n end\n\n # Setup filename\n title = t(\"activerecord.models.purchase_order.few\") + \"_#{from}_#{to}\"\n\n # @order_items_csv = []\n # @order_items_report.each do |pr|\n # @order_items_csv << pr\n # end\n\n respond_to do |format|\n # Render PDF\n if !@order_items_report.blank?\n format.pdf { send_data render_to_string,\n filename: \"#{title}.pdf\",\n type: 'application/pdf',\n disposition: 'inline' }\n format.csv { send_data PurchaseOrderItem.to_csv(@order_items_report),\n filename: \"#{title}.csv\",\n type: 'application/csv',\n disposition: 'inline' }\n # format.csv { render text: PurchaseOrderItem.to_csv(@order_items_csv) }\n else\n format.csv { redirect_to ag2_products_track_url, alert: I18n.t(\"ag2_purchase.ag2_purchase_track.index.error_report\") }\n format.pdf { redirect_to ag2_products_track_url, alert: I18n.t(\"ag2_purchase.ag2_purchase_track.index.error_report\") }\n end\n end\n end", "def all_production_items\n\titems = Array.new\n\tself.item.materials.each do |mat|\n\t\titems.push mat.item\n\tend\n\tself.manufacturing_requirements.each do |ram|\n\t\titems.push ram.item\n\tend\n\tself.invention_requirements.each do |ram|\n\t\titems.push ram.item\n\tend\n\titems.each do |item|\n\t\tif item.base_blueprint\n\t\t\titems.concat item.base_blueprint.all_production_items\n\t\telse\n\t\t\titems.concat item.materials.map{|mat| mat.item }\n\t\tend\n\tend\n\titems.uniq\nend", "def calculate_historical_data\n\n data = []\n collector = []\n\n target_projects = Project.find(:all, :conditions => ['status = ?', 'Archived'])\n target_projects.each do |p|\n \n # Note: do not use <<, as find:all returns an array, we would be appending\n # the whole array as a single entry\n d = Deliverable.find(:all, :conditions => ['complexity = ? AND deliverable_type = ? AND project_id = ?', session[:complexity], session[:deliverable_type], p.id])\n collector = collector | d\n end\n\n sizes = []\n efforts = []\n rates = []\n \n unless collector.empty? || collector.blank?\n collector.each do |c|\n sizes << c.estimated_size\n efforts << c.estimated_effort\n rates << c.production_rate\n end\n\n data[0] = sizes.min\n data[1] = (sizes.size == 0) ? \"-\" : sizes.sum / sizes.size\n data[2] = sizes.max\n\n data[3] = rates.min\n data[4] = (rates.size == 0) ? \"-\" : rates.sum / rates.size\n data[5] = rates.max\n\n data[6] = efforts.min\n data[7] = (efforts.size == 0) ? \"-\" : efforts.sum / efforts.size\n data[8] = efforts.max\n\n else\n flash.now[:warning] = \"No data available\"\n data.fill(\"?\", 0, 9)\n end\n\n return data\n end", "def project_all\n prj = { '_id' => 0 }\n prj.merge!(make_grp_prj_periods[1])\n prj.merge!(make_grp_prj_nodes[1])\n prj.merge!(project_bookingnet)\n prj.merge!(project_baselist) unless @sensitivity >= 2\n prj.merge!(project_standardcost) unless @sensitivity >= 1\n { '$project' => prj }\n end", "def products_billing_summary_by_stock(year)\n\n current_year = DateTime.now.year\n\n # Build the result holder\n total_cost = 0\n stock_items = if current_year == year\n ::Yito::Model::Booking::BookingItem.all(conditions: {active: true},\n fields: [:reference, :cost],\n order: [:category_code, :reference])\n else\n BookingDataSystem::Booking.historic_stock(year).map do |item|\n OpenStruct.new({reference: item.item_reference, cost: 0})\n end\n end\n\n summary = stock_items.inject({}) do |result, item|\n data_holder = {}\n (1..12).each { |item| data_holder.store(item, 0) }\n data_holder.store(:total, 0)\n data_holder.store(:cost, item.cost || 0)\n data_holder.store(:percentage, 0)\n total_cost = total_cost + item.cost unless item.cost.nil?\n result.store(item.reference, data_holder)\n result\n end\n data_holder = {}\n (1..12).each { |item| data_holder.store(item, 0) }\n data_holder.store(:total, 0)\n data_holder.store(:cost, 0)\n data_holder.store(:percentage, 0)\n summary.store(:TOTAL, data_holder)\n # Fill the data\n data = query_strategy.products_billing_summary_by_stock(year)\n data.each do |data_item|\n if summary.has_key?(data_item.reference)\n # stock\n summary[data_item.reference][data_item.period.to_i] = data_item.total_item_cost\n summary[data_item.reference][:total] += data_item.total_item_cost\n if summary[data_item.reference][:cost] and summary[data_item.reference][:cost] > 0\n summary[data_item.reference][:percentage] = summary[data_item.reference][:total] /\n summary[data_item.reference][:cost] * 100\n end\n # total\n summary[:TOTAL][data_item.period.to_i] += data_item.total_item_cost\n summary[:TOTAL][:total] += data_item.total_item_cost\n summary[:TOTAL][:cost] = total_cost\n if summary[:TOTAL][:cost] and summary[:TOTAL][:cost] > 0\n summary[:TOTAL][:percentage] = summary[:TOTAL][:total] /\n summary[:TOTAL][:cost] * 100\n end\n end\n end\n\n return summary\n end", "def global_company_breakdown(items)\n pf = [] # 4 items array\n a = [] # 5 items returning array\n # Only if items\n if items.count > 0\n # First: Sum net amounts\n # Store first office & initialize\n prev_pf_id = items.first.project.company_id\n prev_pf_net = 0\n company = Company.find(prev_pf_id) rescue nil\n # Loop thru items, previously ordered by office\n items.each do |i|\n # if company changed\n if i.project.company_id != prev_pf_id\n # Store previous office data\n pf = pf << [prev_pf_id, company.nil? ? 0 : company.max_order_total, company.nil? ? 0 : company.max_order_price, prev_pf_net]\n # Store current company & initialize\n prev_pf_id = i.project.company_id\n prev_pf_net = 0\n company = Company.find(prev_pf_id) rescue nil\n end\n # Add net amount while current company\n prev_pf_net += i.net\n end\n # Store last unsaved project data\n pf = pf << [prev_pf_id, company.nil? ? 0 : company.max_order_total, company.nil? ? 0 : company.max_order_price, prev_pf_net]\n\n # Second: Returning array with item prices\n items.each do |i|\n # Search office in pf\n d = pf.detect { |f| f[0] == i.project.company_id }\n # Add row to array\n a = a << [d[0], d[1], d[2], d[3], i.net_price]\n end\n end\n\n # Returns a\n a\n end", "def project_costs(proj)\n Issue.cross_project_scope(proj, 'descendants')\n .select('MAX(spent_on) AS spent_on, SUM(hours) AS sum_hours')\n .where('start_date IS NOT NULL AND due_date IS NOT NULL')\n .joins(:time_entries)\n .group('spent_on').collect { |issue| [issue.spent_on, issue.sum_hours] }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
gets the current theme or returns default if it comes back blank
def get_theme use_theme = Preference.get_setting('CURRENT_THEME') (use_theme == '' ? 'default' : use_theme).downcase end
[ "def current_theme \n if @current_theme.nil? \n @current_theme = get_default_theme \n end \n @current_theme \n end", "def current_theme\n @_current_theme ||= current_site.get_theme.decorate\n end", "def current_theme\n @theme\n end", "def get_default_theme \n if @default_theme.nil?\n @default_theme = GuiTheme.new(COLOR_WHITE, # text color\n COLOR_HEADER_BRIGHT_BLUE, # graphic elements\n COLOR_BORDER_BLUE, # border color\n COLOR_BLACK, # background\n COLOR_LIGHT_GRAY, # selected item\n true, # use icons\n Gosu::Font.new(22), # regular font\n Gosu::Font.new(38)) # large font\n end \n @default_theme\n end", "def get_theme\n\t\tif @current_user and @current_user.theme\n\t\t\t@current_theme = @current_user.theme.css_class\n\t\telse\n\t\t\t@current_theme = \"pond\"\n\t\tend\n\tend", "def theme\n @theme || 'plastik'\n end", "def theme\n Design::Theme.array.find_by_name(self.theme_name || 'Default')\n end", "def current_theme_name\n @theme_name\n end", "def theme\n return @theme\n end", "def current_theme\n account_prefix\n end", "def current_theme_name\n if @theme\n @theme[:type].to_sym if @theme[:type]\n end\n end", "def theme\n options.fetch(:theme, nil)\n end", "def get_theme_name\n #grab the theme folder\n @theme = Settings.where(:setting=>\"theme\").first.value\n return @theme\n end", "def selected_theme\n @themes[@selected_theme]\n end", "def parent_theme\n ThemesForRails.config.parent_theme(current_theme)\n end", "def parse_theme(theme)\n theme == 'none' ? '' : theme\n end", "def theme_name(opts ={})\n ret = opts[:default] || 'sc-theme'\n if target.config[:theme_name]\n ret = target.config[:theme_name]\n elsif target.config[:theme]\n if theme_target = target.target_for(target.config[:theme])\n ret = theme_target.config[:theme_name] || ret\n end\n end\n return ret\n end", "def get_theme_name\n #grab the theme folder\n @theme = Settings.where(:setting=>\"theme\").first\n return 'layouts/themes/'+@theme.value+'/'\n end", "def load_theme\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The value which is used for sorting. Used on the preset scenario list
def sorting_value respond_to?(:ordering) ? ordering : 0 end
[ "def value\n requested_sort[:key]\n end", "def default_sort_value\n default_sort.fetch(:value, nil)\n end", "def selected_sort_value\n sort_param.present? ? sort_param : default_sort_value\n end", "def default_sort_attribute\n ::Mincer.config.sorting.sort_attribute\n end", "def sort_value_from_arg(arg) # :nodoc:\n case arg\n when /^asc/i\n arg = 1\n when /^desc/i\n arg = -1\n when Number\n arg.to_i >= 0 ? 1 : -1\n else\n arg ? 1 : -1\n end\n end", "def collection_rating_sort_values\n {\n 'FALSE' => '1',\n 'PARTIAL' => '2',\n 'TRUE' => '3',\n }\n end", "def sort context, value\n if value.respond_to?(:sort)\n value.sort\n else\n value\n end\n end", "def comparison_value\n return @comparison_value\n end", "def current_sort_field\n (blacklight_config.sort_fields.values.find { |f| f.sort == @response.sort } if @response && @response.sort.present?) || blacklight_config.sort_fields[params[:sort]] || default_sort_field\n end", "def validate_sort_field( value )\n if [ 'title', 'code', 'created_at' ].include?( value )\n return value\n else\n return 'title'\n end\n end", "def sort_by\n return @sort_by\n end", "def default_sort_option\n\t\t'name'\n\tend", "def sort_rank\n @sort_rank ||= (property.property_values.length - property.property_values.index(self))\n end", "def variant_column\n \"option_value_#{ order_in_good }\"\n end", "def sort_by_value\n @data = @data.sort_by { |k, v| v.value }.reverse\n end", "def date_component_sort_value(component, value)\n case component.to_sym\n when DateComponents::DAY, DateComponents::MONTH then value.iso8601\n else value.to_i\n end\n end", "def sort_key\n ''\n end", "def lbSortByValue _args\n \"lbSortByValue _args;\" \n end", "def current_sort_field_selected\n sort_field_from_response || # as in original\n sort_field_from_params || # sort param specified\n sort_field_from_list || # sort param not specified\n default_sort_field # falls back on 'relevance'\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates the four required columns that constitutes a single cascading namespace settings attribute. This helper is only appropriate if the setting is not already present as a noncascading attribute. Creates the `setting_name` column along with the `lock_setting_name` column in both `namespace_settings` and `application_settings`. This helper is not reversible and must be defined in conjunction with `remove_cascading_namespace_setting` in separate up and down directions. setting_name The name of the cascading attribute same as defined in `NamespaceSetting` with the `cascading_attr` method. type The column type for the setting itself (:boolean, :integer, etc.) options Standard Rails column options hash. Accepts keys such as `null` and `default`. `null` and `default` options will only be applied to the `application_settings` column. In most cases, a nonnull default value should be specified.
def add_cascading_namespace_setting(setting_name, type, **options) lock_column_name = "lock_#{setting_name}".to_sym check_cascading_namespace_setting_consistency(setting_name, lock_column_name) namespace_options = options.merge(null: true, default: nil) add_column(:namespace_settings, setting_name, type, **namespace_options) add_column(:namespace_settings, lock_column_name, :boolean, default: false, null: false) add_column(:application_settings, setting_name, type, **options) add_column(:application_settings, lock_column_name, :boolean, default: false, null: false) end
[ "def setting(name)\n settings.find_by(name: name) || begin\n t = setting_templates.find_by!(setting_name: name)\n\n setting_args = {\n name: t.setting_name,\n type: t.setting_klass,\n setting_template: t,\n value_type: t.value_type,\n owner: self\n }\n\n settings.create!(setting_args) do |setting|\n # due to some init/AR/meta issues in the original implementation :(\n setting.value = t.value\n end\n end\n end", "def has_settings(*args)\n options = args.extract_options!\n attribute_name = args.shift || HasSettings.config[:settings_attribute_name]\n association_name = \"_#{attribute_name}\"\n class_name = options[:class_name] || HasSettings.config[:settings_class_name]\n \n has_many association_name.to_sym,\n :class_name => class_name,\n :as => :configurable,\n :dependent => :destroy\n \n define_method attribute_name do\n instance_variable_get(:\"@#{attribute_name.to_s}\") || instance_variable_set(:\"@#{attribute_name.to_s}\", SettingsAccessor.new(self, association_name.to_sym, options[:inherit]))\n end\n end", "def config_setting\n @config_access_name = \"config_setting\"\n @setting ||= Setting.new(ContainerAdapter.new(self))\n end", "def settings=(setting_options = [])\n # for arrays, set in raw form \n @settings = if setting_options.is_a?(Array)\n setting_options\n # set optional shortcuts for settings\n # :keyword_match_setting => { :opt_in => false } # =>\n # { :xsi_type => 'KeywordMatchSetting', :opt_in => false }\n elsif setting_options.is_a?(Hash)\n setting_options.map do |key, values|\n { :xsi_type => key.to_s.camelcase }.merge(values).symbolize_keys\n end\n end\n end", "def wrap_at=(setting)\n @wrap_at = setting == :auto ? output_cols : setting\n end", "def setting_name\n return @setting_name\n end", "def default_setting_class\n PDK::Config::Setting\n end", "def set_standard_defaults( opts = self.opts )\n\n # We set NOT NULL on everything by default, but note the ?\n # syntax (like Text?) which declares the column as NULL.\n\n set_defaults :global, :null => false\n\n # We also like our keys unsigned, so we allow setting that, too.\n # Unfortunately, :unsigned currently works only with :integer,\n # not the default :Integer, and :integer can't be specified for compound keys,\n # so we have to use the callback to set the type only at correct times.\n # Furthermore, Postgres's autoincrementing serials only work with Integer,\n # so we set the type only as long as the unsigned keys are requested.\n\n unsigned_keys = !! opts[ :unsigned_keys ]\n\n set_defaults :Key, :integer, :unsigned => unsigned_keys\n set_defaults :primary_key, :unsigned => unsigned_keys do |opts,args,table|\n opts[ :type ] ||= :integer unless args.first.is_a? Array or not opts[ :unsigned ]\n end\n set_defaults :foreign_key, :key => :id, :unsigned => unsigned_keys do |opts,args,table|\n opts[ :type ] ||= :integer unless args.first.is_a? Array or not opts[ :unsigned ]\n end\n\n # Save some typing for unique and fulltext indexes.\n\n set_defaults :unique, :index, :unique => true\n set_defaults :fulltext, :index, :type => :full_text do |opts,args,table|\n opts[ :name ] ||= [ table, *args, :fulltext ].join( '_' ).to_sym\n end\n\n # Type shortcuts we use frequently.\n\n set_defaults :Bool, :TrueClass\n set_defaults :True, :TrueClass, :default => true\n set_defaults :False, :TrueClass, :default => false\n\n set_defaults :Signed, :integer, :unsigned => false\n set_defaults :Unsigned, :integer, :unsigned => ! opts[ :signed_unsigned ]\n\n set_defaults :String, :text => false\n set_defaults :Text, :String, :text => true\n\n # We want times to be stored as 4 byte timestamps, however\n # we have to be careful to turn off the MySQL autoupdate behavior.\n # That's why we have to set defaults explicitly.\n\n default_time = ( opts[ :zero_timestamps ] || ( opts[ :mysql_timestamps ] && opts[ :zero_timestamps ].nil? ) ) ? ZERO_TIME : DEFAULT_TIME\n set_defaults :Time, :timestamp, :default => default_time\n set_defaults :Time?, :timestamp, :default => nil\n\n self\n end", "def owner_class_attribute_default\n if owner_class.connected? && owner_class.table_exists?\n owner_class.column_defaults[attribute.to_s]\n end\n end", "def call\n setting_item = build_setting_item(name, type, default)\n register_setting setting_item\n define_setting setting_item\n setting_item\n end", "def create_setting\n if setting_options.is_a?(Hash)\n setting_options[:options][:validations] = setting_options[:validations]\n setting = Supports::Settingable::Models::Setting.new name: setting_options[:name]\n setting.options = setting_options[:options]\n setting.settingable= self\n setting.save\n end\n end", "def setting_name=(value)\n @setting_name = value\n end", "def metadata_only=(setting)\r\n end", "def settings\n Typesettings.new(self.typesettings)\n end", "def locking_column=(value)\n reload_schema_from_cache\n @locking_column = value.to_s\n end", "def locking_column=(value)\n reload_schema_from_cache\n @locking_column = value.to_s\n end", "def bit_wise_columns_config\n @b_w_c_c ||= {\n admin_action_types: admin_action_types_config,\n qualify_types: qualify_types_config\n }\n end", "def setting_params(params)\n params.tap do\n params[:type] = ApplicationSetting.types[params[:type]] || \"ApplicationSetting::StringSetting\"\n end\n end", "def settings_for_node\n cluster_name = self.parent.name.to_sym\n cluster_role = self.name.to_sym\n node_settings = {\n :user_data => { :attributes => { :run_list => [] } },\n :cluster_name => cluster_name,\n :cluster_role => cluster_role,\n }.deep_merge(Settings)\n node_settings.delete :pools\n raise \"Please define the '#{cluster_name}' cluster and the '#{cluster_role}' role in your ~/.chef/cluster_chef.yaml\" if (Settings[:pools][cluster_name].blank? || Settings[:pools][cluster_name][cluster_role].blank?)\n node_settings = node_settings.deep_merge(\n Settings[:pools][cluster_name][:common] ||{ }).deep_merge(\n Settings[:pools][cluster_name][cluster_role] ||{ })\n configure_aws_region node_settings\n node_settings\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Because pagy gem method pagy_next_url return url base on request url, but sometime we want specify base url. So this is what this method doing.
def next_url_for_path(path, pagy) return unless pagy.next url = URI.parse(path); url_query = Rack::Utils.parse_query url.query url.query = Rack::Utils.build_query url_query.merge(pagy.vars[:page_param].to_s => pagy.next) url.to_s end
[ "def next_url\n next_page ? url(next_page) : nil\n end", "def next_page_url\n \"#{request.path}?page=#{@page + 1}\"\n end", "def next_page_path; end", "def link_next_page; target_uri(next_page); end", "def paginable_base_url(page = 1)\n return url_for(@paginable_path_params.merge({ controller: @paginable_params[:controller],\n action: @paginable_params[:action], page: page }))\n end", "def paginable_base_url(page = 1)\n url_params = @paginable_path_params.merge(\n controller: @paginable_params[:controller],\n action: @paginable_params[:action],\n page: page\n )\n url_for(url_params)\n end", "def paginable_base_url(page = 1)\n @args = @args.with_indifferent_access\n url_params = @paginable_path_params.merge(\n controller: @args[:controller],\n action: @args[:action],\n page: page\n )\n url_for(url_params)\n end", "def get_next_url\n @index = @index + 1\n link = @url.to_s + \"?PageNumber=\"\n link = link + @index.to_s\n \n return link\nend", "def find_next_url(page)\r\n raise 'Define me!'\r\n end", "def next_page_link\n paging['next'] if paging\n end", "def next\n perform_request(next_page_uri) if next?\n end", "def paginable_base_url(page = 1)\n options = { controller: @paginable_controller || params[:controller],\n action: @paginable_action || params[:action], page: page }\n if @paginable_path_params.present?\n options = @paginable_path_params.merge(options)\n end\n return url_for(options)\n end", "def next_page_url(paginated_array)\r\n url_for controller: 'products' ,action: 'index',page: paginated_array.next_page\r\n end", "def get_next_page(xml)\n #次のURLをGET\n doc = REXML::Document.new xml\n next_url=''\n doc.root.each_element_with_attribute('rel','next'){|link|\n next_url= link.attributes['href']\n }\n uri = URI.parse(next_url)\n #次がなければnil返して終了\n if nil == uri.path || nil == uri.query then\n return nil\n end\n return uri.path+\"?\"+uri.query\n end", "def next_page\n api.send(self.method, options.merge(:page => self.page.to_i + 1)) if self.page.to_i < self.pages.to_i\n end", "def next_page\n anchors = get_url.css(\"a.paginatorActive\")\n if anchors.last.children[0].text == \"volgende\"\n @url = anchors.last.attributes[\"href\"].value\n else\n @url = \"\"\n end\n end", "def url_for_pagination(page, path = request.path, q = request.query_string)\n # Remove any current reference to page in the query string\n q = q.to_s.gsub(/page=(-?[\\d]+)(&?)/, '')\n # Assemble new link\n link = \"#{path}?page=#{page}&#{q}\"\n link = link[0..-2] if link[-1..-1] == '&' # Strip trailing ampersand\n link\n end", "def parse\n super\n if next_page_url\n @doc = get_document(URI(next_page_url))\n self.parse\n end\n self\n end", "def set_init_next_page_path\n ['shared', get_next_path_name].join('/')\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /magic_item_names GET /magic_item_names.json
def index @magic_item_names = MagicItemName.all end
[ "def get_item_names\n items = []\n Static.get_item_list.values.each do |f|\n items << f[\"name\"]\n end\n items\n end", "def name_list\n begin\n @products = Product.pluck(:name)\n render json: { names: @products }, status: 200\n rescue => exception\n render json: { errors: exception }\n end\n end", "def index\n @item_names = ItemName.all\n end", "def names()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Worksheets::Item::Names::NamesRequestBuilder.new(@path_parameters, @request_adapter)\n end", "def index\n @material_item_names = MaterialItemName.all\n end", "def item_name\n name\n end", "def autocomplete_menu_item_name\n menu_items = MenuItem.select('DISTINCT name').where('name LIKE ?', params[:term] + '%').order(:name)\n render :json => menu_items.collect {|item| {\"id\" => item.id, \"label\" => item.name, \"value\" => item.name}}\n end", "def names()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Names::NamesRequestBuilder.new(@path_parameters, @request_adapter)\n end", "def item_names()\n return items.map { |item| item.name }.join(\", \")\n end", "def index\n @item_selected_names = ItemSelectedName.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @item_selected_names }\n end\n end", "def name\n return \"#{@item[:name]}\"\n end", "def name\n Item.find(item_id).name\n end", "def list_items(url, username, password)\n uri = URI(url)\n req = Net::HTTP::Get.new(uri)\n req.basic_auth username, password\n response = Net::HTTP.start(uri.hostname, uri.port) {|http|\n http.request(req)\n }\n item_list = JSON.parse response\n items = []\n item_list.each { |item|\n item_list << item[:metadata][:name]\n }\n return item_list\n\nend", "def products_by_name\n render json: Product.where(\"name LIKE ? OR name LIKE ?\", \"#{params[:name]}%\", \"%#{params[:name]}%\").offset(params[:offset]).limit(20).map(&:simple_info), status: :ok\n end", "def item\n # Url generated from Js script function => getitem() of _form.html.erb file under Views of different controllers\n @item = Report.where(\"user_id = ?\" , current_user.id).pluck(:item_name )\n # send item_names' in form of json\n render json: @item\n end", "def repository_name (item)\n @response[item][\"name\"]\n end", "def loop_item_names\n @section = Section.find(params[:id])\n authorize! :questions, @section\n @item_names = @section.loop_item_names if @section && @section.looping? && @section.loop_item_type.present?\n respond_to do |format|\n format.js { render :json => (@item_names ? @item_names.to_json : nil), :callback => params[:callback] }\n end\n end", "def get_names(spicy_foods)\n spicy_foods.map { |spicy_food_hash| spicy_food_hash[:name] }\nend", "def list_names # :nologin:\n query = create_query(:Name, :all, :by => :name)\n show_selected_names(query)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /magic_item_names POST /magic_item_names.json
def create @magic_item_name = MagicItemName.new(magic_item_name_params) respond_to do |format| if @magic_item_name.save format.html { redirect_to @magic_item_name, notice: 'Magic item name was successfully created.' } format.json { render :show, status: :created, location: @magic_item_name } else format.html { render :new } format.json { render json: @magic_item_name.errors, status: :unprocessable_entity } end end end
[ "def index\n @magic_item_names = MagicItemName.all\n end", "def do_name( items )\n @callback[ OK, items ]\n end", "def item_name\n name\n end", "def update_item_names\n #puts \"Display number of #{self.name} changed. Updating names of '#{items.count}' Items....\"\n items.each { | i | i.populate_name.save }\n end", "def create\n @item_name = ItemName.new(item_name_params)\n\n respond_to do |format|\n if @item_name.save\n format.html { redirect_to @item_name, notice: 'Item name was successfully created.' }\n format.json { render :show, status: :created, location: @item_name }\n else\n format.html { render :new }\n format.json { render json: @item_name.errors, status: :unprocessable_entity }\n end\n end\n end", "def get_item_names\n items = []\n Static.get_item_list.values.each do |f|\n items << f[\"name\"]\n end\n items\n end", "def ids_names(items)\n\t\titems.reject(&:new_record?).collect {|item| {id: item.id, name: item.name.html_escape}}\n\tend", "def names()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Worksheets::Item::Names::NamesRequestBuilder.new(@path_parameters, @request_adapter)\n end", "def autocomplete_menu_item_name\n menu_items = MenuItem.select('DISTINCT name').where('name LIKE ?', params[:term] + '%').order(:name)\n render :json => menu_items.collect {|item| {\"id\" => item.id, \"label\" => item.name, \"value\" => item.name}}\n end", "def create\n item = list.items.create!(item_params)\n render json: item, status: 201\n end", "def create\n @request_item = RequestItem.new(request_item_params)\n @request_item.item = Item.new(name: params[:request_item][:item][:name])\n\n if @request_item.save\n render json: @request_item \n else\n render json: @request_item.errors, status: :bad_request\n end\n end", "def index\n @item_names = ItemName.all\n end", "def names()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Names::NamesRequestBuilder.new(@path_parameters, @request_adapter)\n end", "def name_list\n begin\n @products = Product.pluck(:name)\n render json: { names: @products }, status: 200\n rescue => exception\n render json: { errors: exception }\n end\n end", "def name\n return \"#{@item[:name]}\"\n end", "def update\n respond_to do |format|\n if @magic_item_name.update(magic_item_name_params)\n format.html { redirect_to @magic_item_name, notice: 'Magic item name was successfully updated.' }\n format.json { render :show, status: :ok, location: @magic_item_name }\n else\n format.html { render :edit }\n format.json { render json: @magic_item_name.errors, status: :unprocessable_entity }\n end\n end\n end", "def item\n # Url generated from Js script function => getitem() of _form.html.erb file under Views of different controllers\n @item = Report.where(\"user_id = ?\" , current_user.id).pluck(:item_name )\n # send item_names' in form of json\n render json: @item\n end", "def create\n @material_item_name = MaterialItemName.new(material_item_name_params)\n\n respond_to do |format|\n if @material_item_name.save\n format.html { redirect_to @material_item_name, notice: 'Material item name was successfully created.' }\n format.json { render :show, status: :created, location: @material_item_name }\n else\n format.html { render :new }\n format.json { render json: @material_item_name.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @magic_item = MagicItem.new(magic_item_params)\n\n respond_to do |format|\n if @magic_item.save\n format.html { redirect_to @magic_item, notice: 'Magic item was successfully created.' }\n format.json { render :show, status: :created, location: @magic_item }\n else\n format.html { render :new }\n format.json { render json: @magic_item.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /magic_item_names/1 PATCH/PUT /magic_item_names/1.json
def update respond_to do |format| if @magic_item_name.update(magic_item_name_params) format.html { redirect_to @magic_item_name, notice: 'Magic item name was successfully updated.' } format.json { render :show, status: :ok, location: @magic_item_name } else format.html { render :edit } format.json { render json: @magic_item_name.errors, status: :unprocessable_entity } end end end
[ "def _update_item(http, headers, path, body, name)\n resp = retry_request(http, \"PATCH\", path, body, headers)\n if resp.is_a?(Net::HTTPOK)\n Chef::Log.info(\"Updated keystone item '#{name}'\")\n else\n _raise_error(resp, \"Unable to update item '#{name}'\", \"_update_item\")\n end\nend", "def update_item token, item_id, name, description\n uri = URI.parse \"https://#{get_hostname(token)}/sf/v3/Items(#{item_id})\"\n puts uri\n \n http = Net::HTTP.new uri.host, uri.port\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_PEER\n \n item = {\"Name\"=>name, \"Description\"=>description}\n \n request = Net::HTTP::Patch.new uri.request_uri \n request[\"Content-Type\"] = \"application/json\"\n request[\"Authorization\"] = get_authorization_header(token)\n request.body = item.to_json\n \n response = http.request request\n puts \"#{response.code} #{response.message}\"\n \n if response.kind_of? Net::HTTPSuccess\n updated_item = JSON.parse response.body\n puts \"Updated Item: #{updated_item['Id']}\"\n end \nend", "def update\n\n #update the item of request_item\n if (params[:request_item].present?)\n @request_item.item = params[:request_item][:item].present? ? Item.new(name: params[:request_item][:item][:name]) : @request_item.item\n end\n #update all other parameters\n if @request_item.update(request_item_params)\n render json: @request_item\n else\n render json: @request_item.errors, status: :bad_request\n end\n\n end", "def update\n @itemname = Itemname.find(params[:id])\n\n respond_to do |format|\n if @itemname.update_attributes(params[:itemname])\n format.html { redirect_to @itemname, notice: 'Item actualizado.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @itemname.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @magic_item.update(magic_item_params)\n format.html { redirect_to @magic_item, notice: 'Magic item was successfully updated.' }\n format.json { render :show, status: :ok, location: @magic_item }\n else\n format.html { render :edit }\n format.json { render json: @magic_item.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_item_names\n #puts \"Display number of #{self.name} changed. Updating names of '#{items.count}' Items....\"\n items.each { | i | i.populate_name.save }\n end", "def update\n @item_alt_name = ItemAltName.find(params[:id])\n\n respond_to do |format|\n if @item_alt_name.update_attributes(item_alt_name_params)\n format.html { redirect_to item_alt_names_path, notice: 'Alt name was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @item_alt_name.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @material_item_name.update(material_item_name_params)\n format.html { redirect_to @material_item_name, notice: 'Material item name was successfully updated.' }\n format.json { render :show, status: :ok, location: @material_item_name }\n else\n format.html { render :edit }\n format.json { render json: @material_item_name.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n json_response(@food_item.update!(food_item_params))\n end", "def rename(id, name = \"\")\n item = Item.find(id)\n item.class == Item ? item.update(:name => name) : item\n end", "def update_name(new_name)\n ensure_uri\n response = @client.rest_put(@data['uri'], 'body' => { 'name' => new_name, 'type' => 'ArtifactsBundle' })\n @client.response_handler(response)\n @data['name'] = new_name\n true\n end", "def update_qty(list_items, item_name, new_qty)\n raise ArguementError.new(\"This item does not exist\") unless list_items.include?(item_name)\n list_items[item_name] = item_qty\nend", "def update\n\n if @api_v1_item.update(api_v1_item_params)\n render json: @api_v1_item\n else\n render json: @api_v1_item.errors\n end\n end", "def update\n @item_selected_name = ItemSelectedName.find(params[:id])\n\n respond_to do |format|\n if @item_selected_name.update_attributes(item_selected_name_params)\n format.html { redirect_to @item_selected_name, notice: 'Item selected name was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @item_selected_name.errors, status: :unprocessable_entity }\n end\n end\n end", "def rename_note\n note = Note.find(params[:note_id])\n note.update(name: params[:new_name])\n render json: note_to_json(note)\n end", "def update_rest\n @item = Item.find(params[:id])\n\n respond_to do |format|\n if @item.update_attributes(params[:item])\n flash[:notice] = 'Item was successfully updated.'\n format.html { redirect_to(@item) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @item.errors, :status => :unprocessable_entity }\n end\n end\n end", "def change_item_name\n puts \"Which item do you want to rename?\"\n item = get_item\n puts\"\\n\\n\"\n\n puts \"What's the new name for this item (#{item})?\"\n new_name = gets.chomp.to_sym\n @inv[new_name] = @inv.delete(item)\n puts \"\\n\\n\"\n\n puts \"Okay, the name's been changed!\"\n end", "def update\n respond_to do |format|\n if @custom_item.update(custom_item_params)\n format.html { redirect_to order_bill_path(@custom_item.bill.order, @custom_item.bill), notice: 'Custom item was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { redirect_to order_bill_path(@custom_item.bill.order, @custom_item.bill) }\n format.json { render json: @custom_item.errors, status: :unprocessable_entity }\n end\n \n end\n end", "def update\n item = params[:item_alias].delete(:item)\n @item_alias = ItemAlias.find(params[:id])\n @item_alias.item = Item.find_by_name(item)\n respond_to do |format|\n if @item_alias.update_attributes(params[:item_alias])\n format.html { redirect_to(@item_alias, :notice => 'Item alias was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @item_alias.errors, :status => :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /magic_item_names/1 DELETE /magic_item_names/1.json
def destroy @magic_item_name.destroy respond_to do |format| format.html { redirect_to magic_item_names_url, notice: 'Magic item name was successfully destroyed.' } format.json { head :no_content } end end
[ "def destroy\n @itemname = Itemname.find(params[:id])\n @itemname.destroy\n\n respond_to do |format|\n format.html { redirect_to itemnames_url }\n format.json { head :no_content }\n end\n end", "def destroy\n # :id here represents the name so we don't have to change the routes\n @item = Item.find_by_name(params[:id])\n p params\n logger.debug @item.inspect\n @item.destroy\n\n respond_to do |format|\n format.html { redirect_to items_url }\n format.json { head :no_content, status: 200 }\n end\n end", "def delete_item(item_name)\n if item_name =~ /\\[(\\d+)\\]/\n self.items.delete_at($1.to_i)\n else\n self.items.reject { |item| item.name == item_name }\n end\n end", "def destroy\n @item_name.destroy\n respond_to do |format|\n format.html { redirect_to item_names_url, notice: 'Item name was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def item_delete(list_name, item_name)\n storage.find_list(list_name).delete_item(item_name)\n output \"Boom! \\\"#{item_name}\\\" is gone forever.\"\n save!\n end", "def destroy\n @magic_item.destroy\n respond_to do |format|\n format.html { redirect_to magic_items_url, notice: 'Magic item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete(items)\n item_ids = items.collect { |item| item.id }\n args = {ids: item_ids.to_json}\n return @client.api_helper.command(args, \"item_delete\")\n end", "def delete_item(id)\n delete_request configure_payload(\"/items/#{id}\")\n end", "def destroy\n the_name = @equipment_item.name\n @equipment_item.destroy\n respond_to do |format|\n format.html { redirect_to equipment_items_url, status: 303, notice: t('.delete_ok', item: the_name) }\n format.json { head :no_content }\n end\n end", "def test_trying_to_delete_non_item\n r = delete \"/groceries\", name: \"not a thing\"\n assert_equal 404, r.status\n end", "def destroy\n @item_alt_name = ItemAltName.find(params[:id])\n @item_alt_name.destroy\n\n respond_to do |format|\n format.html { redirect_to item_alt_names_path, notice: 'Alt name was successfully deleted.' }\n format.json { head :no_content }\n end\n end", "def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend", "def delete\n api_xml(category(target),:delete) if options.data or options.category\n if options.itemdef\n parse_itemdef\n uid=find_definition_uid_by_name(itemdef.name)\n response=admin_xml(\"/itemDefinitions/#{uid}\")\n verbose \"About to delete: #{REXML::XPath.first(response,'//Name/text()').value} item definition.\\n\"\n admin_xml(\"/itemDefinitions/#{uid}\",\n :delete) if itemdef.name\n end\n end", "def remove_item(list, item_name)\n list.delete(item_name)\nend", "def delete_item\n item_id = params[\"item_id\"]\n\n item = TextItem.find_by_id(item_id)\n item = Image.find_by_id(item_id) if item.nil?\n item = Collection.find_by_id(item_id) if item.nil?\n render_json :status => :not_found, :messages => \"Could not find the item with id #{item_id}.\" and return if item.nil?\n\n if item.class == Collection\n if params[\"id\"].nil?\n render_json :status => :bad_request, :messages => \"Can't delete a collection reference without providing the parent collection id. Please use the longer url for item deletion.\" and return\n end\n collection = Collection.find_by_id(params[\"id\"])\n else\n collection = Ownership.find_by_item_id(item_id).parent\n end\n;\n render_json :status => :not_found, :messages => \"Could not find parent collection for the item.\" and return if (collection.nil?)\n render_json :status => :forbidden, :messages => \"The user is not allowed to delete from this collection.\" and return if (!collection.delete?(@user, @client))\n\n collection.delete_item(item_id)\n render_json :entry => {} and return\n end", "def destroy\n @material_item_name.destroy\n respond_to do |format|\n format.html { redirect_to material_item_names_url, notice: 'Material item name was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n generic_item = @specific_item.generic_item\n @specific_item.destroy\n\n respond_to do |format|\n format.html { redirect_to :back, :notice => t('notice.successfully_deleted')}\n format.json { head :no_content }\n end\n end", "def destroy\n @apiv1_item.destroy\n respond_to do |format|\n format.html { redirect_to apiv1_items_url, notice: 'Item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def test_send_delete_body_with_multiliner_name()\n # Parameters for the API call\n body = DeleteBody.from_hash(APIHelper.json_deserialize(\n '{\"name\":\"farhan\\\\nnouman\",\"field\":\"QA\"}'\n ))\n\n # Perform the API call through the SDK function\n result = @controller.send_delete_body(body)\n\n # Test response code\n assert_equal(200, @response_catcher.response.status_code)\n\n # Test whether the captured response is as we expected\n refute_nil(result)\n expected_body = JSON.parse(\n '{\"passed\":true}'\n )\n received_body = JSON.parse(@response_catcher.response.raw_body)\n assert(TestHelper.match_body(expected_body, received_body, check_values: true))\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
3 ways to register filter 1. builtin filter filter.add_filter(:mtime, :passed, 30, :days) 2. custom filter filter.add_filter(my_filter) (my_filter must implements match?(path) method) 3. block filter filter.add_filter do |path| filter operations end
def add_filter(*args, &block) # 3. block filter if block_given? filter = File::Visitor::Filter::Proc.new(block) @filters.push(filter) return true end # 2. custom filter if (1 == args.size) custom_filter = args.shift unless (custom_filter.respond_to?(:match?)) raise ArgumentError, "custom_filter must implement match?()" end @filters.push(custom_filter) return true end # 1. built-in filter filter_class = File::Visitor::FilterDispatcher.dispatch(args.shift) @filters.push(filter_class.new(*args)) true end
[ "def register_filter(mod); end", "def addFilter( &block )\n\t\t@filters << block\n\tend", "def add_filter(name, &block)\n raise ArgumentError, \"Expected block to be given\" if block.nil?\n\n @filters[name] = block\n end", "def define_filter(name, &block)\n filters[name.to_sym] = block\n nil\n end", "def define_filter(name, &block)\n @filters[name.to_sym] = block\n nil\n end", "def filter(name, &block)\n @filters[name.to_s] = block\n end", "def filter_helper(filter)\n filter = FileFilter.new(filter)\n assert filter.match(\"file\")\n assert !filter.match(\"other\")\n end", "def register_argument_filter(name, &block)\n @argument_filters[name.to_sym] = block\n end", "def filter(&block)\n actions << {\n 'type' => 'filter',\n 'block' => block\n }\n self\n end", "def apply_filter\n end", "def add_filter(filter)\n @filters << filter\n end", "def named_filter; end", "def instance_filter(*args, &block)\n instance_filters << [args, block]\n end", "def add_filter(filter)\n @filters << filter\n self\n end", "def filter filter\n @befores << filter\n @afters << filter\n end", "def filter(filter_name, &block)\n raise BlockRequired unless (block_given?)\n\n @filters.synchronize do\n @filters[filter_name] = block\n end\n \n assign_next_task(filter_name)\n end", "def global_filter; end", "def make_filter(parts); end", "def before method_or_filter, options={}, &block\n _add_filter(:before, method_or_filter, options, block)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
dir: target directory mode: file visit all files dir visit directory only handler: proc to call
def visit_with_mode(dir, mode, &handler) assert_directory(dir) entries = Dir.entries(dir) .sort_by { |filename| filename } if @direction == :desc entries.reverse! end entries.each do |entry| next if (dot_dir?(entry) && !@visit_dot_dir) abs_path = File.join(dir, entry) if File.directory?(abs_path) mode == :dir && handler.call(abs_path) visit_with_mode(abs_path, mode, &handler) else if mode == :file && target?(abs_path) handler.call(abs_path) end end end end
[ "def process_directory(dir, files, rec)\n dir.children(true).each do |f|\n # ignore sub-directories\n if f.directory?\n if rec == false\n next\n else\n process_directory(f.expand_path, files, rec)\n end\n end\n process_file(f.expand_path, files)\n end\n end", "def process_dir( dir )\n #puts \"Scanning #{dir}\"\n Dir.foreach( dir ) do |entry|\n if entry.start_with?('.')\n next\n end\n path = \"#{dir}/#{entry}\"\n if Dir.exist?(path)\n process_dir(path)\n elsif entry.end_with?( '.rb' )\n process_file( path )\n end\n end\n end", "def stat_directory(dir); end", "def recursive_dir_descend(dir,regexp,action)\n # print \"dir : #{dir}\\n\"\n\n olddir = Dir.pwd\n dirp = Dir.open(dir)\n Dir.chdir(dir)\n pwd = Dir.pwd\n @dirN += 1\n\n for file in dirp\n file.chomp\n next if file =~ /^\\.\\.?$/\n filename = \"#{pwd}/#{file}\"\n\n if File::directory?(filename)\n recursive_dir_descend(filename,regexp,action)\n else\n @fileN += 1\n if file =~ regexp\n # evaluate action\n eval action\n end\n end\n end\n Dir.chdir(olddir)\n end", "def recursiveDirectoryDescend(dir,regexp,action)\n # print \"dir : #{dir}\\n\"\n\n olddir = Dir.pwd\n dirp = Dir.open(dir)\n Dir.chdir(dir)\n pwd = Dir.pwd\n @@dirN += 1\n\n for file in dirp\n file.chomp\n next if file =~ /^\\.\\.?$/\n\n fullname = \"#{pwd}/#{file}\"\n\n if File::directory?(fullname)\n recursiveDirectoryDescend(fullname,regexp,action)\n else\n @@fileN += 1\n if file =~ regexp\n # evaluate action\n eval action\n end\n end\n end\n Dir.chdir(olddir)\nend", "def files(rootDir)\n Dir.foreach(rootDir) do |dir|\n if dir != \".\" && dir != \"..\"\n puts \"Processing \" + dir\n Dir.foreach(rootDir + \"/\" + dir) do |file|\n if file != \".\" && file != \"..\"\n open(rootDir + \"/\" + dir + \"/\" + file) do |f|\n yield(f)\n end\n end\n end\n end\n end\nend", "def for(file_or_dir); end", "def traverse(dir, base=dir, &block)\n return unless File.directory?(dir)\n Dir.new(dir).each do |file|\n next if file == '.' or file == '..'\n path = File.join(dir, file)\n if File.directory?(path)\n traverse(path, base, &block)\n else\n block.call(path.sub(base+'/',''))\n end\n end\n end", "def traverse_dir path, ext_names = [], black_list = [], &block\n\tif File.directory? path\n\t\tDir.foreach path do |file|\n\t\t\tunless /^\\./i.match file or black_list.include?(file)\n\t\t\t\tfile = path + '/' + file\n\t\t\t\tif File.directory? file\n traverse_dir(file, ext_names, black_list, &block)\n\t\t\t\telsif ext_names.empty? or ext_names.include? File.extname(file)\n\t\t\t\t\tblock.call file if block_given?\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend\nend", "def traverse (directory)\n Dir.chdir(directory) do\n Dir.glob(\"**/*\") do |f|\n file = File.stat(f)\n check_file f if file.file? && File.extname(f) == '.rb'\n end\n end\nend", "def in_each_dir\n dirs.each do |dir|\n Dir.chdir(dir) do\n yield(dir)\n end\n end\n end", "def walk(dir, reporter)\n dir.each_child do |child|\n next if black_listed?(child)\n\n walk_child(child, reporter)\n end\n end", "def process_directory(path)\n Dir[path].each do |file_name|\n process_file(file_name)\n end\n end", "def foreach(dir, &block)\n VirtFS.fs_lookup_call(dir) { |p| dir_foreach(p, &block) }\n end", "def dir(*) end", "def scanDir(dirname, matchfn, callfn, recurse)\n\tDir.foreach(dirname) do |filename|\n\t\tfullfilename = dirname + '/' + filename;\n\t\tif File.directory?(fullfilename)\n\t\t\tif recurse && filename != \".\" && filename != \"..\"\t\t# don't infinite loop kthx\n\t\t\t\tscanDir(fullfilename, matchfn, callfn, recurse)\n\t\t\tend\n\t\telsif matchfn.call(filename)\n\t\t\tcallfn.call(fullfilename)\n\t\tend\n\tend\nend", "def dir_foreach( *args, &block )\n warn \"Path#dir_foreach is obsoleted. Use Path#each_entry.\"\n each_entry( *args, &block )\n end", "def scanDir(dirname, matchfn, callfn, recurse)\n\tDir.foreach(dirname) do |filename|\n\t\tfullfilename = dirname + \"/\" + filename;\n\t\tif File.directory?(fullfilename)\n\t\t\tif recurse && filename != \".\" && filename != \"..\"\t\t# don't infinite loop kthx\n\t\t\t\tscanDir(fullfilename, matchfn, callfn, recurse)\n\t\t\tend\n\t\telsif matchfn.call(filename)\n\t\t\tcallfn.call(fullfilename)\n\t\tend\n\tend\nend", "def scan_directory(dir)\n p \"scanning path: #{dir}\" if @verbose\n\n dir.children.each do |child|\n if child.file? && @formats.include?(File.extname(child).downcase)\n begin\n @output_file.insert(Image.new(child.to_path).attrs_array)\n rescue EXIFR::MalformedJPEG => e\n handle_warning('malformed_jpeg', e, child.to_path) if @verbose\n rescue SystemCallError => e\n p \"An error occurred while processing image: #{child.to_path}\"\n raise SystemCallError, \"The error received was: #{e}\"\n end\n\n elsif child.directory?\n begin\n scan_directory(child)\n rescue SystemCallError => e\n handle_warning('unscannable_directory', e, child.to_path) if @verbose\n end\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns only the children with +results+
def children_with_results children.select(&:any_results_including_children?) end
[ "def children_with_results(reload = false)\n children(reload).select(&:has_results_including_children?)\n end", "def children_and_child_competitions_with_results(reload = false)\n children_and_child_competitions(reload).select(&:has_results_including_children?)\n end", "def children\n if !@children\n child_result_set = @results.map { |r| r.children }\n @children = AggregateResult.process_children(child_result_set.flatten, world, target, self.inherited_tags, filter_tags)\n end\n @children\n end", "def results_list_items()\n results_list.all(\".result\")\n end", "def search_children(node, attribute, search_term)\n matches = []\n end", "def getTestNames(results)\n path = getCollectionPath(results)\n while !(String(path).end_with? Jekyll::RESULTS_URL_PATTERN || path.root?) do\n path = path.parent\n end\n return path.children\n end", "def results_list()\n results.find_element(:id, \"results\")\n end", "def children(all = false)\n sql = at_or_below_genus? ?\n \"text_name LIKE '#{text_name} %'\" :\n \"classification LIKE '%#{rank}: _#{text_name}_%'\"\n sql += \" AND correct_spelling_id IS NULL\"\n return Name.where(sql).to_a if all\n Name.all_ranks.reverse.each do |rank2|\n next if rank_index(rank2) >= rank_index(rank)\n matches = Name.where(\"rank = #{Name.ranks[rank2]} AND #{sql}\")\n return matches.to_a if matches.any?\n end\n []\n end", "def multifind_filter_results(options, results)\n results = multifind_collapse_results(options, results)\n\n # If we're only checking for existence, we're done here.\n if options[:check_element] == :exists?\n return results\n end\n\n # Filter all results according to the :check_element option\n filtered = results.map do |result|\n if result.is_a? Array\n next result.map do |inner|\n next apply_filter(inner, options)\n end\n end\n next apply_filter(result, options)\n end\n\n return filtered\n end", "def get_childs\n childs = Category.any_in(parent_ids: [parent.id])\n\n results = Array.new\n childs.each do |child|\n results << child\n end\n\n results\n end", "def results\n fetch unless @results\n @results\n end", "def children\n self.class.cis_from_query(\"&parent=#{id}\")\n end", "def results_list()\n results.find_element(:id, \"resultsList\")\n rescue\n return nil\n end", "def parse_results\n cur_results = Array.new\n\n @page.search('li.g').each do |item|\n cur_results << node_from_item(item)\n end\n \n update_next_page unless next_page_missing\n @results += cur_results\n \n return cur_results\n end", "def fetch_search_results(context)\n\n params = @params\n site_id = context['__site_id']\n\n matching_ids = Node.search_ids do\n\n # Site ID\n with :site_id, site_id\n\n # Node classification\n if params['classification']\n with :classification, params['classification']\n end\n\n # Parent\n if params['scope_to']\n parent_scope = context[params['scope_to']]\n with :parent_uri, parent_scope['uri']\n elsif params['parent_uri']\n with :parent_uri, params['parent_uri']\n end\n\n # Ordering\n order_by_fields = params['order_by'].blank? ? [] : params['order_by'].split(',')\n order_by_fields.each do |order_by_field|\n\n field_name, direction = order_by_field.gsub(/[\"']/, '').strip.split(' ', 2)\n direction = 'asc' if direction.blank?\n order_by field_name.to_sym, direction.to_sym\n\n end\n\n # Limit\n if params['limit']\n paginate :page => 1, :per_page => params['limit']\n end\n\n end\n\n results = []\n matching_ids.each do |id|\n\n node = Rails.cache.fetch \"node_id:#{site_id}:#{id}\" do\n Node.where(:site_id => site_id).find(id).to_liquid\n end\n results << node\n\n end\n\n results\n\n end", "def direct_children(extras = {})\n return [] unless might_have_children? # optimize!\n self.class.scoped(scope_hash_for_direct_children).find(:all, extras)\n end", "def intend_children\n children.select { |child| !child.has_class :hidden }\n end", "def children\n dataset.nested.filter(self.class.qualified_parent_column => self.id)\n end", "def children(options={})\n @global_page.children.all options\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns only the Races with +results+
def races_with_results races.select(&:any_results?) end
[ "def races_with_results\n races_copy = races.select {|race|\n !race.results.empty?\n }\n #races_copy.sort! alphere this causes races on the public results page to sort in an unexpected order.\n races_copy\n end", "def races_with_results\n races.select { |race| !race.results.empty? }.sort\n end", "def source_results(race)\n Result.all(\n :include => [:race, {:person => :team}, :team, {:race => [:event, :category]}],\n :conditions => [%Q{\n members_only_place between 1 AND #{point_schedule.size - 1}\n and results.person_id is not null\n and events.type = 'SingleDayEvent' \n and events.sanctioned_by = \"#{RacingAssociation.current.default_sanctioned_by}\"\n and categories.id in (#{category_ids_for(race).join(\", \")})\n and (races.bar_points > 0 or (races.bar_points is null and events.bar_points > 0))\n and events.date between '#{date.beginning_of_year}' and '#{date.end_of_year}'\n }],\n :order => 'person_id'\n )\n end", "def source_results(race)\n Result.find(:all,\n :include => [:race, {:racer => :team}, :team, {:race => [{:standings => :event}, :category]}],\n :conditions => [%Q{members_only_place between 1 AND #{point_schedule.size - 1}\n and events.type = 'SingleDayEvent' \n and events.sanctioned_by = \"#{ASSOCIATION.short_name}\"\n and categories.id in (#{category_ids_for(race)})\n and (races.bar_points > 0 or (races.bar_points is null and standings.bar_points > 0))\n and events.date >= '#{date.year}-01-01' \n and events.date <= '#{date.year}-12-31'}],\n :order => 'racer_id'\n )\n end", "def results\n sql = []\n conditions = [\"\"]\n\n # event_id\n if (params[:event_id])\n sql << \"races.event_id = ?\"\n conditions << params[:event_id]\n end\n\n # person_id\n if (params[:person_id])\n sql << \"results.person_id = ?\"\n conditions << params[:person_id]\n end\n\n if (sql)\n conditions[0] = sql.join(\" AND \");\n Race.paginate(\n :page => params[:page],\n :per_page => 10,\n :conditions => conditions,\n :include => { :results => [ :person, :category ] }\n )\n else\n []\n end\n end", "def source_results(race)\n Result.find_by_sql(\n %Q{SELECT results.*\n FROM results \n LEFT OUTER JOIN races ON races.id = results.race_id\n LEFT OUTER JOIN events ON races.event_id = events.id\n LEFT OUTER JOIN categories ON races.category_id = categories.id\n where results.id in (select source_result_id\n from scores\n LEFT OUTER JOIN results as competition_results\n ON competition_results.id = scores.competition_result_id\n LEFT OUTER JOIN races as competition_races\n ON competition_races.id = competition_results.race_id\n LEFT OUTER JOIN events as competition_events\n ON competition_races.event_id = competition_events.id\n where competition_events.type = 'Bar'\n and competition_events.date between'#{date.beginning_of_year}' and '#{date.end_of_year}')\n order by results.team_id}\n )\n end", "def source_results(race)\n Result.find_by_sql(\n %Q{SELECT results.points, results.id as id, race_id, racer_id, team_id, place \n FROM results \n LEFT OUTER JOIN races ON races.id = results.race_id \n LEFT OUTER JOIN standings ON races.standings_id = standings.id \n LEFT OUTER JOIN events ON standings.event_id = events.id \n LEFT OUTER JOIN categories ON races.category_id = categories.id \n where results.id in (select source_result_id \n from scores \n LEFT OUTER JOIN results as competition_results \n ON competition_results.id = scores.competition_result_id\n LEFT OUTER JOIN races as competition_races \n ON competition_races.id = competition_results.race_id\n LEFT OUTER JOIN standings as competition_standings \n ON competition_races.standings_id = competition_standings.id \n LEFT OUTER JOIN events as competition_events \n ON competition_standings.event_id = competition_events.id \n where competition_events.type = 'Bar' \n and competition_events.date >= '#{date.year}-01-01' \n and competition_events.date <= '#{date.year}-12-31')\n order by team_id}\n )\n end", "def index\n @race_results = RaceResult.all\n end", "def get_results\n\t\trace_id = params[:id]\n\t\t\n\t\trace = Race.find_by_id(race_id)\n\t\tresults = Result.get_race_results(race_id)\n\t\t\n\t\trender :json=>{:results=>results}\n\tend", "def create_competition_results_for(results, race)\n competition_result = nil\n for source_result in results\n logger.debug(\"#{self.class.name} scoring result: #{source_result.event.name} | #{source_result.race.name} | #{source_result.place} | #{source_result.name} | #{source_result.team_name}\") if logger.debug?\n # e.g., MbraTeamBar scoring result: Belt Creek Omnium - Highwood TT | Master A Men | 4 | Steve Zellmer | Northern Rockies Cycling\n\n # I don't need this now: no allowance for TTT or tandem teams with members of multiple teams\n # teams = extract_teams_from(source_result)\n # logger.debug(\"#{self.class.name} teams for result: #{teams}\") if logger.debug?\n # for team in teams\n\n if member?(source_result.team, source_result.date)\n\n if first_result_for_team?(source_result, competition_result)\n # Bit of a hack here, because we split tandem team results into two results,\n # we can't guarantee that results are in team-order.\n # So 'first result' really means 'not the same as last result'\n # race here is the category in the competition\n competition_result = race.results.detect {|result| result.team == source_result.team}\n competition_result = race.results.create(:team => source_result.team) if competition_result.nil?\n end\n\n # limit to top two results for team for each source race by \n # removing the lowest score for the event after every result.\n # I do it this way because results do not arrive in postion order.\n # SQL sorting does not work as position is a varchar field.\n score = competition_result.scores.create(\n :source_result => source_result,\n :competition_result => competition_result,\n :points => points_for(source_result).to_f #/ teams.size\n )\n this_points = score.points\n logger.debug(\"#{self.class.name} competition result: #{competition_result.event.name} | #{competition_result.race.name} | #{competition_result.place} | #{competition_result.team_name}\") if logger.debug?\n # e.g., MbraTeamBar competition result: 2009 MBRA BAT | Master A Men | | Gallatin Alpine Sports/Intrinsik Architecture\n scores_for_event = competition_result.scores.select{ |s| s.source_result.event.name == source_result.event.name}\n if scores_for_event.size > 2\n competition_result.scores.delete(scores_for_event.min { |a,b| a.points <=> b.points })\n end\n end\n end\n end", "def event_results(reload = true)\n if reload\n return Result.\n includes(:team, :person, :scores, :category, {race: [:event, :category]}).\n where(\"people.id\" => id).\n reject {|r| r.competition_result?}\n end\n results.reject do |result|\n result.competition_result?\n end\n end", "def return_run_workouts\n\t\tjoins(:run_type).where.not(\"run_types.name=?\", \"Race\")\n\tend", "def event_results(reload = true)\n if reload\n return Result\n .includes(:team, :person, :scores, :category, race: %i[event category])\n .where(\"people.id\" => id)\n .reject(&:competition_result?)\n end\n results.reject(&:competition_result?)\n end", "def races\n entries = Contestant.all.select do |contestant|\n # puts \"#{contestant.corgi.name} == #{self.name}\"\n contestant.corgi == self\n end.map do |contestant|\n contestant.race\n end\n end", "def fetch_results(type)\n results.select { |r| r.type == type }\n end", "def exclude_results(results, options)\n exclude_oob = options.fetch(:excluding_out_of_bounds, false)\n exclude_occupied = options.fetch(:excluding_occupied_spaces, false)\n\n results = excluding_out_of_bounds(results) if exclude_oob\n results = excluding_occupied_spaces(results) if exclude_occupied\n results\n end", "def search_results(*args)\n ranks_and_ids = search_result_ranks_and_ids(*args)\n search_results_from_ids(ranks_and_ids.map(&:last))\n end", "def filter_by_result(scoped, result = nil)\n result ? scoped.where(activities: { result: result }) : scoped\n end", "def results\n \t\t@teams = Team.all\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns an array with the relative path of the selected songs (Overrides def on MusicBaseController)
def get_selected_paths return get_songs_column( :path ) end
[ "def get_selected_paths\n return get_songs_relation.pluck( :path )\n end", "def get_selected_paths\n return get_songs_search_from_params.songs_found.map { |s| s[SongsSearch::IDX_SONG_PATH] }\n end", "def path\n File.expand_path File.join(songs.first.path, '..').gsub(' ','\\ ')\n end", "def audio_paths\n Hash[@media[:music].map { |entry| [audio_path(entry[:ext]), entry[:path]] }]\n end", "def get_selected_play_list_song_ids\n get_selected_ids( :play_list_song_ids )\n end", "def songs\n\t\t@musician = User.find_by_id(current_user.id)\n\tend", "def index\n @music_sheets = @song.music_sheets\n end", "def full_path\n if path[0] == '/'\n path\n else\n File.join(Play.music_path,path)\n end\n end", "def generate_filepath_music\n data = get_fat32_compliant_data\n\n path = []\n path << root_dir\n path << \"music\"\n path << artist_first_letter(data['artist'])\n path << data['artist']\n path << \"#{data['year']} - #{data['album']}\"\n # Adding cd subdir if cd specified\n path << data['cd'] if data['cd'] != \"\"\n path << \"#{data['index']} - #{data['title']}#{data['ext']}\"\n\n return path.join(\"/\")\n end", "def play_songs\n MusicImporter.new(path).play_song\n end", "def files\n list = Dir[path + \"/*.mp3\"].map {|full_path| full_path.split(\"/\").last}\n end", "def index\n @song_parts = SongPart.all_parts\n end", "def all\n Pathname.glob( @path.join('*.{wav,wave}') ).collect(&:basename).collect(&:to_s)\n end", "def files # loads all the mp3 files in the path directory / normalizes the filename to just the mp3 filename with no path\n @files ||= Dir.glob(\"#{path}/*.mp3\").collect{ |f| f.gsub(\"#{path}/\", \"\") } # then using the .collect method and gsub to just return the filename with out path\n end", "def songs_found\n get_songs\n return @songs\n end", "def path\n new_path = parent ? parent.path : []\n new_path << [kind, (id || name)]\n end", "def songs\n @songs ||= Song.where(:artist => info(:name))\n end", "def full_path(settings)\n return File.join( settings.music_dir_path , self.path )\n end", "def get_all_songs\n @songs.values\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
NOTE do not pattern your production application after this (refer to test_should_create_customer_profile_transaction_auth_only_and_then_prior_auth_capture_requests instead as the correct way to do an auth then capture). capture_only "is used to complete a previously authorized transaction that was not originally submitted through the payment gateway or that required voice authorization" and can in some situations perform an auth_capture leaking the original authorization.
def test_should_create_customer_profile_transaction_auth_only_and_then_capture_only_requests @gateway.expects(:ssl_post).returns(successful_create_customer_profile_transaction_response(:auth_only)) assert response = @gateway.create_customer_profile_transaction( transaction: { customer_profile_id: @customer_profile_id, customer_payment_profile_id: @customer_payment_profile_id, type: :auth_only, amount: @amount } ) assert_instance_of Response, response assert_success response assert_equal response.authorization, response.params['direct_response']['transaction_id'] assert_equal 'This transaction has been approved.', response.params['direct_response']['message'] assert_equal 'auth_only', response.params['direct_response']['transaction_type'] assert_equal 'Gw4NGI', approval_code = response.params['direct_response']['approval_code'] @gateway.expects(:ssl_post).returns(successful_create_customer_profile_transaction_response(:capture_only)) assert response = @gateway.create_customer_profile_transaction( transaction: { customer_profile_id: @customer_profile_id, customer_payment_profile_id: @customer_payment_profile_id, type: :capture_only, amount: @amount, approval_code: approval_code } ) assert_instance_of Response, response assert_success response assert_equal 'This transaction has been approved.', response.params['direct_response']['message'] end
[ "def capture(options)\n hash = { transaction: { type: :prior_auth_capture,\n amount: options.fetch(:amount),\n customer_profile_id: options.fetch(:customer_profile_id),\n customer_payment_profile_id: options.fetch(:customer_payment_profile_id),\n trans_id: options.fetch(:transaction_id)}}\n response = gateway.create_customer_profile_transaction(hash)\n transaction_id = response.success? ? response.params['direct_response']['transaction_id'] : nil\n [transaction_id, response]\n end", "def authorize_and_capture\n \n resp = StdClass.new \n if self.financial_status == Invoice::FINANCIAL_STATUS_CAPTURED\n resp.error = \"Funds for this invoice have already been captured.\"\n else\n \n sc = self.site.store_config \n case sc.pp_name \n when StoreConfig::PAYMENT_PROCESSOR_STRIPE\n \n Stripe.api_key = sc.stripe_secret_key.strip\n bt = nil\n begin\n c = Stripe::Charge.create(\n :amount => (self.total * 100).to_i,\n :currency => 'usd',\n :customer => self.customer.stripe_customer_id,\n :capture => true,\n :metadata => { :invoice_id => self.id },\n :statement_descriptor => \"#{self.site.description.truncate(22)}\"\n ) \n rescue Exception => ex\n resp.error = \"Error during capture process\\n#{ex.message}\" \n end \n if resp.error.nil?\n InvoiceTransaction.create(\n :invoice_id => self.id,\n :transaction_id => c.id,\n :transaction_type => InvoiceTransaction::TYPE_AUTHCAP,\n :payment_processor => sc.pp_name,\n :amount => c.amount / 100.0,\n :captured => true,\n :date_processed => DateTime.now.utc,\n :success => c.status == 'succeeded'\n )\n if c.status == 'succeeded'\n self.financial_status = Invoice::FINANCIAL_STATUS_CAPTURED\n self.save\n resp.success = true\n else\n resp.error = \"Error capturing funds.\"\n end\n end\n \n end \n end \n return resp\n end", "def authorized_with_capture(options = {})\n options = { date_authorized: Date.current, date_captured: Date.current }.merge(options)\n request_params = {\n 'customerID' => customer_id,\n 'cartID' => cart_id,\n 'orderID' => order_id,\n 'dateAuthorized' => xml_date(options[:date_authorized]),\n 'dateCaptured' => xml_date(options[:date_captured])\n }\n\n response = TaxCloud.client.request :authorized_with_capture, request_params\n TaxCloud::Responses::AuthorizedWithCapture.parse response\n end", "def capture_transaction\n data = full_params.merge(\n 'x_unique_id' => unique_id,\n 'x_invoice_num' => invoice_num,\n 'x_auth_code' => approval_code,\n 'x_type' => \"CAPTURE_ONLY\"\n )\n\n astro_curl(@validator_url, data)\n end", "def create_transaction_auth_capture(amount, profile_id, payment_profile_id, order = nil, options = {})\n create_transaction(:auth_capture, amount, profile_id, payment_profile_id, order, options)\n end", "def authorized_with_capture(options = {})\n options = { :date_authorized => Date.today, :date_captured => Date.today }.merge(options)\n request_params = {\n 'customerID' => customer_id,\n 'cartID' => cart_id,\n 'orderID' => order_id,\n 'dateAuthorized' => options[:date_authorized],\n 'dateCaptured' => options[:date_captured]\n }.merge(TaxCloud.auth_params)\n\n response = TaxCloud.client.request :authorized_with_capture, :body => request_params\n end", "def capture(money, authorization, options = {})\n post = { trans_id: authorization }\n add_customer_data(post, options)\n commit('PRIOR_AUTH_CAPTURE', money, post)\n end", "def capture(money, authorization, options = {})\n post = {:trans_id => authorization}\n add_customer_data(post, options)\n commit('PRIOR_AUTH_CAPTURE', money, post)\n end", "def capture_payment(options = {})\n # transaction do\n capture = OrderTransaction.capture(amount, authorization_reference, options)\n self.save!\n self.order_transactions << capture\n\n if capture.success?\n self.payment_captured!\n else\n self.transaction_declined!\n end\n\n capture\n # end\n end", "def test_credit_card_authorize_and_capture_amount_low\n assert auth = @gateway.authorize(@amount, @credit_card, @options)\n assert_success auth\n assert_equal 'Approved', auth.message\n assert capture = @gateway.capture(@amount-100, auth.authorization, @credit_card, @options)\n assert_success capture\n assert_equal 'Approved', capture.message\n end", "def capture(money, identification, options = {})\n post = {}\n add_address(post, options)\n add_customer_data(post, options)\n post[:MASTER_ID] = identification\n post[:TRANS_TYPE] = 'CAPTURE'\n commit('PRIOR_AUTH_CAPTURE', money, post, options)\n end", "def create_transaction_prior_auth_capture(transaction_id, amount, order = nil, options = {})\n handle_transaction_id(transaction_id)\n create_transaction(:prior_auth_capture, amount, nil, nil, order, options)\n end", "def capture_payment(attributes = {})\n request = capture_payment_request(attributes)\n execute_request(request)\n end", "def capture(money, authorization, options = {})\n requires!(options, :pasref)\n requires!(options, :order_id)\n \n request = build_capture_request(authorization, options) \n commit(request, :default, options)\n end", "def capture!(verification_value = nil)\n gateway.capture(amount, card.to_active_merchant(verification_value), :ip => card.ip_address)\n end", "def can_capture?(payment)\n payment.pending? || payment.checkout?\n end", "def capture(money, authorization, options = {})\n requires!(options, :credit_card)\n\n form = {}\n add_salestax(form, options)\n add_approval_code(form, authorization)\n add_invoice(form, options)\n add_creditcard(form, options[:credit_card])\n add_customer_data(form, options)\n add_test_mode(form, options)\n commit(:capture, money, form)\n end", "def can_capture?(payment)\n payment.pending? || payment.checkout?\n end", "def capture(amount = nil)\n \n return { :error => \"This invoice doesn't seem to be authorized.\" } if !self.success\n \n ct = InvoiceTransaction.where(:parent_id => self.id, :transaction_type => InvoiceTransaction::TYPE_CAPTURE, :success => true).first \n return { :error => \"Funds for this invoice have already been captured.\" } if ct\n \n # Make sure the amount given isn't greater than the invoice total \n return { :error => \"Amount given to capture is greater than the current invoice total.\" } if amount && amount.to_f > self.invoice.total.to_f \n amount = self.invoice.total if amount.nil? \n \n resp = Caboose::StdClass.new\n sc = self.invoice.site.store_config \n case sc.pp_name\n \n when StoreConfig::PAYMENT_PROCESSOR_STRIPE\n \n Stripe.api_key = sc.stripe_secret_key.strip\n bt = nil\n begin \n c = Stripe::Charge.retrieve(self.transaction_id) \n return { :error => \"Amount given to capture is greater than the amount authorized. amount = #{amount}, c.amount = #{c.amount}\" } if (amount*100).to_i > c.amount \n amount = (amount.to_f * 100.0).to_i\n if amount == c.amount \n c = c.capture\n else\n c = c.capture({ :amount => amount })\n end\n bt = Stripe::BalanceTransaction.retrieve(c.balance_transaction)\n rescue Exception => ex\n resp.error = \"Error during capture process\\n#{ex.message}\" \n end\n \n if resp.error.nil?\n InvoiceTransaction.create(\n :invoice_id => self.invoice_id,\n :parent_id => self.id,\n :transaction_id => bt.id,\n :transaction_type => InvoiceTransaction::TYPE_CAPTURE,\n :payment_processor => sc.pp_name,\n :amount => bt.amount / 100.0, \n :date_processed => DateTime.strptime(bt.created.to_s, '%s'),\n :success => bt.status == 'succeeded' || bt.status == 'pending'\n )\n if bt.status == 'succeeded' || bt.status == 'pending'\n self.captured = true\n self.save \n self.invoice.financial_status = Invoice::FINANCIAL_STATUS_CAPTURED\n self.invoice.save\n resp.success = true\n else\n resp.error = \"Error capturing funds.\"\n end\n end\n \n end\n return resp\n \n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get an strategy by its id
def get(id) self.class.strategies.select { |strategy| strategy.id == strategy.id } end
[ "def find(id)\n id_to_adapter[id]\n end", "def strategy\n @strategy\n end", "def provider_by_id(id)\n providers.select { |provider| provider.id == id }.first\n end", "def find_by_id(id)\n service_registry[id]\n end", "def find(name)\n @strategies[name] || @aliases[name]\n end", "def sso_strategy_id\n @attributes[:sso_strategy_id]\n end", "def find(id)\n @spies[id]\n end", "def strategy\n @strategy ||= @strategy_klass.new(self)\n end", "def select_strategy(input)\n strategies.values.find { |s| s.(input.to_s) }\n end", "def get(id)\n klass.find(wrap_key(id))\n rescue ActiveResource::ResourceNotFound\n nil\n end", "def find_strategy(type)\n \"Simple::OAuth2::Strategies::#{type.to_s.camelize}\".constantize\n end", "def get_by_id(id)\n fetch(@database, id)\n end", "def get_service_by_id(id)\n require_relative 'service'\n Service.new(@api, @api.do_request(\"GET\", get_base_api_path() + \"/services/#{id}\"))\n end", "def [](label)\n _strategies[label]\n end", "def get_service_by_id(id)\n if params[:id]\n Service.find(params[:id])\n else\n nil #todo error handling.\n end\n end", "def find_by_id(client, id, options: {})\n\n self.new(parse(client.get(\"/portfolios/#{id}\", options: options)).first, client: client)\n end", "def [](label)\n strategies[label]\n end", "def find_by_id(id)\n @features.find { |feature| feature.id == id }\n end", "def find_driver(id)\n return find_by_id(@drivers, id)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /connectors POST /connectors.json
def create @connector = Connector.new(connector_params) respond_to do |format| if @connector.save format.html { redirect_to @connector, notice: 'Connector was successfully created.' } format.json { render :show, status: :created, location: @connector } else format.html { render :new } format.json { render json: @connector.errors, status: :unprocessable_entity } end end end
[ "def create_connector(connector_id, request)\n start.uri('/api/connector')\n .url_segment(connector_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .post()\n .go()\n end", "def create\n @url_connector = UrlConnector.new(params[:url_connector])\n\n respond_to do |format|\n if @url_connector.save\n format.html { redirect_to @url_connector, notice: 'Url connector was successfully created.' }\n format.json { render json: @url_connector, status: :created, location: @url_connector }\n else\n format.html { render action: \"new\" }\n format.json { render json: @url_connector.errors, status: :unprocessable_entity }\n end\n end\n end", "def retrieve_connectors()\n start.uri('/api/connector')\n .get()\n .go()\n end", "def create_connector!\n uuid, data = @request.params.select {|k,v| v[\"function\"] == \"connector\"}.first\n raise SetupError, \"No Connector data given.\" unless data\n connector = service.new(data)\n unless connector.save\n raise SetupError, \"Setting up the Connector failed: #{connector.errors.full_messages}\"\n end\n connector\n end", "def create\n @connectorize = Connectorize.new(params[:connectorize])\n\n respond_to do |format|\n if @connectorize.save\n format.html { redirect_to @connectorize, notice: 'Connectorize was successfully created.' }\n format.json { render json: @connectorize, status: :created, location: @connectorize }\n else\n format.html { render action: \"new\" }\n format.json { render json: @connectorize.errors, status: :unprocessable_entity }\n end\n end\n end", "def connectors=(value)\n @connectors = value\n end", "def api_connectors=(value)\n @api_connectors = value\n end", "def create\n @connector_type = ConnectorType.new(connector_type_params)\n\n respond_to do |format|\n if @connector_type.save\n format.html { redirect_to @connector_type, notice: 'Connector type was successfully created.' }\n format.json { render :show, status: :created, location: @connector_type }\n else\n format.html { render :new }\n format.json { render json: @connector_type.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @url_connector = UrlConnector.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @url_connector }\n end\n end", "def new\n @connectorize = Connectorize.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @connectorize }\n end\n end", "def index\n @url_connectors = UrlConnector.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @url_connectors }\n end\n end", "def update_connector(connector_id, request)\n start.uri('/api/connector')\n .url_segment(connector_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .put()\n .go()\n end", "def exchange_connectors=(value)\n @exchange_connectors = value\n end", "def create\n @lifestyle_cue_inference_clarification_connector = LifestyleCueInferenceClarificationConnector.new(params[:lifestyle_cue_inference_clarification_connector])\n\n respond_to do |format|\n if @lifestyle_cue_inference_clarification_connector.save\n format.html { redirect_to @lifestyle_cue_inference_clarification_connector, notice: 'Lifestyle cue inference clarification connector was successfully created.' }\n format.json { render json: @lifestyle_cue_inference_clarification_connector, status: :created, location: @lifestyle_cue_inference_clarification_connector }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lifestyle_cue_inference_clarification_connector.errors, status: :unprocessable_entity }\n end\n end\n end", "def add_connector(connector)\n @connectors[connector.name] = connector\n self\n end", "def add_connector(connector)\n @connectors[connector.name.to_sym] = connector\n self\n end", "def associate_connector_dependencies!(connectors)\n connectors.each do |connector|\n source_names = [*connector.raw_config[\"source_connectors\"]].compact\n next if source_names.blank?\n \n source_connectors = source_names.map do |source_name|\n c = connectors.select{|connector2| connector2.name == source_name}.first\n raise InvalidConfig.new(\"Connector '#{connector.name}' references a source connector '#{source_name}' but no such connector name is defined.\") unless c\n raise InvalidConfig.new(\"Connector '#{connector.name}' cannot have itself as a source connector.\") if c == connector\n c\n end\n \n connector.send :source_connectors=, source_connectors\n end\n end", "def create\n isError=unique_mapping_fields\n binding.pry\n if !isError\n @connector_detail = ConnectorDetail.new(connector_detail_params)\n respond_to do |format|\n if @connector_detail.save\n format.html { redirect_to @connector_detail, notice: 'Connector detail was successfully created.' }\n format.json { render :show, status: :created, location: @connector_detail }\n else\n format.html { render :new }\n format.json { render json: @connector_detail.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def patch_connector(connector_id, request)\n start.uri('/api/connector')\n .url_segment(connector_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .patch()\n .go()\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /connectors/1 PATCH/PUT /connectors/1.json
def update respond_to do |format| if @connector.update(connector_params) format.html { redirect_to @connector, notice: 'Connector was successfully updated.' } format.json { render :show, status: :ok, location: @connector } else format.html { render :edit } format.json { render json: @connector.errors, status: :unprocessable_entity } end end end
[ "def patch_connector(connector_id, request)\n start.uri('/api/connector')\n .url_segment(connector_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .patch()\n .go()\n end", "def update_connector(connector_id, request)\n start.uri('/api/connector')\n .url_segment(connector_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .put()\n .go()\n end", "def update\n @url_connector = UrlConnector.find(params[:id])\n\n respond_to do |format|\n if @url_connector.update_attributes(params[:url_connector])\n format.html { redirect_to @url_connector, notice: 'Url connector was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @url_connector.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @connectorize = Connectorize.find(params[:id])\n\n respond_to do |format|\n if @connectorize.update_attributes(params[:connectorize])\n format.html { redirect_to @connectorize, notice: 'Connectorize was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @connectorize.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @connector_detail.update(connector_detail_params)\n format.html { redirect_to @connector_detail, notice: 'Connector detail was successfully updated.' }\n format.json { render :show, status: :ok, location: @connector_detail }\n else\n format.html { render :edit }\n format.json { render json: @connector_detail.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n update_object(@scheme, schemes_url, scheme_params)\n end", "def update\n respond_to do |format|\n if @connector_type.update(connector_type_params)\n format.html { redirect_to @connector_type, notice: 'Connector type was successfully updated.' }\n format.json { render :show, status: :ok, location: @connector_type }\n else\n format.html { render :edit }\n format.json { render json: @connector_type.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @link_schema = LinkSchema.find(params[:id])\n\n respond_to do |format|\n if @link_schema.update_attributes(params[:link_schema])\n format.html { redirect_to @link_schema, notice: 'Link schema was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @link_schema.errors, status: :unprocessable_entity }\n end\n end\n end", "def api_connectors=(value)\n @api_connectors = value\n end", "def update\n respond_to do |format|\n if @connection.update(connection_params)\n build_schema(@connection)\n format.html { redirect_to @connection, notice: 'Connection was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @connection.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @lifestyle_cue_inference_clarification_connector = LifestyleCueInferenceClarificationConnector.find(params[:id])\n\n respond_to do |format|\n if @lifestyle_cue_inference_clarification_connector.update_attributes(params[:lifestyle_cue_inference_clarification_connector])\n format.html { redirect_to @lifestyle_cue_inference_clarification_connector, notice: 'Lifestyle cue inference clarification connector was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @lifestyle_cue_inference_clarification_connector.errors, status: :unprocessable_entity }\n end\n end\n end", "def put(*a) route 'PUT', *a end", "def patch *args\n make_request :patch, *args\n end", "def put\n request_method('PUT')\n end", "def update\n if actions = params[:http_path_rule].try(:[], :actions)\n actions = JSON.load(actions) if String === actions\n params[:http_path_rule][:actions] = actions\n end\n\n @http_path_rule = collection.find(params[:id])\n\n respond_to do |format|\n if @http_path_rule.update_attributes(params[:http_path_rule])\n @core_application.send_to_redis\n format.html do\n flash[:success] = 'Path rule was successfully updated.'\n redirect_to @core_application\n end\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @http_path_rule.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def update\n json_update(factType,factType_params, FactType)\n end", "def update\n @jetty = Jetty.find(params[:id])\n\n respond_to do |format|\n if @jetty.update_attributes(params[:jetty])\n format.html { redirect_to @jetty, notice: 'Jetty was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @jetty.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @schema = Schema.find(params[:id])\n\n respond_to do |format|\n if @schema.update_attributes(params[:schema])\n format.html { redirect_to @schema, :notice => 'Schema was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @schema.errors, :status => :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }