query
stringlengths
7
9.5k
document
stringlengths
10
1.07M
negatives
sequencelengths
19
19
metadata
dict
DELETE /connectors/1 DELETE /connectors/1.json
def destroy @connector.destroy respond_to do |format| format.html { redirect_to connectors_url, notice: 'Connector was successfully destroyed.' } format.json { head :no_content } end end
[ "def destroy\n @url_connector = UrlConnector.find(params[:id])\n @url_connector.destroy\n\n respond_to do |format|\n format.html { redirect_to url_connectors_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @connector = Connector.find(params[:id])\n @connector.destroy\n\n respond_to do |format|\n format.html { redirect_to(connectors_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @connectorize = Connectorize.find(params[:id])\n @connectorize.destroy\n\n respond_to do |format|\n format.html { redirect_to connectorizes_url }\n format.json { head :no_content }\n end\n end", "def delete_connector(connector_id)\n start.uri('/api/connector')\n .url_segment(connector_id)\n .delete()\n .go()\n end", "def destroy\n @socket_connector = SocketConnector.find(params[:id])\n @socket_connector.destroy\n\n respond_to do |format|\n format.html { redirect_to socket_connectors_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @connector_detail.destroy\n respond_to do |format|\n format.html { redirect_to connector_details_url, notice: 'Connector detail was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @connector_type.destroy\n respond_to do |format|\n format.html { redirect_to connector_types_url, notice: 'Connector type was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @connection.destroy\n respond_to do |format|\n format.html { redirect_to connections_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @link_schema = LinkSchema.find(params[:id])\n @link_schema.destroy\n\n respond_to do |format|\n format.html { redirect_to link_schemas_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @connection = Connection.find(params[:id])\n @connection.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @scene_connector.destroy\n respond_to do |format|\n format.html { redirect_to scene_connectors_url, notice: 'Scene connector was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete_endpoint\n end", "def delete endpoint\n do_request :delete, endpoint\n end", "def destroy\n @db_connect.destroy\n respond_to do |format|\n format.html { redirect_to db_connects_url, notice: 'Подключение было уничтожено.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @connection = current_user.connections.find(params[:id])\n @connection.destroy\n\n respond_to do |format|\n format.html { redirect_to(connections_url) }\n format.json { head :ok }\n end\n end", "def destroy\n @connection = Connection.find(params[:id])\n @connection.destroy\n\n respond_to do |format|\n format.html { redirect_to connections_url }\n format.json { head :ok }\n end\n end", "def destroy\n @core_connection.destroy\n respond_to do |format|\n format.html { redirect_to core_connections_url, notice: 'Connection was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @complex_graph = ComplexGraph.find(params[:id])\n @complex_graph.destroy\n\n respond_to do |format|\n format.html { redirect_to url_for(:controller=>:service, :action => :index) }\n format.json { head :no_content }\n end\n end", "def destroy\n @lifestyle_cue_inference_clarification_connector = LifestyleCueInferenceClarificationConnector.find(params[:id])\n @lifestyle_cue_inference_clarification_connector.destroy\n\n respond_to do |format|\n format.html { redirect_to lifestyle_cue_inference_clarification_connectors_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
this could be optimized a tiny bit by only calling superclass.build_kiln but i am le tired
def build_kiln @build_kiln ||= begin retval = Hash.new { |hash,key| hash[key] = {} } klasses = [] klass = self while klass && klass <= UIView klasses.unshift(klass) klass = klass.superclass end klasses.each do |klass| kiln_props = klass.kiln kiln_props && kiln_props.each do |key,values| values.keys.each do |check_unique| retval.each do |section, editors| editors.delete(check_unique) end end retval[key].merge!(values) end end # clean out nil-editors and empty sections retval.each do |section, editors| editors.each do |property, editor| editors.delete(property) unless editor end retval.delete(section) if editors.length == 0 end retval end end
[ "def build_up\n end", "def building_construct_abstract(bld)\n case bld.getType\n when :Stockpile\n max = df.world.buildings.other[:STOCKPILE].map { |s| s.stockpile_number }.max\n bld.stockpile_number = max.to_i + 1\n when :Civzone\n max = df.world.buildings.other[:ANY_ZONE].map { |z| z.zone_num }.max\n bld.zone_num = max.to_i + 1\n end\n building_link bld\n if !bld.flags.exists\n bld.flags.exists = true\n bld.initFarmSeasons\n end\n end", "def kid_generator; end", "def ancestor_builder; end", "def lagrangian_kx_graphkit\nend", "def build_xml(builder)\n super\n builder.Network{|b| network.build_xml(b) } if network\n end", "def class_iseq; end", "def object_nl()\n #This is a stub, used for indexing\n end", "def base_n_k(suffix, eye=@k, is_image=false)\n folder = is_image ? @base+\"/images\" : @base\n extension = is_image ? \"png\" : \"txt\"\n if block_given?\n @k.upto(@k2) do |i|\n filename = sprintf(\"%s/%03d_%03d_%s.%s\", folder, @n, i, suffix, extension)\n yield i, filename\n end\n else\n eye = eye.join(\"_\") if eye.respond_to?('join')\n sprintf(\"%s/%03d_%03d_%s.%s\", folder, @n, eye, suffix, extension)\n end\n end", "def init(model, rec, pckg, baseName); @model = model; @rec = rec; @pckg = pckg; @baseName = baseName; self end", "def klone\n Kopy.new(self.attributes.slice(:title, :body, :tags).merge({ :knote => self }))\n end", "def build_requested\n end", "def build\n private_methods.select { |x| x =~ /^build_/ }.each do |method|\n @info.push self.send(method)\n end\n private_methods.select { |x| x =~ /^warning_/ }.each do |method|\n self.send(method)\n end\n end", "def super_frog_base_cases(k)\n return { 1 => [[1]] } if k == 1\n add_next_base_case(super_frog_base_cases(k-1), k)\n end", "def build_walk_itineraries\n build_fixed_itineraries :walk\n end", "def initialize(eff, k)\n @eff = eff\n @k = k\n end", "def initialize(n: ,k: nil , p: )\r\n @n = n\r\n @k = k\r\n @p = p\r\n end", "def build_ingest_form\n super\n @form.association_class = OregonDigital::Metadata::CloneableAssociation if @cloneable\n end", "def build_key(options = {})\n options[:base] = options[:base]||self.class.to_s.downcase\n if (options[:key_pluralize_instances] == true ) || (options[:key_pluralize_instances] != false && self.class.key_pluralize_instances.present?)\n options[:base] = options[:key_plural]||self.class.key_plural||options[:base].pluralize\n end\n unique = self.send \"#{options[:key_unique]||self.class.key_unique}\".to_sym\n options[:unique] = unique unless unique == object_id\n self.class.build_key(options)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Represents the pick of some user and chosen coordinates String player Player picking a coordinate Integer x X coordinate Integer y Y coordinate Exceptions Throws InvalidPickException if player is not valid or if coordinate is out of boundaries
def pick(player, x, y) fail InvalidPickException.new('Invalid pick') unless valid_pick?(x, y) && valid_player?(player) @game_array[x][y] = PLAYER_MARKS[player.to_sym] @current_player = if @current_player == :player1 :player2 else :player1 end end
[ "def acquire_player_choice\n output = [-1, -1]\n until valid_input?(output)\n puts \"#{@player_ordered_list[0].name} input x coordinate:\"\n output[0] = gets.chomp.to_i\n output[1] = find_y_coordinate(output[0])\n end\n output\n end", "def promptAction()\n hasCoordinates = nil\n pieceToPickup = nil\n while hasCoordinates == nil\n @pieces.each {|pie|\n x = pie.spacePosition[\"x\"]\n y = pie.spacePosition[\"y\"]\n str = x.to_s + y.to_s\n print(str)\n\n print(\"\\n\")\n\n }\n print(\"\\n\" + @name + \" please enter the coordinates of piece you wish to pick up (X then Y no space): \")\n coordinates = gets.chomp\n\n @pieces.each {|pie|\n x = pie.spacePosition[\"x\"]\n y = pie.spacePosition[\"y\"]\n str = x.to_s + y.to_s\n\n if str == coordinates\n hasCoordinates = true\n pieceToPickup = pie\n break\n end\n }\n end\n\n select(pieceToPickup)\n end", "def get_choice( x_coord, y_coord )\n suggested_number = nil\n\n @available_position_choices[x_coord][y_coord].each do |number, is_available|\n next unless is_available\n\n if valid_choice?( x_coord, y_coord, number )\n suggested_number = number\n @available_position_choices[x_coord][y_coord][number] = false\n break\n end\n end\n\n suggested_number.nil? ? false : suggested_number\n end", "def get_coord\n puts \"#{@name}, where would you like to play? Enter your coordinates.\n For example, if you want enter your piece on the first row, second column,\n You should type: 1,2\n and then hit enter\"\n player_coord = gets.strip.split(\",\").map { |coord| coord.to_i-1}\n end", "def get_user_input\r\n puts \"Enter coord of piece to move:\"\r\n start_coord = read_keyboard_input\r\n puts \"Enter destination coord:\"\r\n end_coord = read_keyboard_input\r\n\r\n return start_coord, end_coord\r\n end", "def collect_coords\n if $new_game.lives == 0\n self.game_over\n elsif $new_game.targets == 0\n self.win\n else\n $new_game.print_grid\n self.info\n row = \"z\"\n while $abc.index(row) == nil\n puts \"Enter row coordinate (A - J):\"\n row = gets.chomp.downcase.to_s\n row_num = $abc.index(row)\n end\n col = \"\"\n while (\"0\"..\"9\").to_a.index(col) == nil\n puts \"Enter column coordinate (0 - 9):\"\n col = gets.chomp\n end\n self.check_coords([row_num,col.to_i])\n end\n end", "def get_new_coordinates(choice)\n selected_move = moveset[possible_moves[choice]]\n return x + selected_move[0], y + selected_move[1]\n end", "def o_player_turn\n print \"Player O:\"\n @o_player_choice = gets.chomp\n #Make sure that the player inputs a correct input\n if check_coord_marked?(@o_player_choice)\n print \"Already marked please try again\\n\"\n self.o_player_turn\n elsif @coordinates.any?{|i| i==@o_player_choice}\n self.place_mark(@o_player_choice, \"o\")\n else\n print \"Not a valid Coordinate try again \\n\"\n self.o_player_turn\n end\n end", "def player_select_position(player)\n\t\t\tdisplay_board\n\t\t\tselection = 0\n\t\t\tloop do\n\t\t\t\tputs \"#{player.name} select your position (1-9): \"\n\t\t\t\tselection = gets.to_i\n\t\t\t\tbreak if free_positions.include?(selection)\n\t\t\t\tputs \"Position #{selection} is not available. Please try again.\"\n\t\t\tend\n\n\t\t\t@board[selection] = player.marker\n\t\tend", "def player_choose_piece\n puts \"What piece would you like to move?\"\n print \">> \"\n from_position = convert_location(gets.chomp)\n until @board.valid_player_piece?(@turn.color, from_position)\n puts \"Not a valid piece! Try again.\"\n print \">> \"\n from_position = convert_location(gets.chomp)\n end\n from_position\n end", "def get_input\n print 'Please enter a value from 0 to 9: '\n value = valid_value\n puts\n print 'Please enter coordinates in y, x format: '\n coords = valid_coords\n puts\n [value, coords]\n end", "def pick_point_inferenced(view, x, y)\n @ip.pick(view, x, y)\n return unless @ip.valid?\n return @ip.position.transform(@model.edit_transform.inverse)\n end", "def user_shot_selection_text\n \"Enter the coordinate for your shot:\\n> \"\n end", "def test_can_start_game\n battleship = Battleship.new\n battleship.input_player_coordinates(\"A1 A2\", 1)\n assert_equal ({ \"A1\" => \"H\", \"A2\"=>\"H\", \"A3\"=>\"-\", \"A4\"=> \"-\",\n \"B1\" => \"-\", \"B2\"=>\"-\", \"B3\"=>\"-\", \"B4\"=> \"-\",\n \"C1\" => \"-\", \"C2\"=>\"-\", \"C3\"=>\"-\", \"C4\"=> \"-\",\n \"D1\" => \"-\", \"D2\"=>\"-\", \"D3\"=>\"-\", \"D4\"=> \"-\"}),\n battleship.player_grid\n\n battleship.input_player_coordinates(\"D2 D3 D4\", 2)\n assert_equal ({ \"A1\" => \"H\", \"A2\"=>\"H\", \"A3\"=>\"-\", \"A4\"=> \"-\",\n \"B1\" => \"-\", \"B2\"=>\"-\", \"B3\"=>\"-\", \"B4\"=> \"-\",\n \"C1\" => \"-\", \"C2\"=>\"-\", \"C3\"=>\"-\", \"C4\"=> \"-\",\n \"D1\" => \"-\", \"D2\"=>\"H\", \"D3\"=>\"H\", \"D4\"=> \"H\"}),\n battleship.player_grid\n end", "def set_location(user_input, gameboard)\n chk_input = true\n while chk_input\n location = gets.chomp\n if location.include? \",\"\n location = location.split(\",\")\n if location.length == 2\n location_x = location[0].to_i\n location_y = location[1].to_i\n #check that location is in range\n if location_x >= 0 && location_x <= 2 && location_y >= 0 && location_y <= 2\n #check if space is available\n check = chk_board(location_x, location_y, user_input, gameboard)\n #apply changes to game board\n gameboard = check[:gameboard]\n #assign the return value of check space\n chk_input = check[:space]\n else\n #check range, and redo\n puts \"Try again! Enter coordinates\n of desired position:\"\n redo\n end\n else\n #check for input split, and redo\n puts \"Whoops! That's not a coordinate! Enter coordinates of position, (i.e. [1, 2]):\"\n redo\n end\n else\n #check for comma, and redo\n puts \"Do you know what coordinates look like? Try again! (i.e. [1, 2]): \"\n redo\n end\n end\n gameboard\nend", "def valid_coordinate?(x,y)\n true\n end", "def player_select_move_piece(player)\n puts \"#{player.name}, select which piece you would like to move, in the form of a1, b3 etc\"\n response_coords = player_select_coordinates\n move_piece = board.get_board_coord(response_coords[0], response_coords[1])\n if !move_piece.respond_to?(:color) || move_piece.color != player.color\n puts \"#{player.name}, that's not a piece of yours. Please select another\"\n response_coords = player_select_move_piece(player)\n end\n response_coords\n end", "def player_shot\n player_shot_message\n input = gets.chomp.upcase\n until @computer_player.board.is_valid_coordinate?(input)\n if @computer_player.board.has_been_fired_upon(input)\n puts \"You have already fired on that coordinate, please try again\"\n print \">\"\n input = gets.chomp.upcase\n else\n puts \"Please enter a valid coordinate:\"\n print \">\"\n input = gets.chomp.upcase\n end\n end\n @computer_player.board.cells[input].fire_upon\n puts \"Your shot on #{input} #{@computer_player.board.cells[input].text_render}\"\n end", "def choose_position(player)\n\t\tcheck_answer do\n\t\t\tputs \"Hey #{player.name}, where you wanna play?\"\n\t\t\tgets.chomp.to_i\n\t\tend\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a hash containing the current state of the game, the current user able to do the next pick. If the last pick resulted in a winner the player is returned.
def current_state result = { current_state: @game_array, current_player: @current_player } mark = winner_mark if mark result.merge!(winner: player_by_mark(mark).name) end result end
[ "def winner\n return @winner\n end", "def winner\n if no_pieces_for_player?(1)\n 2\n elsif no_pieces_for_player?(2)\n 1\n else\n nil\n end\n end", "def find_winner\n\t\tset_win_hash\n\n\t\tif @win_hash[@p1].include?(@p2)\n\t\t\treturn @player2\n\t\telsif @win_hash[@p2].include?(@p1)\n\t\t\treturn @player1\n\t\telse\n\t\t\treturn nil\n\t\tend\n\tend", "def winner\n furthest_player if finished?\n end", "def winning_player\n return :player_1 if who_won == 'x'\n :player_2\n end", "def current_player_to_recieve_damage\n unless winner.blank?\n return nil\n end\n\n last_damage_player_id = self.damages.where.not(player_id: nil).last.try(:player_id)\n if last_damage_player_id.blank?\n player_2\n elsif last_damage_player_id == player_1_id\n player_2\n else\n player_1\n end\n end", "def winner\n if won? #winning_token\n return @winning_token\n end\n return nil\n end", "def choose_winner; end", "def winning_player\n return human if human.score == GAMES_PER_MATCH\n return computer if computer.score == GAMES_PER_MATCH\n nil\n end", "def curr_waiting_user\n self.players.find_by_player_number(self.turn % 2).user\n end", "def winner\n if won?\n win_combination = won?\n token = @board[win_combination[0]]\n return token\n end\n end", "def get_current_winner\r\n if self.rank_one == nil\r\n return nil\r\n else\r\n return rank_one.owner\r\n end\r\n end", "def winner\n @first_hand_rank = PokerEngine.evaluate(@first_hand)\n @second_hand_rank = PokerEngine.evaluate(@second_hand)\n\n return compare_cards if @first_hand_rank == @second_hand_rank\n\n highest_ranking_hand\n end", "def winner\n\t\tbest_for_1 = best_hand(@hand1)\n\t\tbest_for_2 = best_hand(@hand2)\n\t\tcase best_for_1[:rank] <=> best_for_2[:rank]\n\t\t\twhen -1 then 2\n\t\t\twhen 1 then 1\n\t\t\twhen 0 then check_kicker(best_for_1, best_for_2)\n\t\tend\n\tend", "def find_last_player_chosen\n p = Pick.find(:first, :include => [:player, :team], :order => \"pick_number DESC\")\n #return \"#{p.pick_number}. #{p.player.player} (#{p.player.pos} - #{p.team.short_name})\"\n end", "def get_winner\n id_to_wins = calculate_outcomes\n #Finally, return the team with the most wins.\n winning_team_id = nil;\n id_to_wins.each { |id, wins| \n if(winning_team_id == nil) || id_to_wins[winning_team_id] < wins\n winning_team_id = id\n end \n }\n \n return winning_team_id;\n end", "def current_pick(picks)\n picks.each { |pick|\n if !pick.chef_id.nil?\n next\n end\n return pick\n }\n return nil\n end", "def determine_winner\n\t\tyour_strength = @your_hand.determine_strength\n\t\topp_strength = @opp_hand.determine_strength\n\t\tif opp_strength > your_strength\n\t\t\tputs \"You lost the round.\"\n\t\t\t@your_hand.adjust_money(-1)\n\t\t\t@opp_hand.adjust_money(1)\n\t\telsif your_strength > opp_strength\n\t\t\tputs \"You won the round.\"\n\t\t\t@your_hand.adjust_money(1)\n\t\t\t@opp_hand.adjust_money(-1)\n\t\telse\n\t\t\tputs \"The round ends in a tie.\"\n\t\tend\t\n\tend", "def winner\n\t\tbest_for_1 = best_hand(@hand1)\n\t\tbest_for_2 = best_hand(@hand2)\n#\t\tputs \"Best hand for #{@hand1.inspect}:\"\n#\t\tpp best_for_1\n#\t\tputs \"Best hand for #{@hand2.inspect}:\"\n#\t\tpp best_for_2\n#\t\tputs \"\\n\\n\\n\\n\"\n\t\tcase best_for_1[:rank] <=> best_for_2[:rank]\n\t\t\twhen -1 then 2\n\t\t\twhen 1 then 1\n\t\t\twhen 0 then check_kicker(best_for_1, best_for_2)\n\t\tend\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets player based on his mark
def player_by_mark(mark) player = if PLAYER_MARKS.key(mark) == :player1 @player1 else @player2 end player end
[ "def get_player(name)\n @players[name]\n end", "def get_mark(player)\n puts \"#{player}, where would you place your mark ?\".center(50, ' ').center(59, '*')\n gets.chomp\n\n end", "def find_player_by_student_id(student_id)\n self.players.each do |player|\n if player.user\n return player if player.user.id == student_id\n end\n end\n nil\n end", "def player(player_name)\n players.find{|player| player.name == player_name}\n end", "def find_player(player)\n players.find { |p| p == player }\n end", "def player(user)\n players.select { |player| player.user == user }.first\n end", "def player_from_strategy(strategy)\n stratp = strategy_profiles.detect{|x| x[:strategy][:name] == strategy[:name]}\n stratp[:player] \n end", "def player(user)\n players.detect { |p| p.user_id == user.id } if user != nil\n end", "def current_player\n @players.each do |name, letter|\n if letter == whose_turn\n return name\n end\n end\n end", "def player(id)\n @players[id]\n end", "def first_player\n singles_player_of_team first_team\n end", "def find_player_at_position(x, y)\n\t\t\t\t# Note: we shift by 11 and not 12 because the X value is multiplied by 2\n\t\t\t\t@player_lookup_by_position[(x << 11) | y]\n\t\t\tend", "def player_lookup(player, stat = nil)\n player_loc = roster.find_index {|n| n[:player_name] == player}\n all_stat = roster[player_loc]\n all_stat.delete(:player_name)\n !stat ? all_stat : all_stat[stat]\nend", "def current_player\n current_user.player\n end", "def get_player(team, position)\n if team.eql?(:red)\n if position == 1\n Player.find(r1_id)\n elsif position == 2\n Player.find(r2_id)\n end\n elsif team.eql?(:blue)\n if position == 1\n Player.find(b1_id)\n elsif position == 2\n Player.find(b2_id)\n end\n end\n end", "def find_player_by_name(name)\n\t\t\t\t@player_lookup_by_shortname[name.to_shortname]\n\t\t\tend", "def get_player_from_symbol(symbol)\n @players.select{|p| p.symbol == symbol}.first\n end", "def player_info(player)\n @players[player]\n end", "def second_player\n singles_player_of_team second_team\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run will validate all inputs; returning on input failures, resolving declared dependencies, then delegate to the handlers call method with its valid inputs and resolved dependencies. Finally it ensure every response is a Response object.
def run(inputs = {}, container = Dry::Container.new) response = resolve_inputs(inputs) return response if response.failure? valid_inputs = response.ok resolve_dependencies(container) handler = self.new(container) result = handler.call(valid_inputs) result = create_response(result) unless response?(result) result end
[ "def run\n @response ||= build_response catch_halt{ @handler.run }\n end", "def run\n handlers.each do |info, handler|\n matches = matches_options?(info[:type], info[:regexp])\n break if matches && handler.call(options)\n end\n end", "def try(handlers:, tape:, **args)\n\n handlers.each do |handler|\n warn \"-- #{self.name}: Trying #{handler}\" if DEBUG_TRACING\n\n result = handler.process(tape: tape, **args)\n\n return result if result.succeeded\n end\n\n fail(\"No handler in #{handlers} succeeded in parsing\", tape: tape)\n end", "def process(payload, request_type)\n rules = RULES[request_type]\n result = rules.map { |rule| send(rule, payload) }.flatten.compact\n\n # Return a Failure result if any of the validation rules fail\n if result.any?(Failure)\n errors = result.select { |r| r.is_a?(Failure) }.map(&:failure)\n Failure(errors)\n else\n Success()\n end\n end", "def validate_schemas!\n validate_request_schema!\n yield\n validate_response_schema! if Respect::Rails::Engine.validate_response\n end", "def validate_handlers\n handlers.each { |handler| validate(:handler, handler, children_for(handler)) }\n end", "def execute\n set_environment_vars\n\n @response = opts.dup\n hooks = locate_hooks\n @response[:matching_hooks] = matching_hooks\n\n if hooks.empty?\n @response[:error] =\n \"Could not find any hook to process this request. Please implement one of the 'matching_hooks'.\"\n\n false\n else\n execute_all hooks\n @response[:executed_hooks] = hooks\n\n true\n end\n end", "def call(path)\n log(path)\n\n catch(:respond) {\n body = Controller.handle(path)\n Response.current.build(body)\n }\n\n FILTER.each{|f| f.call(response)}\n response\n rescue Ramaze::Error => ex\n ex\n end", "def execute(merge_requests: [])\n validate_merge_requests(merge_requests)\n\n [@valid, @invalid]\n end", "def dispatch(method, retype, args)\n result = self.send(\"handle_#{method}\".to_sym, args)\n result_key = Chassis.exit_after_current_dispatch ? :last_result : :result\n case retype\n when :json\n [result_key, [:raw, result.to_json]]\n when :pure\n [result_key, result]\n else\n raise \"Unknown response type: #{retype}\"\n end\n rescue Exception => e\n if e.instance_of?(SystemExit)\n exit\n elsif Chassis.exception_handler\n begin\n Chassis.exception_handler.call(e)\n rescue Exception => e2\n [:error, e2.message + \"\\n\\n\" + e2.backtrace.join(\"\\n\")]\n end\n else\n [:error, e.message + \"\\n\\n\" + e.backtrace.join(\"\\n\")]\n end\n end", "def run\n runner = self\n @test_cases.each do |path|\n next if ENV['TEST_CASE'] && !File.basename(path).match(ENV['TEST_CASE'])\n\n Aws::ModelValidators.load_json(path).tap do |test_case|\n\n models = test_case.inject({}) { |h,(k,v)| h[k.to_sym] = v; h }\n errors = models.delete(:errors)\n\n @group.it(File.basename(path[0..-6])) do\n pending unless errors\n results = described_class.new.validate(models, apply_schema: false)\n unless runner.results_match?(results, errors)\n expect(results).to eq(errors)\n end\n end\n\n end\n end\n end", "def run\n @run_mutex.synchronize do\n fail 'cannot run without registering services' if rpc_descs.size.zero?\n @server.start\n transition_running_state(:running)\n @run_cond.broadcast\n end\n loop_handle_server_calls\n end", "def validate_handlers\n handlers.each do |handler|\n validate(:handler, handler.namespace, handler, children_for(handler))\n end\n end", "def concert!\n execute_steps\n @steps.map do |step|\n step.errors\n end.flatten.each do |error|\n raise error\n end\n end", "def run(messages=nil, &exception_handler)\n loop do\n handle_request(exception_handler)\n\n if messages\n messages -= 1\n break if messages <= 0\n end\n end\n end", "def run_error_checks\r\n @error_checkers.each { |e| e.call(self) }\r\n end", "def execute_callbacks\n callbacks = Typhoeus.on_complete + Typhoeus.on_progress + on_complete + on_progress\n\n if response && response.success?\n callbacks += Typhoeus.on_success + on_success\n elsif response\n callbacks += Typhoeus.on_failure + on_failure\n end\n\n callbacks.each do |callback|\n self.response.handled_response = callback.call(self.response)\n end\n end", "def call!\n # Get all adapters, inclusive of the ones listed above, and iterate\n # over them.\n configured_adapters.each do |adapter|\n # Instantiate a new instance of the adapter given the current data.\n a = adapter.new(@data, options)\n\n # If this particular adapter has a +finalize+ method (see below),\n # keep a record of its existence so we can call it later.\n @_finalize_adapters << adapter if a.respond_to?(:finalize)\n\n # Call the +call+ method in order to manipulate the response.\n @data = a.call\n end\n\n # Return the final response.\n @data\n end", "def dispatch!\n full_response! if !response_required? || Responder.dispatch_for(self)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Getting the description of the character from the awoiaf Wiki page.
def char_description(character) html = open("https://awoiaf.westeros.org/index.php/#{character}") doc = Nokogiri::HTML(html) if doc.css("div.hatnote:first-child").empty? #|| doc.css(".mw-parser-output .hatnote:nth-child(2)").empty? description = doc.css(".mw-parser-output > p:nth-child(2)").text.gsub!(/[^A-Za-z ,.]/,'') else description = doc.css(".mw-parser-output > p:nth-child(3)").text.gsub!(/[^A-Za-z ,.]/,'') end if character == "Walder_Frey" description = doc.css(".mw-parser-output > p:nth-child(3)").text.gsub!(/[^A-Za-z ,.]/,'') end if character == "Viserys_Targaryen" description = doc.css(".mw-parser-output > p:nth-child(3)").text.gsub!(/[^A-Za-z ,.]/,'') end if character == "Tywin_Lannister" description = doc.css(".mw-parser-output > p:nth-child(2)").text.gsub!(/[^A-Za-z ,.]/,'') end description end
[ "def description\n fetch('phobia.descriptions')\n end", "def description\n page.render_part('description') rescue ''\n end", "def description\n\t # if the description exists\n\t # return it \n\t # else \n\t # scrape to get the description\n\t # return it\n\t # end\n\tend", "def char_info(character, char_with_underscore)\n\tapi_id = Character.characters[character.split.first]\n\n\turi = URI.parse(\"https://anapioficeandfire.com/api/characters/#{api_id}\")\n\tresponse = Net::HTTP.get_response(uri)\n\tcharacter_hash = JSON.parse(response.body)\n\n\tputs \"Name: #{character_hash[\"name\"]}\"\n\tputs \"Title: #{character_hash[\"titles\"][0]}\"\n\thouse_page_url = character_hash[\"allegiances\"][0]\n\n\turi = URI.parse(house_page_url)\n\tresponse = Net::HTTP.get_response(uri)\n\thouse_response = JSON.parse(response.body)\n\n\tputs \"House: #{house_response[\"name\"]}\"\n\tputs \"Coat of Arms: #{house_response[\"coatOfArms\"]}\"\n\n\thtml = open(\"https://gameofthrones.fandom.com/wiki/#{char_with_underscore}\")\n\tdoc = Nokogiri::HTML(html)\n\tputs \"Season(s): #{doc.css(\"div[data-source='Season'] a\").text.split(//).join(\", \")}\"\n\tputs \"Description: #{char_description(char_with_underscore)}\"\nend", "def description\n @description = PageDescription[self.description_name.to_sym] if self.description_name\n @description\n end", "def description\n noko.css('#description')[0].content\n end", "def description\n @ole.Description\n end", "def description\n @description = @description || BeerMe::Scraper.scrape_description(self.url)\n end", "def description\n site = ASF::Site.find(name)\n site[:text] if site\n end", "def description\n return text_get(7, @id || 0) # GameData::Skill.descr(@id)\n end", "def description\n parse_yardoc\n @description\n end", "def description\r\n return @wf_desc \r\n end", "def character\n fetch('lebowski.characters')\n end", "def description\n @description ||= (\n ld = LessonDescription.find_by_schoolday_and_lesson(@schoolday, @lesson)\n ld && ld.description\n )\n end", "def description\n documentation_lines[1]\n end", "def description\n self.span(:id=>\"contentmetadata_description_display\").text\n end", "def description\n @description ||= begin\n readme = File.read( path( 'README.txt' ) )\n md = readme.match( /== DESCRIPTION:(.+?)\\n== /m ) or\n fail( \"can't find a description section in README.txt\" )\n md[1].strip\n end\n end", "def get_description(n)\n description = Nokogiri::HTML(super(n)).text\n if description.include?(\"IF YOU GO\")\n description = description.split(\"IF YOU GO\")[0]\n description = description.split(\" \")[3..-1].join(\" \") # remove \"by 'author name'\"\n description.slice!(\"[ Subscribe to the comments on this story ] \")\n description\n else\n nil\n end\n end", "def description()\n return Vocab::battle_formations_strings[@v_index][0]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checking to see if the character is still alive in the series by scraping the gameofthrones fandom site.
def char_alive?(user, result, character) html = open("https://gameofthrones.fandom.com/wiki/#{character}") doc = Nokogiri::HTML(html) words = doc.css(".pi-item .pi-font a").text.split /(?=[A-Z])/ if words.include?("Alive") puts "You are still alive!" else death = doc.css("div[data-source='Death'] .pi-font").text.split /(?=[A-Z])/ puts "You died in #{death.join(' ').gsub(/ +/, ' ')}" end end_menu(user, result) end
[ "def checks_passed?\n if blacklisted_wifi?\n puts \"Blacklisted Network; Won't go online\"\n return false\n end\n\n # we don't want to spam himawari's site more than once every 10 minutes while running on a schedule\n return false if by_schedule && now.min % 10 != 1\n\n if `find #{data_path} -name \\\"t_*.png\\\"`.length.positive?\n puts 'Another himawari process is still downloading/processing files.\\n' \\\n '(There are tiles (t_*.png) in the `data` folder.) Quitting w/o overlapping.'\n return false\n end\n\n unless internet_connection?\n puts \"Not online? Can't reach #{HIMAWARI_URL}\"\n return false\n end\n true\n end", "def alive?\n begin\n self.get\n return true\n rescue Wref::Recycled\n return false\n end\n end", "def have_double_wild_battle?\r\n return false if $PokemonTemp.forceSingleBattle\r\n return false if pbInSafari?\r\n return true if $PokemonGlobal.partner\r\n return false if $Trainer.able_pokemon_count <= 1\r\n return true if $game_player.pbTerrainTag.double_wild_encounters && rand(100) < 30\r\n return false\r\n end", "def isAlive()\n if @hp > 0\n return true\n else\n return false\n end\n end", "def is_alive?\n @alive = false if Dates.is_greatter(@last_wash, @limit_wash)\n @alive = false if Dates.is_greatter(@last_food, @limit_food)\n return @alive\n end", "def alive?() end", "def alive?\n return true if ws && ts\n return false\n end", "def available?\n return false if $scene.is_a?(Scene_Battle)\n return true if @fish_battle\n # Check roaming pokemon\n @roaming_pokemons.each do |roaming_info|\n if roaming_info.appearing?\n PFM::Wild_RoamingInfo.unlock # Allow Roaming pokemon update at the end of the battle\n roaming_info.spotted = true\n init_battle(roaming_info.pokemon)\n return true\n end\n end\n # Check remaining Pokemon\n @forced_wild_battle = false\n var = @remaining_pokemons[$env.get_zone_type]\n return false unless var\n return false unless $actors[0]\n if var[$game_player.terrain_tag].class == Wild_Info\n var = var[$game_player.terrain_tag]\n level = nil\n if $pokemon_party.repel_count > 0\n levels = var.levels.map { |i| i.is_a?(Integer) ? i : i[:level] }\n return false unless levels.any? { |i| i >= $actors[0].level }\n end\n if WEAK_POKEMON_ABILITY.include?($actors[0].ability_db_symbol)\n var.levels.each do |i|\n level = (i.is_a?(Integer) ? i : i[:level])\n return true if (level + 5) >= $actors[0].level\n end\n return rand(100) < 50\n end\n return true\n end\n return false\n end", "def online?\n Browser.get(url).code != 0\n end", "def retreated_from_battle?\n mobs.each do |mob|\n return false if not mob.retreated_from_battle? and not mob.dead?\n end\n return true\n end", "def check_game_over\n\t\t@game_over = true if @limit && Time.now >= @limit\n\t\t@game_over = true if @players_list.any? { |player| player.letters.empty? }\n\t\tif @players == 2 && @pass == 6\n\t\t\t@game_over = true\n\t\telsif @players == 3 && @pass == 9\n\t\t\t@game_over = true\n\t\telsif @players == 4 && @pass == 12\n\t\t\t@game_over = true\n\t\tend\n\tend", "def test_non_online_access\n visit '/catalog/bib_305929'\n sleep(6)\n within('div.links') do\n assert page.has_content?(\"Not Available\")\n end\n end", "def is_it_snowing(response)\n geocoded = geo_lookup response.user, response.match_data[1]\n forecast = get_forecast_io_results response.user, geocoded\n\n response.reply get_eightball_response get_chance_of('snow', forecast['currently'])\n end", "def check_ghost\n return if !@current_player.eliminated? \n puts \"\\n||#{@current_player.name} has become a GHOST and has been eliminated!||\\n\"\n @player_arr.delete(@current_player)\n #decrement the number of active players\n @players -= 1\n if @player_arr.size > 1\n print \"\\nRemaining players \"\n print_player_status\n return false\n end\n true\n end", "def fangraphs_odds\n\t\tmatchupgame=Game.new\n\t\tgamedate=matchupgame.parsegamestring(self.gid)[:year].to_s+\"-\"+matchupgame.parsegamestring(self.gid)[:month].to_s+\"-\"+matchupgame.parsegamestring(self.gid)[:day].to_s\n\t\turl=\"https://www.fangraphs.com/livescoreboard.aspx?date=\"+gamedate\n\t\tdoc=Nokogiri::HTML(open(url))\n\t\t\n\t\tawayt=matchupgame.parsegamestring(self.gid)[:awayt]\n\t\thomet=Matchup.fangraphTeamList[matchupgame.parsegamestring(self.gid)[:homet]]\n\t\tputs awayt.to_s\n\tend", "def appearing?\n return false if @pokemon.hp <= 0\n if @map_id == $game_map.map_id &&\n @zone_type == $env.get_zone_type(true) &&\n @tag == $game_player.terrain_tag\n return rand(@chance) == 0\n end\n return false\n end", "def alive_robots\n @robots.select {|r| not r.dead? }\n end", "def verify_page_contents\n page = nil\n\n begin\n page = Nokogiri::HTML(open(\"http://#{@endpoint_url}\"))\n rescue\n puts 'Could not open page.'\n return false\n end\n\n begin\n page_text = page.css('body h1')[0].text\n if page_text == 'Automation for the People'\n return true\n else\n return false\n end\n rescue\n puts 'Required element not found.'\n return false\n end\n end", "def isLiveNikto(packet)\n\treturn (packet =~ (/\\x4E\\x69\\x6b\\x74\\x6F/))\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /dependences GET /dependences.json
def index @dependences = Dependence.all end
[ "def index\n @service_of_dependences = ServiceOfDependence.all\n end", "def index\n @dependables = Dependable.all\n end", "def index\n @dependents = Dependent.all\n end", "def index\n @dependants = Dependant.all\n end", "def index\n @depends = Depend.all\n end", "def index\n @dependencies = Locomotive::Dependency.all\n display @dependencies\n end", "def show\n @dependency = Dependency.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dependency }\n end\n end", "def dependents(options: {})\n\n Collection.new(parse(client.get(\"/tasks/#{gid}/dependents\", options: options)), type: self.class, client: client)\n end", "def find_dependents(file_number)\n response = request(:find_dependents, \"fileNumber\": file_number)\n response.body[:find_dependents_response][:return]\n end", "def index\n @predepends = Predepend.all\n end", "def index\n find_dependencias\n respond_to do |format|\n format.html\n format.json { render :json => @dependencias.to_json(:methods => :alias_or_fullname, :only => [:id, :codigo, :nombre])}\n\n end\n end", "def depends\n return @depends if @depends\n\n deps = survey.dependencies.includes({:dependency_conditions => {:question => :answers}})\n\n resps = self.responses.includes(:answer)\n\n # gather if the dependencies are met in a hash\n @depends = deps.all.reduce({}) do |mem, v|\n mem[v.id] = v.is_met? self, resps\n mem\n end\n end", "def show\n\tadd_breadcrumb \"Datos de la dependencia\", :dependencia_path\n @dependencia = Dependencia.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @dependencia }\n end\n end", "def go_list_deps\n args = [\"-deps\"]\n args << \"-mod=vendor\" if config.dig(\"go\", \"mod\") == \"vendor\"\n\n # the CLI command returns packages in a pretty-printed JSON format but\n # not separated by commas. this gsub adds commas after all non-indented\n # \"}\" that close root level objects.\n # (?!\\z) uses negative lookahead to not match the final \"}\"\n deps = package_info_command(*args).gsub(/^}(?!\\z)$/m, \"},\")\n JSON.parse(\"[#{deps}]\")\n end", "def dependencies\n []\n end", "def dependents(resource)\n tree_from_vertex(resource).keys\n end", "def dependencies\n @dependencies.values\n end", "def get_actual_cargo_dependencia\n @entidades = Entidad.where(:cargo_id => params[:cargo_id],:dependencia_id => params[:dependencia_id],:es_actual => true)\n render :json => @entidades.to_json({:methods => :cargoname, :only => [:id ]})\n end", "def dependencies_for(specification)\n []\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /dependences/1 DELETE /dependences/1.json
def destroy @dependence.destroy respond_to do |format| format.html { redirect_to dependences_url, notice: 'Dependence was successfully destroyed.' } format.json { head :no_content } end end
[ "def destroy\n @depend.destroy\n respond_to do |format|\n format.html { redirect_to depends_url, notice: 'Depend was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @dependable.destroy\n respond_to do |format|\n format.html { redirect_to dependables_url, notice: 'Dependable was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @dependency = Dependency.find(params[:id])\n @dependency.destroy\n\n respond_to do |format|\n format.html { redirect_to dependencies_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @service_dependance = ServiceDependance.find(params[:id])\n @service_dependance.destroy\n\n respond_to do |format|\n format.html { redirect_to service_dependances_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @predepend.destroy\n respond_to do |format|\n format.html { redirect_to predepends_url, notice: 'Predepend was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @independant = Independant.find(params[:id])\n @independant.destroy\n\n respond_to do |format|\n format.html { redirect_to independants_url }\n format.json { head :ok }\n end\n end", "def destroy\n @dependency.destroy\n respond_to do |format|\n format.html { redirect_to dependencies_url, notice: 'Dependencia fue eliminada satisfactoriamente.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @dependency.destroy\n respond_to do |format|\n format.html { redirect_to dependencies_url, notice: 'Dependency was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @dependencia = Dependencia.find(params[:id])\n begin\n @dependencia.destroy\n rescue ActiveRecord::DeleteRestrictionError => e\n flash[:error]=\"No se pudo eliminar porque otros dependen de el\\n(#{e})\"\n end\n respond_to do |format|\n format.html { redirect_to dependencias_url }\n format.json { head :no_content }\n end\n end", "def destroy\n authorize! :destroy, @profile\n @depend = @profile.depends.find(params[:id])\n @depend.destroy\n respond_to do |format|\n format.html { redirect_to edit_profile_path(@profile), notice: 'Dependency was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @taskdepend.destroy\n respond_to do |format|\n format.html { redirect_to taskdepends_url, notice: 'Taskdepend was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @dependencia = Dependencia.find(params[:id])\n @dependencia.destroy\n\n respond_to do |format|\n format.html { redirect_to(dependencias_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @service_of_dependence.destroy\n respond_to do |format|\n format.html { redirect_to service_of_dependences_url, notice: 'Service of dependence was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @depot = Depot.find(params[:id])\n @depot.destroy\n\n respond_to do |format|\n format.html { redirect_to depots_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @javascript_dependency = JavascriptDependency.find(params[:id])\n @javascript_dependency.destroy\n\n respond_to do |format|\n format.html { redirect_to javascript_dependencies_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @st_has_dep.destroy\n respond_to do |format|\n format.html { redirect_to st_has_deps_url, notice: 'St has dep was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def destroy\n @dependant_task.destroy\n respond_to do |format|\n format.html { redirect_to dependant_tasks_url, notice: 'Dependant task was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete(path, params)\n headers = {:Authorization => \"token #{token}\", :content_type => :json, :accept => :json}\n res = RestClient.delete(\"#{github_api_uri}/#{path}\", params.to_json, headers)\n Yajl.load(res)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check active state condition id : skill ID
def skill_state_on(id) if Skill_State_On[id]['include'] != nil for i in Skill_State[id]['include'] return true if @state.include?(i) end end if Skill_State[id]['set'] != nil for i in Skill_State[id]['set'] return false unless @state.include?(i) end return true end return false end
[ "def guard_skill_id\r\n return 2\r\n end", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def won?\n @state.id == 14\n end", "def change_itskill_status_to_active\n @itskill = Itskill.find(params[:id])\n if @itskill\n @itskill.update_attributes(:status => IT_SKILL_STATUS_ACTIVE)\n @log_msg = \"IT skill \"+ (params[:id].present? ? @itskill.name : \"\") +\" status is updated to 'active' from 'unconfirmed'\" #log message\n \n end\n @itskills = Itskill.all\n @unconfirmed_itskills = Itskill.where(:status => IT_SKILL_STATUS_UNCONFIRMED)\n render :partial => 'itskills', :locals => {:itskills => @itskills , :unconfirmed_itskills => @unconfirmed_itskills, :itskills_flag => nil }\n \n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def skill_learn?(skill_id)\n return @skills.include?(skill_id)\n end", "def skill_sw_on(id)\n if Skill_Sw_On[id]['include'] != nil\n for i in Skill_Sw_On[id]['include']\n return true if $game_switches[i]\n end\n end\n if Skill_Sw_On[id]['set'] != nil\n for i in Skill_Sw_On[id]['set']\n return false unless $game_switches[i]\n end\n return true\n end\n return false \n end", "def attack_skill_id\r\n return 1\r\n end", "def skill?\n return true\n end", "def skill_state_off(id)\n if Skill_State_On[id]['include'] != nil\n for i in Skill_State[id]['include']\n return false if @state.include?(i)\n end\n end\n if Skill_State[id]['set'] != nil\n for i in Skill_State[id]['set']\n return true unless @state.include?(i)\n end\n return false\n end\n return true\n end", "def has_state? id\n @ids[id] == Types::STATE\n end", "def pokemon_skill_index(id)\n has_skill?(id, true)\n end", "def skill_learn?(skill)\n return @skills.include?(skill.id)\n end", "def skill_aid_for skill\n \n end", "def active(c, ci, cv)\n state(VAR, c, ci, cv) and\n not terminal(c, ci, cv) and\n not state(PENDING, c, ci, cv) and\n not satisfied(c, ci, cv)\nend", "def active?\n state == \"ACTIVE\"\n end", "def skill_use?\n return @current_action.kind == 1\n end", "def skill(skill_id)\n skills.select { |skill| skill.real_id == skill_id.to_i}.first\n end", "def skill?; current_action._?(:item)._?(:is_a?, ::RPG::Skill); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check inactive state condition id : skill ID
def skill_state_off(id) if Skill_State_On[id]['include'] != nil for i in Skill_State[id]['include'] return false if @state.include?(i) end end if Skill_State[id]['set'] != nil for i in Skill_State[id]['set'] return true unless @state.include?(i) end return false end return true end
[ "def skill_state_on(id)\n if Skill_State_On[id]['include'] != nil\n for i in Skill_State[id]['include']\n return true if @state.include?(i)\n end\n end\n if Skill_State[id]['set'] != nil\n for i in Skill_State[id]['set']\n return false unless @state.include?(i)\n end\n return true\n end\n return false\n end", "def change_itskill_status_to_active\n @itskill = Itskill.find(params[:id])\n if @itskill\n @itskill.update_attributes(:status => IT_SKILL_STATUS_ACTIVE)\n @log_msg = \"IT skill \"+ (params[:id].present? ? @itskill.name : \"\") +\" status is updated to 'active' from 'unconfirmed'\" #log message\n \n end\n @itskills = Itskill.all\n @unconfirmed_itskills = Itskill.where(:status => IT_SKILL_STATUS_UNCONFIRMED)\n render :partial => 'itskills', :locals => {:itskills => @itskills , :unconfirmed_itskills => @unconfirmed_itskills, :itskills_flag => nil }\n \n end", "def guard_skill_id\r\n return 2\r\n end", "def inactive?\n state == \"INACTIVE\"\n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def inactive?\n status == 'inactive'\n end", "def inactive?\n\t\t\treturn account_life_cycle_status == INACTIVE \n\t\tend", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def inactive_user(user_id)\n\tUserMission.find_by(user_id: user_id) ? false : true\nend", "def skill_sw_off(id)\n if Skill_Sw_Off[id]['include'] != nil\n for i in Skill_Sw_Off[id]['include']\n return true if $game_switches[i] == false\n end\n end\n if Skill_Sw_Off[id]['set'] != nil\n for i in Skill_Sw_Off[id]['set']\n return false unless $game_switches[i] == false\n end\n return true\n end\n return false \n end", "def won?\n @state.id == 14\n end", "def attack_skill_id\r\n return 1\r\n end", "def change_itskill_status_to_deleted\n @active_itskill = Itskill.find_by_name(params[:itskill_selected])\n if Itskill.find_by_name(params[:itskill_selected])\n @unconfirmed_itskill = Itskill.find(params[:itskill_selected_id])\n \n if @unconfirmed_itskill\n @unconfirmed_itskill.update_attributes(:status => \"deleted\")\n @log_msg = \"It skill \"+ (params[:itskill_selected_id].present? ? @unconfirmed_itskill.name : \"\") +\" status is updated to 'deleted' from 'unconfirmed'\" #log message \n end\n \n @unconfirmed_itskill_id = @unconfirmed_itskill.id\n @user=UserItskill.where(:itskill_id => @unconfirmed_itskill_id).first\n \n if @user\n @user.update_attributes(:itskill_id => @active_itskill.id)\n @log_msg = \"It skill \"+ (params[:itskill_selected_id].present? ? @unconfirmed_itskill.name : \"\")+ \" of user is replace with IT skill \"+ (params[:itskill_selected].present? ? @active_itskill.name : \"\") +\" whose status is active\" #log message \n end\n \n flag = false\n else\n flag = true\n @log_msg = \"IT Skill is not present in database\" #log message \n \n end\n\n @itskills = Itskill.all\n @unconfirmed_itskills = Itskill.where(:status => IT_SKILL_STATUS_UNCONFIRMED)\n render :partial => 'itskills', :locals => {:itskills => @itskills , :unconfirmed_itskills => @unconfirmed_itskills, :itskills_flag => flag }\n \n end", "def inactive?\n !active?\n end", "def skill_sw_on(id)\n if Skill_Sw_On[id]['include'] != nil\n for i in Skill_Sw_On[id]['include']\n return true if $game_switches[i]\n end\n end\n if Skill_Sw_On[id]['set'] != nil\n for i in Skill_Sw_On[id]['set']\n return false unless $game_switches[i]\n end\n return true\n end\n return false \n end", "def inactive?\n !active?\n end", "def skill_can_use?(skill_id)\n if not skill_learn?(skill_id)\n return false\n end\n return super\n end", "def pokemon_skill_index(id)\n has_skill?(id, true)\n end", "def make_inactive\n self.status = \"I\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check active swithc condition id : skill ID
def skill_sw_on(id) if Skill_Sw_On[id]['include'] != nil for i in Skill_Sw_On[id]['include'] return true if $game_switches[i] end end if Skill_Sw_On[id]['set'] != nil for i in Skill_Sw_On[id]['set'] return false unless $game_switches[i] end return true end return false end
[ "def guard_skill_id\r\n return 2\r\n end", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def attack_skill_id\r\n return 1\r\n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def skill_learn?(skill_id)\n return @skills.include?(skill_id)\n end", "def skill(skill_id)\n skills.select { |skill| skill.real_id == skill_id.to_i}.first\n end", "def skill?\n return true\n end", "def skill_learn?(skill)\n return @skills.include?(skill.id)\n end", "def skill_aid_for skill\n \n end", "def skill?\n return (@kind == ACTSkill)\n end", "def skill_sp(id)\n if Skill_Sp[id]['integer'] != nil\n return true if eval(\"self.sp #{Skill_Sp[id]['integer']}\")\n end\n if Skill_Sp[id]['rate'] != nil\n return true if eval(\"(self.sp * 100 / [self.maxsp, 1].max) #{Skill_Sp[id]['rate']}\")\n end\n return false \n end", "def skill_use?\n return @current_action.kind == 1\n end", "def skill_sw_off(id)\n if Skill_Sw_Off[id]['include'] != nil\n for i in Skill_Sw_Off[id]['include']\n return true if $game_switches[i] == false\n end\n end\n if Skill_Sw_Off[id]['set'] != nil\n for i in Skill_Sw_Off[id]['set']\n return false unless $game_switches[i] == false\n end\n return true\n end\n return false \n end", "def check_skill_condition?\n # disallow usage if skill button disabled\n return false if !$game_system.skill_button\n # disallow usage\n skill_condition = false\n # if using direct hotkeys\n if BlizzABS::Config::DIRECT_HOTKEYS\n # check direct hotkeys\n skill_condition = self.skill_hotkeys?\n # if skill button pressed\n elsif Input.trigger?(Input::Skill)\n # allow usage\n skill_condition = true\n end\n # return result\n return skill_condition\n end", "def skill?; current_action._?(:item)._?(:is_a?, ::RPG::Skill); end", "def skill_effect_scope(skill)\r\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\r\n # or skill scope is for ally with 0, and your own HP = 1 or more\r\n return (((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\r\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1))\r\n end", "def skill_state_on(id)\n if Skill_State_On[id]['include'] != nil\n for i in Skill_State[id]['include']\n return true if @state.include?(i)\n end\n end\n if Skill_State[id]['set'] != nil\n for i in Skill_State[id]['set']\n return false unless @state.include?(i)\n end\n return true\n end\n return false\n end", "def isSkillOverLimit()\n skill.i\n end", "def pokemon_skill_index(id)\n has_skill?(id, true)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check inactive swithc condition id : skill ID
def skill_sw_off(id) if Skill_Sw_Off[id]['include'] != nil for i in Skill_Sw_Off[id]['include'] return true if $game_switches[i] == false end end if Skill_Sw_Off[id]['set'] != nil for i in Skill_Sw_Off[id]['set'] return false unless $game_switches[i] == false end return true end return false end
[ "def skill_sw_on(id)\n if Skill_Sw_On[id]['include'] != nil\n for i in Skill_Sw_On[id]['include']\n return true if $game_switches[i]\n end\n end\n if Skill_Sw_On[id]['set'] != nil\n for i in Skill_Sw_On[id]['set']\n return false unless $game_switches[i]\n end\n return true\n end\n return false \n end", "def guard_skill_id\r\n return 2\r\n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def change_itskill_status_to_active\n @itskill = Itskill.find(params[:id])\n if @itskill\n @itskill.update_attributes(:status => IT_SKILL_STATUS_ACTIVE)\n @log_msg = \"IT skill \"+ (params[:id].present? ? @itskill.name : \"\") +\" status is updated to 'active' from 'unconfirmed'\" #log message\n \n end\n @itskills = Itskill.all\n @unconfirmed_itskills = Itskill.where(:status => IT_SKILL_STATUS_UNCONFIRMED)\n render :partial => 'itskills', :locals => {:itskills => @itskills , :unconfirmed_itskills => @unconfirmed_itskills, :itskills_flag => nil }\n \n end", "def attack_skill_id\r\n return 1\r\n end", "def skill_on_cooldown?(skill)\n if $game_party.in_battle\n return true if current_cooldown(skill) > 0\n end\n return false\n end", "def skill?\n return true\n end", "def isSkillOverLimit()\n skill.i\n end", "def skill_learn?(skill_id)\n return @skills.include?(skill_id)\n end", "def can_execute_bh_skill_name?\r\r\n return false if @subject.current_action == nil\r\r\n return false if @subject.current_action.item == nil\r\r\n item_id = @subject.current_action.item.id rescue nil\r\r\n return false if item_id == nil\r\r\n if @subject.current_action.item.is_a?(RPG::Skill)\r\r\n return false if MOG_ATB_SKILL_NAME::DISABLE_SKILL_NAME.include?(item_id)\r\r\n return false if item_id == @subject.attack_skill_id\r\r\n return false if item_id == @subject.guard_skill_id\r\r\n elsif @subject.current_action.item.is_a?(RPG::Item)\r\r\n return false if MOG_ATB_SKILL_NAME::DISABLE_ITEM_NAME.include?(item_id)\r\r\n end\r\r\n return true\r\r\n end", "def skill_can_use?(skill_id)\n if not skill_learn?(skill_id)\n return false\n end\n return super\n end", "def skill_state_off(id)\n if Skill_State_On[id]['include'] != nil\n for i in Skill_State[id]['include']\n return false if @state.include?(i)\n end\n end\n if Skill_State[id]['set'] != nil\n for i in Skill_State[id]['set']\n return true unless @state.include?(i)\n end\n return false\n end\n return true\n end", "def check_skill_condition?\n # disallow usage if skill button disabled\n return false if !$game_system.skill_button\n # disallow usage\n skill_condition = false\n # if using direct hotkeys\n if BlizzABS::Config::DIRECT_HOTKEYS\n # check direct hotkeys\n skill_condition = self.skill_hotkeys?\n # if skill button pressed\n elsif Input.trigger?(Input::Skill)\n # allow usage\n skill_condition = true\n end\n # return result\n return skill_condition\n end", "def skill_busy?\n @battler && (BusyPhases - [:collapse]).any? do |phase|\n phase == @battler.battle_phase\n end && !@battler.finish || (@battler && @battler.moving?)\n end", "def skill_state_on(id)\n if Skill_State_On[id]['include'] != nil\n for i in Skill_State[id]['include']\n return true if @state.include?(i)\n end\n end\n if Skill_State[id]['set'] != nil\n for i in Skill_State[id]['set']\n return false unless @state.include?(i)\n end\n return true\n end\n return false\n end", "def skill_effect_scope(skill)\r\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\r\n # or skill scope is for ally with 0, and your own HP = 1 or more\r\n return (((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\r\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1))\r\n end", "def change_itskill_status_to_deleted\n @active_itskill = Itskill.find_by_name(params[:itskill_selected])\n if Itskill.find_by_name(params[:itskill_selected])\n @unconfirmed_itskill = Itskill.find(params[:itskill_selected_id])\n \n if @unconfirmed_itskill\n @unconfirmed_itskill.update_attributes(:status => \"deleted\")\n @log_msg = \"It skill \"+ (params[:itskill_selected_id].present? ? @unconfirmed_itskill.name : \"\") +\" status is updated to 'deleted' from 'unconfirmed'\" #log message \n end\n \n @unconfirmed_itskill_id = @unconfirmed_itskill.id\n @user=UserItskill.where(:itskill_id => @unconfirmed_itskill_id).first\n \n if @user\n @user.update_attributes(:itskill_id => @active_itskill.id)\n @log_msg = \"It skill \"+ (params[:itskill_selected_id].present? ? @unconfirmed_itskill.name : \"\")+ \" of user is replace with IT skill \"+ (params[:itskill_selected].present? ? @active_itskill.name : \"\") +\" whose status is active\" #log message \n end\n \n flag = false\n else\n flag = true\n @log_msg = \"IT Skill is not present in database\" #log message \n \n end\n\n @itskills = Itskill.all\n @unconfirmed_itskills = Itskill.where(:status => IT_SKILL_STATUS_UNCONFIRMED)\n render :partial => 'itskills', :locals => {:itskills => @itskills , :unconfirmed_itskills => @unconfirmed_itskills, :itskills_flag => flag }\n \n end", "def skill_learn?(skill)\n return @skills.include?(skill.id)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check variable condition id : skill ID
def skill_var(id) for var in Skill_Var[id] return true if eval("$game_variables[#{var[0]}] #{var[1]}") end return false end
[ "def guard_skill_id\r\n return 2\r\n end", "def skill_learn?(skill_id)\n return @skills.include?(skill_id)\n end", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def attack_skill_id\r\n return 1\r\n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def pokemon_skill_index(id)\n has_skill?(id, true)\n end", "def skill(skill_id)\n skills.select { |skill| skill.real_id == skill_id.to_i}.first\n end", "def skill_learn?(skill)\n return @skills.include?(skill.id)\n end", "def skill_sp(id)\n if Skill_Sp[id]['integer'] != nil\n return true if eval(\"self.sp #{Skill_Sp[id]['integer']}\")\n end\n if Skill_Sp[id]['rate'] != nil\n return true if eval(\"(self.sp * 100 / [self.maxsp, 1].max) #{Skill_Sp[id]['rate']}\")\n end\n return false \n end", "def skill_level_must_exist\n begin\n level = SkillLevel.find(skill_level_id) \n rescue ActiveRecord::RecordNotFound\n errors.add(:skill_level_id, 'Skill Level provided does not exist!')\n end\n end", "def isSkillOverLimit()\n skill.i\n end", "def skill_sw_on(id)\n if Skill_Sw_On[id]['include'] != nil\n for i in Skill_Sw_On[id]['include']\n return true if $game_switches[i]\n end\n end\n if Skill_Sw_On[id]['set'] != nil\n for i in Skill_Sw_On[id]['set']\n return false unless $game_switches[i]\n end\n return true\n end\n return false \n end", "def variable_exists?(id) #method\n @variables.key?(id)\n end", "def skill?\n return true\n end", "def skill_effect_scope(skill)\r\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\r\n # or skill scope is for ally with 0, and your own HP = 1 or more\r\n return (((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\r\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1))\r\n end", "def skill_learn_persist?(class_id, skill)\n skill.is_a?(RPG::Skill) && !$game_persistent_skills[class_id].nil? && $game_persistent_skills[class_id].include?(skill.id)\n end", "def skill_level(skill_id)\n @skill_levels[skill_id] || 0\n end", "def can_execute_bh_skill_name?\r\r\n return false if @subject.current_action == nil\r\r\n return false if @subject.current_action.item == nil\r\r\n item_id = @subject.current_action.item.id rescue nil\r\r\n return false if item_id == nil\r\r\n if @subject.current_action.item.is_a?(RPG::Skill)\r\r\n return false if MOG_ATB_SKILL_NAME::DISABLE_SKILL_NAME.include?(item_id)\r\r\n return false if item_id == @subject.attack_skill_id\r\r\n return false if item_id == @subject.guard_skill_id\r\r\n elsif @subject.current_action.item.is_a?(RPG::Item)\r\r\n return false if MOG_ATB_SKILL_NAME::DISABLE_ITEM_NAME.include?(item_id)\r\r\n end\r\r\n return true\r\r\n end", "def skill?; current_action._?(:item)._?(:is_a?, ::RPG::Skill); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check HP condition id : skill ID
def skill_hp(id) if Skill_Hp[id]['integer'] != nil return true if eval("self.hp #{Skill_Hp[id]['integer']}") end if Skill_Hp[id]['rate'] != nil return true if eval("(self.hp * 100 / self.maxhp) #{Skill_Hp[id]['rate']}") end return false end
[ "def guard_skill_id\r\n return 2\r\n end", "def attack_skill_id\r\n return 1\r\n end", "def pokemon_skill_index(id)\n has_skill?(id, true)\n end", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def skill_effect_scope(skill)\r\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\r\n # or skill scope is for ally with 0, and your own HP = 1 or more\r\n return (((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\r\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1))\r\n end", "def skill(skill_id)\n skills.select { |skill| skill.real_id == skill_id.to_i}.first\n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def skill_learn?(skill_id)\n return @skills.include?(skill_id)\n end", "def skill_var(id)\n for var in Skill_Var[id]\n return true if eval(\"$game_variables[#{var[0]}] #{var[1]}\")\n end\n return false \n end", "def skill_level(skill_id)\n @skill_levels[skill_id] || 0\n end", "def isSkillOverLimit()\n skill.i\n end", "def skill_effect(user, skill)\n # Clear critical flag\n self.critical = false\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\n # or skill scope is for ally with 0, and your own HP = 1 or more\n if ((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1)\n # End Method\n return false\n end\n # Clear effective flag\n effective = false\n # Set effective flag if common ID is effective\n effective |= skill.common_event_id > 0\n # First hit detection\n hit = skill.hit\n if skill.atk_f > 0\n hit *= user.hit / 100\n end\n hit_result = (rand(100) < hit)\n # Set effective flag if skill is uncertain\n effective |= hit < 100\n # If hit occurs\n if hit_result == true\n # Calculate power\n power = skill.power + user.atk * skill.atk_f / 100\n if power > 0\n power -= self.pdef * skill.pdef_f / 200\n power -= self.mdef * skill.mdef_f / 200\n power = [power, 0].max\n end\n # Calculate rate\n rate = 20\n rate += (user.str * skill.str_f / 100)\n rate += (user.dex * skill.dex_f / 100)\n rate += (user.agi * skill.agi_f / 100)\n rate += (user.int * skill.int_f / 100)\n # Calculate basic damage\n self.damage = power * rate / 20\n # Element correction\n self.damage *= elements_correct(skill.element_set)\n self.damage /= 100\n # If damage value is strictly positive\n if self.damage > 0\n # Guard correction\n if self.guarding?\n self.damage /= 2\n end\n end\n # Dispersion\n if skill.variance > 0 and self.damage.abs > 0\n amp = [self.damage.abs * skill.variance / 100, 1].max\n self.damage += rand(amp+1) + rand(amp+1) - amp\n end\n # Second hit detection\n eva = 8 * self.agi / user.dex + self.eva\n hit = self.damage < 0 ? 100 : 100 - eva * skill.eva_f / 100\n hit = self.cant_evade? ? 100 : hit\n hit_result = (rand(100) < hit)\n # Set effective flag if skill is uncertain\n effective |= hit < 100\n end\n # If hit occurs\n if hit_result == true\n # If physical attack has power other than 0\n if skill.power != 0 and skill.atk_f > 0\n # State Removed by Shock\n remove_states_shock\n # Set to effective flag\n effective = true\n end\n # Substract damage from HP\n last_hp = self.hp\n self.hp -= self.damage\n effective |= self.hp != last_hp\n # State change\n @state_changed = false\n effective |= states_plus(skill.plus_state_set)\n effective |= states_minus(skill.minus_state_set)\n # If power is 0\n if skill.power == 0\n # Set damage to an empty string\n self.damage = \"\"\n # If state is unchanged\n unless @state_changed\n # Set damage to \"Miss\"\n self.damage = \"Miss\"\n end\n end\n # If miss occurs\n else\n # Set damage to \"Miss\"\n self.damage = \"Miss\"\n end\n # If not in battle\n unless $game_temp.in_battle\n # Set damage to nil\n self.damage = nil\n end\n # End Method\n return effective\n end", "def skill_effect(user, skill)\n # Clear critical flag\n self.critical = false\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\n # or skill scope is for ally with 0, and your own HP = 1 or more\n if ((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1)\n # End Method\n return false\n end\n # Clear effective flag\n effective = false\n # Set effective flag if common ID is effective\n effective |= skill.common_event_id > 0\n # First hit detection\n hit = skill.hit\n if skill.atk_f > 0\n hit *= user.hit / 100\n end\n hit_result = (rand(100) < hit)\n # Set effective flag if skill is uncertain\n effective |= hit < 100\n # Si Golpeas\n if hit_result == true\n if Wep::Atribute_mod_skills[skill.id] != nil\n # Extract and calculate effect\n # Calculate power\n ef = Wep::Atribute_mod_skills[skill.id][0] + user.atk * skill.atk_f / 100\n ef -= self.pdef * skill.pdef_f / 200\n ef -= self.mdef * skill.mdef_f / 200\n # Calculate rate\n ra = 20\n ra += (user.str * skill.str_f / 100)\n ra += (user.dex * skill.dex_f / 100)\n ra += (user.agi * skill.agi_f / 100)\n ra += (user.int * skill.int_f / 100)\n # Calculate total effect\n total_ef = ef * ra / 20\n # Apply dispersion\n if skill.variance > 0\n amp = [total_ef * skill.variance / 100, 1].max\n total_ef += rand(amp+1) + rand(amp+1) - amp\n end\n \n # Apply if exist\n case Wep::Atribute_mod_skills[skill.id][1]\n \n when 'maxhp':\n self.atr_mod_list.maxhp += total_ef\n when 'maxsp':\n self.atr_mod_list.maxsp += total_ef\n \n when 'str':\n self.atr_mod_list.str += total_ef\n when 'dex':\n self.atr_mod_list.dex += total_ef\n when 'int':\n self.atr_mod_list.int += total_ef\n when 'agi':\n self.atr_mod_list.agi += total_ef\n \n when 'atk':\n self.atr_mod_list.atk += total_ef\n when 'pdef':\n self.atr_mod_list.pdef += total_ef\n when 'mdef':\n self.atr_mod_list.mdef += total_ef\n when 'eva':\n self.atr_mod_list.eva += total_ef\n end\n end\n \n # Calculate power\n power = skill.power + user.atk * skill.atk_f / 100\n if power > 0\n power -= self.pdef * skill.pdef_f / 200\n power -= self.mdef * skill.mdef_f / 200\n power = [power, 0].max\n end\n # Calculate rate\n rate = 20\n rate += (user.str * skill.str_f / 100)\n rate += (user.dex * skill.dex_f / 100)\n rate += (user.agi * skill.agi_f / 100)\n rate += (user.int * skill.int_f / 100)\n # Calculate basic damage\n self.damage = power * rate / 20\n # Element correction\n self.damage *= elements_correct(skill.element_set)\n self.damage /= 100\n # If damage value is strictly positive\n if self.damage > 0\n # Guard correction\n if self.guarding?\n self.damage /= 2\n end\n end\n # Dispersion\n if skill.variance > 0 and self.damage.abs > 0\n amp = [self.damage.abs * skill.variance / 100, 1].max\n self.damage += rand(amp+1) + rand(amp+1) - amp\n end\n # Second hit detection\n eva = 8 * self.agi / user.dex + self.eva\n hit = self.damage < 0 ? 100 : 100 - eva * skill.eva_f / 100\n hit = self.cant_evade? ? 100 : hit\n hit_result = (rand(100) < hit)\n # Set effective flag if skill is uncertain\n effective |= hit < 100\n end\n # If hit occurs\n if hit_result == true\n # If physical attack has power other than 0\n if skill.power != 0 and skill.atk_f > 0\n # State Removed by Shock\n remove_states_shock\n # Set to effective flag\n effective = true\n end\n # Substract damage from HP\n last_hp = self.hp\n self.hp -= self.damage\n effective |= self.hp != last_hp\n # State change\n @state_changed = false\n if Wep::Skill_state_rates[skill.id] != nil\n state_add = []\n state_remove = []\n # Loop over state rates and check the posibiltys. Create a state list.\n for state_rate in Wep::Skill_state_rates[skill.id]\n if rand(100) < state_rate[1]\n state_add.push(state_rate[0])\n for s in state_rate[2]\n state_remove.push(s)\n end\n end\n end\n states_plus(state_add)\n states_minus(state_remove)\n #effective |= states_plus(state_add)\n #effective |= states_minus(state_remove)\n else\n states_plus(skill.plus_state_set)\n states_minus(skill.minus_state_set)\n #effective |= states_plus(skill.plus_state_set)\n #effective |= states_minus(skill.minus_state_set)\n end\n # If power is 0\n if skill.power == 0\n # No damage\n self.damage = \"\"\n # If state does not change\n unless @state_changed\n # Miss\n self.damage = \"Miss\"\n end\n end\n else\n # Miss\n self.damage = \"Miss\"\n end\n unless $game_temp.in_battle\n self.damage = nil\n end\n return effective\n end", "def skill_level_must_exist\n begin\n level = SkillLevel.find(skill_level_id) \n rescue ActiveRecord::RecordNotFound\n errors.add(:skill_level_id, 'Skill Level provided does not exist!')\n end\n end", "def skill_sp(id)\n if Skill_Sp[id]['integer'] != nil\n return true if eval(\"self.sp #{Skill_Sp[id]['integer']}\")\n end\n if Skill_Sp[id]['rate'] != nil\n return true if eval(\"(self.sp * 100 / [self.maxsp, 1].max) #{Skill_Sp[id]['rate']}\")\n end\n return false \n end", "def check_special_skills(ch, targets, skill)\n # if Tons of Add-ons is being used\n if $tons_version != nil && $tons_version >= 6.4\n # if using absorbing skills\n if $game_system.ABSORB_HP_SP\n # set damage accumulation to 0\n damages = 0\n # if skill absorbs HP\n if SKILL_IDS_HP.include?(skill.id)\n # for each target\n targets.each {|target|\n # if damage was done\n if target.battler.damage.is_a?(Numeric)\n # accumulate damage\n damages += target.battler.damage\n end}\n # change battler HP\n ch.battler.hp += damages\n # request damage sprite\n $BlizzABS.util.request_damage_sprite(ch)\n # if skill absorbs SP\n elsif SKILL_IDS_SP.include?(skill.id)\n # for each target\n targets.each {|target|\n # if damage was done\n if target.battler.damage.is_a?(Numeric)\n # accumulate damage\n damages += target.battler.spdamage\n # remove damage\n target.battler.damage = nil\n # make SP damage text\n target.check_spdamage\n end}\n # change battler SP\n ch.battler.sp += damages\n # request damage sprite\n $BlizzABS.util.request_damage_sprite(ch)\n end\n end\n # if using Destructor Skill and battler should die\n if $game_system.DESTRUCTOR_SKILL && ch.battler.set_to_die\n # kill\n ch.battler.hp = 0\n end\n # if using Blus Magic Skills\n if $game_system.BLUE_MAGIC_SKILL && BLUE_MAGIC_IDS.include?(skill.id)\n # remove damage for all targets\n targets.each {|target| target.battler.damage = nil}\n # get a random target\n target = targets[rand(targets.size)]\n # try to learn\n if rand(100) < skill.hit\n # if enemy\n if target.battler.is_a?(Game_Enemy)\n # initialize array\n ids = []\n # get all skill IDs of the target\n target.battler.actions.each {|act|\n ids.push(act.skill_id) if act.kind == 1}\n # if actor\n elsif target.battler.is_a?(Game_Actor)\n # get all skill IDs of the target\n ids = target.battler.skills.clone\n end\n # if any ID exists\n if ids.size > 0\n # get skill\n newskill = $data_skills[ids[rand(ids.size)]]\n # if already knowing that skill\n if ch.battler.skills.include?(newskill.id)\n # make damage text\n target.battler.damage = \"#{newskill.name} known\"\n else\n # learn skill\n target.battler.learn_skill(newskill.id)\n # make damage text\n target.battler.damage = \"#{newskill.name} learned\"\n end\n else\n # no skills available\n target.battler.damage = 'None available'\n end\n else\n # not successful\n target.battler.damage = 'Miss'\n end\n end\n end\n end", "def can_execute_bh_skill_name?\r\r\n return false if @subject.current_action == nil\r\r\n return false if @subject.current_action.item == nil\r\r\n item_id = @subject.current_action.item.id rescue nil\r\r\n return false if item_id == nil\r\r\n if @subject.current_action.item.is_a?(RPG::Skill)\r\r\n return false if MOG_ATB_SKILL_NAME::DISABLE_SKILL_NAME.include?(item_id)\r\r\n return false if item_id == @subject.attack_skill_id\r\r\n return false if item_id == @subject.guard_skill_id\r\r\n elsif @subject.current_action.item.is_a?(RPG::Item)\r\r\n return false if MOG_ATB_SKILL_NAME::DISABLE_ITEM_NAME.include?(item_id)\r\r\n end\r\r\n return true\r\r\n end", "def special_skill(launcher, target, skill)\n case skill.symbol\n when :s_counter #Riposte & co\n if skill.id == 64 and (count = target.skill_category_amount(1)) > 0\n @IA_Info[:other_factor] = rand * 0.6 + count * 0.1\n elsif skill.id == 243 and (count = target.skill_category_amount(2)) > 0\n @IA_Info[:other_factor] = rand * 0.6 + count * 0.1\n else\n @IA_Info[:other_factor] = rand * 0.7\n end\n else\n return false\n end\n return true\n end", "def skill_effect(user, skill,w=0)\n # Clear critical flag\n self.critical = false\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\n # or skill scope is for ally with 0, and your own HP = 1 or more\n if ((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1)\n # End Method\n return false\n end\n # Clear effective flag\n effective = false\n # Set effective flag if common ID is effective\n effective |= skill.common_event_id > 0\n # First hit detection\n hit = skill.hit\n if skill.atk_f > 0\n hit = (user.hit*hit)/100\n end\n hit_result = (rand(60) < (hit*3 -self.eva/2))\n # Set effective flag if skill is uncertain\n effective |= hit < 100\n # If hit occurs\n if hit_result == true\n # Calculate power\n #STEP 1.Calculate power\n power = (skill.power + user.atk(w) * skill.atk_f / 100) #*user.level\n power = (power*(32+user.level) / 64.0).floor if skill.atk_f > 0\n power *= (1+(user.level/33.0)).floor if skill.atk_f > 0 \n \n # Calculate rate\n rate = 20\n rate += (user.str * skill.str_f / 100)\n rate += (user.dex * skill.dex_f / 100)\n rate += (user.agi * skill.agi_f / 100)\n # Calculate basic damage\n skill_power = power * rate / 20\n magic_power = (user.int * skill.int_f / 100)\n total_damage= 0\n if user.is_a?(Game_Enemy)\n total_damage = skill_power*4 + (user.level*(magic_power*3/2)*skill_power/32)\n else\n #STEP \"0\".Weapon level\n weapon_type = $data_weapons[attacker.weapon_id(-1)[w]].element_set\n t_level = user.w_level[weapon_type[0]]\n t_level *= (2+t_level)/3\n \n total_damage = skill_power*4 + (t_level*(magic_power)*skill_power/32)\n #STEP 2. Earring / Hero Ring\n extra = 0\n extra += total_damage/4 if user.armor4_id(0) != nil && ($data_armors[user.armor4_id(0)].guard_element_set.include?(RPG::MAGICAL_BOOST))\n extra += total_damage/4 if user.armor4_id(1) != nil && ($data_armors[user.armor4_id(1)].guard_element_set.include?(RPG::MAGICAL_BOOST))\n total_damage += extra\n end\n \n total_damage *= 1.5 if skill.atk_f == 0\n total_damage *= 1 + (100-user.level)/100.0 if skill.atk_f > 0\n \n # Element correction\n #if power > 0\n # power -= self.pdef * skill.pdef_f / 200\n # power -= self.mdef * skill.mdef_f / 200\n # power = [power, 0].max\n #end\n #STEP 3. Multiple targets\n self.damage = total_damage\n self.damage /= 2 if [2,4,6].include?(skill.scope)\n \n self.damage *= elements_correct(skill.element_set)\n self.damage /= 100\n # If damage value is strictly positive\n if self.damage > 0\n # Guard correction\n if self.guarding?\n self.damage /= 2\n end\n #STEP 4. Attacker's row\n # Positions corrections\n if self.is_a? Game_Actor\n self.damage *= Wep::Positions_types[self.position][:defender_skill_damage_mod]\n end\n \n # Positions corrections\n if user.is_a? Game_Actor\n self.damage *= Wep::Positions_types[self.position][:attacker_skill_damage_mod]\n end\n \n #STEP 5. Damage Mult#1\n mult=0\n mult+=2 if user.states.include?(RPG::SPEC_STATES[4])\n #mult+=1 if user.states.include?(20)\n n = rand(32)\n mult+=2 if n==1\n self.damage *= (2+mult)/2\n \n self.damage /=2 if (self.is_a?(Game_Actor) and user.is_a?(Game_Actor))\n end\n # Dispersion\n if skill.variance > 0 and self.damage.abs > 0\n var= (255 * (skill.variance/100.0)).floor\n n = rand(var) + (256-var)\n self.damage = (self.damage*n / 256) + 1\n self.damage = (self.damage*(255-self.mdef)/256)+1\n #15: Shell\n self.damage = (self.damage*170/256)+1 if self.states.include?(RPG::SPEC_STATES[1])\n self.damage /= 2 if self.states.include?(RPG::SPEC_STATES[4])\n end\n #Step 7.\n \n #self.damage *= 3/2 if self.states.include?(xx) #Attacked from behind <-it needs pincers and those stuff to work!!\n #STEP 8. Petrify damage\n self.damage = 0 if self.states.include?(RPG::SPEC_STATES[5]) #Petrify status\n \n # Second hit detection\n \n #eva = 8 * self.agi / user.dex + self.eva\n #hit = user.hit*1.5 - eva * skill.eva_f / 100\n #hit = self.cant_evade? ? 100 : hit\n #hit_result = (rand(100) < hit)\n eva_val = skill.atk_f > 0 ? self.eva : self.m_eva\n blockValue = [[(255 - eva_val*2)+1,1].max,255].min\n r = rand(99)\n hit_result = (user.hit * blockValue / 256 ) > r\n \n hit_result = true if self.restriction == 4\n hit_result = true if user.hit==255\n \n hit_result = true if self.states.include?(RPG::SPEC_STATES[6])\n \n hit_result = true if skill.element_set.include?(RPG::SKILL_TAGS[1])\n hit_result = false if skill.element_set.include?(RPG::SKILL_TAGS[0]) and self.state_ranks[1]==6\n \n # Set effective flag if skill is uncertain\n effective |= hit < 100\n end\n # If hit occurs\n if hit_result == true\n # If physical attack has power other than 0\n if skill.power != 0 and skill.atk_f > 0\n # State Removed by Shock\n remove_states_shock\n # Set to effective flag\n effective = true\n end\n # Substract damage from HP\n last_hp = self.hp\n self.damage = self.damage.floor\n self.damage = [self.damage, 9999].min\n self.hp -= self.damage\n effective |= self.hp != last_hp\n # State change\n @state_changed = false\n effective |= states_plus(skill.plus_state_set)\n effective |= states_minus(skill.minus_state_set)\n # If power is 0\n if skill.power == 0\n # Set damage to an empty string\n self.damage = \"\"\n # If state is unchanged\n unless @state_changed\n # Set damage to \"Miss\"\n self.damage = \"Miss\"\n end\n end\n # If miss occurs\n else\n # Set damage to \"Miss\"\n self.damage = \"Miss\"\n end\n # If not in battle\n unless $game_temp.in_battle\n # Set damage to nil\n self.damage = nil\n end\n # End Method\n return effective\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check SP condition id : skill ID
def skill_sp(id) if Skill_Sp[id]['integer'] != nil return true if eval("self.sp #{Skill_Sp[id]['integer']}") end if Skill_Sp[id]['rate'] != nil return true if eval("(self.sp * 100 / [self.maxsp, 1].max) #{Skill_Sp[id]['rate']}") end return false end
[ "def guard_skill_id\r\n return 2\r\n end", "def skill; $data_skills[(skill? ? current_action.item : last_skill).id]; end", "def attack_skill_id\r\n return 1\r\n end", "def skill(skill_id)\n skills.select { |skill| skill.real_id == skill_id.to_i}.first\n end", "def skill_learn?(skill_id)\n return @skills.include?(skill_id)\n end", "def pokemon_skill_index(id)\n has_skill?(id, true)\n end", "def skill_learn?(skill)\n return @skills.include?(skill.id)\n end", "def skill_level_must_exist\n begin\n level = SkillLevel.find(skill_level_id) \n rescue ActiveRecord::RecordNotFound\n errors.add(:skill_level_id, 'Skill Level provided does not exist!')\n end\n end", "def skill?\n return true\n end", "def skill_sw_on(id)\n if Skill_Sw_On[id]['include'] != nil\n for i in Skill_Sw_On[id]['include']\n return true if $game_switches[i]\n end\n end\n if Skill_Sw_On[id]['set'] != nil\n for i in Skill_Sw_On[id]['set']\n return false unless $game_switches[i]\n end\n return true\n end\n return false \n end", "def skill_var(id)\n for var in Skill_Var[id]\n return true if eval(\"$game_variables[#{var[0]}] #{var[1]}\")\n end\n return false \n end", "def skill_can_use?(id)\n return @battler.skill_can_use?(id)\n end", "def isSkillOverLimit()\n skill.i\n end", "def skill?; current_action._?(:item)._?(:is_a?, ::RPG::Skill); end", "def check_condition(condition, item)\n i = item.send(condition.attribute)\n @l.info \"#{i} to be checked if #{condition.query} #{condition.attribute}\"\n test_condition(condition, item, i)\n end", "def skill_effect_scope(skill)\r\n # If skill scope is for ally with 1 or more HP, and your own HP = 0,\r\n # or skill scope is for ally with 0, and your own HP = 1 or more\r\n return (((skill.scope == 3 or skill.scope == 4) and self.hp == 0) or\r\n ((skill.scope == 5 or skill.scope == 6) and self.hp >= 1))\r\n end", "def skill_aid_for skill\n \n end", "def skill_level(skill_id)\n @skill_levels[skill_id] || 0\n end", "def id_condition(condition_hash)\r\n value = r_string_to_c_string(condition_hash[:id].to_s)\r\n System::Windows::Automation::PropertyCondition.new(System::Windows::Automation::AutomationElement.automation_id_property, value)\r\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function that takes the name of a file and loads in the stop words from the file. You could return a list from this function, but a hash might be easier and more efficient. (Why? Hint: think about how you'll use the stop words.)
def load_stopwords_file(file) stop_words = {} # Looping through the file and adding each word to a hash table after chomping them File.open(file, "r").each_line do |line| stop_words[line.chomp] = 1 end return stop_words end
[ "def load_stopwords_file(file)\r\n stop_words = Hash.new(0)\r\n file = File.open(file, \"r\")\r\n file.readlines.each do |word|\r\n stop_words[word.chomp] = 1\r\n end\r\n\r\n file.close\r\n return stop_words\r\nend", "def load_stopwords_file(file) \r\n\r\n\r\n stop_words = Hash.new(0)\r\n\r\n\r\n file = File.open(file, \"r\")\r\n\r\n\r\n file.readlines.each do |word|\r\n\r\n\r\n stop_words[word.chomp] = 1\r\n\r\n\r\n end\r\n\r\n\r\n\r\n\r\n\r\n file.close\r\n\r\n\r\n return stop_words\r\n\r\n\r\nend", "def stopwords_list locale=\"en\", filename=\"\"\n\t\tload_file['stop_list'].fetch(locale.to_s).split(\" \")\n\tend", "def set_stop_words(file_name=nil)\r\n unless file_name\r\n @stop_words ||= DEFAULT_STOP_WORDS\r\n else\r\n @stop_words = IO.read(file_name).collect { |l| l.chomp! }.delete_if{ |w| w.length <= 2 || w =~ /[^[:alpha:]]/}\r\n end\r\n end", "def set_stop_words(file_name=nil)\n unless file_name\n @stop_words ||= DEFAULT_STOP_WORDS\n else\n @stop_words = IO.read(file_name).collect { |l| l.chomp! }.delete_if{ |w| w.nil? }.delete_if{ |w| w.length <= 2 || w =~ /[^[:alpha:]]/}\n end\n end", "def get_stopword_list\n list = []\n \n begin\n File.open(\"stopwords.txt\", \"r\") do |file|\n file.each_line { |line| list.push( line.chomp ) }\n end\n rescue\n puts \"The file 'stopwords.txt' was not found.\"\n exit\n end\n\n return list\nend", "def stopwords\n @stopwords ||= IO.readlines(@stopwords_file).map { |l| l.strip }\n end", "def cargar_stopwords\n \n ruta=DIR_RECURSOS+\"stopwords.txt\"\n if !File.zero?(ruta) then # si el archivo no es vacío\n stopwords = IO.readlines(ruta)\n stopwords.each {|elemento|elemento.strip!} #con la función strip se quitan los saltos de línea y espacios en blanco\n end \n return stopwords \n \n end", "def load_stop_words( source )\r\n File.open( source ) { |f| add_stop_words( YAML.load( f ) ) }\r\n end", "def custom_stopwords(stopwords)\n unless stopwords.is_a?(Enumerable)\n if stopwords.strip.empty?\n stopwords = []\n elsif File.exist?(stopwords)\n stopwords = File.read(stopwords).force_encoding(\"utf-8\").split\n else\n return # Do not overwrite the default\n end\n end\n Hasher::STOPWORDS[@language] = Set.new stopwords\n end", "def common_words #the, of, or, and etc\n File.read(\"stop_words.txt\").split(' ')\nend", "def load_words()\n fileName = \"Words.txt\"\n subject = 'DEFAULT'\n File.open(fileName,\"r\") do |f|\n f.each_line do |line|\n if line[0,2] == \"//\"\n subject = line.gsub(\"\\n\",'').split('//')[-1].upcase\n else\n if !line.to_s.empty?\n @@words[line.gsub(\"\\n\",'')] = subject\n end\n end\n end\n end\n end", "def load_words(file_name)\n words_loaded = []\n File.open(file_name).readlines.each do |line|\n words_loaded << line if line.length.between?(5, 12)\n end\n words_loaded\n end", "def get_words(file)\n\twords=[]\n\tunless file.nil?\n\t\tFile.readlines(file).each do |line|\n\t\t\tline=line.chomp\n\t\t\t#Replaces non-ASCII characters with an ASCII approximation\n\t\t\twords << ActiveSupport::Inflector.transliterate(line) if line.length == $length\n\t\tend\n\tend\n\treturn words.uniq!||[]\nend", "def load_words\n File.readlines(\"#{WORD_DIR}/#{language}.txt\").map(&:strip)\n end", "def prepare_words(filename)\n @words = []\n File.readlines(filename).each do |line|\n line.split.each {|word| @words << word}\n end\n end", "def get_words_from(file)\n if File.exists?(file)\n words = []\n File.open(file).each { |word| words << word.chomp }\n return words\n end\n end", "def load_dict\n words = []\n File.open(\"dict.txt\").each do |line| # Hard code for now\n if line.length.between?(5, 12)\n words << line.rstrip.downcase\n end\n end\n words\n end", "def create_dict(file)\n # Since wordlist is constant\n if File.file?(file)\n IO.readlines(file, chomp: true)\n else\n puts 'File not found!'\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function that takes the name of a directory, and returns a list of all the filenames in that directory.
def list_files(dir) # Getting all the files names in the directory file_names = Dir[dir + "*"] return file_names end
[ "def get_filenames(dir)\n Dir.entries(dir).select {|f| !File.directory? f}\n end", "def list_files_in_directory dir\n files = Dir.glob File.join(dir, \"*\")\n\n normalized_file_list files, false, @options.exclude\n end", "def get_full_filenames(directory=PROJECTS_PATH)\n files = Dir.chdir(directory) do\n Dir['**/*.rb'].collect { |item| item = File.expand_path(item, directory) }\n end\n end", "def files\n entries = []\n Dir.entries(@dir_path).each do |e|\n entries.push e if File.file? \"#{@dir_path}/#{e}\"\n end\n entries\n end", "def recursive_file_list( root_dir)\n\t\treturn nil unless File.directory?(root_dir)\n\t\tlist = []\n\t\tDir.entries( root_dir).reject{|e| e=~/^\\./}.each { |e| \n\t\t\tpath = File.join( root_dir, e)\n\t\t\tif File.directory?( path)\n\t\t\t\t# puts \"Dir: #{path}\"\n\t\t\t\t list += recursive_file_list(path)\n\t\t\telsif File.file?(path)\n\t\t\t\t# puts \"File: #{path}\"\n\t\t\t\t list << path\n\t\t\tend\t\n\t\t}\n\t\tlist\n\tend", "def get_files directory\n files = Dir.glob( directory + \"/\" + File.join(\"**\",\"*.rb\")).sort()\n return files\n end", "def dir_entries(dir)\n return [] unless dir and File.directory? dir\n Dir.entries dir\n end", "def directory_contents(directory)\n return Dir.glob(File.join(directory, '*'))\n end", "def fakedir_get_all_names(root, basename = '')\n result = (['.', '..'] + root[:files] + root[:dirs].keys).map{|e| basename + e}\n root[:dirs].each do |name, content|\n result += fakedir_get_all_names(content, \"#{basename}#{name}/\")\n end\n result\n end", "def get_files(directory)\n Dir.entries(directory).select { |entry| not is_dir?(\"#{directory}/#{entry}\") }\n end", "def get_directory_files(directory, verbose_flag=false)\n exists = File.directory?(directory)\n if exists\n files = Dir[\"#{directory}/*\"] # grab all the files inside that directory\n return files\n else\n puts \"Unable to find a directory at #{directory}\" if verbose_flag\n return nil\n end\nend", "def enumerate_files(directory)\n return directory.\n children.\n reject(&:directory?).\n map(&:expand_path)\n end", "def get_file_list(filepath)\n Dir.entries(filepath)\nend", "def get_files(dir, regex)\n absdir = File.expand_path(dir)\n\n files = `cd '#{absdir}' 2>/dev/null && ls 2>/dev/null`.split()\n\n return \"\" if (files == \"\")\n\n files_string = \"\"\n files.each do |file|\n if Regexp.new(regex) =~ file\n files_string += File.basename(file) + \" \"\n end\n end\n\n return files_string.strip\nend", "def get_files_recursively_from directory\n dir = Pathname.new directory\n return nil unless (dir.directory?)\n return dir.each_child.map do |file|\n next file.realpath.to_path if (file.file?)\n next get_files_recursively_from file if (file.directory?)\n end .flatten\n end", "def files_in_directory name, glob = '**' / '*'\n Dir[path / name / glob]\n end", "def listFiles(dir = nil)\n req = DaemonListFilesRequest.new(dir)\n sendAndRecv(req){ |resp|\n if resp.successful\n DirContents.new(resp.dir, resp.files)\n else\n nil\n end\n }\n end", "def list_files(path)\n Dir.glob(\"#{path}/**\").reject do |entry|\n next if %w[. ..].include?(entry)\n File.directory?(entry)\n end.sort\n end", "def list_as_files(dir_name)\n Dir.entries(dir_name).grep(/\\.as$/).map { |name| name.sub(/\\.as$/, \"\") }.join(\", \")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
CUSTOM FUNCTIONS parse_html takes the HTML code of a document and removes all the junk from it in order to return the text content on the page
def parse_html(html) doc = Nokogiri::HTML(html) # Removing style and script tag content such as Javascript tags in order to get rid of JUNK text doc.xpath("//script").remove doc.xpath("//style").remove begin text = doc.at('body').inner_text rescue NoMethodError puts "NoMethodError" # puts file_name #title = nil end return text end
[ "def parse\n #use regex to split\n arr = @html_string.scan(TAGS_AND_TEXT).flatten\n\n #remove nil values and return\n arr.compact!\n\n #remove white spaces\n arr.map! { |s| s.strip}\n end", "def strip_tags(html) \n\t\t\t\t\treturn html if html.blank?\n\t\t\t\t\tif html.index(\"<\")\n\t\t\t\t\t\ttext = \"\"\n\t\t\t\t\t\ttokenizer = HTML::Tokenizer.new(html)\n\t\t\t\n\t\t\t\t\t\twhile token = tokenizer.next\n\t\t\t\t\t\t\tnode = HTML::Node.parse(nil, 0, 0, token, false)\n\t\t\t\t\t\t\t# result is only the content of any Text nodes\n\t\t\t\t\t\t\ttext << node.to_s if node.class == HTML::Text \n\t\t\t\t\t\tend\n\t\t\t\t\t\t# strip any comments, and if they have a newline at the end (ie. line with\n\t\t\t\t\t\t# only a comment) strip that too\n\t\t\t\t\t\ttext.gsub(/<!--(.*?)-->[\\n]?/m, \"\") \n\t\t\t\t\telse\n\t\t\t\t\t\thtml # already plain text\n\t\t\t\t\tend \n\t\t\t\tend", "def strip_tags(html) \n return html if html.blank? || !html.index(\"<\")\n tokenizer = HTML::Tokenizer.new(html)\n\n text = returning [] do |text|\n while token = tokenizer.next\n node = HTML::Node.parse(nil, 0, 0, token, false)\n # result is only the content of any Text nodes\n text << node.to_s if node.class == HTML::Text \n end\n end\n \n # strip any comments, and if they have a newline at the end (ie. line with\n # only a comment) strip that too\n result = text.join.gsub(/<!--(.*?)-->[\\n]?/m, \"\")\n \n # Recurse - handle all dirty nested tags\n result == html ? result : strip_tags(result)\n end", "def get_text_contents(html_string)\n\t# Remove HTML and scripts\n\thtml_regex = /<head>.*?<\\/head>|<script>.*?<\\/script>|<noscript>.*?<\\/noscript>/m\n\ttext_string = html_string.gsub(html_regex,\"\")\n\n\t# Remove tags\n\ttag_regex = /<[^<>]*?>/m\n\ttext_string.gsub!(tag_regex,\"\")\n\n\t# Replace multiple spaces with one\n\ttext_string.gsub!(/\\s{2,}/m,\" \")\n\n\t# Remove STX\n\ttext_string.gsub!(/\\^B/,\"\")\n\n\treturn text_string\nend", "def text_wikimedia_html page\n html = @client.text_wikimedia_html page\n # normalize html by removing <!-- html comments -->\n doc = Nokogiri.HTML html\n (doc.xpath '//comment()').remove\n doc.inner_html\n end", "def sanitize_text(text)\n doc = Nokogiri::HTML.fragment(text)\n UNSUPPORTED_HTML_TAGS.each do |tag|\n doc.search(tag).each(&:remove)\n end\n doc.inner_html\n end", "def strip_html(html)\n html.gsub(/\\n|\\t/, '')\nend", "def html2text(html)\n\n result = ''\n begin\n web_doc = Hpricot(html)\n web_doc.search(\"//comment()\").remove\n web_doc.search(\"script\").remove\n web_doc.search(\"style\").remove\n web_doc.search(\"noscript\").remove\n web_doc.search(\"object\").remove\n web_doc.search(\"embed\").remove\n web_doc.search(\"head\").remove\n\n web_doc.traverse_text do |e| \n\n begin\n if e.content\n result += e.content+\"\\n\"\n end\n rescue\n # ignore errors\n end\n end\n rescue Exception => e\n # ignore errors\n warn \"html2text() - Exception '#{e.message}' trying to parse '#{html}'\"\n end\n\n if result == ''\n # Use a simple regular-expression approach to remove all tags\n result = html.gsub(/<[^>]*>/, '')\n end\n\n coder = HTMLEntities.new\n result = coder.decode(result)\n\n result.gsub!(/\\n[\\r\\n \\t]*/, \"\\n\")\n\n result\nend", "def clean_document(html)\n unless html.nil?\n clean_document!(html.dup) || html\n end\n end", "def strip_tags(html)\n return html if html.blank?\n if html.index(\"<\")\n text = \"\"\n tokenizer = ::HTML::Tokenizer.new(html)\n while token = tokenizer.next\n node = ::HTML::Node.parse(nil, 0, 0, token, false)\n # result is only the content of any Text nodes\n text << node.to_s if node.class == ::HTML::Text\n end\n # strip any comments, and if they have a newline at the end (ie. line with\n # only a comment) strip that too\n text.gsub(/<!--(.*?)-->[\\n]?/m, \"\")\n else\n html # already plain text\n end\n end", "def strip_links(html); end", "def extract_content(doc)\n if ce = content_element(doc)\n return strip_tags(strip_comments(ce.inner_html))\n end\n # return (ce.inner_text || '').gsub(Regexp.new('\\s+', Regexp::MULTILINE, 'u'), ' ').strip\n return ''\n end", "def strip_html(text)\n unless text.nil?\n strip_tags(text)\n end\n end", "def reduceHtml(text)\n return nil if not text\n # un-fix spaces\n new_text = text.gsub(/\\s*\\&nbsp;/, ' ')\n # remove images\n new_text.gsub!(/\\s*<img\\s+[^>]*>/m, '')\n # remove hyperlinks\n # note: this will drop waypoint URLs!\n new_text.gsub!(/\\s*<a\\s+[^>]*>/m, '')\n new_text.gsub!(/\\s*<\\/a>/m, '')\n # remove form elements\n new_text.gsub!(/\\s*<input\\s+[^>]*>/m, '')\n # do not remove table head: detect table format change\n #$#new_text.gsub!(/\\s*<thead>.*<\\/thead>/m, '')\n # remove spans\n new_text.gsub!(/\\s*<\\/?span[^>]*>/m, '')\n # remove leading and trailing blanks\n new_text.gsub!(/^\\s+/, '')\n new_text.gsub!(/\\s+$/, '')\n # combine table header entries\n new_text.gsub!(/<th( [^>]*)?>\\n+/m, '<th>')\n new_text.gsub!(/\\n+<\\/th( [^>]*)?>/m, '</th>')\n # combine table entries\n new_text.gsub!(/<td[^>]*>\\n+/m, '<td>')\n new_text.gsub!(/\\n+<\\/td[^>]*>/m, '</td>')\n # fuse continuation lines together between <td> .. </td>\n new_text.gsub!(/<td>.*?<\\/td>/m){ |td|\n td.gsub(/\\n/, ' ')\n }\n # remove \"class\" string from <tr>\n new_text.gsub!(/\\s*class=\\\"[^\\\"]*\\\"/m, '')\n # we have to keep the \"ishidden\" information for later\n if text != new_text\n debug3 \"reduced HTML to #{new_text}\"\n end\n debug2 \"reduceHTML old: #{text.length} new: #{new_text.length}\"\n return new_text\n end", "def extract_text body, options = {}\n # spans will have no effect on the way things look, we'll just get rid of them\n body.gsub!(/<\\/?span[^>]*>/, '')\n\n # there shall be only one white space between things.\n body.gsub!(/\\s+/o, ' ')\n\n # block-level tag don't need surrounding white space\n ['p', 'li', 'ul', 'ol', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'div'].each do |tagname|\n body.gsub!(/\\s*(<#{tagname}[^>]*>)\\s*/i, \"\\\\1\")\n body.gsub!(/\\s*(<\\/#{tagname}>)\\s*/i, \"\\\\1\")\n end\n\n document = Hpricot(body)\n\n # replace a lot of common elements, outputs something like markdown\n document.search('head').remove();\n document.search('script').remove();\n document.search('h1').prepend(\"\\n\\n# \").append(\" #\\n\")\n document.search('h2').prepend(\"\\n\\n## \").append(\" ##\\n\")\n document.search('h3').prepend(\"\\n\\n### \").append(\" ###\\n\")\n document.search('h4').prepend(\"\\n\\n#### \").append(\" ####\\n\")\n document.search('h5').prepend(\"\\n\\n##### \").append(\" #####\\n\")\n document.search('h6').prepend(\"\\n\\n###### \").append(\" ######\\n\")\n document.search('p, li').append(\"\\n\")\n document.search('ul li').prepend(\"* \")\n document.search('ol li').prepend(\"1. \") # markdown doesn't care.\n document.search('br').each {|br| br.swap(\"\\n\")}\n document.search('ul, ol').prepend(\"\\n\").append(\"\\n\")\n document.search('a').each {|a| a.swap(\"[#{a.inner_text}](#{a.attributes['href']})\") }\n\n content = if options.has_key? :css\n document.search(options[:css]).inner_text.strip\n elsif options.has_key? :xpath\n (document/options[:xpath]).inner_text.strip\n else\n document.inner_text.strip\n end\n\n if content.length < 100 # 100 characters is an abritrary value. Basically \"small\"\n raise \"Very short content (#{content.length} bytes) after text-extraction. Double-check the selector \\\"#{options[:css] || options[:xpath]}\\\"\"\n end\n content\nend", "def lstrip_html\n return if self.blank?\n\n m = self.match(/\\A(\\s*?[^<]|(.*?)>\\s*[^<])/) #Find first printing character\n return self unless m\n \n ldr = m[0]\n ldr_last = ldr.slice(ldr.size-1, ldr.size)\n ldr = ldr.slice(0,ldr.size-1) # portion up to the first printing character\n bdy = ldr_last + m.post_match # portion following the first printing character\n \n cln_ldr = ldr.gsub(/<p/mi, \"<span\")\n cln_ldr = cln_ldr.gsub(/<\\/p/mi, \"</span\")\n cln_ldr = cln_ldr.gsub(/<br(.*?)>/mi, \"\")\n \n m = bdy.match(/(\\A.*?)<p/mi)\n if !m\n bdy = bdy.sub(/<\\/p/mi, \"</span\") # change first closing </p> from an open <p> remaining from ldr\n else\n l = m.post_match\n f_cln = m[0].gsub(/<\\/p/mi, \"</span\") # change any closing </p> from and open <p> remaining from ldr\n bdy = f_cln + l \n end\n return cln_ldr + bdy \n end", "def get_html_contents(url)\n\turl_string=\"\"\n\t# Open page and store contents\n\topen(url) { |data| \n\t\turl_string = data.read\n\t}\n\n\t# Remove \"not to be confused with\" links\n\thatnote_regex = /<div class=\"hatnote\">.*?<\\/div>/m\n\thtml_string = url_string.gsub(hatnote_regex,\"\")\n\n\treturn html_string\nend", "def remove_all_html_tags!() gsub!(%r{</?[^>]+?>}, \"\"); strip! end", "def extract_content(doc)\n content = ''\n ce = content_element(doc)\n ce = ce.inner_html if ce.respond_to? :inner_html\n content = strip_tags(strip_comments(ce)) if ce\n # (ce/'h1, h2, h3, h4, h5, h6, p, li, dt, dd, td, address, option, ').each do |child|\n # extract_text child, content\n return content.strip\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
remove_punc takes a string containing text and removes all the punctuation from it in order to finally return a list of words/tokens in the text
def remove_punc(text) word_list = [] # Checking for correct encoding and reencoding the string if necessary if ! text.valid_encoding? text = text.encode("UTF-16be", :invalid=>:replace, :replace=>"?").encode('UTF-8') end # Removing puctuation words = text.split(/[ ,;{}`~!@#$%^&*<>.:"'|?\\()_+=\/\[\]\-]/) # Looping though the list, checking for valid words, and changing their case for word in words word = word[/\w*/] word.downcase! word_list.push(word) end # Deleting blanks word_list.delete("") return word_list end
[ "def remove_punctuation\n gsub /[[:punct:]]/, ''\n end", "def removeQuotesAndPunc sentence\n\t\tquotes = [\"\\\"\",\"'\",\":\",\",\",\".\",\"(\",\")\",\";\",\"!\",\"&\",\"<\",\">\",\"?\",\"-\",\"_\"]\n\t\twords = sentence.split(' ')\n\t\twords.map! do |w|\n\t\t\tw.slice!(1) if quotes.include?(w[0])\n\t\t\tw.slice(-1) if quotes.include?(w[-1])\n\t\t\tw\n\t\tend\n\t\treturn words.join(' ')\n\tend", "def strip_puncuation(words)\n words.delete!(\"!\" + \"#\" + \"$\")\n return words\nend", "def trim_punctuation\n lambda do |rec, accumulator|\n accumulator.collect! {|s| Marc21.trim_punctuation(s)}\n end\n end", "def clean_string_list(list)\n clean_list = []\n\n list.each do |word|\n if (word =~ /^.+$/)\n word.gsub!(/[[:punct:]]/,\"\")\n clean_list.push(word.downcase.chomp) if word =~ /^[a-zA-Z]+$/\n end\n end\n return clean_list\nend", "def remove_punctuation(str)\n str.gsub(/[^0-9A-Za-z]/, '')\n end", "def call\n text\n .split\n .map { |token| convert_sym_to_punct(token) }\n .flat_map { |token| \n token = should_downcase(token)\n remove_symbols(token)\n }\n .flat_map { |token| token.split(Regex::COMMAS_OR_PUNCTUATION) }\n .flat_map { |token| token.split(Regex::VARIOUS) }\n .flat_map { |token| token.split(Regex::ENDS_WITH_PUNCTUATION2) }\n .flat_map { |token| split_dotted_email_or_digit(token) }\n .flat_map { |token| split_abbreviations(token) }\n .flat_map { |token| split_period_after_last_word(token) }\n .flat_map { |token| remove_slash_start_and_end(token) }\n end", "def remove_punctuation(name)\n name.gsub(/\\W/,\"\")\nend", "def removePunctuation(line)\n line.to_s.gsub(/(^|\\s+)[[:punct:]]+|[[:punct:]]{2,}|[[:punct:]]+(\\s+|$)/,' ').strip \n end", "def punctuation(s)\n i = 0\n j = 0\n ret = ''\n s.each_char do |c|\n if !\"abcdefghijklmnopqrstuvwxyz'\".include?(c.downcase)\n ret += word(s[i,j-i]) + c\n i = j + 1\n end\n j += 1\n end\n\n if j > i\n ret += word(s[i..j])\n end\n\n return ret\n end", "def normalize_punctuation(text)\n return unless text.is_a?(String)\n\n text = text.tr(',', ' ').strip.split(' ').join(', ')\n text.empty? ? ', ' : text\n end", "def word_sorting_with_another_remove_punctuation (sentence)\n\twords_array = sentence.split(\" \")\n\n\tclean_words_array = []\n\twords_array.each do |word|\n\t\tclean_words_array << word.gsub(/[^a-zA-Z ]/,\"\")\n\tend\n\tprint clean_words_array.sort\nend", "def prepare_words_and_emoticons\n @text\n .split\n .map { |word| WordHelper.strip_punctuation(word) }\n end", "def remove_words(text, removes)\n\twords = text.split(\" \")\n\n\twords_to_remove = []\n\n\tremoves.split(\" \").each do |item|\n\t\twords_to_remove << item\n\tend\n\n\treturn_text = \"\"\n\n\twords.each do |word|\n\t\treturn_text += \"#{word} \" unless words_to_remove.include?(word)\n\tend\n\n\treturn return_text\nend", "def phrase_without_punctuation\n @phrase.gsub(/[^A-Za-z0-9 ]/, \"\")\n end", "def all_words\n result = []\n tagged_words.each do |word|\n result << word[0] unless is_punctuation([ word[0], word[1] ])\n end\n result\n end", "def tokenize\n return [] if unencoded_text.blank?\n # kill off all punctuation except [stuff]'s or [stuff]'t\n # this includes hyphens (words are split)\n str = unencoded_text.\n downcase.\n gsub(/[^a-zA-Z0-9\\']+/, ' ').\n gsub(/(\\w)\\'([st])\\b/, '\\1!\\2').gsub(/\\'/, ' ').gsub(/!/, \"'\")\n # Busticate at whitespace\n words = str.strip.split(/\\s+/)\n words.reject!{|w| w.length < 3 }\n words.map!{|w| Wukong::encode_str(w) }\n end", "def remove_punctuation(string)\n punctuation?(string) ? string.chop! : string\nend", "def clean(text)\n words = text.split(/\\r?\\n/)\n words.select! do |w|\n w.strip!\n w =~ /^[a-zA-Z]/\n end\n words.uniq unless words.empty?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function that takes the name of an html file stored on disk, and returns a list of tokens (words) in that file.
def find_tokens(filename) html = File.read(filename) # Parsing the HTML content of the file parsed_html = parse_html(html) # Converting the text into a list of tokens after removing punctuation tokens = remove_punc(parsed_html) return tokens end
[ "def find_tokens(filename)\n return clean_string_list(convert_html_to_list(filename))\nend", "def find_tokens(filename)\n word_list = []\n rr = Nokogiri::HTML(open(filename))\n rr.css('script, style').each { |node| node.remove }\n words = rr.css('body').text.encode!('UTF-8', :invalid=>:replace).split(/[ ,\\-,\\n,\\r,\\t,&]/)\n\n #words = rr.css('body').text.split(/[ ,\\-,\\n,\\r,\\t,&]/)\n words.each {|w| word_list.push(w)}\n word_list.reject!{|c| c.empty?}\n return word_list\nend", "def tokenize(file)\n doc = Nokogiri::HTML(open(file))\n doc.content.split(/[^'\\w]/).map{|token| token.downcase}.reject{|token| token == \"\" || STOPLIST.include?(token)}\nend", "def content_tokens\n filename\n end", "def read_tokens\n @tokens = []\n\n File.open(@filename) do |input_file|\n input_file.each do |line|\n line.split.each do |word|\n word = normalize(word)\n @tokens << word unless word.empty?\n end\n end\n end\n\n @tokens\n end", "def getWordlist(file_path)\n return File.open(file_path).readlines\n end", "def read_tokens(filename)\n\n # Since we're returning the tokens as an array, we need to create that array\n # here.\n tokens = []\n\n File.open(filename) do |input_file|\n input_file.each do |line|\n line.split.each do |word|\n word = normalize(word)\n tokens << word unless word.empty?\n end\n end\n end\n\n # Since it's the last line in the file, this function will return the array.\n tokens\nend", "def get_words_from(file)\n if File.exists?(file)\n words = []\n File.open(file).each { |word| words << word.chomp }\n return words\n end\n end", "def read_tokens()\n\n # By adding @ to tokens, we're saving it also in the instance. We're not\n # going to use that, but it might be useful later.\n @tokens = []\n\n File.open(@filename) do |input_file|\n input_file.each do |line|\n line.split.each do |word|\n word = normalize(word)\n @tokens << word unless word.empty?\n end\n end\n end\n\n @tokens\n end", "def get_allwords_from_html(url = @url)\n JSON.parse(get_html_string(url).scan(%r{.+({.+name.+mult.+})})[0][0][0...-1])[\"json\"][1...-1]\n end", "def load_words(file_name)\n words_loaded = []\n File.open(file_name).readlines.each do |line|\n words_loaded << line if line.length.between?(5, 12)\n end\n words_loaded\n end", "def get_word_list\n words = \"\"\n File.open('mots.txt', 'r') do |file|\n while line = file.gets\n words += line\n end\n end\n list = words.split\n end", "def get_words(file)\n\twords=[]\n\tunless file.nil?\n\t\tFile.readlines(file).each do |line|\n\t\t\tline=line.chomp\n\t\t\t#Replaces non-ASCII characters with an ASCII approximation\n\t\t\twords << ActiveSupport::Inflector.transliterate(line) if line.length == $length\n\t\tend\n\tend\n\treturn words.uniq!||[]\nend", "def scanner\n @sentences ||= File.open(@path) do |file|\n file.each_line.each_with_object([]) do |line, acc|\n stripped_line = line.strip\n\n unless stripped_line.nil? || stripped_line.empty?\n acc << line.split(' ').map do |word|\n word.split('/').first\n end.join(' ')\n end\n end\n end\n\n end", "def html_to_words_and_tags\n tokens = []\n n = 0\n html_split(@html.strip).each do |word, offset|\n if word == \"<\" \n tokens[n] = Token.new(offset, word)\n elsif word.match(/^[>.;\\s]$/)\n tokens[n-1].merge Token.new(offset, word)\n else\n token = Token.new(offset, word)\n tokens[n] ? tokens[n].merge(token) : tokens[n] = token\n n += 1\n end\n end\n tokens.map { |t| [t.value.strip, t.offset] }\n end", "def read_words_from_file(filename)\n words = []\n \n fp = File.new(filename)\n for line in fp.readlines\n words.push(line.strip)\n end\n fp.close\n \n words\nend", "def tokensFromFile(filename)\n ts = []\n File.readlines(filename).each do |line|\n \n # Allow for both token type and value by storing as array\n arr = line.split\n \n # Ignore empty lines\n if (arr.size > 0)\n ts << arr\n end\n end\n \n # Add EOF token\n ts << [\"$\"]\n \n return ts\nend", "def read_markup(file)\n File.read(file)\n end", "def list\n files = Dir.glob(\"#{coverage_dir}/*.html\")\n files.map { |file| File.basename(file) }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function that takes a list of tokens, and a list (or hash) of stop words, and returns a new list with all of the stop words removed
def remove_stop_tokens(tokens, stop_words) # Looping through the list of tokens and removing all the stop words from the list for i in tokens if stop_words.member?(i) tokens.delete(i) end end return tokens end
[ "def remove_stop_tokens(tokens, stop_words)\n \n stop_words.each do |stop|\n tokens.delete(stop)\n end\n\n return tokens\nend", "def remove_stop_tokens(tokens, stop_words)\n stop_words.each do |word|\n if tokens.include?(word)\n tokens.delete(word)\n end\n end\n return tokens\nend", "def remove_stop_tokens(tokens, stop_words)\n for token in tokens\n token.downcase!\n if stop_words[token]\n tokens.delete(token)\n end\n end\n return tokens\nend", "def remove_stopwords(stopword_list=\"ENGLISH_US\")\n tokens_sans_stopwords = []\n stopword_list = Stopwords.const_get(stopword_list)\n domain_tokens.each do |token| \n tokens_sans_stopwords << token unless stopword_list.include?(token)\n end\n domain_tokens(tokens_sans_stopwords)\n end", "def remove_stop_words(word_list)\n stop_words = File.open('/Users/ravil/experimental/exips/stop_words.txt').read.split(',')\n stop_words += 'abcdefghijklmnopqrstuvwxyz'.chars\n word_list.filter { |w| !stop_words.include? w }\nend", "def removeBlackList words\n\t\tblacklist = ['a','an','the','then','but','therefore','because','I','he',\n\t\t\t\t\t 'she','it','him','her','his','her','its','they','them','their']\n\t\tblacklist.map!{|w| w.upcase}\n\t\tmodified = words.clone\n\t\tmodified.delete_if{|w| blacklist.include?(w.upcase)}\n\t\treturn modified\n\tend", "def remove_stopwords(ary)\n @filter.filter(ary)\n end", "def remove_stopwords(lines, stopwords)\n\twords = lines.join.split\n\tkeywords = words.select {|word| !stopwords.include?(word)}\n\treturn keywords.join(' ')\nend", "def stop_words\n @text = text.scan(/(\\w+)(\\W+)/).reject do |(word, other)|\n stop_words_array.include? word\n end.flatten.join\n end", "def xfrm_remove_stop_words(str)\n stop_words = ['Variant','variant', 'Erhua', 'Counter', 'Has', 'I', 'me', 'a', 'an', 'am', 'are', 'as', 'at', 'be', 'by','how', 'in', 'is', 'it', 'of', 'on', 'or', 'that', 'than', 'the', 'this', 'to', 'was', 'what', 'when', 'where', 'who', 'will', 'with', 'the']\n results = []\n str.gsub!($regexes[:inlined_tags], \"\") ## remove tag blocks\n str.split(' ').each do |sstr|\n # remove non word characters from string\n results << sstr unless stop_words.index(sstr.gsub(/[^a-zA-Z|\\s]/, '').strip)\n end\n return results.flatten.compact.join(' ')\n end", "def remove_stop_words(question, vectorStopWords)\n vectorStopWords.each do |stopWord|\n if question.match(/\\b#{stopWord}\\b/)\n question.gsub! (/\\b#{stopWord}\\b/), ''\n end\n end\n question\n end", "def tokenize(s)\nterms = s.gsub(/(\\s|\\d|\\W)+/u,' ').rstrip.strip.downcase.split(' ')\nterms.reject!{|term| @@stop_words.include?(term) || term.length < 3}\nterms\nend", "def get_words_to_remove existing_words, new_words\n existing_words.map do |existing_word|\n existing_word unless new_words.include?(existing_word.text)\n end.compact\n end", "def filter_term_list(term_list)\n (term_list.map(&:downcase) - IGNORED_WORDS).reject { |t| t.size < 3 }\n end", "def remove_stop_words\n f = File.open('/Users/ravil/experimental/exips/stop_words.txt')\n $stack.push(f.read.split(','))\n f.close\n # add single letter words\n $stack[-1] += 'abcdefghijklmnopqrstuvwxyz'.chars # Python's list(string.ascii_lowercase)\n $heap[:stop_words] = $stack.pop\n $heap[:words] = []\n while $stack.length > 0\n if $heap[:stop_words].include? $stack.last\n $stack.pop\n else\n $heap[:words].append $stack.pop # pop it, store it\n end\n end\n $stack += $heap[:words] # Load the words onto the stack\n $heap[:stop_words] = nil; $heap[:words] = nil # Not needed\nend", "def process_stopwords(txt = self.search_text)\n #Needs to be set so highlighting will work properly (can't match quotes)\n self.highlight_token_array(txt)\n #Now put humpty dumpty back together without the nasty stopwords, sort the tokens by length\n self.search_token_array(txt).join(\" \")\n end", "def without_stopwords\n gsub(STOPWORD_RE, '')\n end", "def clear_stopwords(freq_hash)\n @stopwords.each { |word| freq_hash.delete(word) }\n end", "def prune_tokens\n tokens.select(&:stale?).map(&:delete)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
function that takes a list of tokens, runs a stemmer on each token, and then returns a new list with the stems
def stem_tokens(tokens) stem_list = [] # Looping through the list and finding the stem word for each word for word in tokens word = word[/\w*/] s = word.stem stem_list.push(s) end return stem_list end
[ "def stem_tokens(tokens)\n stem_tokens = Array.new\n tokens.each do |token|\n stem_tokens.push(Stemmer::stem_word(token))\n end\n return stem_tokens\nend", "def stem_tokens(tokens)\r\n stem_tokens = Array.new\r\n\r\n tokens.each do |token|\r\n stem_tokens.push(Stemmer.stem_word(token))\r\n end\r\n\r\n return stem_tokens\r\nend", "def stem_each(ary)\n ary.map { |term| @stemmer.stem(term) }\n end", "def stem_words(words)\n new_words = []\n words.each do |word|\n new_words << word.stem\n end\n new_words\nend", "def get_word_stems words\n\t\t\toutput = []\n\t\t\twords.each do |word|\n\t\t\t\toutput << @stemmer.stem(word)\n\t\t\tend\n\t\t\toutput\n\t\tend", "def stemmed_text_terms\n if stemmer\n each_attribute(:text) do |name, value, options|\n value = value.to_s.split(/\\s+/u).map { |w| w.gsub(/[^\\w]/u, \"\") } unless value.kind_of? Array\n value.map(&:to_s).map(&:downcase).map do |term|\n [\"Z#{stemmer.call(term)}\", options[:weight] || 1] unless term.empty?\n end\n end.flatten(1).compact\n else\n []\n end\n end", "def to_stems(sentence)\n return [] if sentence.blank?\n text = sentence.gsub /\\s[A-Z][0-9][0-9]\\..+\\s/, '' # remove codes\n text.gsub!(/[\\(\\)\\:\\[\\],\\.\\?]/, '') # remove special characters\n\n text.downcase!\n words = text.split(/[-\\/\\s]/) # split at whitespace/slash/line\n #words.reject! { |w| w.match(/[A-Z][0-9][0-9]\\..+/) }\n words.reject! { |w| w.match(/[0-9]+/) } # reject digits\n words.map { |w| @stemmer.stem(w) }\n end", "def analyze content\n unless content.respond_to? :split\n raise ArgumentError, \"#{content.class} has no #split\"\n end\n content.split(/\\s/).map {|w| @stemmer.stem w }\n end", "def stems(all: true)\n self.class.sorted_stem_list(raw_stems, all: all)\n end", "def test_stemming\n data = []\n File.foreach(File.expand_path(File.dirname(__FILE__))+\"/porter2_testdata.txt\") do |line| \n data << line.chomp.split(/ /).reject { |e| e.empty? }\n end\n\n data.each do |input|\n word, stem = input\n assert_equal stem, PorterStemmer::Porter2.stem(word)\n end\n end", "def match_stem(result, from_word)\n words = result.string.split(/\\b/)\n result_words = words.dup\n stems = words.map { |word| self.class.stem word.downcase }\n from_stem = from_word.split(/\\b/).map { |w| self.class.stem w }\n stems.each_index do |index|\n next unless stems[index, from_stem.size] == from_stem\n yield words, result_words, index..(index+from_stem.size - 1)\n end\n\n return result_words\n end", "def stemming(type,words,date)\n # selecting feeds to be clustered\n @feeds= Feedentry.where(pubon: /#{date}/,summary: /#{words}/,type: /#{type}/)\n c = 0\n # process each feed data \n @feeds.each do |feed| \n \t summ=\" \"+feed.title.to_s()+\" \"+feed.summary.to_s()\n\t summ = summ.downcase\n summ=summ.gsub /nbsp/,''\n\t words = summ.scan(/\\w+/)\n summ =''\n # remove too short words and numbers\n words.each do |key|\n if key.length > 3 && !key.match(/\\d/)\n summ = summ + \" \" + Lingua.stemmer(key, :language=>\"en\")\n end\n end\n\t words = summ.scan(/\\w+/)\n #considering only articles whose title and summary have more than 20 words\n if (words.length > 20)\n #path to store the text files for clustering in mahout.\n filename = \"#{PROJECT_PATH}/files/\" + feed.name\n f=File.new(filename,\"w\")\n \t f.write(summ)\n\t f.close\n c = c + 1\n #puts \"No of files created :\" + c.to_s()\n end\n end \n\nend", "def index\n @stemmings = Stemming.all\n end", "def normalize_list tokens\n tokens.map! { |t| normalize t }\n tokens.compact!\n return tokens\n end", "def test_stemming\n data = []\n current_dir = File.expand_path(File.dirname(__FILE__))\n voc_file = File.new(current_dir+\"/porter1_vocabulary.txt\", \"r\")\n out_file = File.new(current_dir+\"/porter1_output.txt\", \"r\")\n while ((word = voc_file.gets) && (stem = out_file.gets)) \n data << [word.chop, stem.chop]\n end\n voc_file.close\n out_file.close\n\n data.each do |input|\n word, stem = input\n assert_equal stem, PorterStemmer::Porter1.stem(word)\n end\n end", "def tokenize_documents\r\n\t\tdocuments = []\r\n\t\t@collection.each do |document|\r\n\t\t\tdocument = TfIdf::Document.new(document)\r\n\t\t\tdocuments.push(document.tokens)\r\n\t\tend\r\n\t\treturn documents\r\n\tend", "def stem\n\t\treturn Linguistics::EN::Stemmer.stemmer.stem( self.obj.to_s )\n\tend", "def each_term\n\t\t\tself.to_terms.each do |term|\n\t\t\t\tyield term.stem\n\t\t\tend\n\t\tend", "def find_stem_freq(stem_tokens)\n freq_words = {}\n stem_tokens.each do |token|\n if freq_words.has_key?(token)\n freq_words[token] += 1\n else\n freq_words[token] = 1\n end\n end\n return freq_words\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get_file_detials takes a file name containg the index and returns the data of the file i.e. its name and url in a hash table
def get_file_details(file_name) fd = {} # Looping through the file and updating the name and url variable with the new data # and then finally adding them to the hash table File.readlines(file_name).each do |line| data = line.split(" ") puts data[2] name = data[0] url = data[2] fd[name] = url end puts fd return fd end
[ "def file_location(index)\n r = {}\n files = @info_hash['files']\n piece_bytes = @info_hash['piece length']\n\n prev_bytes = calc_file_lengths(files.take(index))\n file_bytes = files[index]['length']\n post_bytes = calc_file_lengths(files.drop(index + 1)) \n\n # Not adding one to starting piece and byte to account for \n # 0 index\n r[:starting_piece] = prev_bytes / piece_bytes \n starting_offset = prev_bytes % piece_bytes\n r[:starting_byte] = starting_offset \n\n if starting_offset + file_bytes > piece_bytes \n oflow_bytes = starting_offset + file_bytes - piece_bytes \n oflow_pieces, oflow_remainder = oflow_bytes.divmod piece_bytes \n r[:ending_piece] = starting_piece + oflow_pieces\n else\n r[:ending_piece] = r[:starting_piece]\n end \n r[:ending_byte] = starting_offset + file_bytes\n r\n end", "def download_file(index)\n prev_pieces = 0\n\n if @info_hash.has_key? 'files'\n files = @info_hash['files']\n puts files\n filename = files[index]['path'].join('/') \n end \n p = Pieces.new @info_hash\n # All you need are here are the starting piece, starting byte,\n # ending piece, and ending byte\n\n if @info_hash.has_key? 'files'\n file_loc = file_location index\n s_piece, s_byte, e_piece, e_byte = file_loc.values\n end \n\n f = File.open(\"./#{filename}\", 'w')\n downloaded = p.download_range(s_piece || 0, e_piece, @socket) \n puts downloaded.values.join[s_byte, e_byte].size\n f.write(downloaded.values.join[s_byte..e_byte])\n f.close_write\n end", "def get_file(name)\n data = []\n target_path = @prefix.nil? ? name : \"#{@prefix}/#{name}\"\n iterate file: proc { |path, entry| data << entry.read if path == target_path }, other: proc {}\n fail \"Multiple entries for #{target_path} found in #{@file_name}\" if data.size > 1\n data.first\n end", "def get_file index\n return @files[index]\n end", "def getFile(aFileIdx, aDirectory)\n dsURL='https://data.ct.gov/resource/'+aFileIdx+'.json'\n altDSURL='https://data.ct.gov/api/views/'+aFileIdx\n # puts dsURL\n retryCnt = 0\n currentURL = dsURL\n begin\n open(aDirectory+aFileIdx+'.json', 'wb') do |file|\n file << open(currentURL).read\n end\n rescue\n # puts \"error reading index:\"+aFileIdx\n if retryCnt < 1\n # puts \"retry\"\n currentURL = altDSURL\n retryCnt += 1\n retry\n end\n end\n return 0\n end", "def retrieve_file_data(file_name)\n gist['files'][file_name]\n end", "def getFileAt(position)\n require 'rubygems/package'\n require 'zlib'\n\n @files = []\n f = File.new(@filename)\n tar_extract = Gem::Package::TarReader.new(f)\n tar_extract.rewind # The extract has to be rewinded after every iteration\n \n i = 0\n tar_extract.each do |entry|\n COURSE_LOGGER.log(entry)\n COURSE_LOGGER.log(i)\n\n if i == position then\n return nil, nil unless entry\n return entry.read, entry.full_name\n end\n\n i += 1\n end\n\n return nil, nil unless header\n\n rescue\n return nil, nil\n end", "def file_find(name)\n @file_hash[name]\n end", "def info(filename, nohash = nil)\n\tf = filename\n\tif test_file(f)\n\t\th = nohash ? (nil) : (Digest::SHA1.hexdigest(File.read(f)))\n\t\treturn [File.mtime(f), File.stat(f).mode.to_s(8).to_i, h]\n\tend\n\treturn []\nend", "def file_list(ref)\n if sha = @access.ref_to_sha(ref)\n commit = @access.commit(sha)\n tree_map_for(sha).inject([]) do |list, entry|\n next list if @page_class.valid_page_name?(entry.name)\n list << entry.file(self, commit)\n end\n else\n []\n end\n end", "def file_by_url(url)\n return file_by_id(url_to_id(url))\n end", "def file_entries\n file_info = []\n\n ss = StringScanner.new(central_directory)\n\n until ss.scan_until(/\\x50\\x4b\\x01\\x02/).nil? # central directory signature\n\n # compressed size\n ss.pos += 16\n compressed_size = ss.peek(4).unpack1('L<')\n\n # uncompressed size\n ss.pos += 4\n uncompressed_size = ss.peek(4).unpack1('L<')\n\n # file name length\n ss.pos += 4\n file_name_length = ss.peek(2).unpack1('S<')\n\n # filename\n ss.pos += 18\n file_name = ss.peek(file_name_length)\n enc = file_name.detect_encoding[:ruby_encoding] || 'UTF-8'\n file_name.force_encoding(enc)\n\n # try to make UTF-8 and in the rare case it fails then make bad characters into question marks\n file_name = file_name.encode('UTF-8', invalid: :replace, undef: :replace, replace: '?')\n\n # forward past the file name\n ss.pos += file_name_length\n\n # if compressed or uncompressed equal 4294967295 then it's a zip64 file and they need recalculation\n if compressed_size == 4_294_967_295 || uncompressed_size == 4_294_967_295\n\n unless ss.peek(2) == \"\\x01\\x00\"\n raise Stash::Compressed::ZipError, \"Something is wrong with the zip64 file signature for #{file_name} for #{@presigned_url}\"\n end\n\n # uncompressed size\n ss.pos += 4\n uncompressed_size = ss.peek(8).unpack1('Q<')\n\n # compressed size\n ss.pos += 8\n compressed_size = ss.peek(8).unpack1('Q<')\n\n ss.pos += 8\n end\n\n file_info << { file_name: file_name, compressed_size: compressed_size, uncompressed_size: uncompressed_size }\n end\n\n file_info\n end", "def get_file_list()\n file_hash = {}\n\n # Choreo to retrieve document list.\n get_doc_list = Google::Documents::GetAllDocuments.new(@test_session)\n # Inputs for the list-fetching choreo.\n inputs = get_doc_list.new_input_set()\n # Configure inputs\n inputs.set_Username(GOOGLEDOCS_USERNAME)\n inputs.set_Password(GOOGLEDOCS_PASSWORD)\n inputs.set_Deleted(\"false\")\n\n # Get Temboo result object.\n results = get_doc_list.execute(inputs)\n # Convert the XML response to REXML document object.\n result_tree = REXML::Document.new(results.get_Response())\n # Get the information that we will need to download the document.\n result_tree.root.each_element('//entry') {|entry|\n title = entry.elements['title'].text\n content = entry.elements['content']\n src = content.attributes['src']\n file_hash[title] = src\n }\n\n return file_hash\n end", "def filepath(url)\n hsh = hash url\n path_h(hsh) + filename_h(hsh)\n end", "def file_names\n names = []\n frm.table(:class=>/listHier lines/, :text=>/Title/).rows.each do |row|\n next if row.td(:class=>\"specialLink\").exist? == false\n next if row.td(:class=>\"specialLink\").link(:title=>\"Folder\").exist?\n names << row.td(:class=>\"specialLink\").link(:href=>/access.content/, :index=>1).text\n end\n return names\n end", "def retrieve(aIndexFile, aOutputDirectory)\n jsonIdx = loadFileIdx(aIndexFile)\n# puts \"length of index:\"+jsonIdx.length.to_s\n for jfIdx in 0..jsonIdx.length\n getFile(jsonIdx[jfIdx][\"dataId\"], aOutputDirectory)\n end\n end", "def file_names\n names = []\n frm.table(:class=>/listHier lines/).rows.each do |row|\n next if row.td(:class=>\"specialLink\").exist? == false\n next if row.td(:class=>\"specialLink\").link(:title=>\"Folder\").exist?\n names << row.td(:class=>\"specialLink\").link(:href=>/access.content/, :index=>1).text\n end\n return names\n end", "def find_nth_file(basename, index = T.unsafe(nil)); end", "def files\n array = []\n @list.each do |k,v|\n array += v.filename\n end\n array\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterate over each bigram as an instance of Bigram.
def each @table.each do |pair, bigram| yield(bigram) end end
[ "def each\n scanner.each_bigram do |bigram|\n yield(bigram)\n end\n end", "def printBigram\n\t$bigrams.each do |i|\n\t\tputs \"#{i}\"\n\tend\nend", "def bigram\n\n\t\tend", "def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end", "def each_trigram\n table.each do |unigram, bigrams|\n bigrams.each do |bigram, trigrams|\n next unless bigram\n\n trigrams.each do |trigram, count|\n next unless trigram\n\n yield [[unigram, bigram, trigram], count]\n end\n end\n end\n end", "def n_gramas\n for i in 2...8\n busca_n_gramas(i)\n end\n end", "def bigramate(title)\n stop_words = [\"a\", \"an\", \"and\", \"by\", \"for\", \"from\", \"in\", \"of\", \"on\", \"or\", \"out\", \"the\", \"to\", \"with\"] # set of stopwords to remove\n\twords = title.split(\" \") # title represented as an array of strings\n words -= stop_words # remove any stopwords from array of words\n previous = \"\"\n skip = true #iterator control flag\n\twords.each do |next_gram|\n #Skip the first iteration over title since we are looking for word PAIRS\n\t\tif skip == false\n #If no entry in $bigrams[word_n] exists\n\t\t\tif $bigrams[previous] == nil\n\t\t\t\t$bigrams[previous]= {\"#{next_gram}\" => 1} # Create entry $bigrams[word_n][word_n+1] == 1\n #If an entry was erroneously created with no value\n elsif $bigrams[previous][next_gram] == nil #initialize to 1\n $bigrams[previous][next_gram]=1\n #Else increment the value $bigrams[word_n][word_n+1] += 1\n else\n num = $bigrams[previous][next_gram]\n num = num+1\n $bigrams[previous][next_gram] = num\n\t\t\tend\n end\n\t\tskip = false\n\t\tprevious = \"#{next_gram}\"\n end\nend", "def each_ngram\n @prefixes.each do |prefix,table|\n table.each_gram do |postfix_gram|\n yield(prefix + postfix_gram) if block_given?\n end\n end\n\n return self\n end", "def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT word1, word2, probability FROM bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t w1, w2, b = *row\n\t\t bigrams << [w1, w2, b]\n\t\t end\n ensure\n stm.close\n end\n\n bigrams\n end", "def load_bigrams(bigram_files)\n bigram_rates = Hash.new{ |h,k| h[k] = {} }\n bigram_files.each do |file|\n puts \"Loading #{file}...\"\n File.readlines(file).each do |line|\n rate, word1, word2 = line.strip.split(/\\s+/)\n bigram_rates[word1][word2] ||= []\n bigram_rates[word1][word2] << BigDecimal.new(rate)\n end\n end\n\n bigrams = Hash.new{ |h,k| h[k] = {} }\n bigram_rates.each do |word1, bw|\n bw.each do |word2, rates|\n bigrams[word1][word2] = rates.inject(0){ |s, r| s = s + r; s } / rates.size\n end\n end\n\n @bigrams = bigrams\n\n #bglist.each do |word1, scores|\n # bigrams[word1] = scores.sort_by{ |w, r| r }.map{ |x| x[0] }.reverse\n #end\n end", "def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend", "def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend", "def each_gram(&block)\n @frequencies.each_key(&block)\n end", "def load_bigrams(filename)\n #puts filename\n CSV.foreach(filename, :headers=>true) do |row|\n bigram = row['bigram']\n bigram.gsub!(' ','_')\n @bigrams << bigram\n end\n end", "def Gramos\n\t\t@gramos\n\tend", "def process\n @letters.flatten.each { |l| yield l }\n end", "def yield_ngrams(str, ngram_size=@ngram_size)\n ngram_list = ngram_list(str, ngram_size)\n ngram_list.each { |ngram| yield ngram }\n end", "def find_bigrams(str, bigrams)\n letters = str.split(\"\")\n pairs = []\n (0...letters.length-1).each do |idx|\n pairs << letters[idx] + letters[idx+1]\n end\n bigrams.select { |bigram| pairs.any?(bigram) }\nend", "def parse_bigrams\n bigrams = Hash.new\n IO.readlines('bigram.txt').each {|l|\n values = l.chomp.split(' ')\n bigrams[ [values[0].to_i, values[1].to_i] ] = values.last.to_i\n }\n\n return bigrams\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get a list of second words of bigrams matching the given first word.
def matching_bigrams(word1) list = @index[word1] list.map{ |word2| @table[[word1,word2]] } end
[ "def matching_bigrams(word1)\n list = scanner.bigrams[word1]\n list.values\n end", "def find_bigrams(str, bigrams)\n letters = str.split(\"\")\n pairs = []\n (0...letters.length-1).each do |idx|\n pairs << letters[idx] + letters[idx+1]\n end\n bigrams.select { |bigram| pairs.any?(bigram) }\nend", "def find_bigrams(str, bigrams)\n new_arr = [] # empty array\n bigrams.each_with_index do |ele, i|\n new_arr << ele if str.include?(ele)\n end\n new_arr # return the new array\nend", "def find_bigrams(str, bigrams)\n # (0...str.length - 1).inject([]) do |acc, i|\n # if bigrams.include?(str[i] + str[i + 1])\n # acc << (str[i..i + 1])\n # else\n # acc\n # end\n # end\n bigrams.select { |ele| str.include?(ele) }\nend", "def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend", "def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend", "def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT word1, word2, probability FROM bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t w1, w2, b = *row\n\t\t bigrams << [w1, w2, b]\n\t\t end\n ensure\n stm.close\n end\n\n bigrams\n end", "def find_ocurrences(text, first, second)\n text = text.split(' ')\n \n word_output = []\n \n text.each_with_index do |word, index|\n next if index == 0 || index == 1\n \n word_output << word if text[index - 1] == second && text[index - 2] == first\n end\n \n word_output\nend", "def pick_first_two_words_fast\n @word_pairs_and_probabilities.keys.sample.split(' ')\n end", "def search_for (word)\n\n chars = word.split(\"\")\n all_words = chars.permutation(chars.size).map{|_chars|\n _chars.join \"\"\n }\n\n anagrams = []\n all_words.each do |w|\n anagrams.push w if @word_list[w]\n end\n\n return anagrams\n end", "def two_word_name\n two_word_names = fetch_all('funny_name.name').select do |name|\n name.count(' ') == 1\n end\n\n sample(two_word_names)\n end", "def compute_bigram_for(a_word, followed)\n a_word_index = find_index_for(a_word)\n followed_index = find_index_for(followed)\n (@bigrams[[followed_index, a_word_index]]).to_f / @vocab[followed].to_f\nend", "def friends_of(word)\n ret = []\n for candidate in bounding_words_of(word)\n ret << candidate if friends?(word, candidate)\n end\n ret\nend", "def best_match(given_word)\n words = (@word_list.is_a? Array) ? @word_list : @word_list.keys\n\n word_bigrams = bigramate(given_word)\n word_hash = words.map do |key|\n [key, bigram_compare(word_bigrams, bigramate(key))]\n end\n word_hash = Hash[word_hash]\n\n # Weight by word usage, if logical\n word_hash = apply_usage_weights(word_hash) if @word_list.is_a? Hash\n\n word_hash.max_by { |_key, value| value }.first\n end", "def get_most_common_words\n\t$bigrams.each do |key, word| # iterate over bigram hash\n\t\tmax_word = word.max_by{|k, count| count} # get max element of bigram word hash\n\t\t$most_common[key] = max_word[0]\n\tend\nend", "def grams_preceding(gram)\n gram_set = Set.new\n\n ngrams_ending_with(gram).each do |ngram|\n gram_set << ngram[-2]\n end\n\n return gram_set\n end", "def table_with_bigrams(opts={})\n max_words = options[:max] || self.max_words\n\n word_scores = scoresheet(max_words)\n\n case opts[:sort].to_s\n when 'rank'\n word_scores = word_scores.sort_by{ |_,s| s }.reverse\n else\n word_scores = word_scores.sort_by{ |w,_| w.to_s }\n end\n\n tbl = []\n\n word_scores.each_with_index do |(w, s), i|\n bigram_list = bigrams.matching_bigrams(w)\n bigram_list = bigram_list.sort_by{ |b| bigrams.score(b) }.reverse\n bigram_list = bigram_list.map{ |b| b.word2 }.uniq\n bigram_list = bigram_list[0, self.max_bigrams]\n\n #output.puts \"%-6s. %s %s %s\" % [i, s.to_s(' F'), w, bigram_list.join(' ')]\n tbl << [s, w, bigram_list]\n end\n\n return tbl\n end", "def gets_second_word(second_word)\n index = 0\n second_word.split(\"\").each do |char| # optional enhancement: using another method besides .each_char\n if is_vowel?(char)\n break\n end\n index += 1\n end\n # get the second part of portmanteau\n second_part_portmanteau = second_word[index, second_word.length - index]\n return second_part_portmanteau\nend", "def search_top_2(words,trie)\n compound_words = []\n largest_word = \"\"\n second_largest_word = \"\"\n word_length = 0\n\n words.each do |word|\n if word.length >= word_length\n if compound_word?(word,trie)\n compound_words << word\n\n if word.length > second_largest_word.length\n if word.length > largest_word.length\n second_largest_word = largest_word\n largest_word = word\n word_length = second_largest_word.length\n else\n second_largest_word = word\n word_length = second_largest_word.length\n end\n end\n end\n end\n end\n\n # Output first and second largest word found\n puts \"Largest compound words (letter length):\"\n puts \" 1. #{largest_word} (#{largest_word.length})\"\n puts \" 2. #{second_largest_word} (#{second_largest_word.length})\"\n puts\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Probability of bigram's occurance in the corpus.
def probability(word1, word2=nil) bigram = (Bigram === word1 ? word1 : get(word1, word2)) BigDecimal.new(bigram.count) / total #size end
[ "def probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.count) / size\n end", "def compute_probabilities\n @tag_trigrams.each do |tritags, count|\n @trigram_probs [tritags] = count.to_f / @tag_bigrams[t2b(tritags)].to_f\n end\n end", "def probability_of(gram)\n @probabilities.fetch(gram,0.0)\n end", "def probability(word)\n (dictionary[word] || 0) / total_count\n end", "def compute_bigram_for(a_word, followed)\n a_word_index = find_index_for(a_word)\n followed_index = find_index_for(followed)\n (@bigrams[[followed_index, a_word_index]]).to_f / @vocab[followed].to_f\nend", "def probability_of_keyboard_mashing(string, apriori_probability_of_mashing: 0.1)\n bigrams = bigrams(string)\n\n return 0 unless bigrams.present?\n\n prob_bigrams_given_mashing = bigrams.\n map { |bigram| BigDecimal(mashing_probability(bigram).to_s) }.\n inject(&:*)\n\n prob_bigrams_given_corpus = bigrams.\n map { |bigram| BigDecimal(corpus_probability(bigram).to_s) }.\n inject(&:*)\n\n numerator = prob_bigrams_given_mashing * apriori_probability_of_mashing\n\n numerator / (numerator + prob_bigrams_given_corpus * (1 - apriori_probability_of_mashing))\n end", "def text_probability(text)\n probability_of_ngrams(ngrams_from_text(text))\n end", "def calculate_probability(word)\n ham_word_frequency = 2 * words_hash[[word,:good]]\n spam_word_frequency = words_hash[[word, :bad]]\n return if ham_word_frequency + spam_word_frequency < 5\n word_probability = min(1.0, spam_word_frequency.to_f / spam_count)\n total_probability = word_probability + min(1.0, ham_word_frequency.to_f / ham_count)\n max(0.1, min(0.99, word_probability/total_probability))\n end", "def probability(word)\n word = (Word === word ? word : get(word))\n BigDecimal.new(word.count) / total\n end", "def sentence_probability(sentence)\n probability_of_ngrams(ngrams_from_sentence(sentence))\n end", "def bigram_similarity_to_corpus(string)\n bigrams = bigrams(string)\n\n freqs = bigrams.\n each_with_object(Hash.new(0)) { |bigram, counts| counts[bigram] += 1 }.\n each_with_object({}) do |(bigram,count), freqs|\n freqs[bigram] = count.to_f / bigrams.length\n end\n\n numerator = freqs.\n map{ |bigram, freq| corpus_bigram_frequencies[bigram].to_f * freq }.inject(&:+)\n denominator = corpus_bigram_magnitude * ((freqs.values.map{ |v| v**2 }.inject(&:+)) ** 0.5)\n\n numerator / denominator\n end", "def word_prob(word)\n # Return the number of ocurrances of a word in the dictionary or 0\n count = self[word] || 0\n # Divide by the total number of words.\n count / total_word_count\n end", "def conditional_probability_for(word, klass)\n (count_word_occurrences_in(klass, word) + 1) / (training_words_by_class[klass].size + total_uniq_training_words.size).to_f\n end", "def get_chi_square_expected_value bigram\n starting = get_starting_of_bigram bigram\n ending = get_ending_of_bigram bigram\n starting_count = get_starting_count starting\n ending_count = get_ending_count ending\n return (starting_count * ending_count)/Float(@total_number_of_bigrams)\n end", "def compute_unigram_for(word)\n @vocab[word].to_f / @total\nend", "def tweet_probability(category, tweet)\n tweet_prob = 1\n word_count(tweet).each do |word|\n tweet_prob *= word_probability(category, word[0])\n end\n return tweet_prob\n end", "def calculate_probabilities\n @words_hash.keys.each do |word, _|\n @probability_hash[word] = calculate_probability(word)\n end\n end", "def get_prob_spam(token)\n if @frequency_table.has_key?(token)\n return @frequency_table[token][2]\n else # token not found\"\n return ( 1.0 / @uniq_s_toks ) / ( @total_s_toks + 1 )\n end\n end", "def word_prob(word)\n letters = word.split(//)\n count = letters.count\n\n n = numerator(letters)\n count.times do |i|\n subbed_once = sub_nth_letter_with_blank(letters, i)\n (i...count).each do |j|\n subbed_once_or_twice = sub_nth_letter_with_blank(subbed_once, j)\n n += numerator(subbed_once_or_twice)\n end\n end\n n * factorial(count) / denominator(count)\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check if a given word should be considered an acceptable bigram.
def good_bigram?(word) return false if REJECT_BIGRAMS.include?(word) return false if word.size < 2 true end
[ "def good_bigram?(word)\n return false if REJECT_BIGRAMS.include?(word)\n return false if word.size < 2\n true\n end", "def validate_word(word)\n return false if word.nil?\n\n word = normalize_word(word)\n word.length <= MAX_WORD_LENGTH && word =~ /^[a-z]{2,}$/\n end", "def valid_word?(word)\n return true if ACCEPT_WORDS.include?(word)\n return false if REJECT_WORDS.include?(word)\n return false if word.size < 3\n return false if word.size > 20\n return false if word.start_with?(\"'\")\n return false if word.end_with?(\"'\")\n true\n end", "def is_word?(word)\r\n word = word.downcase\r\n word.each_char { |c| return false if not is_letter?(c) }\r\n !word.empty?\r\n end", "def phrase_is_boring?(phrase)\n words = phrase.words\n boring_words = %w{a and also are be been for get has in is just me of on only see than this the there was january february march april may june july august september october november december}\n number_non_boring_words = 0\n words.each do |word|\n number_non_boring_words += 1 unless boring_words.include?(word.downcase) #Not unicode safe?\n #number_non_boring_words += 1 unless boring_words.include?(word.chars.downcase) #Unicode safe\n end\n return true unless number_non_boring_words > 1\n end", "def valid_ngram?(word)\n return false unless valid_word?(word)\n return false if word.size < 1 # 2\n return false if REJECT_NGRAMS.include?(word)\n true\n end", "def isAntigram(comparedWord)\n String.chars.each(comparedWord)\n end", "def verb?(word)\n word.size >= MINIMUM_VERB_SIZE && # at least 8 chars\n !FOO_LETTERS.include?(word[word.size - 1]) # the last letter must be included in BAR LETTER group\n end", "def is_word?\n !(self =~ /^[a-zA-Z]+$/).nil?\n end", "def check_word(word)\n valid_words.include?(word)\n end", "def block_word?(str)\n return false unless str.chars.uniq.size == str.size\n char_arr = str.upcase.chars\n char_arr.none? { |char| char_arr.include?(BLOCK_PAIRS[char]) }\nend", "def block_word?(string)\n string_cap = string.upcase\n spelling_blocks = \"BO GT VI XK RE LY DQ FS ZM CP JW NA HU\"\n spelling_blocks.split.none? { |block| string_cap.count(block) >=2}\nend", "def has_bad_word(str)\n #Turn string (url or body) into UTF-8 and lower case\n new_str = str.force_encoding(\"UTF-8\").downcase\n bad_words = [\"spongebob\",\n \"britney spears\",\n \"paris hilton\",\n \"norrköping\"]\n return bad_words.any? { |word| new_str.include?(word) }\nend", "def letter_in_word?(letter)\n end", "def can_make_word(word)\n blocks = @blocks.dup\n word.upcase.chars.all? do |letter|\n index = blocks.flatten.index(letter)\n blocks.delete_at index/2 if index\n end\nend", "def block_word?(word)\n blocks_arr = [['B', 'O'], ['X', 'K'], ['D', 'Q'], ['C', 'P'], ['N', 'A'],\n ['G', 'T'], ['R', 'E'], ['F', 'S'], ['J', 'W'], ['H', 'U'],\n ['V', 'I'], ['L', 'Y'], ['Z', 'M']]\n word.upcase!\n word_arr = word.split(//)\n blocks_arr.each do |arr|\n if word_arr.include?(arr[0]) && word_arr.include?(arr[1])\n return false\n end\n end\n true\nend", "def word_match?(other_word)\n word == other_word\n end", "def pangram?(string)\n alphabet = ('a'..'z').to_a\n clean_string = string.downcase.scan(/[a-z]/).uniq.sort\n\n return alphabet == clean_string\nend", "def valid_Word(_word)\n\tif _word.strip.empty? # check for empty string\n\t\tputs \"Bye!\"\n\t\treturn false\n\telse\t\t\t# string is not empty, score may be calculated\n return true\n end\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Whether a single unique match was found.
def found_unique? @flags.size == 1 end
[ "def match?\n @match\n end", "def unique?\n check = nil\n is_unique { |pipe| check = pipe }.each do\n return false unless check.isUnique\n end\n true\n end", "def include?(match)\n any? { |m| m.eql?(match) }\n end", "def unique?\n @unique\n end", "def company_convictions_match_found?\n conviction_search_result.first &&\n (conviction_search_result.first.match_result != 'NO')\n end", "def matches?(record)\n record_value(record) == expected\n end", "def matchError?\r\n return ! @matchError.nil?\r\n end", "def is_matched(ind)\n\t\treturn @tile_Array[ind].return_match\n\tend", "def found?(name)\n result.member?(name)\n end", "def unique?\n true\n end", "def ensure_unique_query\n search_list.search_items.each do |item|\n if item.query == self.query\n return false\n end\n end\n return true\n end", "def unique?\n if @data.fetch(:Index_type) == \"UNIQUE\" || @data.fetch(:Non_unique).to_i == 0\n return true\n else\n return false\n end\n end", "def is_result_dupe? result\n\n if !@results_dedupe_map\n @results_dedupe_map = []\n end\n\n if result.size > 100\n result = result[0..99]\n end\n\n result.downcase!\n result = result.gsub(/\\s+/, \"\")\n\n if @results_dedupe_map.include? result\n return true\n end\n\n @results_dedupe_map.push result\n\n false\n\n end", "def terminal?\n matches.length == 0\n end", "def includedOnce(haystack, needle)\n \n counter = haystack.count { |x| x == needle}\n if counter == 1\n return true\n elsif counter > 1\n return false\n else\n return false\n end\n \nend", "def match?(text)\n !!match(text).value_or(false)\n end", "def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end", "def match?(subject)\n self.matches(subject) > 0\n end", "def select?(record)\n return true unless match\n match =~ record.to_s\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Whether multiple matches were found (i.e. ambiguous input).
def found_multiple? @flags.size > 1 end
[ "def match_all(args)\n args.all?{|arg| match(arg)}\n end", "def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end", "def all_matches_accepted?(matches)\n matches_accepted = true\n\n matches.each do |match|\n if match.p1_accepted == false || match.p2_accepted == false\n matches_accepted = false\n break\n end\n end\n\n matches_accepted\n end", "def is_a_match(total_in_common)\n total_in_common >= 2 ? true : false\n end", "def match_any?(elements)\n queries = elements.map{|e| hash_to_range(e) }.sort\n match_internal?(queries, queries.size)\n end", "def include?(match)\n any? { |m| m.eql?(match) }\n end", "def probable_matching(ingredient_long_name,item)\n return (item.downcase.split(\" \") & ingredient_long_name.split(\" \")).size >= 2\nend", "def intersect?(set)\n return @keyword_set.intersect?(set)\n end", "def is_multimatch(s)\n return s.match(/[*?]/)\n end", "def any_of(*matchers); end", "def multi_selection?(key)\n %w($and $or $nor).include?(key)\n end", "def is_wanted?(match_all = false)\n # puts \"Checking if item is wanted.\"\n wants_count = 0\n match_count = 0\n\n self.attrs.each do |attr|\n if (!attr[:want].nil?)\n # puts \"Want value(s) specified on #{attr[:title]} (#{attr[:value]}): #{attr[:want]}\"\n wants_count += 1\n\n attr[:want].each do |want_val|\n # If the value is quoted, match it exactly.\n if ((want_val[0] == '\"') ||\n (want_val[0] == \"'\"))\n quote = want_val[0]\n if (want_val[(want_val.length - 1)] == quote)\n # puts \"Value #{want_val} is quoted.\"\n if (attr[:value].downcase == want_val[1..(want_val.length - 2)].downcase)\n # puts \"Matches.\"\n match_count += 1\n else\n # puts \"Doesn't match.\"\n end\n elsif (attr[:value].downcase.include?(want_val.downcase))\n # puts \"Matches (loose / 1).\"\n match_count += 1\n else\n # puts \"Doesn't match.\"\n end\n elsif (attr[:value].downcase.include?(want_val.downcase))\n # puts \"Matches (loose / 2).\"\n match_count += 1\n end\n end\n end\n end\n\n # puts \"Wanted? #{wants_count} & #{match_count}\"\n if match_all\n return (wants_count == match_count)\n else\n return ((wants_count == 0) || (match_count > 0))\n end\n end", "def any_of_match(*args)\n qs = []\n args.each do |hash|\n qs << any_match_(hash)\n end\n self.where(qs.reduce(:or))\n end", "def find_q_matches(q_values, available, &matcher); end", "def valid?\n \tpairings.empty? || (pairings.length == 1 || !multiple.nil?) &&\n \tpairings.all? { |v| self.matchs?(v) && self.wordclass != v.wordclass}\n end", "def matches_have_score(matches)\n return false if matches.empty?\n matches.each do |match|\n return false if match.score.nil?\n end\n return true\n end", "def check_if_common_matched_unmatched_records\n return error_with_data(\n 'ka_ad_vm_1',\n 'Invalid Input.Record can not be matched and unmatched together',\n 'Invalid Input.Record can not be matched and unmatched together',\n GlobalConstant::ErrorAction.default,\n {}\n ) if (@matched_ids & @unmatched_ids).present?\n success\n end", "def exact_match?(arguments, keyword_arguments)\n definition.exact_match?(arguments, keyword_arguments)\n end", "def match?(regex)\n return @vals.grep(regex).size > 0\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return whether the unique match was a hit on the negative (`no`) case, or `nil` if not found or not unique.
def unique_flag_negative? found_unique? ? @flags.first[2] : nil end
[ "def found_unique?\n @flags.size == 1\n end", "def select?(record)\n return true unless match\n not match =~ record.to_s\n end", "def negative_match?(rule, search_criteria)\n rule.criteria.any? do |criterium, values|\n values.is_a?(Hash) && values[:not]&.any?(search_criteria[criterium])\n end\n end", "def unique?\n unique = case\n when previous_visit.present?\n false\n when site.visitor_ids.include?(visitor_id)\n false\n #TODO: More advanced unique testing\n #when site.visitor_ips.include?(ip_address)\n # Check for other matching data\n else\n true\n end \n end", "def dissallow? goal, index\r\n return NO_MATCH if index == NO_MATCH # allow users to not check results of a sequence\r\n found = match? goal, index\r\n found == NO_MATCH ? index : NO_MATCH\r\n end", "def unique?\n if @data.fetch(:Index_type) == \"UNIQUE\" || @data.fetch(:Non_unique).to_i == 0\n return true\n else\n return false\n end\n end", "def unique?\n @unique\n end", "def unique?\n check = nil\n is_unique { |pipe| check = pipe }.each do\n return false unless check.isUnique\n end\n true\n end", "def secondary_key_unique?(value, fail_if_non_existent: nil)\n raise 'No secondary_key field defined' unless secondary_key\n\n l = where(secondary_key => value).length\n return false if l > 1\n return true if l == 1\n\n # the length is 0\n # handle the result based on the option\n (fail_if_non_existent ? nil : true)\n end", "def match?(text)\n !!match(text).value_or(false)\n end", "def check_for_matches\n values = @grid.flatten\n values.each_with_index do |value, i|\n ((i+1)...values.length).each do |j|\n return value if values[j] == value && !@matches.include?(value)\n end\n end\n\n nil\n end", "def slug_unique?(test_slug, existing = existing_slugs)\n !existing.include?(test_slug)\n end", "def is_duplicate \n (flag & 0x400) != 0\n end", "def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end", "def unique?\n if @args[:data][:Index_type] == \"UNIQUE\"\n return true\n else\n return false\n end\n end", "def symbolIsNotDupe(symbol)\n return !StockDatum.where(:symbol => symbol).present?\n end", "def unique_by_field?(arr, field, uniqueVal)\n result = true\n fieldVal = safe_access_hash_value(uniqueVal, field)\n arr.each do |value|\n # the best variable name\n valueFieldVal = safe_access_hash_value(value, field)\n if(fieldVal == valueFieldVal)\n result = false\n end\n end\n return result\n end", "def slug_unique?(slug)\n self.class.find_by(slug: slug).nil?\n end", "def match?\n @match\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns an array of the matching full flag strings.
def matching_flag_strings @flags.map do |_flag, flag_syntax, negative| negative ? flag_syntax.negative_flag : flag_syntax.positive_flag end end
[ "def flags_names\n flags_list(true)\n end", "def get_flags(*files)\n matches = []\n begin\n files.each do |f|\n file = File.new(f, 'r')\n while (line = file.gets)\n m = line.match(/(^.*=)?/)\n matches << m[0] if m\n end\n file.close\n end\n rescue => err\n puts 'Exception: #{err}'\n err\n end\n matches.uniq.sort!\nend", "def complete_flags cmd\n cmd.flags.values.map do |flag|\n candidates = [flag.name]\n candidates += flag.aliases if flag.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}#{'=' if c.length > 1}\"\n end\n end + cmd.switches.values.map do |switch|\n candidates = [switch.name]\n candidates += switch.aliases if switch.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}\"\n end\n end\n end", "def get_all_flags\n Flag.all\n end", "def get_feature_flags\n array = []\n array.push(:dual_wield) if dual_wield?\n array.push(:auto_battle) if auto_battle?\n array.push(:guard) if guard?\n array.push(:substitute) if substitute?\n array.push(:preserve_tp) if preserve_tp?\n array.push(:encounter_half) if encounter_half?\n array.push(:encounter_none) if encounter_none?\n array.push(:cancel_surprise) if cancel_surprise?\n array.push(:raise_preemptive) if raise_preemptive?\n array.push(:gold_double) if gold_double?\n array.push(:drop_item_double) if drop_item_double?\n return array\n end", "def matching_flags(line, flags)\n words = line.split(/\\W/).map(&:upcase)\n words & flags.map(&:upcase)\n end", "def flag\n flags.join(\", \")\n end", "def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end", "def atoms_after_flags(flat_args)\n strs = flat_args.reject(&Range.method(:===))\n rngs = flat_args.select(&Range.method(:===))\n\n if @flags.include? :i\n strs = strs.map(&:downcase)\n rngs = rngs.map { |r| Range.new(*range_bounds(r).map(&:downcase)) }\n end\n\n (strs + rngs).uniq\n end", "def flags_list(names = false)\n raise LibraryError, 'Magic library is not open' if closed?\n return [names ? 'NONE' : 0] if @flags.zero?\n\n n, i = 0, @flags\n flags = []\n\n @@flags_map ||= flags_as_map if names\n\n while i > 0\n n = 2 ** (Math.log(i) / Math.log(2)).to_i\n i = i - n\n flags.insert(0, names ? @@flags_map[n] : n)\n end\n\n flags\n end", "def flags\n return [] unless options['flags']\n\n options['flags'].map do |options|\n Flag.new options\n end\n end", "def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end", "def mask_array\n @mask.split('')\n end", "def all_flags\n my_flags = @flag_objects || []\n my_flags += self.superclass.all_flags if self.superclass < HasFlags::InstanceMethods\n return my_flags\n end", "def flags\n [long, short].compact\n end", "def flags_str\n FLAGS.map { |f| (flags.include? f) ? f : '-' }.join('')\n end", "def process_flags flags\n flags.reject { |flag| # all hits are truthy, so this works out well\n case flag\n when /^-I(.*)/ then\n rb_flags << flag\n when /^-d/ then\n rb_flags << flag\n when /^-w/ then\n rb_flags << flag\n else\n false\n end\n }\n end", "def flag_value\n @flags.reduce(0) do |val, flag|\n case flag.to_sym\n when :m then val | Regexp::MULTILINE\n when :i then val | Regexp::IGNORECASE\n when :x then val | Regexp::EXTENDED\n end\n end\n end", "def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Whether to include short flags
def include_short? @include_short end
[ "def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end", "def short_flag_exist?(flag)\n flags.select { |f| f.short == flag }.any?\n end", "def extract_short_flag(objects, config); end", "def short_flag_exist?(flag)\n flags.any? { |f| f.short == flag }\n end", "def short?\n @option_config.short?\n end", "def flags\n [long, short].compact\n end", "def short?\n @straw_size == SHORT_STRAW\n end", "def short\n @short.present? ? \"-#{@short}\" : nil\n end", "def simplifying?\n @precision == :short\n end", "def flags\n [long, negative_long, short].compact\n end", "def short?\n @length.short?\n end", "def shortinclude?(short)\n short = short.intern if name.is_a? String\n @shortnames.include?(short)\n end", "def flag(short, long, options={})\n check_args(short, long)\n\n short = short.to_sym\n long = long.gsub('-', '_').to_sym\n self.class.class_eval do\n define_method(\"flag_#{long}\") do\n instance_variable_set(\"@#{long}\", true)\n end\n\n define_method(\"#{long}?\") do\n instance_variable_get(\"@#{long}\")\n end\n end\n\n self.options[long] = Flag.new(short, long, options)\n self.options[short] = self.options[long]\n self.order << self.options[long]\n check_longest(long)\n end", "def short_straw?\n straw.short?\n end", "def short(value)\n push(:@short_options, value.sub(/^-+/, \"\"))\n end", "def verbosity_flags(short: false)\n verbosity = self[Context::Key::VERBOSITY]\n if verbosity.positive?\n if short\n flag = \"v\" * verbosity\n [\"-#{flag}\"]\n else\n ::Array.new(verbosity, \"--verbose\")\n end\n elsif verbosity.negative?\n if short\n flag = \"q\" * -verbosity\n [\"-#{flag}\"]\n else\n ::Array.new(-verbosity, \"--quiet\")\n end\n else\n []\n end\n end", "def flag(short, long, options={})\n check_args(short, long)\n\n short = short.to_sym\n long = long.gsub('-', '_').to_sym\n eval <<-EOF\n def flag_#{long}\n @#{long} = true\n end\n\n def #{long}?\n return @#{long} || false\n end\n EOF\n\n self.options[long] = Flag.new(short, long, options)\n self.options[short] = self.options[long]\n self.order << self.options[long]\n end", "def include_long?\n @include_long\n end", "def short(*shorts)\n format = ( little_endian? ? 'v*' : 'n*') \n return string(shorts.pack(format)) \n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Whether to include long flags
def include_long? @include_long end
[ "def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end", "def extract_long_flag(objects, config); end", "def long\n @long.present? ? \"--#{@long}\" : nil\n end", "def flags\n [long, negative_long, short].compact\n end", "def flags\n [long, short].compact\n end", "def flag(short, long, options={})\n check_args(short, long)\n\n short = short.to_sym\n long = long.gsub('-', '_').to_sym\n self.class.class_eval do\n define_method(\"flag_#{long}\") do\n instance_variable_set(\"@#{long}\", true)\n end\n\n define_method(\"#{long}?\") do\n instance_variable_get(\"@#{long}\")\n end\n end\n\n self.options[long] = Flag.new(short, long, options)\n self.options[short] = self.options[long]\n self.order << self.options[long]\n check_longest(long)\n end", "def long?\n @length.long?\n end", "def longify(str)\n return nil if str.nil?\n\n str = str.to_s\n str = \"--#{str}\" unless option?(str)\n\n unless str =~ LONG_FLAG\n raise ArgumentError, \"invalid long flag: #{str}\"\n end\n\n str\n end", "def flag(short, long, options={})\n check_args(short, long)\n\n short = short.to_sym\n long = long.gsub('-', '_').to_sym\n eval <<-EOF\n def flag_#{long}\n @#{long} = true\n end\n\n def #{long}?\n return @#{long} || false\n end\n EOF\n\n self.options[long] = Flag.new(short, long, options)\n self.options[short] = self.options[long]\n self.order << self.options[long]\n end", "def extract_short_flag(objects, config); end", "def long?\n words_count > 1\n end", "def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end", "def long_opt_symbol(args); end", "def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend", "def include_short?\n @include_short\n end", "def build_32_bit?\n args.include?('--32-bit') && has_option?('32-bit')\n end", "def test_conflicting_longs_detected\n @p.opt \"goodarg\", \"desc\", :long => \"--goodarg\"\n assert_raises(ArgumentError) { @p.opt \"badarg\", \"desc\", :long => \"--goodarg\" }\n end", "def report_flags\n self.has_links? ? ret = \"L\" : ret = \"l\"\n self.has_jlinks? ? ret += \"J\" : ret += \"j\"\n self.has_form? ? ret += \"F\" : ret += \"f\"\n self.has_comments? ? ret += \"C\" : ret += \"c\"\n return ret\n end", "def pack_flags(flags)\n FLAGS.each_with_index.inject(0) do |memo,(key,i)|\n memo |= (1 << i) if flags[key]\n memo\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
An array of Flag::Syntax including only short (single dash) flags.
def short_flag_syntax @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short } end
[ "def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end", "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def flags\n [long, short].compact\n end", "def flags\n [long, negative_long, short].compact\n end", "def flags\n return [] unless options['flags']\n\n options['flags'].map do |options|\n Flag.new options\n end\n end", "def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end", "def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end", "def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end", "def flags_names\n flags_list(true)\n end", "def complete_flags cmd\n cmd.flags.values.map do |flag|\n candidates = [flag.name]\n candidates += flag.aliases if flag.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}#{'=' if c.length > 1}\"\n end\n end + cmd.switches.values.map do |switch|\n candidates = [switch.name]\n candidates += switch.aliases if switch.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}\"\n end\n end\n end", "def canonical_syntax_strings\n @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)\n end", "def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end", "def scm_flags\n @flags.join(\" \")\n end", "def flag\n flags.join(\", \")\n end", "def whitelisted_flags\n flags.select(&:allowed)\n end", "def flags_str\n FLAGS.map { |f| (flags.include? f) ? f : '-' }.join('')\n end", "def whitelisted_flags\n flags.select &:allowed\n end", "def parse_flags()\n flags = ARGV[0]\n flags.split(\"\").each do |flag|\n case flag\n when \"n\"\n @options[:nocolor] = true\n when \"h\"\n usage\n when \"f\"\n @options[:bit4] = false\n when \"s\"\n @options[:bit64] = false\n when \"g\"\n @options[:grouped] = true\n end\n end\n end", "def verbosity_flags(short: false)\n verbosity = self[Context::Key::VERBOSITY]\n if verbosity.positive?\n if short\n flag = \"v\" * verbosity\n [\"-#{flag}\"]\n else\n ::Array.new(verbosity, \"--verbose\")\n end\n elsif verbosity.negative?\n if short\n flag = \"q\" * -verbosity\n [\"-#{flag}\"]\n else\n ::Array.new(-verbosity, \"--quiet\")\n end\n else\n []\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
An array of Flag::Syntax including only long (doubledash) flags.
def long_flag_syntax @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long } end
[ "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end", "def flags\n return [] unless options['flags']\n\n options['flags'].map do |options|\n Flag.new options\n end\n end", "def flags\n [long, negative_long, short].compact\n end", "def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end", "def flags\n [long, short].compact\n end", "def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end", "def complete_flags cmd\n cmd.flags.values.map do |flag|\n candidates = [flag.name]\n candidates += flag.aliases if flag.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}#{'=' if c.length > 1}\"\n end\n end + cmd.switches.values.map do |switch|\n candidates = [switch.name]\n candidates += switch.aliases if switch.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}\"\n end\n end\n end", "def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end", "def flags_names\n flags_list(true)\n end", "def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end", "def scm_flags\n @flags.join(\" \")\n end", "def flag\n flags.join(\", \")\n end", "def canonical_syntax_strings\n @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)\n end", "def _correct_flags(argv)\n new_argv = []\n on_flag = 0\n created = false\n argv.each do |arg|\n if arg.start_with?('-')\n # We have an Array flag\n on_flag = COMMA_FLAGS.include? arg\n created = false\n elsif on_flag\n # All elements following an Array flag that aren't flags\n # should be concatenated with commas\n if created\n new_argv[-1] = \"#{new_argv[-1]},#{arg}\"\n next\n end\n created = true\n end\n new_argv << arg\n end\n new_argv\nend", "def flags(*args)\n Boxen::Flags.new *args\n end", "def le_flags=(flags)\n new = flags.map{|x| x.split(\", \").reject{|x| x =~ /^0x/}}.flatten.sort.uniq\n current = JSON.parse(self.le_flags || '[]')\n self[:le_flags] = JSON.generate((new + current).uniq)\n end", "def handle_flags(args)\n unknown = []\n args.each_with_index do |item, index|\n case item\n when /^-I(.*)/\n path = $1\n path = path != \"\" ? path : args[index+1]\n $LOAD_PATH << path\n when /^-r(.*)/\n path = $1\n path = path != \"\" ? path : args[index+1]\n require path\n when /^-e(.*)/\n command = $1\n command = command != \"\" ? command : args[index+1]\n eval command\n when /^-{1,2}\\w+/\n option = args[index+1]\n unknown << option unless option =~ /^-/\n unknown << item << option\n end\n end\n unknown.compact\n end", "def whitelisted_flags\n flags.select(&:allowed)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The list of all effective flags used.
def effective_flags @effective_flags ||= flag_syntax.flat_map(&:flags) end
[ "def all_flags\n my_flags = @flag_objects || []\n my_flags += self.superclass.all_flags if self.superclass < HasFlags::InstanceMethods\n return my_flags\n end", "def get_all_flags\n Flag.all\n end", "def flags_names\n flags_list(true)\n end", "def flags\n @flags\n end", "def whitelisted_flags\n flags.select &:allowed\n end", "def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end", "def all_keys\n all_flags.keys\n end", "def flags\n @flags\n end", "def flags\n return [] unless options['flags']\n\n options['flags'].map do |options|\n Flag.new options\n end\n end", "def whitelisted_flags\n flags.select(&:allowed)\n end", "def flags\n [long, negative_long, short].compact\n end", "def list_flags(options={})\n connection.get(\"/flags\").body.flags\n end", "def flags\n [long, short].compact\n end", "def list_bit_flags\n if (!@cached)\n @cached = lists.uniq.inject(0) { |memo,cur| memo |= (1 << cur) }\n end\n @cached\n end", "def flags_list(names = false)\n raise LibraryError, 'Magic library is not open' if closed?\n return [names ? 'NONE' : 0] if @flags.zero?\n\n n, i = 0, @flags\n flags = []\n\n @@flags_map ||= flags_as_map if names\n\n while i > 0\n n = 2 ** (Math.log(i) / Math.log(2)).to_i\n i = i - n\n flags.insert(0, names ? @@flags_map[n] : n)\n end\n\n flags\n end", "def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end", "def get_feature_flags\n array = []\n array.push(:dual_wield) if dual_wield?\n array.push(:auto_battle) if auto_battle?\n array.push(:guard) if guard?\n array.push(:substitute) if substitute?\n array.push(:preserve_tp) if preserve_tp?\n array.push(:encounter_half) if encounter_half?\n array.push(:encounter_none) if encounter_none?\n array.push(:cancel_surprise) if cancel_surprise?\n array.push(:raise_preemptive) if raise_preemptive?\n array.push(:gold_double) if gold_double?\n array.push(:drop_item_double) if drop_item_double?\n return array\n end", "def flagged_reasons\n return @flagged_reasons\n end", "def all_flags\n res = get_request 'features'\n if res.status == 200\n JSON.parse(res.body, symbolize_names: true)\n else\n @config.logger.error(\"[LDClient] Unexpected status code #{res.status}\")\n {}\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Look up the flag by string. Returns an object that indicates whether the given string matched this flag, whether the match was unique, and other pertinent information.
def resolve(str) resolution = Resolution.new(str) flag_syntax.each do |fs| if fs.positive_flag == str resolution.add!(self, fs, false, true) elsif fs.negative_flag == str resolution.add!(self, fs, true, true) elsif fs.positive_flag.start_with?(str) resolution.add!(self, fs, false, false) elsif fs.negative_flag.to_s.start_with?(str) resolution.add!(self, fs, true, false) end end resolution end
[ "def resolve_flag(str)\n result = Flag::Resolution.new(str)\n flags.each do |flag_def|\n result.merge!(flag_def.resolve(str))\n end\n result\n end", "def find_flaggable(flaggable_str, flaggable_id)\n model = flaggable_str.constantize\n model.respond_to?(:find_flags_for) ? model.find(flaggable_id) : nil\n end", "def find_flag(flag_name)\n flags_list.find_flag(flag_name)\n end", "def find_flag(flag_name, flag_state=nil)\n detect do |flag|\n flag.name == flag_name && (flag_state.nil? || flag.state == flag_state)\n end\n end", "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def match(string)\n result = @trie[string]\n return nil unless result\n result.each do |pattern, block|\n match = pattern.match(string)\n block.call(match) if match\n end\n end", "def search_in(label, string)\n if !LABELS.include? label.to_sym\n raise ArgumentError, \"Unknown key: #{label}\"\n end\n\n find_all do |entry|\n text = entry.send(label).str\n text.match(/#{string}/i)\n end\n end", "def search_in(label, string)\n raise(ArgumentError, \"Unknown key: #{label}\") unless LABELS.include?(label.to_sym)\n\n find_all do |entry|\n text = entry.send(label).str\n text.match(/#{string}/i)\n end\n end", "def find_flag(id)\n\n\t\tflags.each do |flag|\n\n\t\t\treturn flag if flag == id\n\t\tend\n\n\t\tnil\n\tend", "def flag_value_for_flag_name(flag_name)\n # finding flag\n flag = self.flags.where(:name => flag_name).first # assuming only single flag can be associated with any given name with a word instance\n\n # finding flag value\n if flag\n flag_value = flag.value # assuming there can't exist multiple associated flags with same name\n else\n flag_value = nil\n end\n\n return flag_value\n end", "def find_param_for_flag(flag)\n params_with(:flag).each do |param_name, param_flag|\n return param_name if flag.to_s == param_flag.to_s\n end\n nil\n end", "def match(string)\n md = regexp.match(string)\n md ? Result.new(md, self) : nil\n end", "def match(string)\n `var c=$u,result=c$MatchData.m$new()`\n `if(!$T(c=string.__value__.match(this.__value__))){return nil;}`\n `for(var i=0,l=c.length;i<l;++i){result.__captures__[i]=$q(c[i])}`\n `result.__string__=string.__value__`\n `result.__pre__=RegExp.leftContext`\n `result.__post__=RegExp.rightContext`\n return `result`\n end", "def find_flaggable\n\t\tparams.each do |name, value|\n\t\t\tif name =~ /(.+)_id$/\n\t\t\t\treturn $1.classify.constantize.find(value)\n\t\t\tend\n\t\tend\n\t\tnil\n\tend", "def matching_flags(line, flags)\n words = line.split(/\\W/).map(&:upcase)\n words & flags.map(&:upcase)\n end", "def get_keyword_value(string)\n return_string = String.new()\n found = false\n @keywordPairs.each do |pair|\n if pair[0] == string\n found = true\n return_string = pair[1]\n end\n end\n if found == false\n raise \"Error: In the command #{@utype}:#{@command_name} Attempted to get a Keyword pair #{string} present in the command\\n Is this keyword missing? \\n#{output}\"\n end\n return return_string\n end", "def get_keyword_value(string)\n return_string = String.new()\n found = false\n @keywordPairs.each do |pair|\n if pair[0] == string\n found = true\n return_string = pair[1]\n end\n end\n if found == false\n raise \"Error: In the command #{@utype}:#{@command_name} Attempted to get a Keyword pair #{string} present in the command\\n Is this keyword missing? \\n#{output}\"\n end\n return return_string\n end", "def match_and_parse(string)\n meme = match(string)\n if meme.nil?\n return meme\n end\n bits = parse(meme, string)\n [meme, bits]\n end", "def flag_state(flag_name)\n find_flag(flag_name).try(:state)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A list of canonical flag syntax strings.
def canonical_syntax_strings @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str) end
[ "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def flags_names\n flags_list(true)\n end", "def scm_flags\n @flags.join(\" \")\n end", "def complete_flags cmd\n cmd.flags.values.map do |flag|\n candidates = [flag.name]\n candidates += flag.aliases if flag.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}#{'=' if c.length > 1}\"\n end\n end + cmd.switches.values.map do |switch|\n candidates = [switch.name]\n candidates += switch.aliases if switch.aliases\n candidates.map do |c|\n \"-#{'-' if c.length > 1}#{c}\"\n end\n end\n end", "def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end", "def flags_str\n FLAGS.map { |f| (flags.include? f) ? f : '-' }.join('')\n end", "def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end", "def flag\n flags.join(\", \")\n end", "def flag_string(flags)\n case flags\n when String\n return flags\n else\n return flags.squash(\"-\").map {|m|\n case m\n when /^--/\n m\n when /^-[^\\-]/\n m\n else\n if m =~ /^.[ =]/\n \"-#{m}\"\n else\n \"--#{m}\"\n end\n end\n }.join(\" \")\n end\n end", "def prefixed_flag_list(flag_list, prefix)\n if prefix.present?\n flag_list.map do |flag|\n \"#{prefix}_#{flag}\"\n end\n else\n flag_list\n end\n end", "def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end", "def flags\n [long, negative_long, short].compact\n end", "def flags\n [long, short].compact\n end", "def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend", "def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end", "def flags_list(names = false)\n raise LibraryError, 'Magic library is not open' if closed?\n return [names ? 'NONE' : 0] if @flags.zero?\n\n n, i = 0, @flags\n flags = []\n\n @@flags_map ||= flags_as_map if names\n\n while i > 0\n n = 2 ** (Math.log(i) / Math.log(2)).to_i\n i = i - n\n flags.insert(0, names ? @@flags_map[n] : n)\n end\n\n flags\n end", "def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end", "def flags\n return [] unless options['flags']\n\n options['flags'].map do |options|\n Flag.new options\n end\n end", "def make_flags(options={})\n return nil if options.empty?\n options.collect{|k,v| \"-#{k} #{v}\"}.join(' ')\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Whether this flag is activethat is, it has a nonempty flags list.
def active? !effective_flags.empty? end
[ "def flags?\n !@flags.empty?\n end", "def complete_flags?\n @complete_flags\n end", "def flags_allowed?\n @flags_allowed\n end", "def global_flags?\n flags.any? and commands.any?\n end", "def flag?(flags)\n (all_flags & flags) != 0\n end", "def is_flagged?\n return self.flags.unresolved.count > 0\n end", "def active?\n @_active_status ||= ( attributes.include?('active') ? !!self.active : true )\n end", "def complete_flag_values?\n @complete_flag_values\n end", "def active?\n metadata[:inactive].nil? or !metadata[:inactive]\n end", "def selectable?\n (%w(noselect) & self.flags).empty?\n end", "def is_active?\n metadata[:inactive].nil? or !metadata[:inactive]\n end", "def empty?\n bitmask.zero?\n end", "def active?\r\n self.state == :active\r\n end", "def active?\n return false if state.nil?\n \"ACTIVE\".casecmp(state).zero?\n end", "def enemy_flag_exists?\n !(enemy_flags.empty?)\n end", "def active?\n state == \"ACTIVE\"\n end", "def flagged\n return true if Pending.flagged and Pending.flagged.include? @attach\n return false unless @flagged_by\n return false if @flagged_by.length == 1 and\n @flagged_by.first == User.initials and\n Pending.unflagged.include?(@attach)\n return ! @flagged_by.empty?\n end", "def aDesActivites?\n self.activites.each do | a |\n if !a.gratuite\n return true\n end\n end\n false\n end", "def enabled?\n any?(&:enabled?)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /agencyfeed.json Get all the agency feed available
def index @agencyfeeds = AgencyFeed.all render :index, status: :ok end
[ "def index\n @hosted_feeds = HostedFeed.all\n end", "def show_all_posts\n feed_urls = Feed.all.collect(&:feed_url)\n @posts = fetch_all_feeds_posts(feed_urls)\n\n respond_to do |format|\n format.html\n format.json { render json: @feeds }\n end\n end", "def index\n @feed_sources = FeedSource.all\n end", "def index\n @affiliate_agencies = Affiliate::Agency.all\n end", "def index\n @feedings = Feeding.all\n end", "def feed\n get '/users/self/feed', auth_params\n end", "def index\n @feed_infos = FeedInfo.all\n end", "def create\n @agencyfeed = AgencyFeed.create! agencyfeed_params\n render :show, status: :created\n end", "def feeds\n @all_feeds ||= begin\n @doc.match(FEED_MATCHES).map do |url|\n case url\n when String\n if url.start_with? \"http\"\n url\n elsif @url\n URI.join(@url , url).to_s\n end\n when Array\n url.map do |u|\n if u.start_with? \"http\"\n u\n elsif @url\n URI.join(@url, u).to_s\n end\n end.uniq\n end\n end\n end\n end", "def index\n @api_v1_news_feed_links = Api::V1::NewsFeedLink.all\n end", "def index\n @agencies = Agency.all\n end", "def advertisers\n get_collection(\n 'oauth2/advertiser/get/',\n access_token: access_token,\n app_id: Panda.config.app_id,\n secret: Panda.config.app_secret\n )\n end", "def get_data_feeds(merchant_id)\n uri = build_uri_for(@provider, {id: merchant_id})\n http = initialize_http(uri)\n\n request = Net::HTTP::Get.new(uri.request_uri)\n JSON( http.request(request).body )['Items'] \n end", "def index\n @product_feeds = ProductFeed.all\n end", "def index\n @breast_feedings = BreastFeeding.all\n end", "def index\n @feed_items = @user.feed_items\n render json: @feed_items\n end", "def feed\n @bookings = Booking.find_waiting_pickup\n respond_to do |format|\n format.rss\n end\n end", "def get_all_feeds\n self.long_feed=[]\n feedpage=@koala_client.get_connections(self.uid, \"feed\", {\"limit\"=>\"100\"})\n self.long_feed+=feedpage\n (1..50).map do |page|\n puts \"pulling first #{page}00 feeds, new\"\n begin\n feedpage=feedpage.next_page\n rescue\n puts \"==== Koala facebook error occurred\"\n bread\n end\n\n break if feedpage.empty?\n self.long_feed+=feedpage\n end\n puts \"======pulled #{self.long_feed.length} feeds\"\n save!\n end", "def index\n @admin_agencies = Admin::Agency.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_agencies }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /agencyfeed.json Create agency with params
def create @agencyfeed = AgencyFeed.create! agencyfeed_params render :show, status: :created end
[ "def create\n @agency = Agency.new(agency_params)\n\n if @agency.save\n render json: @agency, status: :created, location: @agency\n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end", "def create\n @agency = Agency.new(agency_params)\n\n if @agency.save\n render json: @agency, status: :created, location: @agency\n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end", "def create\n \n @agency = Agency.new(agency_params)\n\n respond_to do |format|\n if @agency.save\n format.html { redirect_to agencies_path(@agency), notice: 'Agency was successfully created.' }\n format.json { render :show, status: :created, location: @agency }\n else\n format.html { render :new }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @agency = Agency.new(params[:agency])\n list\n \n respond_to do |format|\n if @agency.save\n format.html { redirect_to(@agency, :notice => 'Agency was successfully created.') }\n format.xml { render :xml => @agency, :status => :created, :location => @agency }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @agency.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @news_agency = NewsAgency.new(params[:news_agency])\n\n respond_to do |format|\n if @news_agency.save\n format.html { redirect_to @news_agency, notice: 'News agency was successfully created.' }\n format.json { render json: @news_agency, status: :created, location: @news_agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @news_agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @referring_agency = ReferringAgency.new(params[:referring_agency])\n\n respond_to do |format|\n if @referring_agency.save\n format.html { redirect_to @referring_agency, notice: 'Referring agency was successfully created.' }\n format.json { render json: @referring_agency, status: :created, location: @referring_agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @referring_agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @affiliate_agency = Affiliate::Agency.new(affiliate_agency_params)\n\n respond_to do |format|\n if @affiliate_agency.save\n format.html { redirect_to @affiliate_agency, notice: 'Agency was successfully created.' }\n format.json { render :show, status: :created, location: @affiliate_agency }\n else\n format.html { render :new }\n format.json { render json: @affiliate_agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @gtfs_agency = GtfsAgency.new(params[:gtfs_agency])\n\n respond_to do |format|\n if @gtfs_agency.save\n format.html { redirect_to(@gtfs_agency, :notice => 'Gtfs agency was successfully created.') }\n format.xml { render :xml => @gtfs_agency, :status => :created, :location => @gtfs_agency }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @gtfs_agency.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @rating_agency = RatingAgency.new(params[:rating_agency])\n\n respond_to do |format|\n if @rating_agency.save\n format.html { redirect_to @rating_agency, :notice => 'Rating agency was successfully created.' }\n format.json { render :json => @rating_agency, :status => :created, :location => @rating_agency }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @rating_agency.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @agency_type = AgencyType.new(params[:agency_type])\n\n respond_to do |format|\n if @agency_type.save\n format.html { redirect_to @agency_type, notice: 'Agency type was successfully created.' }\n format.json { render json: @agency_type, status: :created, location: @agency_type }\n else\n format.html { render action: \"new\" }\n format.json { render json: @agency_type.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_ad(params={})\n request(:post, '/api/ad-create/', params).data\n end", "def create\n @agency_relationship = AgencyRelationship.new(params[:agency_relationship])\n\n respond_to do |format|\n if @agency_relationship.save\n flash[:notice] = 'AgencyRelationship was successfully created.'\n format.html { redirect_to(\n agency_relationship_url(@agency_relationship)) }\n else\n format.html { render :action => \"new\" }\n end\n end\n end", "def create\n @refagency = Refagency.new(params[:refagency])\n\n respond_to do |format|\n if @refagency.save\n format.html { redirect_to @refagency, notice: 'Refagency was successfully created.' }\n format.json { render json: @refagency, status: :created, location: @refagency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @refagency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @registering_agency = RegisteringAgency.new(registering_agency_params)\n\n respond_to do |format|\n if @registering_agency.save\n format.html { redirect_to @registering_agency, notice: 'Registering agency was successfully created.' }\n format.json { render action: 'show', status: :created, location: @registering_agency }\n else\n format.html { render action: 'new' }\n format.json { render json: @registering_agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @subsidiary_agency = SubsidiaryAgency.new(params[:subsidiary_agency])\n\n respond_to do |format|\n if @subsidiary_agency.save\n format.html { redirect_to @subsidiary_agency, notice: 'Subsidiary agency was successfully created.' }\n format.json { render json: @subsidiary_agency, status: :created, location: @subsidiary_agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @subsidiary_agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @agency_client = AgencyClient.new(params[:agency_client])\n\n respond_to do |format|\n if @agency_client.save\n format.html { redirect_to @agency_client, notice: 'Agency client was successfully created.' }\n format.json { render json: @agency_client, status: :created, location: @agency_client }\n else\n format.html { render action: \"new\" }\n format.json { render json: @agency_client.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @notifying_agency = NotifyingAgency.new(params[:notifying_agency])\n\n respond_to do |format|\n if @notifying_agency.save\n format.html { redirect_to @notifying_agency, notice: 'Notifying agency was successfully created.' }\n format.json { render json: @notifying_agency, status: :created, location: @notifying_agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @notifying_agency.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @agency = Agency.new\n list\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @agency }\n end\n end", "def create\n @agence = Agence.new(agence_params)\n\n respond_to do |format|\n if @agence.save\n format.html { redirect_to @agence, notice: 'Agence was successfully created.' }\n format.json { render :show, status: :created, location: @agence }\n else\n format.html { render :new }\n format.json { render json: @agence.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT/PATCH agencyfeed.json update the configuration of agency and category
def update @agencyfeed.update! agencyfeed_params render :show, status: :ok end
[ "def update\n @refagencycategory = Refagencycategory.find(params[:id])\n\n respond_to do |format|\n if @refagencycategory.update_attributes(params[:refagencycategory])\n format.html { redirect_to @refagencycategory, notice: 'Refagencycategory was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @refagencycategory.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n Category.update(params[:category].keys, params[:category].values)\n\n redirect_to admin_ads_path\n end", "def update\n @cat.update(cat_params)\n render json: @cat\n end", "def update_category_attribute_definition(kapp_slug, name, body, headers=default_headers)\n @logger.info(\"Updating the \\\"#{name}\\\" Category attribute definition in the \\\"#{kapp_slug}\\\" kapp.\")\n put(\"#{@api_url}/kapps/#{kapp_slug}/categoryAttributeDefinitions/#{encode(name)}\",body, headers)\n end", "def update\n @agency = Agency.find(params[:id])\n\n if @agency.update(agency_params)\n #head :no_content\n render json: @agency, status: :accepted, location: @agency #sera? status accepted? \n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @api_v1_expense_category.update(api_v1_expense_category_params)\n format.html { redirect_to @api_v1_expense_category, notice: 'Expense category was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_expense_category }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_expense_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_category_on_kapp(kapp_slug, category_slug, body, headers=default_headers)\n raise StandardError.new \"Category properties is not valid, must be a Hash.\" unless body.is_a? Hash\n @logger.info(\"Updating Category \\\"#{body['name']}\\\" for \\\"#{kapp_slug}\\\" kapp\")\n put(\"#{@api_url}/kapps/#{kapp_slug}/categories/#{category_slug}\", body, headers)\n end", "def update\n @category = Category.find(params[:id])\n\n respond_to do |format|\n if @category.update_attributes(params[:category])\n format.html { redirect_to @category, notice: 'Award category was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @category_badge = CategoryBadge.find(params[:id])\n\n respond_to do |format|\n if @category_badge.update_attributes(params[:category_badge])\n format.html { redirect_to admin_category_badges_path, notice: 'Category badge was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @category_badge.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @agency = Agency.find(params[:id])\n\n if @agency.update(agency_params)\n #head :no_content\n render json: @agency, status: :accepted, location: @agency #sera? status accepted? \n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @badge_category.update(badge_category_params)\n format.html { redirect_to @badge_category, notice: 'Badge category was successfully updated.' }\n format.json { render :show, status: :ok, location: @badge_category }\n else\n format.html { render :edit }\n format.json { render json: @badge_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @acategory = Acategory.find(params[:id])\n\n respond_to do |format|\n if @acategory.update_attributes(params[:acategory])\n format.html { redirect_to [:admin,@acategory], :notice => 'Category was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @acategory.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @incidentcategory.update(incidentcategory_params)\n json_response(@incidentcategory)\n else\n render json: @incidentcategory.errors, status: :unprocessable_entity\n end\n end\n end", "def update\n BlogCategory.update(params[:category].keys, params[:category].values)\n\n redirect_to admin_blog_categories_path\n end", "def update\n respond_to do |format|\n if @acticle_category.update(acticle_category_params)\n format.html { redirect_to @acticle_category, notice: 'Acticle category was successfully updated.' }\n format.json { render :show, status: :ok, location: @acticle_category }\n else\n format.html { render :edit }\n format.json { render json: @acticle_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @home_indices_category = Home::Indices::Category.find(params[:id])\n\n respond_to do |format|\n if @home_indices_category.update_attributes(params[:home_indices_category])\n format.html { redirect_to @home_indices_category, notice: 'Category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @home_indices_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @allowance_category = AllowanceCategory.find(params[:id])\n\n respond_to do |format|\n if @allowance_category.update_attributes(params[:allowance_category])\n format.html { redirect_to @allowance_category, notice: 'Allowance category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @allowance_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @advertisers_category.update(advertisers_category_params)\n format.html { redirect_to advertisers_categories_url, notice: 'Categoria atualizada.' }\n format.json { render :show, status: :ok, location: @advertisers_category }\n else\n format.html { render :edit }\n format.json { render json: @advertisers_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @test_category = TestCategory.find(params[:id])\n\n respond_to do |format|\n if @test_category.update_attributes(params[:test_category].permit(:name,\n :organization_id))\n format.html { redirect_to test_categories_path }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @test_category.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /agencyfeed/:id/fetch_news.json fetch all the news and update db for a specific agencyfeed.
def fetch_news @news = News.fetch_and_store_news! @agencyfeed render template: 'news/list', status: :ok end
[ "def fetch\n ##\n # an array of { category_id: number, news: array }\n @fetched = News.fetch_and_store_news_from_all_agency_feed!\n render :fetch, status: :ok\n end", "def fetch!\n parsed_feed = FeedNormalizer::FeedNormalizer.parse open(self.feed_url)\n \n self.update_attributes( :title => parsed_feed.title,\n :url => parsed_feed.url\n #:etag => parsed_feed.etag\n #:last_modified => parsed_feed.last_modified\n )\n \n parsed_feed.entries.each do |entry|\n self.entries.create(:url => entry.url,\n :title => entry.title,\n :author => entry.author,\n #:summary => entry.summary,\n :content => entry.content\n #:published => entry.published\n #:categories => entry.categories\n ) if !Entry.find_by_url(entry.url)\n end\n end", "def news_for_feed(feed_id)\n news = []\n\n where(:user_id => User.current_user_id, :feed_id => feed_id).each do |news_item|\n news.push news_item.attributes\n end\n\n news\n end", "def news_feed\n @news_feed ||= parse_feed\n end", "def fetch\n doc = get_feed\n return nil if doc.nil?\n parse_feed(doc)\n end", "def update\n @agencyfeed.update! agencyfeed_params\n render :show, status: :ok\n end", "def for_user\n news_feed = NewsFeed.unread.where( user_id: params[:id] )\n render json: news_feed\n end", "def get_new_stories\n base_url = \"https://www.informagm.com/\"\n time = Time.now.utc-2.minutes\n path = \"/api/v1/feeds/stories?updated_at=#{time.strftime(\"%FT%T\")+\"%2B00%3A00\"}&per=50\"\n\n conn = Faraday.new(url: base_url) do |faraday|\n faraday.headers[:Accept] = 'application/json'\n faraday.headers[:Authorization] = \"Token token=903890bfe6f5dcbb231e472c0ee33ed7, company_key=61047dce-b69a-4de1-986e-e6db9d46ef97\"\n faraday.request :url_encoded\n faraday.response :logger\n faraday.adapter :net_http\n end\n\n response = conn.get(path)\n JSON.parse(response.body)\n end", "def fetch_articles\n\t\trequire 'open-uri'\n\t\tnewest_article = Article.limit(1).order(\"date DESC\")\n\t\tnews_trigger = false\n\t\ttitle_trigger = false\n\t\tarticles = Array.new\n\n\t\tfile = open(URL)\n\t\tcontents = file.readlines\n\t\tarticles = Array.new\n\t\tcache_article = nil\n\t\t\n\n\t\tcontents.each do |line|\n\t\t\tif title_trigger\n\t\t\t\tcache_article.title = replace_uml line.strip\n\t\t\t\tcache_article.text = fetch_text cache_article.url\n\t\t\t\tcache_article.text = replace_uml cache_article.text\n\t\t\t\tarticles.push(cache_article)\n\n\t\t\t\ttitle_trigger = false\n\t\t\t\tnews_trigger = false\n\t\t\telsif news_trigger\n\t\t\t\t#Date\n\t\t\t\tif line =~ /[1-3]?[0-9]\\.[0-1]?[0-9]\\.201[0-9]{1}/\n\t\t\t\t\tcache_article.date = line.slice(/[1-3]?[0-9]\\.[0-1]?[0-9]\\.201[0-9]{1}/)\n\t\t\t\t\tif (newest_article[0] != nil && cache_article.date < newest_article[0].date)\n\t\t\t\t\t\tbreak\n\t\t\t\t\tend\n\t\t\t\t#Title\n\t\t\t\telsif line.include? \"box__title\"\n\t\t\t\t\ttitle_trigger = true\n\t\t\t\t#Text\n\t\t\t\telsif line.include? \".jpg\"\n\t\t\t\t\timage_url = \"http://www.ehco.ch\" + line.downcase.slice(/\\/upload\\/.+\\.jp[e]?g/)\n\t\t\t\t\t\n\t\t\t\t\tif(image_url != nil)\n\t\t\t\t\t\tcache_article.news_image = URI.parse(image_url)\n\t\t\t\t\tend\n\t\t\t\t#url\n\t\t\t\telsif line =~ /\\/de\\/.+.html/\n\t\t\t\t\tcache_article.url = \"http://www.ehco.ch\" + line.slice(/\\/de\\/.+.html/)\n\t\t\t\tend\n\t\t\telsif line.include? \"small-12 columns\"\n\t\t\t\tnews_trigger = true\t\t\t\t\t\n\t\t\t\tcache_article = Article.new\n\t\t\tend\n\t\tend\n\n\t\tarticles.reverse_each do |a|\n\t\t\tif(!Article.exists?(:url => a.url))\n\t\t\t\ta.save\n\t\t\t\tif !a.errors.empty?\n\t\t\t\t\tputs a.errors.inspect\n\t\t\t\tend\n\t\t\tend\n\t\tend \n\tend", "def read\n news_feed = NewsFeed.find_by_id( params[:id] )\n if news_feed\n news_feed.is_read = true\n if news_feed.save\n render( status: :ok, json: { success: true } ) and return\n end\n end\n render( status: 422, json: { success: false } )\n end", "def fetch\n @feed_entries = nil\n @atom_feed = Atom::Feed.load_feed(URI.parse(@feed_url))\n end", "def index\n @api_v1_news_feed_links = Api::V1::NewsFeedLink.all\n end", "def feed_fetcher_post\n ## Protect against accidental posts from unverified sources\n render_404 and return unless params[:api_key] == FeedParser::FF_KEY\n\n add_af_entry = false\n feed_id = params[:feed_id].to_i # IMPORTANT: Convert to integer!\n feed_cats = params[:feed_cats]\n [:api_key, :feed_id, :feed_cats, :action, :controller].each { |k| params.delete(k) }\n s = Story.check_for_duplicates(params[:url])\n if s.nil?\n params[:status] = Story::PENDING # set story in pending status\n params[:content_type] = 'article' # default content type\n params[:submitted_by_id] = Member.nt_bot.id # ascribe submission to the bot!\n s = Story.new(params)\n s.dont_process = true # No processable processing please!\n status = (s.save(false)) ? :created : \"404\" # No validations please!\n else\n # ignore title & description from the new feed!\n status = \"200\"\n end\n\n # No dupe feed entries for a story\n if status != \"404\" && feed_id && !s.feed_ids.include?(feed_id) # This will be a dupe check for feed parser (but just being cautious!) \n s.add_feed_tags(feed_cats.split(\"|\"))\n s.feeds << Feed.find(feed_id) # s.feed_ids << feed_id doesn't work!\n afs = AutoFetchedStory.find_by_story_id(s.id)\n AutoFetchedStory.create(:story_id => s.id, :fresh_story => (status == :created)) if afs.nil?\n end\n\n render :inline => \"#{s.id}\", :status => status\n end", "def fetch_articles\n return unless any_new?\n (how_many? - 1).downto(0) do |index|\n next if added?(index)\n Article.create(title: feed.entries[index].title,\n description: description(index),\n published: feed.entries[index].published,\n link: feed.entries[index].url,\n site_id: id,\n readingtime: reading_time(index))\n end\n end", "def update\n @feed = current_user.feeds.find params[:id]\n current_user.refresh_feed @feed\n\n head :ok\n rescue => e\n handle_error e\n end", "def fetch_feed(feed, last_fetched)\n\t\tbot.debug(\"Querying #{feed['url']}...\")\n\n\t\tbegin\n\t\t\trss = ::RSS::Parser.parse(open(feed['url']).read(), false)\n\t\trescue SocketError\n\t\t\tbot.debug(\"We encountered an error fetching #{feed['url']}. Aborting.\")\n\t\t\treturn\n\t\trescue ::RSS::NotWellFormedError\n\t\t\tbot.debug(\"We encountered an error parsing #{feed['url']}. Aborting.\")\n\t\t\treturn\n\t\tend\n\t\n\t\tfeed_title = (\n\t\t\tfeed['name'] or\n\t\t\t(rss.channel and rss.channel.title) or\n\t\t\trss.title\n\t\t)\n\n\t\tunless feed_title\n\t\t\tbot.debug(\"#{feed['url']} has no title. Ignoring.\")\n\t\t\treturn\n\t\tend\n\n\t\trss.items.each do |article|\n\t\t\tif not article.date\n\t\t\t\tbot.debug(\"Article #{article.title} has no date. Ignoring.\")\n\t\t\t\tnext\n\n\t\t\telsif not article.title\n\t\t\t\tbot.debug(\"Article in #{feed['url']} has no title. Ignoring.\")\n\t\t\t\tnext\n\n\t\t\telsif article.date > last_fetched\n\t\t\t\tbot.debug(\"Article #{article.title} in #{feed['url']} is new!\")\n\n\t\t\t\tbegin\n\t\t\t\t\tshort = shorten(article.link)\n\t\t\t\t\tbot.debug(\"Shortened URL #{article.link} to #{short}.\")\n\t\t\t\trescue\n\t\t\t\t\tbot.debug(\"Error shortening URL #{article.link}. Continuing.\")\n\t\t\t\t\tshort = article.link\n\t\t\t\tend\n\n\t\t\t\tfeed['channels'].each do |c|\n\t\t\t\t\tc.safe_msg(\"#{article.title} - #{short} (via #{feed_title})\")\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\tbot.debug(\"Updated #{feed['url']}.\")\n\tend", "def pull_hacker_news\n stories[\"Hacker News\"] = HackerNews.new.top_post\n end", "def fetchFeed(feed_url)\n\n $g.report('myg.fetch', 1)\n $statsd.increment('fetch', 1)\n\n feed = nil\n fetch_options = {\n :timeout => 10\n }\n\n begin \n start_time = Time.now\n feed = Feedjira::Feed.fetch_and_parse(feed_url, fetch_options)\n duration = Time.now - start_time\n duration_ms = duration * 1000\n $logger.info(\"\\tFetch took #{duration}\") \n $statsd.timing('fetching', \"#{duration_ms}\")\n rescue\n $logger.error(\"fetchFeed !!!!!! FAILED !!!!!!! on #{feed_url}\")\n $g.report('myg.fetch_error', 1)\n return nil\n end\n\n begin\n original_feed = feed # keep an unsanitized copy in case we need it later\n feed.sanitize_entries!\n # $logger.info(\"Fetched Feed: #{feed.title}\")\n # $logger.info(\"counted #{feed.entries.length} articles\")\n rescue\n $logger.error(\"Exception sanitizing a fetched feed: #{original_feed.inspect}\")\n return nil\n end\n\n feed\n end", "def index\n \n @news = News.all\n @latest_news = News.latest_news\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @news }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns "STANDARD_CLAIM_PROCESS", "BDD_PROGRAM", or "FDC_PROGRAM" based off of a few attributes in the evss data
def evss_claims_process_type(form526) if form526['bddQualified'] return 'BDD_PROGRAM' elsif form526['standardClaim'] return 'STANDARD_CLAIM_PROCESS' end 'FDC_PROGRAM' end
[ "def get_system(code)\n return code[2][1][0][1][1][1]\n end", "def proquest_processing_code\n case submitting_type.first\n when \"Dissertation\"\n \"F\"\n when \"Master's Thesis\"\n \"O\"\n else\n \"F\"\n end\n end", "def proc_name\n data = read_cpuinfo.match(/model name\\s*:\\s*(.+)/)[1]\n\n return data.strip\n end", "def getcardprogramgroupname\r\n return getvalue(SVTags::CARD_PROGRAM_GROUP_NAME)\r\n end", "def composite_med_proc_id\n qualifier = facility.sitecode =~ /^0*00S66$/ ? 'AD' : 'HC'\n elem = []\n proc_code = (service.service_procedure_code.blank? ? 'ZZ' + @delimiter.to_s +\n 'E01' : qualifier + @delimiter.to_s + service.service_procedure_code)\n proc_code = 'ZZ' + @delimiter.to_s + 'E01' if service.service_procedure_code.to_s == 'ZZE01'\n modifier_condition = (@facility_config.details['svc_segment'] && (@facility_config.details['svc_segment']['1'].to_s == '[CPT Code + Modifiers]'))\n elem = modifier_condition ? [proc_code, service.service_modifier1 , service.service_modifier2 ,\n service.service_modifier3 , service.service_modifier4] : [proc_code]\n elem = Output835.trim_segment(elem)\n elem.join(@delimiter)\n end", "def getcardprogramgroupname()\r\n return getvalue(SVTags::CARD_PROGRAM_GROUP_NAME)\r\n end", "def pc_system_type_decode(type)\n case type\n when 4 then \"Enterprise Server\" # most likely so first\n when 0 then \"Unspecified\"\n when 1 then \"Desktop\"\n when 2 then \"Mobile\"\n when 3 then \"Workstation\"\n when 5 then \"SOHO Server\"\n when 6 then \"Appliance PC\"\n when 7 then \"Performance Server\"\n when 8 then \"Maximum\"\n end\n end", "def parse_oleprocinfo(proc_info)\n\t\tcommand = proc_info.Name\n\t\tpid = proc_info.ProcessId\n\t\tuid = 0\n\t\tcmdline = proc_info.CommandLine\n\t\trss = proc_info.MaximumWorkingSetSize\n\t\ttime = proc_info.KernelModeTime.to_i + proc_info.UserModeTime.to_i\n\n\t\t{\n\t\t\t:pid => pid,\n\t\t\t:uid => uid,\n\t\t\t:command => command,\n\t\t\t:cmdline => cmdline,\n\t\t\t:mem => rss,\n\t\t\t:cpu => time,\n\t\t}\n\tend", "def parse_ie_selection_mode(payload_data)\n selection_mode = {}\n \n selection_mode_val = payload_data.unpack(\"H*\")[0].hex\n\n case selection_mode_val\n when 0 then selection_mode_data = \"MS or network provided APN, subscribed verified\"\n when 1 then selection_mode_data = \"MS provided APN, subscription not verified\"\n when 2 then selection_mode_data = \"Network provided APN, subscription not verified\"\n else selection_mode_data = \"For future use\"\n end\n \n selection_mode[:selection_mode] = selection_mode_data\n \n return selection_mode\n end", "def service_prov_identification\n code, qual = nil, nil\n claim = eob.claim_information\n\n if (claim && !claim.provider_npi.blank?)\n code = claim.provider_npi\n qual = 'XX'\n Output835.log.info \"Provider NPI from the 837 is chosen\"\n elsif (claim && !claim.provider_ein.blank?)\n code = claim.provider_ein\n qual = 'FI'\n Output835.log.info \"Provider TIN from 837 is chosen\"\n elsif !facility.facility_npi.blank?\n code = facility.facility_npi\n qual = 'XX'\n Output835.log.info \"facility NPI from FC is chosen\"\n elsif !facility.facility_tin.blank?\n code = facility.facility_tin\n qual = 'FI'\n Output835.log.info \"facility TIN from FC is chosen\"\n end\n\n return code, qual\n end", "def activity_id\n case freckle_entry.description\n when /review/i\n DEV\n when /meeting/i\n MEETING\n when /stand ?up/i\n MEETING\n when /merge/i\n DEV\n when /requirement/i\n REQUIREMENTS\n else\n DEV\n end\n end", "def cpuvendor\n \"CPU Vendor: #{@sysHash[\"Processor Name\"].split(' ')[0]}\" \n end", "def parse_program(prog)\n prog_bytes = to_bytes(prog)\n data = {}\n raise \"Invalid program\" unless prog[0, 4] == 'PROG'\n name = prog[4...16]\n data[:name] = program_name(prog)\n\n HR_PARAMS.each do |(key, ms_offset, ls_offset, ls_pos, units)|\n # single byte value\n value = prog_bytes[ms_offset]\n data[key] = value\n # high resolution value\n value_hr = (value << 2) | ((prog_bytes[ls_offset] >> ls_pos) & 0x03)\n data[:\"#{key}_hr\"] = value_hr\n if units\n # converted value:\n data[:\"#{key}_#{units}\"] = CONVERTERS[units][value_hr]\n end\n end\n\n CONV_PARAMS.each do |(key, offset, bit_pos, bit_len, units)|\n value = bits(prog_bytes[offset], bit_pos, bit_len)\n data[key] = CONVERTERS[units][value]\n if value != data[key]\n data[:\"#{key}_value\"] = value\n end\n end\n\n data[:seq_notes] = (96..426).step(22).map{|offset| note_name prog_bytes[offset]}\n (data[:step_length]...data[:seq_notes].size).each do |i|\n data[:seq_notes][i] = ''\n end\n # puts data[:seq_notes].join(' ')\n\n data[:lfo_rate_vis] = data[:lfo_bpm_sync] == 'ON' ? data[:lfo_rate_bpm] : data[:lfo_rate_hr]\n data[:eg_int_abs] = data[:eg_int_signed].abs\n data[:lfo_int_abs] = data[:lfo_int_signed].abs\n data\nend", "def program_name(qc_inspection_type_code)\n ProgramFunction.generic_program_name( 'QC', qc_inspection_type_code )\n end", "def get_pt_type(device)\n fs_check = Mixlib::ShellOut.new(\"blkid -c /dev/null #{device}\")\n fs_check.run_command\n match = fs_check.stdout.match(/\\sPTTYPE=\"(.*?)\"/)\n match = '' if match.nil?\n\n Chef::Log.info(\"Partition type for device #{device}: #{match[1]}\")\n match[1]\nend", "def get_frame_specs(frame)\n \tinfo = execute_cmd(\"lssyscfg -r sys -m #{frame}\")\n\tattributes = info.chomp.split(\",\")\n\tframe_hash = {}\n\tattributes.each do |line|\n\t\tatt,val = line.split(\"=\")\n\t\tcase att\n\t\twhen \"name\"\n\t\t\tframe_hash[:name]=val\n\t\twhen \"type_model\"\n\t\t\tframe_hash[:type_model]=val\n\t\twhen \"serial_num\"\n\t\t\tframe_hash[:serial_num]=val\n\t\tend\n\tend\n\t\t\n\treturn frame_hash\n end", "def get_pt_type(device)\n fs_check = Mixlib::ShellOut.new(\"blkid -c /dev/null #{device}\")\n fs_check.run_command\n match = fs_check.stdout.match(/\\sPTTYPE=\\\"(.*?)\\\"/)\n match = '' if match.nil?\n\n Chef::Log.info(\"Partition type for device #{device}: #{match[1]}\")\n match[1]\nend", "def service_payee_identification\n code, qual = nil, nil\n claim = eob.claim_information\n fac = facility\n\n if (claim && !claim.payee_npi.blank?)\n code = claim.payee_npi\n qual = 'XX'\n Output835.log.info \"Payee NPI from the 837 is chosen\"\n elsif (claim && !claim.payee_tin.blank?)\n code = claim.payee_tin\n qual = 'FI'\n Output835.log.info \"Payee TIN from 837 is chosen\"\n elsif !fac.facility_npi.blank?\n code = fac.facility_npi\n qual = 'XX'\n Output835.log.info \"facility NPI from FC is chosen\"\n elsif !fac.facility_tin.blank?\n code = fac.facility_tin\n qual = 'FI'\n Output835.log.info \"facility TIN from FC is chosen\"\n end\n\n return code, qual\n end", "def compare_sdc\n @attribute_all.each{|attribute_name,attribute_data|\n case attribute_name\n when \"define_clock\"\n attribute_data[0].each_value{|each|\n sdc_signal = \"chiptop.chip.\" + each.Signal_mod.gsub(\"/\",\".\")\n @RPT_ERR.each{|key,err|\n if err.AttributeName == attribute_name && err.SignalName == sdc_signal\n each.ConstCheckFlag = 0\n each.SynthesisReport = err.Message\n end\n }\n }\n when \"xc_pulldown\", \"xc_pullup\"\n attribute_data[0].each_value{|each|\n sdc_signal = \"chiptop.chip.\" + each.Pin_mod.gsub(\"/\",\".\")\n @RPT_ERR.each{|key,err|\n if err.AttributeName == \"define_attribute\" && err.SignalName == sdc_signal\n each.ConstCheckFlag = 0\n each.SynthesisReport = err.Message\n end\n }\n }\n when \"syn_keep\"\n attribute_data[0].each_value{|each|\n sdc_signal = \"chiptop.chip.\" + each.Pin_mod.gsub(\"/\",\".\")\n @RPT_ERR.each{|key,err|\n if err.AttributeName == \"define_attribute\" && err.SignalName == sdc_signal\n each.ConstCheckFlag = 0\n each.SynthesisReport = err.Message\n end\n }\n }\n end\n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns either 'Active', 'Reserves' or 'National Guard' based on the service branch
def convert_to_service_component(service_branch) service_branch = service_branch.downcase return 'Reserves' if service_branch.include?('reserves') return 'National Guard' if service_branch.include?('national guard') 'Active' end
[ "def hca_branch_of_service\n HCA_SERVICE_BRANCHES[branch_of_service_code] || 'other'\n end", "def determine_branch_type(branch)\n return branch if (branch.downcase == \"develop\")\n branch =~ /^([a-zA-Z]+)-/\n if $1 && (%w(rc hotfix).member? $1.downcase)\n return $1.downcase\n else\n raise \"unrecognised branch prefix in '#{branch}'. Should be hotfix or rc\"\n end\nend", "def get_vehicle_status\n if (@age < 5)\n return \"a service.\"\n end\n return \"to be scrapped.\"\n end", "def service_branch_used_in_disability(military_service_episode)\n category = case military_service_episode.personnel_category_type_code\n when 'A'\n ''\n when 'N'\n 'National Guard'\n when 'V' || 'Q'\n 'Reserve'\n else\n ''\n end\n\n service_name = \"#{military_service_episode.branch_of_service} #{category}\".strip\n service_name.gsub!('Air Force National Guard', 'Air National Guard')\n service_name if EVSS_COMBINED_SERVICE_BRANCHES.include? service_name\n end", "def current_branch\n return @branch unless @branch.blank?\n @branch = case @svninfo.url\n when /trunk/ then \"trunk\"\n when /branches\\/private\\/([^\\/]+)/ then $1\n when /branches\\/([^\\/]+)/ then $1\n when /tags\\/([^\\/]+)/ then $1\n else @config_source[\"branch\"]\n end \n @branch || \"unkown\"\n end", "def status\n branch&.status\n end", "def component_branch(cmp)\n component(cmp).fetch('branch', CONF_DEFAULT_BRANCH)\nend", "def state_group\n state = @current_policy ? @current_policy.tax_state_abbr : @company_info.hq_state\n case state\n when nil\n 'All'\n when 'CA'\n 'California'\n else\n 'non_California'\n end\n end", "def status_str\n case self.status\n when ACTIVE\n \"Active\"\n when INACTIVE\n \"Inactive\"\n when CLOSED\n \"Closed\"\n when NO_STRIPE\n \"No Stripe Account\"\n when UNKNOWN\n \"Unknown\"\n else\n \"Invalid\"\n end\n end", "def get_reservation_status(reservation)\n owner = reservation.item_owner\n case reservation.status\n when \"pending_owner\"\n is_current_user?(owner) ? \"awaiting_acceptance_from_you\" : \"awaiting_acceptance_from_other_party\"\n when \"pending_reserver\"\n is_current_user?(owner) ? \"awaiting_acceptance_from_other_party\" : \"awaiting_acceptance_from_you\"\n else\n \"reservation_\" + reservation.status\n end \n end", "def branch_info_for_location\n vendor = user_valid_for_viewing?('Vendor', ['Vendor', 'Agent'])\n #vendor = user_valid_for_viewing?('Agent')\n ### Either a vendor or a premium developer\n if !vendor.nil? && (vendor.class.to_s == 'Vendor' || (vendor.class.to_s == 'Agents::Branches::AssignedAgent' && vendor.is_developer ))\n #if true\n count = Agents::Branch.unscope(where: :is_developer).where(district: params[:location]).count\n #results = Agents::Branch.unscope(where: :is_developer).where(district: params[:location]).limit(20).offset(20*(params[:p].to_i)).map do |branch|\n results = Agents::Branch.unscope(where: :is_developer).where(district: params[:location]).order('name ASC').map do |branch|\n agent_count = Agents::Branches::AssignedAgent.where(branch_id: branch.id).count\n agent_count == 0 ? agent_count = 0 : agent_count -= 1\n {\n logo: branch.image_url,\n name: branch.name,\n address: branch.address,\n phone_number: branch.phone_number,\n email: branch.email,\n website: branch.website,\n branch_id: branch.id,\n agent_count: agent_count,\n branch_stats: branch.branch_specific_stats\n }\n end\n render json: { branches: results, count: count }, status: 200\n else\n render json: { message: 'Authorization failed' }, status: 401\n end\n end", "def state_or_province\n end", "def status\n if self.renewal?\n 'Renewal'\n else\n 'Newbusiness'\n end\n end", "def state_or_province\n end", "def derive_branch_name\n @config[:git][:branch] == 'preview' ? preview_branches.last : @config[:git][:branch]\n end", "def branch_location branch_name\n case\n when branch_name.start_with?('origin/') then 'remote'\n when branch_name.start_with?('upstream/') then 'upstream'\n else 'local'\n end\nend", "def composer_status_as_public_site_string\n result = \"\"\n if composer_status == Contributor.composer_statuses[:tier_2]\n result = \"Fully Represented SOUNZ Composer\"\n end\n result\n end", "def show_git_branch_for_staging\n \"<p class='header-top-notice'>Current branch: <strong> #{render :partial => 'git'}</strong></p>\".html_safe if Rails.env == 'staging'\n end", "def get_status(state)\n state ||= 'info'\n if state.downcase.include? \"fail\"\n 'danger'\n elsif state.downcase.include?(\"complete\")||state.downcase.include?(\"ready\")\n 'success'\n else\n 'info'\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /variant_images GET /variant_images.json
def index @variant_images = VariantImage.all respond_to do |format| format.html # index.html.erb format.json { render json: @variant_images } end end
[ "def variant_image(variant)\n variant_image = shopify_images.detect { |image| image.variant_ids.include?(variant.id) }\n return if variant_image.blank?\n\n variant_image.to_json\n end", "def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n end\n end", "def show\n @image_variant = ImageVariant.cs(self.current_scope).find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_variant }\n end\n end", "def show\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n\n respond_to do |format|\n format.html # show.html.erb\n format.js\n format.json { render json: @variant_image }\n end\n end", "def show\n render json: @img_url\n end", "def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end", "def index\n render json: @variants\n end", "def images\n response = JSON.parse( self.class.get(\"#{BASE_URL}/contest/#{@api_key}/images\") )\n end", "def image_list\n @images = Picture.where(album_id: params[:album_id])\n respond_to do |format|\n format.json { render json: @images.to_json(methods: [:path])}\n end\n end", "def images(params = {})\n response, status = BeyondApi::Request.get(@session, \"/shop/images\", params)\n\n handle_response(response, status)\n end", "def show\n @product_option_value_image = ProductOptionValueImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @product_option_value_image }\n end\n end", "def product_images\n user_id, product = params[:user_id], params[:id]\n return bad_request if !user_id || !product\n # returns all images for a given user and product\n images = UserProduct.find_images(user_id, product)\n # create json array\n img = images ? images.collect { |i| i.js_serialize } : []\n render :json => img\n end", "def show\n @image = Image.find(params[:id])\n\n render json: @image\n end", "def index\n @img_urls = ImgUrl.all\n\n render json: @img_urls\n end", "def index\n @product_images = ProductImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @product_images }\n end\n end", "def get_all_images(env)\n images_json = get(env, \"#{@session.endpoints[:image]}/images\")\n images = JSON.parse(images_json)['images']\n\n return images if images.empty?\n\n is_v1 = false\n unless images[0].key? 'visibility'\n is_v1 = true\n images_json = get(env, \"#{@session.endpoints[:image]}/images/detail\")\n images = JSON.parse(images_json)['images']\n end\n\n images.map do |i|\n i['visibility'] = i['is_public'] ? 'public' : 'private' if is_v1\n Image.new(i['id'], i['name'], i['visibility'], i['size'], i['min_ram'], i['min_disk'])\n end\n end", "def _state_photos(state_id)\n get('state/photos', state_id, options: { type: :array })\n end", "def images\n images = []\n JSON.parse(resource['/offerings/image'].get)[\"images\"].each do |img|\n images << Image.new(img)\n end\n return images\n end", "def index\n @pictures = Picture.where(foodscape_id: params[:foodscape_id])\n render json: @pictures\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /variant_images/1 GET /variant_images/1.json
def show @variant_image = VariantImage.find(params[:id]) @variant = @variant_image.variant respond_to do |format| format.html # show.html.erb format.js format.json { render json: @variant_image } end end
[ "def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end", "def show\n @image_variant = ImageVariant.cs(self.current_scope).find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_variant }\n end\n end", "def variant_image(variant)\n variant_image = shopify_images.detect { |image| image.variant_ids.include?(variant.id) }\n return if variant_image.blank?\n\n variant_image.to_json\n end", "def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n end\n end", "def show\n render json: @img_url\n end", "def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end", "def show\n @image = Image.find(params[:id])\n\n render json: @image\n end", "def show\n @product_option_value_image = ProductOptionValueImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @product_option_value_image }\n end\n end", "def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/photos/#{params[:id]}.json?token=#{ENV['API_KEY']}\"\n\t response = RestClient.get(url)\n\t @photo = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/photos.json?token=#{ENV['API_KEY']}\"\n response = RestClient.get(url)\n @photos = JSON.parse(response.body)\t\t \n\tend\n end", "def show\n @client_image = ClientImage.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @client_image }\n end\n end", "def image_list\n @images = Picture.where(album_id: params[:album_id])\n respond_to do |format|\n format.json { render json: @images.to_json(methods: [:path])}\n end\n end", "def show\n @images_product = ImagesProduct.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @images_product }\n end\n end", "def show\n render json: @model_full_image\n end", "def index\n @pictures = Picture.where(foodscape_id: params[:foodscape_id])\n render json: @pictures\n end", "def show\n @vendor_image = VendorImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @vendor_image }\n end\n end", "def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_image }\n end\n end", "def show\n render json: @default_bike_image\n end", "def index\n render json: @variants\n end", "def show\n @image_set = ImageSet.find(params[:id])\n\n render json: @image_set\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /variant_images/new GET /variant_images/new.json
def new @variant_image = VariantImage.new @variant = Variant.find(params[:variant_id]) @variant_image.variant_id = @variant.id @product = @variant.product respond_to do |format| format.html # new.html.erb format.js # new.js.erb format.json { render json: @variant_image } end end
[ "def new\n @image_variant = ImageVariant.cs(self.current_scope).new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_variant }\n end\n end", "def new\n @title = t('view.images.new_title')\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image_url = ImageUrl.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_url }\n end\n end", "def new\n @get_image = GetImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @get_image }\n end\n end", "def new\n @images_product = ImagesProduct.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @images_product }\n end\n end", "def new\n @variant = Variant.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @variant }\n end\n end", "def new\n @variant = Variant.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @variant }\n end\n end", "def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @image_file = ImageFile.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_file }\n end\n end", "def new\n @property_image = PropertyImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_image }\n end\n end", "def new\n @product_option_value_image = ProductOptionValueImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_option_value_image }\n end\n end", "def new\n @client_image = ClientImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @client_image }\n end\n end", "def new\n @image_upload = ImageUpload.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_upload }\n end\n end", "def new\n @shop_image = ShopImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @shop_image }\n end\n end", "def new\n @image_template = ImageTemplate.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @image_template }\n end\n end", "def new\n @motivational_image = MotivationalImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @motivational_image }\n end\n end", "def new\n @tl_image = TlImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tl_image }\n end\n end", "def new\n @image_gallery = ImageGallery.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_gallery }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /variant_images POST /variant_images.json
def create params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts @variant_image = VariantImage.new(params[:variant_image]) @variant = @variant_image.variant @product = @variant.product respond_to do |format| if @variant_image.save format.html { redirect_to @variant, notice: 'Image added successfully.' } format.js { redirect_to @variant_image, notice: 'Image added successfully.' } format.json { render json: @variant_image, status: :created, location: @variant_image } else format.html { render action: "new" } format.js { render action: "new" } format.json { render json: @variant_image.errors, status: :unprocessable_entity } end end end
[ "def create\n @image_variant = ImageVariant.cs(self.current_scope).new(params[:image_variant])\n \n respond_to do |format|\n if @image_variant.save\n format.html { redirect_to @image_variant, notice: 'Image variant was successfully created.' }\n format.json { render json: @image_variant, status: :created, location: @image_variant }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image_variant.errors, status: :unprocessable_entity }\n end\n end\n end", "def variant_image(variant)\n variant_image = shopify_images.detect { |image| image.variant_ids.include?(variant.id) }\n return if variant_image.blank?\n\n variant_image.to_json\n end", "def upload_image_file(args = {}) \n post(\"/files.json/captiveportal/images\", args)\nend", "def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_image }\n end\n end", "def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end", "def create\n respond_to do |format|\n format.json do\n if @variant.save\n render json: @variant\n else\n render json: { :errors => @variant.errors.messages }, status: :unprocessable_entity\n end\n end\n end\n end", "def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end", "def upload_floor_plan(args = {}) \n post(\"/files.json/floorplan/images\", args)\nend", "def create\n image = Image.create(image_params)\n\n if image.new_record?\n render json: { errors: image.errors.messages }, status: 422\n else\n render json: image, status: 201\n end\n end", "def get_images_data\t\t\t\t\t\t\n\t\t{ payload: { 'makes' => makes_dropdown, 'images' => images_array('none').compact}, success: true}\n\tend", "def add_image\n obtain_product_image_params\n pi = ProductImage.new(picture: @image_params)\n @product.product_images << pi\n render json: @product.simple_info, status: :ok\n rescue => e\n render json: { error: e }, status: :bad_request\n end", "def create\n brand = Brand.new brand_params\n\n if params[:brand][:images].present?\n params[:brand][:images].each do |image|\n req = Cloudinary::Uploader.upload image\n brand.images << req[\"public_id\"]\n end\n end\n\n brand.save\n redirect_to brand_path\n end", "def create\n create_params = product_image_params || {product_uuid: @product.uuid}\n @product_image = ProductImage.new(product_image_params)\n if @product_image.save\n render json: @product_image, status: 201\n else\n render_error 400, @product_image.errors.full_messages\n end\n end", "def add_image\n if request.post? == false\n render :json => { :message => \"Error\" }\n return\n end\n\n # Add an image to a collection\n collection = Collection.new\n collection.byId( params[ :collection_id ] )\n image = Image.new\n image.byId( params[ :image_id ] )\n collection.add( :images, image.urn )\n render :json => { \n :message => \"Success\", \n :collection => collection.all \n }\n end", "def upload_image\n delete_image() unless @slide.image.nil?\n\n @image = @slide.build_image(slide_image_params)\n\n if @image.save\n render json: @slide, status: :ok#, location: @collection\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def upload_image\n res = []\n params[:images].each do |image|\n image = ContentFile.new(image: image, tmp_key: params[:tmp_images_key])\n if image.save\n res << {id: image.id, url: image.image.url(:thumb)}\n else\n res << {errors: image.errors.full_messages.join(', ')}\n end\n end\n render json: res\n end", "def new\n @image_variant = ImageVariant.cs(self.current_scope).new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_variant }\n end\n end", "def upload_image\n delete_image() unless @collection.image.nil?\n\n @image = @collection.build_image(collection_image_params)\n\n if @image.save\n render json: @collection, status: :ok#, location: @collection\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def upload\n image_file = ImageFile.new(params)\n @image = @product.images.build({ extension: image_file.extension })\n\n unless @image.save\n render json: @image.errors, status: :unprocessable_entity; return\n end\n\n image_file.name = @image._id\n image_processor = ProductImageProcessor.new(collection_id, product_id, image_file)\n\n if image_processor.save_image\n render json: @product, status: :ok#, location: @collection\n else\n render json: image_processor.errors, status: :unprocessable_entity\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /variant_images/1 PUT /variant_images/1.json
def update @variant_image = VariantImage.find(params[:id]) respond_to do |format| if @variant_image.update_attributes(params[:variant_image]) format.html { redirect_to @variant_image, notice: 'Variant image was successfully updated.' } format.json { head :ok } else format.html { render action: "edit" } format.json { render json: @variant_image.errors, status: :unprocessable_entity } end end end
[ "def update\n @image_variant = ImageVariant.cs(self.current_scope).find(params[:id])\n \n respond_to do |format|\n if @image_variant.update_attributes(params[:image_variant])\n format.html { redirect_to @image_variant, notice: 'Image variant was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image_variant.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n\n #we can allow updating an image name and description and unit, but not the image data. for that we need to create a new image\n update_params = image_params\n update_params.delete(\"image\")\n\n if @image.update(update_params)\n head :no_content\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def update\n params[:image].delete :created_at\n params[:image].delete :updated_at\n params[:image].delete :id\n @image = Image.find(params[:id])\n if @image.update_attributes(params[:image])\n render json: @image\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def variant_image(variant)\n variant_image = shopify_images.detect { |image| image.variant_ids.include?(variant.id) }\n return if variant_image.blank?\n\n variant_image.to_json\n end", "def update\n authorize @thing, :update_image?\n if @thing_image.update(thing_image_update_params)\n head :no_content\n else\n render json: @thing_image.errors, status: :unprocessable_entity\n end\n end", "def update_image\n if params[:id].present?\n event_assets = params[:event_assets]\n event = Event.find(params[:id])\n array = event.event_assets\n image = array.push(event_assets)\n image_update = event.update(event_assets: image)\n render json: { message: \"Images are updated Successfully\" }, status: :ok\n else\n render json: { message: \"Give the Event ID\" }, status: :unprocessable_entity \n end\n end", "def create\n @image_variant = ImageVariant.cs(self.current_scope).new(params[:image_variant])\n \n respond_to do |format|\n if @image_variant.save\n format.html { redirect_to @image_variant, notice: 'Image variant was successfully created.' }\n format.json { render json: @image_variant, status: :created, location: @image_variant }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image_variant.errors, status: :unprocessable_entity }\n end\n end\n end", "def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end", "def update\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n if @estate_agent_image.update_attributes(params[:property_image])\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n if @variant.update_attributes(params[:variant])\n format.html { redirect_to @variant, notice: 'Variant was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @variant.errors, status: :unprocessable_entity }\n end\n end\n end", "def set_image_values( version, params )\n Rails.logger.info 'versions_helper - set_image_values() - params: '\\\n + params.to_s\n\n design_template = version.design_template\n images = get_images_array( design_template )\n\n image_count = params[ 'image_count' ]\n image_count = if image_count != ''\n image_count.to_i\n else\n 0\n end\n\n # go through all of the images. For each, determine if its an uploaded file\n # or an instagam.\n image_count.times do |i|\n p_name = 'type' + i.to_s\n type = params[ p_name ]\n\n p_name = 'url' + i.to_s\n url = params[ p_name ]\n\n p_name = 'replacement_image' + i.to_s\n replacement_image = params[ p_name ]\n\n p_name = 'image_name' + i.to_s\n image_name = params[ p_name ]\n\n p_name = 'collage_query_string' + i.to_s\n query_string = params[ p_name ]\n\n p_name = 'collage_query_type' + i.to_s\n query_type = params[ p_name ]\n\n query = { instagram: { type: query_type, query_string: query_string } }\n query = query.to_json\n\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - type: '\\\n + type.to_s\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - image_name: '\\\n + image_name.to_s\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - replacement_image: '\\\n + replacement_image.to_s\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - query: '\\\n + query.to_s\n\n\n if type == 'upload'\n if replacement_image\n my_file = replacement_image[ 'uploaded_file' ]\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - my_file: '\\\n + my_file.to_s\n\n if my_file\n clear_image_associations( image_name, version )\n replacement_image = version.replacement_images.create( { uploaded_file: my_file,\n image_name: image_name } )\n# replacement_image.image_name = image_name\n replacement_image.save\n\n # this will set version.values to reflect any user-set properties\n # for this version, these values will eventually be read by\n # the AI script\n add_replacement_image_to_version( replacement_image,\\\n image_name, version )\n end # my_file\n end # we have a replacement_image\n elsif( type == 'web' )\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - WEB URL!'\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - url: ' + url.to_s\n\n clear_image_associations( image_name, version )\n replacement_image = version.replacement_images.create( { image_name: image_name,\n url: url } )\n replacement_image.save\n\n fetch_image( replacement_image )\n\n # this will set version.values to reflect any user-set properties\n # for this version, these values will eventually be read by\n # the AI script\n add_replacement_image_to_version( replacement_image,\\\n image_name, version )\n\n\n else\n # type = instagram collage\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - Instagram collage!'\n c = get_collage( image_name, version )\n\n if( c.nil? || c.query != query )\n # Either there was no associated collage, or the query has changed.\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - Building a new Collage.'\n o = { query: query }\n clear_image_associations( image_name, version )\n collage = version.collages.create( o )\n collage.image_name = image_name\n collage.save\n build_collage_folder( collage )\n add_collage_to_version( collage, image_name, version )\n else\n Rails.logger.info 'VERSIONS_HELPER - set_image_values() - Keeping the old Collage'\n end\n end\n end # image_count times\n end", "def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end", "def update\n respond_to do |format|\n # spot = HTTParty.get(\"http://localhost:3000/spots/#{params[:id]}\" )\n if @spot.update(spot_params)\n HTTParty.patch(\"http://localhost:3000/spots/#{params[:id]}?name=#{spot_params[:name]}&lat=#{spot_params[:lat]}&lon=#{spot_params[:lon]}&description=#{spot_params[:description]}&features=#{spot_params[:features]}&spot_type=#{spot_params[:spot_type]}&img=#{spot_params[:img]}\")\n @spot.spot_photos.attach(params[:spot][:spot_photos])\n format.html { redirect_to @spot, notice: 'Spot was successfully updated.' }\n format.json { render :show, status: :ok, location: @spot }\n else\n format.html { render :edit }\n format.json { render json: @spot.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n @variant_image.destroy\n\n respond_to do |format|\n format.html { redirect_to @variant.product }\n format.json { head :ok }\n end\n end", "def update\n respond_to do |format|\n if @taxi_image.update(taxi_image_params)\n format.html { redirect_to @taxi_image, notice: 'Taxi image was successfully updated.' }\n format.json { render :show, status: :ok, location: @taxi_image }\n else\n format.html { render :edit }\n format.json { render json: @taxi_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @s3_image = S3Image.find(params[:id])\n\n respond_to do |format|\n if @s3_image.update_attributes(params[:s3_image])\n format.html { redirect_to @s3_image, notice: 'S3 image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @s3_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @vehicle_type.update(vehicle_type_params)\n images\n\n format.html { redirect_to @vehicle_type, notice: 'Vehicle type was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @vehicle_type.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @product_option_value_image = ProductOptionValueImage.find(params[:id])\n\n respond_to do |format|\n if @product_option_value_image.update_attributes(params[:product_option_value_image])\n format.html { redirect_to @product_option_value_image, notice: 'Product option value image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @product_option_value_image.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /variant_images/1 DELETE /variant_images/1.json
def destroy @variant_image = VariantImage.find(params[:id]) @variant = @variant_image.variant @variant_image.destroy respond_to do |format| format.html { redirect_to @variant.product } format.json { head :ok } end end
[ "def destroy\n @image_variant = ImageVariant.cs(self.current_scope).find(params[:id])\n @image_variant.destroy\n \n respond_to do |format|\n format.html { redirect_to image_variants_url }\n format.json { head :ok }\n end\n end", "def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n @image.delete_img(@image.name)\n \n respond_to do |format|\n format.html { redirect_to(images_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n render json: {status: \"success\"}, status: :ok\n end", "def destroy\n image = @product.images.find(image_id)\n image.destroy\n\n render json: @product, status: :ok#, location: @collection\n end", "def destroy\n @image.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n @estate_agent_image.destroy\n\n respond_to do |format|\n format.html { redirect_to estate_agent_image_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @variant = Variant.find(params[:id])\n @variant.destroy\n\n respond_to do |format|\n format.html { redirect_to variants_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @variant.destroy\n respond_to do |format|\n format.html { redirect_to edit_admin_good_url(@variant.good, anchor: \"variants\") }\n format.json { head :no_content }\n end\n end", "def delete_image\n if request.post? == false\n render :json => { :message => \"Error\" }\n return\n end\n image = Image.new\n image.byId( params[ :image_id ] )\n collection = Collection.new\n collection.byId( params[ :collection_id ] )\n collection.delete( :images, image.urn )\n render :json => { \n :message => \"Success\", \n :collection => collection.all \n }\n end", "def destroy\n @vmimage.destroy\n respond_to do |format|\n format.html { redirect_to vmimages_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @imagedemo.destroy\n respond_to do |format|\n format.html { redirect_to imagedemos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @dish_image.destroy\n respond_to do |format|\n format.html { redirect_to dish_images_url }\n format.json { head :no_content }\n end\n end", "def deleteThumbImage\n render json: EventsHlp.deleteThumbImage(params[:ids])\n end", "def destroy\n @product_option_value_image = ProductOptionValueImage.find(params[:id])\n @product_option_value_image.destroy\n\n respond_to do |format|\n format.html { redirect_to product_option_value_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @sub_collection_image.destroy\n respond_to do |format|\n format.html { redirect_to sub_collection_images_url, notice: 'Sub collection image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @sample_photo.destroy\n render json: {message: 'Foto Excluida'} , status: :ok\n end", "def destroy\n @step_image.destroy\n respond_to do |format|\n format.html { redirect_to step_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n id = @taxi_image.taxi_sevice_id\n @taxi_image.destroy\n respond_to do |format|\n format.html { redirect_to \"/taxi_sevices/\" + id.to_s, notice: 'Taxi image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return an absolute path within the working directory. The working directory is determined: value of BREWED_WORKING_DIR env var When run_mode is :daemon, the working dir is state_dir. Otherwise, the current directory.
def working_dir(*path) if _working_dir.nil? @_working_dir = ENV['PROJECT_WORKING_DIR'] if _working_dir != nil @_working_dir = Pathname.new(expand_variables _working_dir) Dir.chdir _working_dir.to_s elsif run_mode == :daemon @_working_dir = state_dir Dir.chdir _working_dir.to_s else @_working_dir = Pathname.getwd end raise "working_dir not a directory: #{_working_dir.safe_s}" unless _working_dir.directory? end [_working_dir, *path].reduce(:+) end
[ "def working_dir\n ENV['PWD'] || Dir.pwd\n end", "def working_dir\n ENV['PWD'] || Dir.pwd\n end", "def working_dir\n if chatterbot_helper?\n Dir.getwd\n else\n File.dirname($0)\n #Dir.pwd\n end\n end", "def working_directory\n @options[:working_directory]\n end", "def dir\n @working_directory\n end", "def working_path\n @repo.working_dir\n end", "def getWorkingDir\n if(@workingDir != nil)\n return @workingDir\n end\n currDir = Dir.pwd\n dr = \"\"\n currDir.split(\"/\").each{ |entry|\n dr = dr+entry+\"/\"\n #puts dr\n if(File.directory? dr+\".hoster\")\n @workingDir = dr+\".hoster\"\n end\n }\n @workingDir\n end", "def working_directory\n check_for_active_task\n\n dir = nil\n\n @task.Definition.Actions.each do |action|\n dir = action.WorkingDirectory if action.Type == 0\n end\n\n dir\n end", "def getWorkingDir\n currDir = Dir.pwd\n dr = \"\"\n currDir.split(\"/\").each{ |entry|\n dr = dr+entry+\"/\"\n #puts dr\n if(File.directory? dr+\".hoster\")\n @workingDir = dr+\".hoster\"\n end\n }\n @workingDir\n end", "def git_directory_path\n base = defined?(@working_dir) ? @working_dir.to_s : nil\n\n File.expand_path(execute(git_cmd('rev-parse', '--git-dir')), base)\n end", "def get_working_path(curriculum)\n \"#{Rails.root}/repos/#{curriculum.creator.username}/#{curriculum.cur_name}/working/#{curriculum.cur_name}\"\n end", "def parent_directory\r\n File.join(%w{C: work})\r\n end", "def work_dir\n File.join(tmpdir, @gem_spec.gem_dirname)\n end", "def cwd\n @cwd ||= begin\n exec! 'pwd'\n rescue => e\n raise e\n '/'\n end\n end", "def run_dir\n result = File.join(osw_dir, 'run')\n if @workflow_json\n begin\n result = @workflow_json.absoluteRunDir.to_s\n rescue StandardError\n end\n end\n result\n end", "def run_dir\r\n result = File.join(osw_dir, 'run')\r\n if @workflow_json\r\n begin\r\n result = @workflow_json.absoluteRunDir.to_s\r\n rescue\r\n end\r\n end\r\n result\r\n end", "def guess_working_path\n unless File.directory?(File.join(Dir.pwd, '.git'))\n raise \"Current working directory doesn't seem to be a Git working directory.\"\n end\n Dir.pwd\nend", "def cwd(pid)\n\tFile.readlink(\"/proc/#{pid}/cwd\")\nend", "def work_dir\n # The directory is not stored in a variable so it can be overridden\n # in specs.\n File.join(base_dir, \"ruby-#{RUBY_VERSION}\", \"rbs-#{RBS::VERSION}\", \"solargraph-#{Solargraph::VERSION}\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Provide the absolute path to this Brewed's lib dir.
def libdir() LIBDIR end
[ "def lib_path\n File.dirname(File.expand_path(__FILE__))\n end", "def lib_path\n File.expand_path '..', File.dirname(__FILE__)\n end", "def lib\n path '/lib'\n end", "def _lib_dir\n File.join(get_pref(\"sketchbook.path\"), \"libraries\")\n end", "def lib_dir\n File.join(root, 'lib')\n end", "def path\n @backend.lib_dir + name_on_disk\n end", "def libdir\n if brewed? || from_osx?\n if @min_version.major == 3\n prefix/\"lib/#{xy}/config-#{version.major}.#{version.minor}m\"\n else\n prefix/\"lib/#{xy}/config\"\n end\n else\n Pathname.new(`#{binary} -c \"from distutils import sysconfig; print(sysconfig.get_config_var('LIBPL'))\"`.strip)\n end\n end", "def lib_path\n File.join( solr_home, 'lib' )\n end", "def library_path(library_name)\n File.join(_lib_dir, library_name)\n end", "def library_path(library_name)\n Pathname.new(lib_dir) + library_name\n end", "def lib_path\n File.join( solr_home, 'lib' )\n end", "def lib_dependencies_directory\n FilePath.new(@root_directory, \"dependencies\", \"lib\")\n end", "def libraries_path\n @local_path + @contrib_path + 'libraries'\n end", "def get_lib_root_dir\n return @LibRootDir\n end", "def app_library_dir\n base_dir = app_sandbox_dir\n if base_dir.nil?\n nil\n else\n File.join(base_dir, 'Library')\n end\n end", "def app_library_dir\n base_dir = app_sandbox_dir\n if base_dir.nil?\n nil\n else\n File.join(base_dir, 'Library')\n end\n end", "def moab_lib\n Pathname.new(@server['lib'].to_s)\n end", "def libpath( *args )\n args.empty? ? LIBPATH.to_s : File.join(LIBPATH.to_s, args.flatten)\n end", "def libraryPath(sourcePath)\n\t'../evothings-libraries/' + sourcePath\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
=============================================================================== Moon phases and Zodiac =============================================================================== Calculates the phase of the moon. 0 New Moon 1 Waxing Crescent 2 First Quarter 3 Waxing Gibbous 4 Full Moon 5 Waning Gibbous 6 Last Quarter 7 Waning Crescent
def moonphase(time=nil) # in UTC time = pbGetTimeNow if !time transitions = [ 1.8456618033125, 5.5369854099375, 9.2283090165625, 12.9196326231875, 16.6109562298125, 20.3022798364375, 23.9936034430625, 27.6849270496875] yy = time.year-((12-time.mon)/10.0).floor j = (365.25*(4712+yy)).floor + (((time.mon+9)%12)*30.6+0.5).floor + time.day+59 j -= (((yy/100.0)+49).floor*0.75).floor-38 if j>2299160 j += (((time.hour*60)+time.min*60)+time.sec)/86400.0 v = (j-2451550.1)/29.530588853 v = ((v-v.floor)+(v<0 ? 1 : 0)) ag = v*29.53 for i in 0...transitions.length return i if ag<=transitions[i] end return 0 end
[ "def calcphase(arg)\n \n # some constants to allow for the moon phase calculation. although, using local variables.\n ages = [18, 0, 11, 22, 3, 14, 25, 6, 17, 28, 9, 20, 1, 12, 23, 4, 15, 26, 7]\n offsets = [-1, 1, 0, 1, 2, 3, 4, 5, 7, 7, 9, 9]\n description = [\"new (totally dark)\",\n \"waxing crescent (increasing to full)\",\n \"in its first quarter (increasing to full)\",\n \"waxing gibbous (increasing to full)\",\n \"full (full light)\",\n \"waning gibbous (decreasing from full)\",\n \"in its last quarter (decreasing from full)\",\n \"waning crescent (decreasing from full)\"]\n \n # split the string argument into its components separated by spaces\n m, d, y = arg.split(/\\s/)\n # make them into integers for calculations\n month = m.to_i\n day = d.to_i\n year = y.to_i\n \n # adjust day for a 30 calendar. I think.\n day = 1 if day == 31\n \n days_into_phase = ((ages[(year + 1) % 19] + ((day + offsets[month-1]) % 30) + (if year < 1900 then 1 else 0 end)) % 30)\n\n index = ((days_into_phase + 2) * 16/59.0).to_i\n\n index = 7 if index > 7\n \n # modify phase\n self.phase = description[index]\n \n end", "def _moon_phase_text(phase)\n if phase < 0.05\n 'new moon'\n elsif phase < 0.20\n 'waxing crescent moon'\n elsif phase < 0.30\n 'first quarter moon'\n elsif phase < 0.45\n 'waxing gibbous moon'\n elsif phase < 0.55\n 'full moon'\n elsif phase < 0.70\n 'waning gibbous moon'\n elsif phase < 0.80\n 'last quarter moon'\n elsif phase < 0.95\n 'waning crescent moon'\n else\n 'new moon'\n end\n end", "def moon_phase(location)\n response = get('astronomy', location)\n {\n age: response['moon_phase']['ageOfMoon'].to_i,\n illumination: response['moon_phase']['percentIlluminated'].to_i\n }\n end", "def time_phase_advance\r\n if @pze_hour >= 6 && @pze_hour < 18 #night\r\n @pze_hour = 18\r\n else #morning\r\n @pze_day += 1 if @pze_hour >= 18 \r\n @pze_hour = 6\r\n if PZE::CLOCK == 0\r\n $game_temp.pze_time_reset = true\r\n $scene = Scene_Phase_Message.new \r\n end\r\n end\r\n @pze_count = 1\r\n @pze_minute = 0\r\n @pze_hour_prog = get_prog_hour(@pze_hour, @pze_day)\r\n @pze_minute_prog = get_prog_minute(@pze_minute, @pze_hour, @pze_day)\r\n @pze_night = !@pze_night\r\n if $game_temp.pze_time_increment != 0\r\n @pze_time_increment = $game_temp.pze_time_increment\r\n $game_temp.pze_time_increment = 0\r\n end\r\n $game_temp.pze_storm = 0\r\n $game_screen.reset_tone_weather\r\n return\r\n end", "def phases\n phs = ''\n PHASES.each do |ph|\n rating = self[\"power#{ph}_rating\".to_sym]\n phs += ph unless rating.nil? || rating.to_f == 0\n end\n phs\n end", "def horni\n @y + @hrana/2\n end", "def mafia_night()\n\n\t$is_morning = false\n\t$cat = false\n\t$mafia_night_counter += 1\n\t$mafia_night = $mafia_night_counter.to_s + ordinal($mafia_night_counter)\n\t\nend", "def set_moon_clocks\n now = set_clocks\n \n # Has new or full moon expired?\n if now > $tnew1 || now > $tfull1\n $tnew0, $tnew1, $tfull0, $tfull1 = LunarYear.date_of_moons(now)\n end\n \n $last_new_moon.value = format_days(now - $tnew0)\n $next_new_moon.value = format_days($tnew1 - now)\n $last_full_moon.value = format_days(now - $tfull0)\n $next_full_moon.value = format_days($tfull1 - now)\nend", "def tonic\n modal = {\n ionian: 0,\n dorian: 2,\n phrygian: 4,\n lydian: 5,\n mixolydian: 7,\n aeolian: 9,\n locrian: 11\n }\n (@pitch.pitch_class - modal[@mode.to_sym]) % 12\n end", "def lunar_phase(tee)\n phi = (lunar_longitude(tee) - solar_longitude(tee)) % 360\n # puts \"Tee: #{tee}, Lunar long: #{lunar_longitude(tee)}; Solar long: #{solar_longitude(tee)}\"\n t0 = nth_new_moon(0)\n n = ((tee - t0) / MEAN_SYNODIC_MONTH).round\n phi_prime = (360.degrees *\n (((tee - nth_new_moon(n)) / MEAN_SYNODIC_MONTH) % 1))\n # puts \"nth: #{nth_new_moon(n)}; mod: #{((tee - nth_new_moon(n)) / MEAN_SYNODIC_MONTH) % 1}\"\n # puts \"lunar_phase: phi: #{phi}, t0: #{t0}; n: #{n}; phi_prime: #{phi_prime}\" \n if (phi - phi_prime).abs > 180.degrees\n return phi_prime\n else\n return phi\n end\n end", "def phases(wod)\n options = {\n wod: wod,\n response: api_connection.connection.get(\"phases/\"),\n directory: \"fpl_data/pulled_data/phases\",\n filename: \"phases_#{DateTime.current.strftime(\"%C%y-%m-%d\")}\"\n }\n\n CoreUtility::DataToJSON.write_or_display_data(options)\n end", "def zodiac(month,day)\n time = [\n 3,21,4,19, # Aries\n 4,20,5,20, # Taurus\n 5,21,6,20, # Gemini\n 6,21,7,20, # Cancer\n 7,23,8,22, # Leo\n 8,23,9,22, # Virgo \n 9,23,10,22, # Libra\n 10,23,11,21, # Scorpio\n 11,22,12,21, # Sagittarius\n 12,22,1,19, # Capricorn\n 1,20,2,18, # Aquarius\n 2,19,3,20 # Pisces\n ]\n for i in 0...12\n return i if month==time[i*4] && day>=time[i*4+1]\n return i if month==time[i*4+2] && day<=time[i*4+3]\n end\n return 0\nend", "def phasea_phase_five_1(group)\n #TODO implement for phase 5_1 in like manner\n set = {'2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, '0': 10, 'J': 11, 'Q': 12, 'K': 13 }\ncolors = {\n 'H': 'r',\n 'D': 'r',\n 'C': 'b',\n 'S': 'b'\n}\nwilds = ['AH', 'AD', 'AC', 'AS']\ncard_counter = 0\nwilds_counter = 0\ndeck_color = nil\nsequence = nil\n\n group.each do |card|\n if wilds.include?(card)\n wilds_counter += 1\n sequence += 1 unless sequence.nil? \n elsif sequence.nil? && deck_color.nil?\n sequence = set[card[0].to_sym] + 1\n deck_color = colors[card[1].to_sym]\n card_counter += 1\n elsif set[card[0].to_sym] == sequence && deck_color == colors[card[1].to_sym]\n sequence += 1\n card_counter += 1\n end\n end\n\n group.length == 4 && card_counter >= 2 && (card_counter + wilds_counter == 4) && wilds_counter != 0 ? 5.1 : nil\nend", "def update_phase\n new_topic.num_players_alive = num_mafia + num_town\n if phase == 1 || phase == -1\n phase = 0\n time_left = day_timelimit\n elsif phase == 0\n phase = 1\n time_left = night_timelimit\n end\n if new_topic.num_mafia >= new_topic.num_town\n gameover = true\n who_won = 0\n elsif num_mafia == 0\n gameover = true\n who_won = 1\n # else if other win condition \n end\n redirect_to root_path\n end", "def test_Complex_InstanceMethods_phase\n\t\tassert_equal(0.9272952180016122, Complex(3,4).phase)\n\t\tassert_equal(2.214297435588181, Complex(-3,4).phase)\n\tend", "def omega() \r\n # delta_equinox()[ 3 ]\r\n Celes.faom03(@ta) \r\n end", "def phasehunt(date = nil)\n date = DateTime.now unless date\n sdate = date.ajd\n\n adate = sdate - 45\n ad1 = DateTime.jd(adate)\n\n k1 = ((ad1.year + ((ad1.month - 1) *\n (1.0 / 12.0)) - 1900) * 12.3685).floor\n\n adate = nt1 = meanphase(adate, k1)\n\n loop do\n adate += SYNMONTH\n k2 = k1 + 1\n nt2 = meanphase(adate, k2)\n break if nt1 <= sdate && nt2 > sdate\n nt1 = nt2\n k1 = k2\n end\n\n PhaseHunt.new(*[\n truephase(k1, 0.0),\n truephase(k1, 0.25),\n truephase(k1, 0.5),\n truephase(k1, 0.75),\n truephase(k2, 0.0)\n ].map do |_|\n _.new_offset(date.offset)\n end)\n end", "def get_time_tone\r\n #Storm tone\r\n if $game_temp.pze_storm != 0\r\n return Tone.new(-100,-50,50,50)\r\n end\r\n if PZE::CLOCK == 0 #Majora's Mask Mode\r\n case $game_system.pze_day\r\n when 1 #1st Day\r\n case $game_system.pze_hour\r\n #the 1st Day begins - daytime\r\n when 6\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-28,-18,48,45) #6:00\r\n else \r\n return Tone.new(-25,-16,46,43) #6:30\r\n end\r\n when 7\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-23,-14,43,40) #7:00\r\n else\r\n return Tone.new(-19,-12,41,35) #7:30\r\n end\r\n when 8\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-15,-10,38,30) #8:00\r\n else\r\n return Tone.new(-11,-8,32,25) #8:30\r\n end\r\n when 9\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-7,-5,25,18) #9:00\r\n else\r\n return Tone.new(-4,-2,18,10) #9:30\r\n end\r\n when 10\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-2,7,5) #10:00\r\n else\r\n return Tone.new(0,0,0,0) #10:30\r\n end\r\n when 11,12\r\n return Tone.new(0,0,0,0)\r\n when 13\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,0,0,0) #13:00\r\n else\r\n return Tone.new(2,1,-5,2) #13:30\r\n end\r\n when 14\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(5,3,-8,4) #14:00\r\n else\r\n return Tone.new(9,5,-10,6) #14:30\r\n end\r\n when 15\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(12,8,-12,9) #15:00\r\n else\r\n return Tone.new(15,10,-14,12) #15:30\r\n end\r\n when 16\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(18,15,-18,15) #16:00\r\n else\r\n return Tone.new(25,8,-25,23) #16:30\r\n end\r\n when 17\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(40,-10,-60,32) #17:00\r\n else\r\n return Tone.new(55,-18,-50,40) #17:30\r\n end\r\n #nighttime begins\r\n when 18\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(25,-23,-30,45) #18:00\r\n else\r\n return Tone.new(5,-26,-15,48) #18:30\r\n end\r\n when 19\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-20,-30,0,50) #19:00\r\n else\r\n return Tone.new(-35,-32,2,52) #19:30\r\n end\r\n when 20\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-45,-35,5,54) #20:00\r\n else\r\n return Tone.new(-65,-40,10,56) #20:30\r\n end\r\n when 21\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-75,-45,15,58) #21:00\r\n else\r\n return Tone.new(-85,-50,17,61) #21:30\r\n end\r\n when 22\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-90,-55,20,65) #22:00\r\n else\r\n return Tone.new(-95,-60,22,70) #22:30\r\n end\r\n when 23\r\n return Tone.new(-100,-60,25,80)\r\n else\r\n return Tone.new(0,0,0,0) #This will not appear ingame\r\n end\r\n when 2 #2nd Day\r\n case $game_system.pze_hour\r\n #the 2nd Day begins\r\n when 0 \r\n return Tone.new(-100,-65,25,85)\r\n when 1\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-100,-60,25,88) #1:00\r\n else\r\n return Tone.new(-100,-65,25,95) #1:30\r\n end\r\n when 2\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-100,-65,25,100) #2:00\r\n else\r\n return Tone.new(-100,-65,25,105) #2:30\r\n end\r\n when 3\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-95,-62,25,110) #3:00\r\n else\r\n return Tone.new(-90,-60,25,115) #3:30\r\n end\r\n when 4\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-85,-55,25,120) #4:00\r\n else\r\n return Tone.new(-80,-50,25,125) #4:30\r\n end\r\n when 5\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-75,-45,25,130) #5:00\r\n else\r\n return Tone.new(-70,-40,25,135) #5:30\r\n end\r\n #daytime begins\r\n when 6\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-65,-35,25,140) #6:00\r\n else \r\n return Tone.new(-60,-30,25,145) #6:30\r\n end\r\n when 7\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-55,-25,25,150) #7:00\r\n else\r\n return Tone.new(-50,-20,25,150) #7:30\r\n end\r\n when 8\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-45,-15,25,150) #8:00\r\n else\r\n return Tone.new(-43,-13,25,150) #8:30\r\n end\r\n when 9,10\r\n return Tone.new(-40,-10,25,150)\r\n when 11\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-17,0,5,150) #11:00\r\n else\r\n return Tone.new(-15,-2,3,150) #11:30\r\n end\r\n when 12\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-13,-5,0,150) #12:00\r\n else\r\n return Tone.new(-11,-7,-2,150) #12:30\r\n end\r\n when 13\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-10,-9,-5,150) #13:00\r\n else\r\n return Tone.new(-10,-11,-7,150) #13:30\r\n end\r\n when 14\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-10,-13,-10,150) #14:00\r\n else\r\n return Tone.new(-10,-15,-13,150) #14:30\r\n end\r\n when 15\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-10,-17,-16,150) #15:00\r\n else\r\n return Tone.new(-10,-20,-19,150) #15:30\r\n end\r\n when 16\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-10,-23,-22,150) #16:00\r\n else\r\n return Tone.new(0,-25,-25,150) #16:30\r\n end\r\n when 17\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(15,-30,-30,150) #17:00\r\n else\r\n return Tone.new(10,-35,-25,150) #17:30\r\n end\r\n #nighttime begins\r\n when 18\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-10,-40,-10,150) #18:00\r\n else\r\n return Tone.new(-30,-45,0,150) #18:30\r\n end\r\n when 19\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-55,-50,5,150) #19:00\r\n else\r\n return Tone.new(-65,-50,10,145) #19:30\r\n end\r\n when 20\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-70,-50,13,140) #20:00\r\n else\r\n return Tone.new(-75,-55,16,135) #20:30\r\n end\r\n when 21\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-80,-55,19,120) #21:00\r\n else\r\n return Tone.new(-85,-55,21,105) #21:30\r\n end\r\n when 22\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-90,-60,23,95) #22:00\r\n else\r\n return Tone.new(-95,-60,25,85) #22:30\r\n end\r\n when 23\r\n return Tone.new(-100,-60,25,80)\r\n else\r\n return Tone.new(0,0,0,0) #This will not appear ingame\r\n end\r\n when 3 #Final Day\r\n case $game_system.pze_hour\r\n #the Final Day begins\r\n when 0 \r\n return Tone.new(-100,-60,25,80)\r\n when 1\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-97,-50,50,50) #1:00\r\n else\r\n return Tone.new(-95,-50,50,50) #1:30\r\n end\r\n when 2\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-90,-50,50,50) #2:00\r\n else\r\n return Tone.new(-85,-50,50,50) #2:30\r\n end\r\n when 3\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-80,-50,50,50) #3:00\r\n else\r\n return Tone.new(-75,-50,50,50) #3:30\r\n end\r\n when 4\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-65,-50,50,50) #4:00\r\n else\r\n return Tone.new(-55,-40,50,50) #4:30\r\n end\r\n when 5\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-45,-30,50,50) #5:00\r\n else\r\n return Tone.new(-35,-20,50,50) #5:30\r\n end\r\n #daytime begins\r\n when 6\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-10,50,25) #6:00\r\n else \r\n return Tone.new(0,-8,40,20) #6:30\r\n end\r\n when 7\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-6,30,15) #7:00\r\n else\r\n return Tone.new(0,-5,20,10) #7:30\r\n end\r\n when 8\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-3,14,8) #8:00\r\n else\r\n return Tone.new(0,-2,7,5) #8:30\r\n end\r\n when 9,10\r\n return Tone.new(0,0,0,0)\r\n when 11\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(2,1,-5,3) #11:00\r\n else\r\n return Tone.new(5,3,-10,6) #11:30\r\n end\r\n when 12\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(10,5,-18,9) #12:00\r\n else\r\n return Tone.new(13,10,-25,12) #12:30\r\n end\r\n when 13\r\n return Tone.new(18,13,-30,15) #13:00\r\n when 14\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(23,15,-35,17) #14:00\r\n else\r\n return Tone.new(27,20,-40,22) #14:30\r\n end\r\n when 15\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(35,20,-50,25) #15:00\r\n else\r\n return Tone.new(45,10,-75,30) #15:30\r\n end\r\n when 16\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(55,0,-90,35) #16:00\r\n else\r\n return Tone.new(75,-10,-110,40) #16:30\r\n end\r\n when 17\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(90,-20,-150,50) #17:00\r\n else\r\n return Tone.new(65,-30,-90,55) #17:30\r\n end\r\n #nighttime begins\r\n when 18\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(25,-40,-30,55) #18:00\r\n else\r\n return Tone.new(-20,-50,0,50) #18:30\r\n end\r\n when 19\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-45,-50,20,50) #19:00\r\n else\r\n return Tone.new(-60,-50,40,50) #19:30\r\n end\r\n when 20\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-80,-50,50,50) #20:00\r\n else\r\n return Tone.new(-90,-50,50,50) #20:30\r\n end\r\n when 21,22,23\r\n return Tone.new(-100,-50,50,50)\r\n else\r\n return Tone.new(0,0,0,0) #This will not appear ingame\r\n end\r\n when 4 #New Day\r\n case $game_system.pze_hour\r\n #First 6 hours of New Day\r\n when 0 \r\n return Tone.new(-100,-50,50,50)\r\n when 1\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-97,-50,50,50) #1:00\r\n else\r\n return Tone.new(-95,-50,50,50) #1:30\r\n end\r\n when 2\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-90,-50,50,50) #2:00\r\n else\r\n return Tone.new(-85,-50,50,50) #2:30\r\n end\r\n when 3\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-80,-50,50,50) #3:00\r\n else\r\n return Tone.new(-70,-50,50,50) #3:30\r\n end\r\n when 4\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-55,-50,50,50) #4:00\r\n else\r\n return Tone.new(-45,-40,50,50) #4:30\r\n end\r\n when 5\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-25,-30,50,50) #5:00\r\n else\r\n return Tone.new(-5,-20,50,50) #5:30\r\n end\r\n else\r\n return Tone.new(0,0,0,0) #This will not appear ingame\r\n end\r\n end\r\n else #Standard/No Clock mode\r\n if $game_system.pze_winter #winter\r\n case $game_system.pze_hour\r\n when 23, 0..1 #dusk\r\n return Tone.new(-100, -50, 50, 50)\r\n when 2..4 #night\r\n return Tone.new(-50, -40, 50, 50)\r\n when 5..7 #dawn\r\n return Tone.new(0, -30, 50, 50)\r\n when 8..11 #day\r\n return Tone.new(0, 0, 0, 0)\r\n when 12..14 #midday\r\n return Tone.new(15, 10, 10, 15)\r\n when 15..17 #evening\r\n return Tone.new(10, 25, 10, 35)\r\n when 18..19 #sunset\r\n return Tone.new(20, 0, 10, 100)\r\n when 20..22 #twilight\r\n return Tone.new(-25, -50, 20, 50)\r\n end\r\n else #summer/spring/autumn\r\n case $game_system.pze_hour\r\n #day begins\r\n when 6\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-10,50,25) #6:00\r\n else \r\n return Tone.new(0,-8,40,20) #6:30\r\n end\r\n when 7\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-6,30,15) #7:00\r\n else\r\n return Tone.new(0,-5,20,10) #7:30\r\n end\r\n when 8\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(0,-3,14,8) #8:00\r\n else\r\n return Tone.new(0,-2,7,5) #8:30\r\n end\r\n when 9,10\r\n return Tone.new(0,0,0,0)\r\n when 11\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(2,1,-5,3) #11:00\r\n else\r\n return Tone.new(5,3,-10,6) #11:30\r\n end\r\n when 12\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(10,5,-18,9) #12:00\r\n else\r\n return Tone.new(13,10,-25,12) #12:30\r\n end\r\n when 13\r\n return Tone.new(18,13,-30,15)\r\n when 14\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(23,15,-35,17) #14:00\r\n else\r\n return Tone.new(27,20,-40,22) #14:30\r\n end\r\n when 15\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(35,20,-50,25) #15:00\r\n else\r\n return Tone.new(45,10,-75,30) #15:30\r\n end\r\n when 16\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(55,0,-90,35) #16:00\r\n else\r\n return Tone.new(75,-10,-110,40) #16:30\r\n end\r\n when 17\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(90,-20,-150,50) #17:00\r\n else\r\n return Tone.new(65,-30,-90,55) #17:30\r\n end\r\n #night begins\r\n when 18\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(25,-40,-30,55) #18:00\r\n else\r\n return Tone.new(-20,-50,0,50) #18:30\r\n end\r\n when 19\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-45,-50,20,50) #19:00\r\n else\r\n return Tone.new(-60,-50,40,50) #19:30\r\n end\r\n when 20\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-80,-50,50,50) #20:00\r\n else\r\n return Tone.new(-90,-50,50,50) #20:30\r\n end\r\n when 21,22,23,0 \r\n return Tone.new(-100,-50,50,50)\r\n when 1\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-97,-50,50,50) #1:00\r\n else\r\n return Tone.new(-95,-50,50,50) #1:30\r\n end\r\n when 2\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-90,-50,50,50) #2:00\r\n else\r\n return Tone.new(-85,-50,50,50) #2:30\r\n end\r\n when 3\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-80,-50,50,50) #3:00\r\n else\r\n return Tone.new(-70,-50,50,50) #3:30\r\n end\r\n when 4\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-55,-50,50,50) #4:00\r\n else\r\n return Tone.new(-45,-40,50,50) #4:30\r\n end\r\n when 5\r\n if $game_system.pze_minute < 30 \r\n return Tone.new(-25,-30,50,50) #5:00\r\n else\r\n return Tone.new(-5,-20,50,50) #5:30\r\n end\r\n end\r\n end\r\n end\r\n end", "def calculate_hu_moments\n # calculate the central moments\n central_moments = calculate_central_moments\n self.first_moment_HU = (central_moments[2][0] + central_moments[0][2])\n self.second_moment_HU = ((central_moments[2][0] - central_moments[0][2]) ** 2) + (4 * (central_moments[1][1] ** 2))\n self.third_moment_HU = ((central_moments[3][0] - (3 * central_moments[1][2])) ** 2) + ((((3 * central_moments[2][1]) - central_moments[0][3])) ** 2)\n\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculates the zodiac sign based on the given month and day: 0 is Aries, 11 is Pisces. Month is 1 if January, and so on.
def zodiac(month,day) time = [ 3,21,4,19, # Aries 4,20,5,20, # Taurus 5,21,6,20, # Gemini 6,21,7,20, # Cancer 7,23,8,22, # Leo 8,23,9,22, # Virgo 9,23,10,22, # Libra 10,23,11,21, # Scorpio 11,22,12,21, # Sagittarius 12,22,1,19, # Capricorn 1,20,2,18, # Aquarius 2,19,3,20 # Pisces ] for i in 0...12 return i if month==time[i*4] && day>=time[i*4+1] return i if month==time[i*4+2] && day<=time[i*4+3] end return 0 end
[ "def find_zodiac_sign(birthdate)\n bday_split = birthdate.split(\"/\")\n return Date.new(bday_split[0].to_i,bday_split[1].to_i,bday_split[2].to_i).zodiac_sign\nend", "def zday; '%02d' % mday; end", "def zmonth; '%02d' % month; end", "def zodiacOpposite(sign)\n return (sign+6)%12\nend", "def absolute_day(month, day)\n\n mo = {\n 1 => 31,\n 2 => 28,\n 3 => 31,\n 4 => 30,\n 5 => 31,\n 6 => 30,\n 7 => 31,\n 8 => 31,\n 9 => 30,\n 10 => 31,\n 11 => 30,\n 12 => 31,\n }\n\n # hash v. loop of arrays, computation v. memory tradeoff\n\n total_days = 0\n\n\n (1...month).each do |i| #1...month is (month-1)\n total_days += mo[i]\n end\n\n total_days + day\n\nend", "def ord(day)\n\nend", "def monthDays(y, m)\n if (LunarInfo[y - 1900] & (0x10000 >> m)).zero?\n return 29\n else\n return 30\n end\n end", "def month(input) = (day_of_year(input) - 1) / 30 + 1", "def daysinmonth(year, month)\n return (Date.new(year, 12, 31) << (12-month)).day\n end", "def monthDays(y, m)\n if ((@@lunar_info[y - 1900] & (0x10000 >> m)) == 0)\n return 29\n else\n return 30\n end\n end", "def monthly_dry_days(day)\n month_result_string(day).count('1')\n end", "def days_in_month(month=nil)\n month ||= Date.today.month\n days_in_mon = Date.civil(Date.today.year, month, -1).day\n end", "def month_cardinal(month)\n validate_month(month)\n\n digit = @@months.index(month) + 1\n if (digit < 10)\n digit = \"0#{digit}\"\n end\n\n \"#{digit}\"\n end", "def siguiente_dia(fecha)\r\n d,m,a=descomponer_fechas(fecha)\r\n d_n=dia_sig(d)\r\n fecha_sig=d_n+\"/\"+m+\"/\"+a\r\n if(!fecha_valida(fecha_sig))\r\n m_n=mes_sig(m)\r\n fecha_sig=\"01/\"+m_n+\"/\"+a\r\n if(!fecha_valida(fecha_sig))\r\n a=(a.to_i+1).to_s\r\n fecha_sig=\"01/01/\"+a\r\n end\r\n end\r\n return fecha_sig\r\nend", "def day_of_month\n ordinal = case @time.day\n when 1 then \"st\"\n when 2 then \"nd\"\n when 3 then \"rd\"\n else \"th\"\n end\n \"#{@time.day}#{ordinal}\"\n end", "def days_in_month(date)\n (Date.new(date.year, 12, 31) << (12 - date.month)).day\n end", "def monthdays; end", "def ordinal_day(day = @day)\n day.to_s + case day.to_s\n when '11', '12', '13'\n 'th'\n when /1$/\n 'st'\n when /2$/\n 'nd'\n when /3$/\n 'rd'\n when /(4|5|6|7|8|9|0)$/\n 'th'\n else\n ''\n end\n end", "def count_of_days_in(month)\n 28 + (month + (month / 8)) % 2 + 2 % month + 2 * (1 / month)\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the opposite of the given zodiac sign. 0 is Aries, 11 is Pisces.
def zodiacOpposite(sign) return (sign+6)%12 end
[ "def reverse_sign(an_integer)\n return 0 - an_integer\nend", "def opposite(number)\n 0 - number\nend", "def opposite(number)\n return 0 - number\nend", "def flip_sign\n self * -1\n end", "def get_sign(coefficient)\r\n if coefficient < 0\r\n '-'\r\n else \r\n '+' \r\n end\r\n end", "def negate\n @value *= -1\n end", "def fixnum_neg\n fmap(fixnum_pos) { |f| f * -1 }\n end", "def negative(number)\n if number.positive?\n return -(number)\n elsif number.negative?\n return number\n elsif number.zero?\n return 0\n end\nend", "def negative?\n @sign < 0\n end", "def negative(number)\n return number if number.negative? || number.zero?\n -number\nend", "def negation\n \"not\" if negate?\n end", "def negate?\n @negate\n end", "def negate?\n @negate\n end", "def opposite(direction)\n if direction & UNDER != 0\n opposite(direction >> UNDER_SHIFT) << UNDER_SHIFT\n else\n case direction\n when N then S\n when S then N\n when E then W\n when W then E\n when NE then SW\n when NW then SE\n when SE then NW\n when SW then NE\n end\n end\n end", "def sign\n self >= 0? 1 : -1\n end", "def negate\n change(:year => year * -1)\n end", "def sign\n if finite?\n 1 / self > 0 ? 1 : -1\n else\n [self].pack(\"G\").unpack1(\"B*\")[0] == \"1\" ? -1 : 1\n end\n end", "def negative(number)\n if number > 0\n -number\n else\n return number\n end\nend", "def opposite; end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
unmark a lineitem for refulfillment, chose not to increment the inventory count due possibility that inventory was refreshed, better to err on side of less inventory
def unrefulfill_line_item(line_item_id) li = Spree::LineItem.find(line_item_id) li.refulfill = nil li.save end
[ "def decrease_ingredients_stock line_items\n line_items.each do |item|\n (item.ingredients + Product.find(item.product_id).ingredients).each do |ingredient|\n ingredient.update_column(:quantity, ingredient.quantity - 1)\n if ingredient.quantity == 0\n ingredient.update_column(:active, false)\n end\n end\n end\n end", "def recover_usage(quantity_to_be_recovered)\n self.used_quantity -= quantity_to_be_recovered \n self.save \n \n self.unmark_as_finished\n \n item = self.item \n item.update_ready_quantity\n \n return self \n end", "def refill(item_name:, quantity:)\n total_items_quantity[item_name] += quantity\n items_running_low[item_name] = false\n end", "def update_quantity line_item_id, num\n line_item_id = line_item_id.to_i\n num = num.to_i\n li = line_items.detect{|x| x.id == line_item_id}\n\n return nil unless li\n\n li = set_quantity_of_line li, num\n return false unless li\n\n if li.quantity == 0\n line_items.delete(li)\n li.destroy\n return nil\n end\n\n # Recompute shipping next time around\n shipping_computed_at = nil\n\n li\n end", "def uncomplete_item\n \t@completed_status = false\n end", "def doPurchase(item)\n item.quantity = Integer(item.quantity)\n\t if item.quantity > 0\n\t item.quantity = item.quantity - 1\n\t puts \"Removed 1 \" + item.format + \" of \" + item.title + \" by \" + item.artist + \" from the inventory\"\n\t else\n\t puts \"Sorry, we have no more \" + item.format + \" format of title: \" + item.title + \" by artist: \" + item.artist + \" at this time.\"\n\t end\n end", "def decrement_inventory(variant_id,order_line_item_qty)\n\t\t\tvariant = ShopifyAPI::Variant.find(variant_id)\n\t\t\tvariant.inventory_quantity -= order_line_item_qty\n\t\t\tvariant.save\n\t\tend", "def remove_from_shipment(shipment, line_item, quantity)\n return 0 if quantity == 0 || shipment.shipped?\n\n variant = line_item.variant\n\n shipment_units = shipment.inventory_units_for_item(line_item, variant).reject do |unit|\n unit.state == 'shipped'\n end.sort_by(&:state)\n\n removed_quantity = 0\n\n shipment_units.each do |inventory_unit|\n break if removed_quantity == quantity\n inventory_unit.destroy\n removed_quantity += 1\n end\n\n shipment.destroy if shipment.inventory_units.count == 0\n\n if order.completed?\n shipment.stock_location.restock variant, removed_quantity, shipment\n end\n\n removed_quantity\n end", "def decrement_inventory\n self.menu_items.each do |menu_item_id, amt|\n unless MenuItem.decrement_inventory(menu_item_id, amt)\n # add error msg to form and return false since not enough ingredients\n # exist to fulfill the order\n errors.add_to_base(\"Not enough ingredients left in inventory to complete this order\")\n return false\n end\n end\n end", "def decline_refund\n response = JSON.parse(@client.patch(\"items/#{send(:id)}/decline_refund\").body)\n @attributes = response['items']\n true\n end", "def decrease_order_item_inventory(order_item)\n item = order_item.variant || order_item.product\n item.inventory -= order_item.quantity\n item.save!\n end", "def release_quantity_of_item(item, quantity)\r\n if self.items.include?(item)\r\n item.quantity -= quantity\r\n end\r\n end", "def unlink_reset\n @reset = nil\n if @inventory\n @inventory.items.each do |item|\n item.unlink_reset\n end\n end\n return\n end", "def issue_refer_line_items\n line_items.each do |line|\n logger.debug \"==@@@@==SalesOrder refer_line_id== #{line.refer_line_id}\"\n po_line = LineItem.find(line.refer_line_id)\n po_line.update_attribute(:quantity_issued, po_line.quantity_issued + line.quantity)\n\n line.update_attribute(:cart_id, nil)\n end\n end", "def rollback_fulfill_subrequest\n disbursement_quantity = (self[:quantity_disburse].nil?) ? 0 : self[:quantity_disburse]\n loan_quantity = (self[:quantity_loan].nil?) ? 0 : self[:quantity_loan]\n\n @item = self.item\n @item.update!(:quantity => item[:quantity] + disbursement_quantity + loan_quantity)\n @item.update!(:quantity_on_loan => item[:quantity_on_loan] - loan_quantity)\n end", "def withdraw exit_date, estimated_return_date, pickup_company, pickup_company_contact, additional_comments, quantity, folio\n \n return self.status if cannot_withdraw?\n\n if quantity != '' and quantity < self.quantity.to_i\n self.quantity = self.quantity.to_i - quantity\n quantity_withdrawn = quantity\n else\n self.status = InventoryItem::OUT_OF_STOCK\n quantity_withdrawn = self.quantity\n self.quantity = 0\n end\n \n if self.save\n inventory_item = InventoryItem.where( 'actable_id = ? AND actable_type = ?', self.id, 'BulkItem' ).first\n if self.warehouse_locations?\n quantity_left = quantity\n if quantity != '' and quantity < ( self.quantity.to_i + quantity_withdrawn.to_i )\n item_location = self.item_locations.where( 'quantity >= ?', quantity ).first\n location = item_location.warehouse_location\n location.remove_quantity( inventory_item.id, quantity )\n elsif quantity != ''\n while quantity_left > 0\n item_location = self.item_locations.first\n location = item_location.warehouse_location\n if quantity_left >= item_location.quantity \n current_location_quantity = item_location.quantity \n location.remove_item( inventory_item.id )\n self.item_locations.delete( item_location )\n location.update_status\n else\n location.remove_quantity( inventory_item.id, quantity_left )\n end\n quantity_left = quantity_left - current_location_quantity\n end\n else\n item_location = self.item_locations.first\n location = item_location.warehouse_location\n location.remove_item( inventory_item.id )\n self.item_locations.delete( item_location )\n location.update_status\n end\n end\n CheckOutTransaction.create( :inventory_item_id => inventory_item.id, :concept => 'Salida granel', :additional_comments => additional_comments, :exit_date => exit_date, :estimated_return_date => estimated_return_date, :pickup_company => pickup_company, :pickup_company_contact => pickup_company_contact, :quantity => quantity_withdrawn, :folio => folio )\n return true\n end\n\n return false\n end", "def unequip\n @ship_item = ShipItem.find(params[:id])\n if ( current_ship_items_item_unequipped(@ship_item.item_id) != nil )\n current_ship_items_item_unequipped(@ship_item.item_id).update_attribute(:quantity, current_ship_items_item_unequipped(@ship_item.item_id).quantity+1)\n @ship_item.destroy\n \n respond_to do |format|\n flash[:notice] = 'Unequipped '+ Item.find(@ship_item.item_id).name\n format.html { redirect_to(current_ship) }\n format.xml { render :xml => @ship_item }\n end\n else\n @ship_item.update_attribute(:equiped, false)\n \n respond_to do |format|\n flash[:notice] = 'Unequipped '+ Item.find(@ship_item.item_id).name\n format.html { redirect_to(current_ship) }\n format.xml { render :xml => @ship_item }\n end\n end\n end", "def increase_quantity_by_reverting_shipping_committed_line_item!(sales_order_line_item_commitment)\n raise \"Not shipped\" if sales_order_line_item_commitment.unshipped?\n self.quantity += sales_order_line_item_commitment.quantity\n save!\n end", "def mark_monthlyinvitems_for_removal \n monthlyinvitems.each do |monthlyinvitem|\n monthlyinvitem.mark_for_destruction if monthlyinvitem.qty.to_f == 0.0\n end \n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the current rockreleasebased prefix for rock packages
def rock_release_prefix(release_name = nil) release_name ||= rock_release_name if release_name pkg_prefix_base + "-#{release_name}-" else pkg_prefix_base + "-" end end
[ "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n pkg_prefix_base + \"-#{release_name}-\"\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def prefix\n Babushka::BrewHelper.prefix\n end", "def base_prefix\n Starter::Config.read[:prefix]\n end", "def get_queue_prefix(args)\n release_version = args[:release_version]\n\n parts = release_version.split('~')[1].split('.')\n sha = parts[1]\n release_parts = parts[0].split('-')\n if release_parts.size() == 2\n return release_parts[1] + '-' + sha\n else\n return parts[0] + '-' + sha\n end\nend", "def current_prefix; end", "def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend", "def exec_prefix() return @prefix end", "def revision_prefix\n revision[0, 6]\n end", "def prefix\n if brewed?\n # Homebrew since a long while only supports frameworked python\n HOMEBREW_PREFIX/\"opt/#{name}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\"\n elsif from_osx?\n # Python on OS X has been stripped off its includes (unless you install the CLT), therefore we use the MacOS.sdk.\n Pathname.new(\"#{MacOS.sdk_path}/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\")\n else\n # What Python knows about itself\n Pathname.new(`#{binary} -c 'import sys;print(sys.prefix)'`.strip)\n end\n end", "def set_prefix\n @prefix ||= Octopolo::SemverTagScrubber.scrub_prefix(git.semver_tags.last)\n end", "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + canonize(name)\n else\n pkg_prefix_base + \"-\" + canonize(name)\n end\n end", "def prefix_for(ns_uri)\n if namespaces_by_uri().has_key?(ns_uri)\n namespaces_by_uri()[ns_uri].prefix || \"\" # namespace.prefix returns nil if there is no prefix defined (default prefix)\n end\n end", "def version_tag_prefix\n if root_path == repo.root_path\n 'v'\n else\n (repo_rel_path / 'v').to_s\n end\n end", "def get_pname(resource)\n case resource\n when RDF::Node\n return options[:unique_bnodes] ? resource.to_unique_base : resource.to_base\n when RDF::URI\n uri = resource.to_s\n else\n return nil\n end\n\n pname = case\n when @uri_to_pname.key?(uri)\n return @uri_to_pname[uri]\n when u = @uri_to_prefix.keys.sort_by {|uu| uu.length}.reverse.detect {|uu| uri.index(uu.to_s) == 0}\n # Use a defined prefix\n prefix = @uri_to_prefix[u]\n unless u.to_s.empty?\n prefix(prefix, u) unless u.to_s.empty?\n log_debug(\"get_pname\") {\"add prefix #{prefix.inspect} => #{u}\"}\n # Escape suffix, as necessary\n RDF::URI(uri).pname(prefixes: {prefix => u})\n end\n when @options[:standard_prefixes] && vocab = RDF::Vocabulary.each.to_a.detect {|v| uri.index(v.to_uri.to_s) == 0}\n prefix = vocab.__name__.to_s.split('::').last.downcase\n @uri_to_prefix[vocab.to_uri.to_s] = prefix\n prefix(prefix, vocab.to_uri) # Define for output\n log_debug(\"get_pname\") {\"add standard prefix #{prefix.inspect} => #{vocab.to_uri}\"}\n RDF::URI(uri).pname(prefixes: {prefix => vocab.to_uri})\n else\n nil\n end\n\n # Make sure pname is a valid pname\n if pname\n md = Terminals::PNAME_LN.match(pname) || Terminals::PNAME_NS.match(pname)\n pname = nil unless md.to_s.length == pname.length\n end\n\n @uri_to_pname[uri] = pname\n end", "def get_pname(resource)\n case resource\n when RDF::Node\n return options[:unique_bnodes] ? resource.to_unique_base : resource.to_base\n when RDF::URI\n uri = resource.to_s\n else\n return nil\n end\n\n #log_debug {\"get_pname(#{resource}), std?}\"}\n pname = case\n when @uri_to_pname.key?(uri)\n return @uri_to_pname[uri]\n when u = @uri_to_prefix.keys.detect {|u| uri.index(u.to_s) == 0}\n # Use a defined prefix\n prefix = @uri_to_prefix[u]\n unless u.to_s.empty?\n prefix(prefix, u) unless u.to_s.empty?\n #log_debug(\"get_pname\") {\"add prefix #{prefix.inspect} => #{u}\"}\n uri.sub(u.to_s, \"#{prefix}:\")\n end\n when @options[:standard_prefixes] && vocab = RDF::Vocabulary.each.to_a.detect {|v| uri.index(v.to_uri.to_s) == 0}\n prefix = vocab.__name__.to_s.split('::').last.downcase\n @uri_to_prefix[vocab.to_uri.to_s] = prefix\n prefix(prefix, vocab.to_uri) # Define for output\n #log_debug {\"get_pname: add standard prefix #{prefix.inspect} => #{vocab.to_uri}\"}\n uri.sub(vocab.to_uri.to_s, \"#{prefix}:\")\n else\n nil\n end\n\n # Make sure pname is a valid pname\n if pname\n md = PNAME_LN.match(pname) || PNAME_NS.match(pname)\n pname = nil unless md.to_s.length == pname.length\n end\n\n @uri_to_pname[uri] = pname\n end", "def prefix\n @prefix ||= self.meta[:prefix]\n return @prefix if @prefix.present?\n\n civet_params = read_cbrain_yaml\n file_args = civet_params[:file_args] || { \"0\" => {} }\n file0 = file_args[\"0\"] || {}\n myprefix = file0[:prefix] || civet_params[:prefix] # new convention || old convention\n\n @prefix = self.meta[:prefix] = myprefix\n end", "def base_prefix\n HaridsyncHelpers.ensure_uppercase_dn_component(group['ou'] || DEFAULT_PREFIX)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the current rockreleasebased prefix for rock(ruby) packages
def rock_ruby_release_prefix(release_name = nil) rock_release_prefix(release_name) + "ruby-" end
[ "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n pkg_prefix_base + \"-#{release_name}-\"\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def prefix\n Babushka::BrewHelper.prefix\n end", "def base_prefix\n Starter::Config.read[:prefix]\n end", "def get_queue_prefix(args)\n release_version = args[:release_version]\n\n parts = release_version.split('~')[1].split('.')\n sha = parts[1]\n release_parts = parts[0].split('-')\n if release_parts.size() == 2\n return release_parts[1] + '-' + sha\n else\n return parts[0] + '-' + sha\n end\nend", "def prefix\n if brewed?\n # Homebrew since a long while only supports frameworked python\n HOMEBREW_PREFIX/\"opt/#{name}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\"\n elsif from_osx?\n # Python on OS X has been stripped off its includes (unless you install the CLT), therefore we use the MacOS.sdk.\n Pathname.new(\"#{MacOS.sdk_path}/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\")\n else\n # What Python knows about itself\n Pathname.new(`#{binary} -c 'import sys;print(sys.prefix)'`.strip)\n end\n end", "def exec_prefix() return @prefix end", "def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend", "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + canonize(name)\n else\n pkg_prefix_base + \"-\" + canonize(name)\n end\n end", "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def current_prefix; end", "def version_tag_prefix\n if root_path == repo.root_path\n 'v'\n else\n (repo_rel_path / 'v').to_s\n end\n end", "def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + canonize(name)\n else\n \"ruby-\" + canonize(name)\n end\n end", "def set_prefix\n @prefix ||= Octopolo::SemverTagScrubber.scrub_prefix(git.semver_tags.last)\n end", "def rpm_name\n \"#{RPM_PREFIX}#{self.name}\"\n end", "def powershell_prefix\n [\n Mixlib::Install::Generator::PowerShell.get_script(\"helpers.ps1\"),\n \"$platform_architecture = Get-PlatformArchitecture\",\n \"$platform_version = Get-PlatformVersion\",\n ].join(\"\\n\")\n end", "def package_name\n @platform.package_name(self)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create a debian package name from a given plain name according to build type, release name and release_prefix setting
def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name) if build_type == :ruby if with_rock_release_prefix rock_release_prefix(release_name) + "ruby-" + Deb.canonize(name) else pkg_prefix_base + "-ruby-" + Deb.canonize(name) end else if with_rock_release_prefix rock_release_prefix(release_name) + Deb.canonize(name) else pkg_prefix_base + "-" + Deb.canonize(name) end end end
[ "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + canonize(name)\n else\n pkg_prefix_base + \"-\" + canonize(name)\n end\n end", "def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end", "def package_name\n case arch\n when 'any', 'all', nil\n \"#{name}-#{version}\"\n else\n \"#{name}-#{version}-#{arch}\"\n end\n end", "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + canonize(name)\n else\n \"ruby-\" + canonize(name)\n end\n end", "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end", "def package_name\n format(node['kong']['package_file'], substitutions)\n end", "def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end", "def package_name(options={})\n if platform = options[:platform]\n \"#{name}-#{version}-#{platform}\"\n else\n \"#{name}-#{version}\"\n end\n end", "def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of Debian package names can only include \" \\\n \"lower case alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n\n converted\n end\n end", "def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end", "def name\n if @name =~ /[A-Z]/\n logger.warn(\"apk packages should not have uppercase characters in their names\")\n @name = @name.downcase\n end\n\n if @name.include?(\"_\")\n logger.warn(\"apk packages should not include underscores\")\n @name = @name.gsub(/[_]/, \"-\")\n end\n\n if @name.include?(\" \")\n logger.warn(\"apk packages should not contain spaces\")\n @name = @name.gsub(/[ ]/, \"-\")\n end\n\n return @name\n end", "def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end", "def hadoop_package(name)\n return name unless hdp22?\n return name if node['platform_family'] == 'debian'\n fw = name.split('-').first\n pv = hdp_version.tr('.', '_').tr('-', '_')\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n pkg_prefix_base + \"-#{release_name}-\"\n end", "def full_name\n \"#{@package}.#{parameterized_name}\"\n end", "def format_versioned_package(name, version)\n if version\n \"#{name}-#{version}\"\n else\n name\n end\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The debian name of a meta package rock[]meta and the releasename can be avoided by setting with_rock_release_prefix to false
def debian_meta_name(name, with_rock_release_prefix = true) if with_rock_release_prefix rock_release_prefix + "meta-" + Deb.canonize(name) else pkg_prefix_base + "meta-" + Deb.canonize(name) end end
[ "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + canonize(name)\n else\n pkg_prefix_base + \"-\" + canonize(name)\n end\n end", "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n pkg_prefix_base + \"-#{release_name}-\"\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end", "def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + canonize(name)\n else\n \"ruby-\" + canonize(name)\n end\n end", "def rpm_name\n \"#{RPM_PREFIX}#{self.name}\"\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end", "def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end", "def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end", "def package_name\n case arch\n when 'any', 'all', nil\n \"#{name}-#{version}\"\n else\n \"#{name}-#{version}-#{arch}\"\n end\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def built_rpm_names(build)\n build.\n # Just names from the rpms without nvr info\n brew_rpms.map(&:name_nonvr).\n # Remove any duplicates\n uniq.\n # Filter out any debuginfo names\n reject{ |name| name =~ /debuginfo/ }.\n # Remove prefixes if there are any for this product. (Mainly for SCL, see Bug 1003719)\n map { |name| BrewRpmNamePrefix.strip_using_list_of_prefixes(@errata.product.brew_rpm_name_prefixes, name) }\n end", "def package_name\n format(node['kong']['package_file'], substitutions)\n end", "def release_name\n @release_name ||= [\n metadata['name'],\n metadata['version']\n ].join('-')\n end", "def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end", "def use_release_name?\n false\n end", "def package_name\n @package.to_s\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The debian name of a package [rock]ruby and the releasename prefix can be avoided by setting with_rock_release_prefix to false
def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil) if with_rock_release_prefix rock_ruby_release_prefix(release_name) + Deb.canonize(name) else "ruby-" + Deb.canonize(name) end end
[ "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + canonize(name)\n else\n pkg_prefix_base + \"-\" + canonize(name)\n end\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + canonize(name)\n else\n \"ruby-\" + canonize(name)\n end\n end", "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end", "def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n pkg_prefix_base + \"-#{release_name}-\"\n end", "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def package_name\n case arch\n when 'any', 'all', nil\n \"#{name}-#{version}\"\n else\n \"#{name}-#{version}-#{arch}\"\n end\n end", "def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end", "def rpm_name\n \"#{RPM_PREFIX}#{self.name}\"\n end", "def package_name\n format(node['kong']['package_file'], substitutions)\n end", "def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end", "def package_name\n case @name\n # The \"opscode\" in /opt/opscode\n when ::ChefUtils::Dist::Org::LEGACY_CONF_DIR\n \"private-chef\"\n else\n @name\n end\n end", "def hadoop_package(name)\n return name unless hdp22?\n return name if node['platform_family'] == 'debian'\n fw = name.split('-').first\n pv = hdp_version.tr('.', '_').tr('-', '_')\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end", "def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of Debian package names can only include \" \\\n \"lower case alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n\n converted\n end\n end", "def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of RPM package names can only include \" \\\n \"lowercase alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n\n converted\n end\n end", "def fix_name(name)\n if name.start_with?(\"python\")\n # If the python package is called \"python-foo\" strip the \"python-\" part while\n # prepending the package name prefix.\n return [attributes[:python_package_name_prefix], name.gsub(/^python-/, \"\")].join(\"-\")\n else\n return [attributes[:python_package_name_prefix], name].join(\"-\")\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Commit changes of a debian package using dpkgsource commit in a given directory (or the current one by default)
def dpkg_commit_changes(patch_name, directory = Dir.pwd, prefix: "apaka-", logfile: nil, include_removal: false ) Dir.chdir(directory) do Packager.debug ("commit changes to debian pkg: #{patch_name}") # Since dpkg-source will open an editor we have to # take this approach to make it pass directly in an # automated workflow ENV['EDITOR'] = "/bin/true" cmd = ["dpkg-source", "--commit"] cmd << "--include-removal" if include_removal cmd << "." cmd << prefix + patch_name if !system(*cmd, [:out, :err] => redirection(logfile,"a"), :close_others => true) raise RuntimeError, "#{self.class}#{__method__}: failed to commit #{patch_name}" end end end
[ "def dpkg_commit_changes(patch_name, directory = Dir.pwd)\n Dir.chdir(directory) do\n Packager.debug (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass directly in an\n # automated workflow\n ENV['EDITOR'] = \"/bin/true\"\n system(\"dpkg-source\", \"--commit\", \".\", patch_name, :close_others => true)\n end\n end", "def dpkg_commit_changes(patch_name, directory = Dir.pwd)\n Dir.chdir(directory) do\n Packager.info (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass directly in an\n # automated workflow\n ENV['EDITOR'] = \"/bin/true\"\n `dpkg-source --commit . #{patch_name}`\n end\n end", "def update_debian_dir(pkginfo, options)\n # Generate the debian directory\n generate_debian_dir(pkginfo, pkginfo.srcdir, options)\n\n if options[:patch_dir] && File.exist?(options[:patch_dir])\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: nil,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to #{pkginfo.name}\"\n end\n Dir.chdir(pkginfo.srcdir) do\n process_apaka_control(\"apaka.control\")\n end\n end\n\n dpkg_commit_changes(\"overlay\", pkginfo.srcdir,\n logfile: options[:logfile],\n include_removal: true)\n\n envyml = File.join(pkginfo.srcdir, \"env.yml\")\n Packager.warn(\"Preparing env.yml #{envyml}\")\n patch_yml = {}\n if File.exists?(envyml)\n patch_yml = YAML.load_file(envyml)\n end\n\n env_data = pkginfo.generate_env_data(\"APAKA__\" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)\n File.open(envyml, \"w\") do |file|\n file.write(env_data.to_yaml)\n end\n dpkg_commit_changes(\"envyml\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n envsh = File.join(pkginfo.srcdir, \"env.sh\")\n Packager.warn(\"Preparing env.sh #{envsh}\")\n File.open(envsh, \"a\") do |file|\n env_txt = pkginfo.envsh(env_data)\n file.write(env_txt)\n end\n dpkg_commit_changes(\"envsh\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n # Run dpkg-source\n # Use the new tar ball as source\n if !system(\"dpkg-source\", \"-I\", \"-b\", pkginfo.srcdir,\n [:out, :err] => redirection(options[:logfile],\"a\"),\n :close_others => true)\n Packager.warn \"Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}\"\n raise RuntimeError, \"Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}\"\n end\n [\"#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz\",\n \"#{plain_versioned_name(pkginfo)}.orig.tar.gz\",\n \"#{versioned_name(pkginfo, options[:distribution])}.dsc\"]\n end", "def commit(msg = nil)\n require_cmd! git_cmd\n in_repo { AwesomeSpawn.run \"#{git_cmd} add #{pkg_files.join(' ')}\" }\n super(msg.nil? ? \"updated to #{version}\" : msg)\n self\n end", "def commit(msg=nil)\n in_repo { AwesomeSpawn.run \"#{git_cmd} add #{pkg_files.join(' ')}\" }\n super(msg.nil? ? \"updated to #{version}\" : msg)\n self\n end", "def unpack_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n sudo <<-SUDO\n sh -c '\n cd #{src_dir};\n test -d #{package_dir}.old && rm -fr #{package_dir}.old;\n test -d #{package_dir} && mv #{package_dir} #{package_dir}.old;\n #{src_package[:unpack]}\n chgrp -R #{group} #{package_dir}; \n chmod -R g+w #{package_dir};\n '\n SUDO\n end", "def commit(msg = nil)\n require_dep! 'awesome_spawn'\n require_cmd! git_cmd\n\n in_repo { AwesomeSpawn.run \"#{git_cmd} add #{pkg_files.join(' ')}\" }\n super(msg.nil? ? \"updated to #{version}\" : msg)\n self\n end", "def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend", "def commit(opts = {:use_dirstate => true, :update_dirstate => true})\n valid = false # don't update the DirState if this is set!\n \n commit = ((modified || []) + (added || [])).sort\n remove = removed\n xtra = extra.dup\n branchname = xtra[\"branch\"]\n text = description\n \n p1, p2 = parents.map {|p| p.node }\n c1 = repo.changelog.read(p1) # 1 parent's changeset as an array\n c2 = repo.changelog.read(p2) # 2nd parent's changeset as an array\n m1 = repo.manifest.read(c1[0]).dup # 1st parent's manifest\n m2 = repo.manifest.read(c2[0]) # 2nd parent's manifest\n \n if opts[:use_dirstate]\n oldname = c1[5][\"branch\"]\n tests = [ commit.empty?, remove.empty?, !opts[:force],\n p2 == NULL_ID, branchname == oldname ]\n if tests.all?\n UI::status \"nothing changed\"\n return nil\n end\n end\n \n xp1 = p1.hexlify\n xp2 = p2 == NULL_ID ? \"\" : p2.hexlify\n \n Hook.run_hook :pre_commit\n journal = Amp::Mercurial::Journal.new(:opener => repo.store_opener)\n \n fresh = {} # new = reserved haha i don't know why someone wrote \"haha\"\n changed = []\n link_rev = repo.size\n \n (commit + (remove || [])).each {|file| UI::status file }\n \n # foreach file in commit:\n # commit_file file\n # end\n commit.each do |file|\n versioned_file = self[file]\n fresh[file] = versioned_file.commit :manifests => [m1, m2],\n :link_revision => link_rev,\n :journal => journal ,\n :changed => changed\n \n new_flags = versioned_file.flags\n \n # TODO\n # Clean this shit up\n if [ changed.empty? || changed.last != file, \n m2[file] != fresh[file]\n ].all?\n changed << file if m1.flags[file] != new_flags\n end\n m1.flags[file] = new_flags\n \n repo.staging_area.normal file if opts[:use_dirstate]\n end\n \n # add_manifest_entry\n man_entry, updated, added = *add_manifest_entry(:manifests => [m1, m2],\n :changesets => [c1, c2],\n :journal => journal ,\n :link_rev => link_rev,\n :fresh => fresh ,\n :remove => remove ,\n :changed => changed )\n\n # get_commit_text\n text = get_commit_text text, :added => added, :updated => updated,\n :removed => removed, :user => user ,\n :empty_ok => opts[:empty_ok] ,\n :use_dirstate => opts[:use_dirstate]\n \n # atomically write to the changelog\n # add_changelog_entry\n # for the unenlightened, rents = 'rents = parents\n new_rents = add_changelog_entry :manifest_entry => man_entry,\n :files => (changed + removed),\n :text => text,\n :journal => journal,\n :parents => [p1, p2],\n :user => user,\n :date => date,\n :extra => xtra\n \n \n # Write the dirstate if it needs to be updated\n # basically just bring it up to speed\n if opts[:use_dirstate] || opts[:update_dirstate]\n repo.dirstate.parents = new_rents\n removed.each {|f| repo.dirstate.forget(f) } if opts[:use_dirstate]\n repo.staging_area.save\n end\n \n # The journal and dirstates are awesome. Leave them be.\n valid = true\n journal.close\n \n # if an error and we've gotten this far, then the journal is complete\n # and it deserves to stay (if an error is thrown and journal isn't nil,\n # the rescue will destroy it)\n journal = nil\n \n # Run any hooks\n Hook.run_hook :post_commit, :added => added, :modified => updated, :removed => removed, \n :user => user, :date => date, :text => text,\n :revision => repo.changelog.index_size\n return new_rents\n rescue StandardError => e\n if !valid\n repo.dirstate.invalidate!\n end\n if e.kind_of?(AbortError)\n UI::warn \"Abort: #{e}\"\n else\n UI::warn \"Got exception while committing. #{e}\"\n UI::warn e.backtrace.join(\"\\n\")\n end\n \n # the journal is a vestigial and incomplete file.\n # destroyzzzzzzzzzzz\n journal.delete if journal\n end", "def upgrade_direct!\n package \"Chef Development Kit v#{package_metadata[:version]}\" do\n source package_metadata[:url]\n checksum package_metadata[:sha256]\n end\n end", "def create_deb_file\n log.info(log_key) { \"Creating .deb file\" }\n\n # Execute the build command\n Dir.chdir(Config.package_dir) do\n shellout!(\"fakeroot dpkg-deb #{compression_params} -D --build #{staging_dir} #{package_name}\")\n end\n end", "def download_src(src_package, src_dir)\n deprec.groupadd(group)\n sudo \"test -d #{src_dir} || sudo mkdir #{src_dir}\" \n sudo \"chgrp -R #{group} #{src_dir}\"\n sudo \"chmod -R g+w #{src_dir}\"\n # XXX check if file exists and if we have and MD5 hash or bytecount to compare against\n # XXX if so, compare and decide if we need to download again\n if defined?(src_package[:md5sum])\n md5_clause = \" && echo '#{src_package[:md5sum]}' | md5sum -c - \"\n end\n sudo <<-SUDO\n sh -c \"cd #{src_dir} && test -f #{src_package[:file]} #{md5_clause} || wget --timestamping #{src_package[:url]}\"\n SUDO\n end", "def install_from_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n unpack_src(src_package, src_dir)\n sudo <<-SUDO\n sh -c '\n cd #{package_dir};\n #{src_package[:configure]}\n #{src_package[:make]}\n #{src_package[:install]}\n #{src_package[:post_install]}\n '\n SUDO\n end", "def commit\n svn_exec('commit -m \"Figgy DAO links\"')\n end", "def with_package_source(source_url, source_repos = [], options = {})\n source_prefix = options[:source] ? 'deb-src' : 'deb'\n source_patterns = [source_prefix, source_url] + source_repos \n \n source_contents = File.read '/etc/apt/sources.list'\n sources = source_contents.split(/(\\r|\\n)+/)\n source_exists = sources.any? do |source_line|\n source_frags = source_line.split(' ')\n source_patterns.all? { |pattern| source_frags.any? { |frag| frag == pattern } }\n end\n\n unless source_exists\n File.open('/etc/apt/sources.list', 'a') do |f|\n f.write \"#{source_prefix} #{source_url} #{source_repos.join(' ')}\\n\"\n end\n update_package_metadata\n end\n \n begin\n yield\n ensure\n unless source_exists\n File.open('/etc/apt/sources.list', 'w') { |f| f.write source_contents }\n update_package_metadata \n end\n end\n end", "def raise_pr(namespace)\n branch = \"update-rds-module-#{namespace}\"\n message = \"Update RDS module for #{namespace}\"\n execute \"git checkout -b #{branch}\"\n execute \"git add #{tfdir(namespace)}\"\n execute %(git commit -m \"#{message}\")\n execute %(git push origin #{branch})\n execute %(hub pull-request -m \"#{message}\")\nend", "def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end", "def write_source_patches\n from_directory do\n id = @git.latest_id(\"tetra: sources-tarball\")\n destination_path = File.join(full_path, packages_dir, name)\n @git.format_patch(\"src\", id, destination_path)\n end\n end", "def commit_xml_repo(author_date, array_add=nil, message='')\r\n Dir.chdir(XML_FOLDER)\r\n if array_add and array_add.length > 0\r\n str_add = array_add.reduce('') do |sum, value|\r\n sum + \"\\\"#{value}\\\" \"\r\n end\r\n # puts \"git add #{str_add}\"\r\n system(\"git add #{str_add}\")\r\n else\r\n # puts 'git add .'\r\n system('git add .')\r\n end\r\n # puts \"git commit -am \\\"#{message+author_date}\\\" --quiet --date '#{get_author_date(author_date)}'\"\r\n system(\"git commit -am \\\"#{message+author_date}\\\" --quiet --date '#{get_author_date(author_date)}'\") #\r\n\r\n Dir.chdir('..')\r\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generate the debian/ subfolder cindlugin control/rules/install files to prepare the debian package build instructions
def generate_debian_dir(pkginfo, dir, options) options, unknown_options = Kernel.filter_options options, :distribution => nil, :override_existing => true, :patch_dir => nil distribution = options[:distribution] # Prepare fields for template package_info = pkginfo debian_name = debian_name(pkginfo) debian_version = debian_version(pkginfo, distribution) versioned_name = versioned_name(pkginfo, distribution) short_documentation = pkginfo.short_documentation documentation = pkginfo.documentation origin_information = pkginfo.origin_information source_files = pkginfo.source_files upstream_name = pkginfo.name copyright = pkginfo.copyright license = pkginfo.licenses deps = @dep_manager.filtered_dependencies(pkginfo) #debian names of rock packages deps_rock_packages = deps[:rock] deps_osdeps_packages = deps[:osdeps] deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten build_dependencies = dependencies.dup this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture) @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo| name = debian_name(pkginfo) build_dependencies << this_rock_release.packageReleaseName(name) end # To handle postinstall DEFAULT_BUILD_DEPENDENCIES.each do |dep| build_dependencies << dep end DEFAULT_RUNTIME_DEPENDENCIES.each do |dep| dependencies << dep end if pkginfo.build_type == :cmake build_dependencies << "cmake" elsif pkginfo.build_type == :orogen build_dependencies << "cmake" orogen_command = pkginfo.orogen_command elsif pkginfo.build_type == :autotools if pkginfo.using_libtool build_dependencies << "libtool" end build_dependencies << "autotools-dev" # as autotools seems to be virtual... build_dependencies << "autoconf" build_dependencies << "automake" build_dependencies << "dh-autoreconf" elsif pkginfo.build_type == :ruby if pkginfo.is_bundle? build_dependencies << "cmake" else raise "debian/control: cannot handle ruby package" end elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package build_dependencies << "cmake" else raise "debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}" end Packager.info "Required OS Deps: #{deps_osdeps_packages}" Packager.info "Required Nonnative Deps: #{deps_nonnative_packages}" dir = cleanup_existing_dir(dir, options) existing_debian_dir = File.join(pkginfo.srcdir,"debian") template_dir = if File.directory?(existing_debian_dir) existing_debian_dir else TEMPLATES end FileUtils.mkdir_p dir Find.find(template_dir) do |path| next if File.directory?(path) template = ERB.new(File.read(path), nil, "%<>", path.gsub(/[^w]/, '_')) rendered = template.result(binding) target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s) FileUtils.mkdir_p File.dirname(target_path) File.open(target_path, "w") do |io| io.write(rendered) end end if options[:patch_dir] whitelist = [ "debian/rules","debian/control","debian/install" ] if patch_pkg_dir(pkginfo.name, options[:patch_dir], whitelist: whitelist, pkg_dir: pkginfo.srcdir, options: patch_options()) Packager.warn "Overlay patch applied to debian folder of #{pkginfo.name}" end end ######################## # debian/compat ######################## compatfile = File.join(dir,"compat") set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile) end
[ "def create_control_files\n sh \"dpkg-gencontrol\"\n end", "def install_management\n # Needed to play with the configuration database.\n package 'debconf'\n package 'debconf-utils'\n\n # Keys for Debian packages.\n package 'debian-archive-keyring'\n\n # Fetch files via HTTP.\n package 'curl'\n package 'wget'\n\n package 'dpkg-dev' # Builds packages from source.\n package 'openssh-server' # SSH into the box.\n\n # For gems with native extensions.\n package 'build-essential'\n package 'g++'\n\n # Pull code from version control.\n package 'subversion'\n package 'git-core'\n\n package 'avahi-daemon' # mDNS, a.k.a. Bonjour\n package 'ddclient' # dynamic DNS\n end", "def run_install\n require 'fileutils'\n install_path = ARGV.shift || '.'\n FileUtils.mkdir_p install_path unless File.exists?(install_path)\n install_file \"#{CC_ROOT}/config/config.example.yml\", \"#{install_path}/config.yml\"\n install_file \"#{CC_ROOT}/config/config.example.ru\", \"#{install_path}/config.ru\"\n install_file \"#{CC_ROOT}/config/database.example.yml\", \"#{install_path}/database.yml\"\n install_file \"#{CC_ROOT}/actions\", \"#{install_path}/actions\", true\n end", "def setup\n add_pub_key\n set_git\n set_www_dir\n\n puts \"\\nCreate essential files...\"\n \n if not File.exists? \"#{@home}/.bashrc\"\n `touch #{@home}/.bashrc`\n `sudo rm /root/.bashrc`\n `sudo ln -s #{@home}/.bashrc /root/.bashrc`\n end\n\n if not File.exists? \"#{@home}/Plugin.rb\"\n `cp #{@templates_dir}/Plugin.rb #{@home}/Plugin.rb`\n `sudo ln -s #{@templates_dir}/Plugin.rb /root/Plugin.rb`\n end\n\n if File.exists? \"#{@home}/.sealed\"\n File.rename(\"#{@home}/.sealed\", \"#{@home}/.unsealed\")\n end\n\n puts \"\\nSetup complete.\"\n end", "def copy_puppet_install_files\n `cd #{self.path} && mkdir -p puppet/lib/deb`\n `cp -R #{lib_path}/deb/* #{self.path}/puppet/lib/deb/`\n end", "def branded_zone_post_install(options)\n options['zonedir'] = options['zonedir']+\"/\"+options['name']\n if File.directory?(options['zonedir'])\n options['clientdir'] = options['zonedir']+\"/root\"\n var_dir = \"/var/tmp\"\n tmp_dir = options['clientdir']+\"/\"+var_dir\n post_file = tmp_dir+\"/postinstall.sh\"\n tmp_file = \"/tmp/zone_\"+options['name']\n pkg_name = \"pkgutil.pkg\"\n pkg_url = $local_opencsw_mirror+\"/\"+pkg_name\n pkg_file = tmp_dir+\"/\"+pkg_name\n wget_file(options,pkg_url,pkg_file)\n file = File.open(tmp_file,\"w\")\n file.write(\"#!/usr/bin/bash\\n\")\n file.write(\"\\n\")\n file.write(\"# Post install script\\n\")\n file.write(\"\\n\")\n file.write(\"cd #{var_dir} ; echo y |pkgadd -d pkgutil.pkg CSWpkgutil\\n\")\n file.write(\"export PATH=/opt/csw/bin:$PATH\\n\")\n file.write(\"pkutil -i CSWwget\\n\")\n file.write(\"\\n\")\n file.close\n message = \"Information:\\tCreating post install script \"+post_file\n command = \"cp #{tmp_file} #{post_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n else\n handle_output(options,\"Warning:\\tZone #{options['name']} doesn't exist\")\n quit(options)\n end\n return\nend", "def create_pre_and_post_install(type)\n if @plugin.preinstall\n if !File.exists?(@plugin.preinstall)\n puts \"pre-install script '#{@plugin.preinstall}' not found.\"\n raise(Errno::ENOENT, @plugin.preinstall)\n else\n FileUtils.cp(@plugin.preinstall, File.join(@build_dir, 'debian', \"#{@package_name}-#{type}.preinst\"))\n end\n end\n\n if @plugin.postinstall\n if !File.exists?(@plugin.postinstall)\n puts \"post-install script '#{@plugin.postinstall}' not found.\"\n raise(Errno::ENOENT, @plugin.postinstall)\n else\n FileUtils.cp(@plugin.postinstall, File.join(@build_dir, 'debian', \"#{@package_name}-#{type}.postinst\"))\n end\n end\n end", "def install!\n require \"sys/uname\"\n require \"fileutils\"\n \n basedir = ::File.dirname(__FILE__)\n \n # Loads and creates the configuration dir\n case Sys::Uname.sysname.downcase.to_sym\n when :freebsd\n etc = \"/usr/local/etc\"\n when :linux\n etc = \"/etc\"\n else\n raise Exception::new(\"You are running on an unknown platform. It cannot be problem, but it's necessary define path to configuration file and define paths in configuration file.\")\n end\n \n etc << \"/rb.rotate\"\n FileUtils.mkdir_p(etc)\n \n # Creates other important directories\n FileUtils.mkdir_p(\"/var/log\")\n FileUtils.mkdir_p(\"/var/lib\")\n \n # Puts configuration files to configuration directory\n source = basedir.dup << \"/install\"\n replacements = { \"%%configuration\" => etc }\n files = [\"rotate.yaml\", \"defaults.yaml\"]\n \n files.each do |file|\n body = ::File.read(source.dup << \"/\" << file << \".initial\")\n replacements.each_pair do |key, value|\n body.gsub! key, value\n end\n ::File.open(etc.dup << \"/\" << file, \"w\") do |io|\n io.write(body)\n end\n end\n \n # Puts to library root path path to configuration directory\n ::File.open(basedir.dup << \"/../paths.conf\", \"w\") do |io|\n io.write(etc.dup << \"/rotate.yaml\")\n end\n end", "def generate_so_dependencies_files\n `cd #{self.path} && mkdir -p puppet/prepare/dependencies/ubuntu`\n ubuntu_dependencies_content = File.read(\"#{Bebox::Project.templates_path}/project/ubuntu_dependencies\")\n File::open(\"#{self.path}/puppet/prepare/dependencies/ubuntu/packages\", \"w\")do |f|\n f.write(ubuntu_dependencies_content)\n end\n end", "def pre_install; end", "def create_deb\n sh \"dpkg-deb --build \\\"#{config.buildroot}\\\" \\\"#{built_deb_path}\\\"\"\n end", "def build_pkg(dist, arch, deps)\n start_dir = Dir.pwd\n build_dir = \"/tmp/rhobuild\"\n version = Rhoconnect::VERSION\n description = '\"Rhoconnect production environment\"'\n prefix = \"/opt/rhoconnect/installer\"\n gem_name = \"rhoconnect-#{version}.gem\"\n\n before_install_script = \"#{build_dir}/unix-like/pre_install.sh\"\n after_install_script = \"#{build_dir}/unix-like/post_install.sh\"\n before_remove_script = \"#{build_dir}/unix-like/pre_uninstall.sh\"\n after_remove_script = \"#{build_dir}/unix-like/post_uninstall.sh\"\n\n `rm -rf #{build_dir}` if File.exist?(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}/unix-like\")\n\n # Copy all necessary Files into the build_dir\n system(\"cp install.sh Gemfile Gemfile.lock #{build_dir}\")\n system(\"cp -r installer/unix-like/*.sh #{build_dir}/unix-like\")\n system(\"cp -r installer/unix-like/*.rb #{build_dir}/unix-like\")\n system(\"cp pkg/#{gem_name} #{build_dir}\")\n\n # cd into the pkg dir so that fpm will create the package into the pkg dir.\n Dir.chdir(\"./pkg\") # it created by build task and should already exist\n\n # Construct fpm command\n fpm_cmd = \"fpm -s dir -t #{dist} -n rhoconnect -v #{version} -a #{arch} -C #{build_dir} --epoch 1 \" +\n \"--before-install #{before_install_script} --after-install #{after_install_script} \" +\n \"--before-remove #{before_remove_script} --after-remove #{after_remove_script} \" +\n \"--prefix #{prefix} --description #{description}\"\n # Add the list of dependencies to the fpm call\n deps.each { |dep| fpm_cmd << \" -d '#{dep}'\" }\n fpm_cmd << \" './'\"\n # Create the package\n system(fpm_cmd)\n # Leave no trace...\n system(\"rm -rf #{build_dir}\")\n Dir.chdir(start_dir)\nend", "def make\n dir\n\n # Copy the files over\n copy_files ProScribe.root('data/default/'), dir\n\n # Copy manual files over\n copy_files manual_path, dir, :except => ['Gemfile', 'Gemfile.lock', 'config.ru']\n\n # Merge Scribefile into Protonfile\n File.open(File.join(dir, 'Protonfile'), 'w') { |f| f.write protonfile }\n\n # Extract block comments\n config.files.each do |group|\n ex = ProScribe::Extractor.new(Dir[root(group.source)], root)\n ex.write! File.join(dir, group.prefix || '')\n end\n end", "def generate_pkg_contents\n shellout!(\"pkgsend generate #{source_dir} | pkgfmt > #{pkg_manifest_file}.1\")\n shellout!(\"pkgmogrify -DARCH=`uname -p` #{pkg_manifest_file}.1 #{pkg_metadata_file} #{transform_file} | pkgfmt > #{pkg_manifest_file}.2\")\n end", "def create_deb_file\n log.info(log_key) { \"Creating .deb file\" }\n\n # Execute the build command\n Dir.chdir(Config.package_dir) do\n shellout!(\"fakeroot dpkg-deb #{compression_params} -D --build #{staging_dir} #{package_name}\")\n end\n end", "def prepare_config_files\n #Create .config dir\n #Create tucotuco dir\n #Create short dir\n #Create info file\n end", "def configureInstall\n Dir.chdir(\"nagios-4.0.4\")\n `./configure --with-command-group=nagcmd`\n `make all`\n `make install`\n `make install-init`\n `make install-config`\n `make install-commandmode`\n `make install-webconf`\n end", "def install_in_ubuntu\n install_ppa(node['SignalFx_ppa']['collectd']['name'],\n node['SignalFx_ppa']['collectd']['uri'])\n install_ppa(node['SignalFx_ppa']['collectd_plugin']['name'],\n node['SignalFx_ppa']['collectd_plugin']['uri'])\n ubuntu_update\n install_package 'collectd'\nend", "def build_package_tasks(config)\n # The name of the task to build the package\n package_task_name = \"build_#{config[:package_name]}\"\n\n # Add task name to the list of dependencies for the :deb_packages task\n task :deb_packages => package_task_name\n\n # The path to the package source directory\n pkg_src_dir = File.join(PACKAGE_CONSTRUCTION_DIR, source_dir_name(config))\n\n # Directory task to ensure the existence of the directory\n directory pkg_src_dir\n\n # Create the tarball task\n orig_source_tarball_path = File.join(PACKAGE_CONSTRUCTION_DIR, \"#{orig_tar_ball_name(config)}.orig.tar.gz\")\n\n # The File task to construct the original source tarball.\n file orig_source_tarball_path => PACKAGE_CONSTRUCTION_DIR do\n system \"tar zcf #{orig_source_tarball_path} --directory #{PACKAGE_CONSTRUCTION_DIR} #{source_dir_name(config)}\"\n end\n\n # The path to the debian directory within the extracted source directory\n package_debian_path = File.join(pkg_src_dir, 'debian')\n\n # Directory task to the package debian path to ensure existence.\n directory package_debian_path\n\n # The task that actually constructs the debian package\n task package_task_name => orig_source_tarball_path do\n # Build the spanky little thing.\n debuild_flag = ENV['debuild'] || 'true'\n if debuild_flag == 'true'\n system \"cd #{pkg_src_dir}; debuild -us -uc\"\n else\n puts \"Skipping build; debug flag was set\"\n end\n end\n\n # Ensure we have set up the tasks for all the files to be included\n # in the package.\n config[:exes].each do | exe_name |\n exe_path = File.join(pkg_src_dir, exe_name.split('.').first)\n file exe_path => pkg_src_dir do\n cp exe_name, exe_path\n end\n\n # Add the file path as a dependency of the source tarball\n task orig_source_tarball_path => exe_path\n end\n\n # Create the task to populate the debian directory\n debian_task = \"populate_#{config[:package_name]}_debian_files\"\n task debian_task => package_debian_path do\n cp_r \"package_source/#{config[:package_name]}/debian\", pkg_src_dir\n end\n\n # Finally add the debian task as a dependency for the package task.\n task package_task_name => debian_task\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generate the debian_dir for a meta package i.e. representing a package set or a full release return [String] the main packages directory
def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: "0.1", distribution: nil) existing_debian_dir = File.join("#{name}-#{version}","debian-meta") template_dir = if File.directory?(existing_debian_dir) existing_debian_dir else TEMPLATES_META end dir = File.join(base_dir, "debian") FileUtils.mkdir_p dir debian_name = debian_meta_name(name) debian_version = "#{version}" if distribution debian_version += '~' + distribution end deps_rock_packages = depends deps_osdeps_packages = [] deps_nonnative_packages = [] Packager.info "Required OS Deps: #{deps_osdeps_packages}" Packager.info "Required Nonnative Deps: #{deps_nonnative_packages}" Find.find(template_dir) do |path| next if File.directory?(path) template = ERB.new(File.read(path), nil, "%<>", path.gsub(/[^w]/, '_')) begin rendered = template.result(binding) rescue puts "Error in #{path}:" raise end target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s) FileUtils.mkdir_p File.dirname(target_path) File.open(target_path, "w") do |io| io.write(rendered) end end return dir end
[ "def generate_debian_dir(pkginfo, dir, options)\n options, unknown_options = Kernel.filter_options options,\n :distribution => nil,\n :override_existing => true,\n :patch_dir => nil\n\n distribution = options[:distribution]\n\n # Prepare fields for template\n package_info = pkginfo\n debian_name = debian_name(pkginfo)\n debian_version = debian_version(pkginfo, distribution)\n versioned_name = versioned_name(pkginfo, distribution)\n short_documentation = pkginfo.short_documentation\n documentation = pkginfo.documentation\n origin_information = pkginfo.origin_information\n source_files = pkginfo.source_files\n\n upstream_name = pkginfo.name\n copyright = pkginfo.copyright\n license = pkginfo.licenses\n\n deps = @dep_manager.filtered_dependencies(pkginfo)\n\n #debian names of rock packages\n deps_rock_packages = deps[:rock]\n deps_osdeps_packages = deps[:osdeps]\n deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact\n\n dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten\n build_dependencies = dependencies.dup\n\n this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture)\n @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo|\n name = debian_name(pkginfo)\n build_dependencies << this_rock_release.packageReleaseName(name)\n end\n\n # To handle postinstall\n DEFAULT_BUILD_DEPENDENCIES.each do |dep|\n build_dependencies << dep\n end\n\n DEFAULT_RUNTIME_DEPENDENCIES.each do |dep|\n dependencies << dep\n end\n\n if pkginfo.build_type == :cmake\n build_dependencies << \"cmake\"\n elsif pkginfo.build_type == :orogen\n build_dependencies << \"cmake\"\n orogen_command = pkginfo.orogen_command\n elsif pkginfo.build_type == :autotools\n if pkginfo.using_libtool\n build_dependencies << \"libtool\"\n end\n build_dependencies << \"autotools-dev\" # as autotools seems to be virtual...\n build_dependencies << \"autoconf\"\n build_dependencies << \"automake\"\n build_dependencies << \"dh-autoreconf\"\n elsif pkginfo.build_type == :ruby\n if pkginfo.is_bundle?\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle ruby package\"\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n dir = cleanup_existing_dir(dir, options)\n existing_debian_dir = File.join(pkginfo.srcdir,\"debian\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES\n end\n FileUtils.mkdir_p dir\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n rendered = template.result(binding)\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n if options[:patch_dir]\n whitelist = [ \"debian/rules\",\"debian/control\",\"debian/install\" ]\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: whitelist,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to debian folder of #{pkginfo.name}\"\n end\n end\n\n ########################\n # debian/compat\n ########################\n compatfile = File.join(dir,\"compat\")\n set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile)\n end", "def package_dir\r\n \"${0%/#{target_name}}\"\r\n end", "def package_dir\n config.package_dir\n end", "def package_install_dir()\n\t\t\t\treturn \"#{$install_base}/packages/#{package_name}\"\n\t\t\tend", "def root_dir_untarfiles\r\n pkg_name = @out_package_down\r\n dir_to_unpack = File.dirname(pkg_name)\r\n name_extr_folder = File.basename(pkg_name).split(\".\").first\r\n return File.join(dir_to_unpack, name_extr_folder)\r\n end", "def output_dir(target_repo = \"\")\n @output_dir ||= File.join(\"deb\", @codename, target_repo)\n end", "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end", "def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end", "def meta_dir\n @meta_dir ||= Dir[File.join(root, '{meta,.meta,var}/')].first || '.meta/'\n end", "def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + canonize(name)\n else\n pkg_prefix_base + \"-\" + canonize(name)\n end\n end", "def create_debian_dir\n deb_dir = File.join(@build_dir, 'debian')\n begin\n FileUtils.mkdir_p(deb_dir)\n rescue => e\n puts \"Could not create directory '#{deb_dir}'\"\n raise e\n end\n end", "def release_dir\n @config[:releases_dir] + '/' + @config[:version_name]\n end", "def create_deb_file\n log.info(log_key) { \"Creating .deb file\" }\n\n # Execute the build command\n Dir.chdir(Config.package_dir) do\n shellout!(\"fakeroot dpkg-deb #{compression_params} -D --build #{staging_dir} #{package_name}\")\n end\n end", "def get_extract_dir(pkg, version, update)\n dir = \"#{pkg == 'jre' ? 'jre' : 'jdk'}1.#{version}.0#{update.empty? ? '' : '_'+update}\"\n Chef::Log.info(\"Java package expanded dir: #{dir}\")\n dir\n end", "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def dmg_package_app\n case new_resource.source\n when :direct\n ::File.basename(package_metadata[:url], '.dmg')\n else\n ::File.basename(new_resource.source.to_s, '.dmg')\n end\n end", "def update_debian_dir(pkginfo, options)\n # Generate the debian directory\n generate_debian_dir(pkginfo, pkginfo.srcdir, options)\n\n if options[:patch_dir] && File.exist?(options[:patch_dir])\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: nil,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to #{pkginfo.name}\"\n end\n Dir.chdir(pkginfo.srcdir) do\n process_apaka_control(\"apaka.control\")\n end\n end\n\n dpkg_commit_changes(\"overlay\", pkginfo.srcdir,\n logfile: options[:logfile],\n include_removal: true)\n\n envyml = File.join(pkginfo.srcdir, \"env.yml\")\n Packager.warn(\"Preparing env.yml #{envyml}\")\n patch_yml = {}\n if File.exists?(envyml)\n patch_yml = YAML.load_file(envyml)\n end\n\n env_data = pkginfo.generate_env_data(\"APAKA__\" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)\n File.open(envyml, \"w\") do |file|\n file.write(env_data.to_yaml)\n end\n dpkg_commit_changes(\"envyml\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n envsh = File.join(pkginfo.srcdir, \"env.sh\")\n Packager.warn(\"Preparing env.sh #{envsh}\")\n File.open(envsh, \"a\") do |file|\n env_txt = pkginfo.envsh(env_data)\n file.write(env_txt)\n end\n dpkg_commit_changes(\"envsh\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n # Run dpkg-source\n # Use the new tar ball as source\n if !system(\"dpkg-source\", \"-I\", \"-b\", pkginfo.srcdir,\n [:out, :err] => redirection(options[:logfile],\"a\"),\n :close_others => true)\n Packager.warn \"Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}\"\n raise RuntimeError, \"Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}\"\n end\n [\"#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz\",\n \"#{plain_versioned_name(pkginfo)}.orig.tar.gz\",\n \"#{versioned_name(pkginfo, options[:distribution])}.dsc\"]\n end", "def packages_path\n\t\t\tcontext.root + \"teapot/packages/#{name}\"\n\t\tend", "def rpm_file\n \"#{staging_dir}/RPMS/#{safe_architecture}/#{package_name}\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A tar gzip version that reproduces same checksums on the same day when file content does not change Required to package orig.tar.gz
def tar_gzip(archive, tarfile, pkg_time, distribution = nil, logfile: nil) # Make sure no distribution information leaks into the package if distribution and archive =~ /~#{distribution}/ archive_plain_name = archive.gsub(/~#{distribution}/,"") FileUtils.cp_r archive, archive_plain_name else archive_plain_name = archive end Packager.info "Tar archive: #{archive_plain_name} into #{tarfile}" # Make sure that the tar files checksum remains the same by # overriding the modification timestamps in the tarball with # some external source timestamp and using gzip --no-name # # exclude hidden files an directories mtime = pkg_time.iso8601() # Exclude hidden files and directories at top level cmd_tar = "tar --mtime='#{mtime}' --format=gnu -c --exclude '.+' --exclude-backups --exclude-vcs --exclude #{archive_plain_name}/debian --exclude build #{archive_plain_name} | gzip --no-name > #{tarfile}" if system(cmd_tar, [:out,:err] => redirection(logfile, "a")) Packager.info "Package: successfully created archive using command '#{cmd_tar}' -- pwd #{Dir.pwd} -- #{Dir.glob("**")}" checksum = `sha256sum #{tarfile}` Packager.info "Package: sha256sum: #{checksum}" return true else Packager.info "Package: failed to create archive using command '#{cmd_tar}' -- pwd #{Dir.pwd}" return false end end
[ "def tar_gzip(archive, tarfile, pkg_time, distribution = nil)\n\n # Make sure no distribution information leaks into the package\n if distribution and archive =~ /~#{distribution}/\n archive_plain_name = archive.gsub(/~#{distribution}/,\"\")\n FileUtils.cp_r archive, archive_plain_name\n else\n archive_plain_name = archive\n end\n\n\n Packager.info \"Tar archive: #{archive_plain_name} into #{tarfile}\"\n # Make sure that the tar files checksum remains the same by\n # overriding the modification timestamps in the tarball with\n # some external source timestamp and using gzip --no-name\n #\n # exclude hidden files an directories\n mtime = pkg_time.iso8601()\n # Exclude hidden files and directories at top level\n cmd_tar = \"tar --mtime='#{mtime}' --format=gnu -c --exclude '.+' --exclude-backups --exclude-vcs --exclude #{archive_plain_name}/debian --exclude build #{archive_plain_name} | gzip --no-name > #{tarfile}\"\n\n if system(cmd_tar)\n Packager.info \"Package: successfully created archive using command '#{cmd_tar}' -- pwd #{Dir.pwd} -- #{Dir.glob(\"**\")}\"\n checksum = `sha256sum #{tarfile}`\n Packager.info \"Package: sha256sum: #{checksum}\"\n return true\n else\n Packager.info \"Package: failed to create archive using command '#{cmd_tar}' -- pwd #{Dir.pwd}\"\n return false\n end\n end", "def tar_gz_file\n \"#{package_name}.tar.gz\"\n end", "def compress_source_tgz(path)\n tarfile = Tempfile.create([\"vagrant\", \".tar\"])\n tarfile.close\n tarfile = File.open(tarfile.path, \"wb+\")\n tgzfile = Tempfile.create([\"vagrant\", \".tgz\"])\n tgzfile.close\n tgzfile = File.open(tgzfile.path, \"wb\")\n tar = Gem::Package::TarWriter.new(tarfile)\n tgz = Zlib::GzipWriter.new(tgzfile)\n if File.file?(path)\n tar.add_file(File.basename(path), File.stat(path).mode) do |io|\n File.open(path, \"rb\") do |file|\n while bytes = file.read(4096)\n io.write(bytes)\n end\n end\n end\n else\n Dir.glob(File.join(path, \"**/**/*\")).each do |item|\n rel_path = item.sub(path, \"\")\n item_mode = File.stat(item).mode\n\n if File.directory?(item)\n tar.mkdir(rel_path, item_mode)\n else\n tar.add_file(rel_path, item_mode) do |io|\n File.open(item, \"rb\") do |file|\n while bytes = file.read(4096)\n io.write(bytes)\n end\n end\n end\n end\n end\n end\n tar.close\n tarfile.rewind\n while bytes = tarfile.read(4096)\n tgz.write bytes\n end\n tgz.close\n tgzfile.close\n tarfile.close\n File.delete(tarfile.path)\n tgzfile.path\n end", "def tar_xz_file\n \"#{package_name}.tar.xz\"\n end", "def extract_tar_gz\n Gem::Package::TarReader.new(Zlib::GzipReader.open(@package)) do |tar|\n\n # Progressbar\n progressbar = TTY::ProgressBar.new(PROGRESSBAR_FORMAT, total: tar.count, frequency: 2, clear: true)\n\n # tar.count move position pointer to end\n tar.rewind\n\n dest_file = nil\n tar.each do |entry|\n\n if entry.full_name == TAR_LONGLINK\n dest_file = File.join(@temp_dir, entry.read.strip)\n next\n end\n\n # Pax header\n # \"%d %s=%s\\n\", <length>, <keyword>, <value>\n if entry.header.typeflag == 'x'\n\n pax_headers = entry.read.split(\"\\n\")\n pax_headers.each do |header|\n meta, value = header.split('=', 2)\n length, keyword = meta.split(' ', 2)\n\n if keyword == 'path'\n dest_file = File.join(@temp_dir, value)\n next\n end\n end\n\n # If there is no header with keyword \"path\"\n next\n end\n\n dest_file ||= File.join(@temp_dir, entry.full_name)\n if entry.directory?\n FileUtils.rm_rf(dest_file) unless File.directory?(dest_file)\n FileUtils.mkdir_p(dest_file, mode: entry.header.mode, verbose: false)\n elsif entry.file?\n FileUtils.rm_rf(dest_file) unless File.file?(dest_file)\n File.open(dest_file, 'wb') do |f|\n f.write(entry.read)\n end\n FileUtils.chmod(entry.header.mode, dest_file, verbose: false)\n elsif entry.header.typeflag == '2' # symlink\n File.symlink(entry.header.linkname, dest_file)\n end\n\n dest_file = nil\n progressbar.advance(1)\n end\n\n progressbar.finish\n end\n end", "def extract_tar_gz\n Gem::Package::TarReader.new(Zlib::GzipReader.open(base.package)) do |tar|\n\n # Progressbar\n progressbar = ProgressBar.create(format: PROGRESSBAR_FORMAT, total: tar.count)\n\n # tar.count move position pointer to end\n tar.rewind\n\n dest_file = nil\n tar.each do |entry|\n if entry.full_name == TAR_LONGLINK\n dest_file = File.join(@tmpdir, entry.read.strip)\n next\n end\n dest_file ||= File.join(@tmpdir, entry.full_name)\n if entry.directory?\n FileUtils.rm_rf(dest_file) unless File.directory?(dest_file)\n FileUtils.mkdir_p(dest_file, mode: entry.header.mode, verbose: false)\n elsif entry.file?\n FileUtils.rm_rf(dest_file) unless File.file?(dest_file)\n File.open(dest_file, 'wb') do |f|\n f.write(entry.read)\n end\n FileUtils.chmod(entry.header.mode, dest_file, verbose: false)\n elsif entry.header.typeflag == '2' # symlink\n File.symlink(entry.header.linkname, dest_file)\n end\n\n dest_file = nil\n progressbar.increment\n end\n end\n end", "def archive_tar_gz(treeish = 'master', prefix = nil)\n options = {}\n options[:prefix] = prefix if prefix\n self.git.archive(options, treeish, \"| gzip\")\n end", "def compress\n @env[:ui].info I18n.t(\"vagrant.actions.general.package.compressing\", :tar_path => tar_path)\n File.open(tar_path, Platform.tar_file_options) do |tar|\n Archive::Tar::Minitar::Output.open(tar) do |output|\n begin\n current_dir = FileUtils.pwd\n\n copy_include_files\n\n FileUtils.cd(@env[\"package.directory\"])\n Dir.glob(File.join(\".\", \"**\", \"*\")).each do |entry|\n Archive::Tar::Minitar.pack_file(entry, output)\n end\n ensure\n FileUtils.cd(current_dir)\n end\n end\n end\n end", "def tar_compression_flag(path)\n case path\n when /\\.tar\\.bz2$/\n return \"-j\"\n when /\\.tar\\.gz$|\\.tgz$/\n return \"-z\"\n when /\\.tar\\.xz$/\n return \"-J\"\n else\n return nil\n end\n end", "def tar_bz2_file\n \"#{package_name}.tar.bz2\"\n end", "def checksum_file(build_file)\n basename = File.basename(build_file, '.tar.gz')\n Dir.glob(\"#{basename}.md5\").fetch(0, nil)\n end", "def archive_tar_gz(treeish = 'master', prefix = nil)\n options = {}\n options[:prefix] = prefix if prefix\n @git.archive(options, treeish, \"| gzip -n\")\n end", "def compress_command\n \"tar -czf #{filename} #{@backup.database}\"\n end", "def untar(tarball, &block)\n Rake::Task[\"dependency:archive-tar-minitar\"].invoke\n require \"archive/tar/minitar\"\n tgz = Zlib::GzipReader.new(File.open(tarball))\n # Pull out typesdb\n tar = Archive::Tar::Minitar::Input.open(tgz)\n tar.each do |entry|\n path = block.call(entry)\n next if path.nil?\n parent = File.dirname(path)\n\n mkdir_p parent unless File.directory?(parent)\n\n # Skip this file if the output file is the same size\n if entry.directory?\n mkdir path unless File.directory?(path)\n else\n entry_mode = entry.instance_eval { @mode } & 0777\n if File.exists?(path)\n stat = File.stat(path)\n # TODO(sissel): Submit a patch to archive-tar-minitar upstream to\n # expose headers in the entry.\n entry_size = entry.instance_eval { @size }\n # If file sizes are same, skip writing.\n next if stat.size == entry_size && (stat.mode & 0777) == entry_mode\n end\n puts \"Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}\"\n File.open(path, \"w\") do |fd|\n # eof? check lets us skip empty files. Necessary because the API provided by\n # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an\n # IO object. Something about empty files in this EntryStream causes\n # IO.copy_stream to throw \"can't convert nil into String\" on JRuby\n # TODO(sissel): File a bug about this.\n while !entry.eof?\n chunk = entry.read(16384)\n fd.write(chunk)\n end\n #IO.copy_stream(entry, fd)\n end\n File.chmod(entry_mode, path)\n end\n end\n tar.close\nend", "def tgz_file\n \"#{package_name}.tgz\"\n end", "def untar_gzip(file, options={})\n Shell.require_minitar\n untar(ungzip(file, options), options)\n end", "def tar_compression_flag\n case compression\n when :bzip2, \"bzip2\", nil\n \"j\"\n when :gzip, \"gzip\"\n \"z\"\n when :none, \"none\"\n \"\"\n end\n end", "def create_tar\n name_and_version = \"#{@package_name}_#{@plugin.metadata[:version]}\"\n tarfile = \"#{name_and_version}.orig.tar.gz\"\n begin\n PluginPackager.execute_verbosely(@verbose) do\n Dir.chdir(@tmpdir) do\n PluginPackager.safe_system(\"tar -Pcvzf #{File.join(@tmpdir, tarfile)} #{name_and_version}\")\n end\n end\n rescue Exception => e\n puts \"Could not create tarball - #{tarfile}\"\n raise e\n end\n end", "def create_tar\n tarfile = File.join(@tmpdir, \"#{@package_name_and_version}.tgz\")\n begin\n PluginPackager.execute_verbosely(@verbose) do\n Dir.chdir(@tmpdir) do\n PluginPackager.safe_system(\"tar -cvzf #{tarfile} #{@package_name_and_version}\")\n end\n end\n rescue => e\n puts \"Could not create tarball - '#{tarfile}'\"\n raise e\n end\n tarfile\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update the debian directory with overlay and env.sh/env.yml
def update_debian_dir(pkginfo, options) # Generate the debian directory generate_debian_dir(pkginfo, pkginfo.srcdir, options) if options[:patch_dir] && File.exist?(options[:patch_dir]) if patch_pkg_dir(pkginfo.name, options[:patch_dir], whitelist: nil, pkg_dir: pkginfo.srcdir, options: patch_options()) Packager.warn "Overlay patch applied to #{pkginfo.name}" end Dir.chdir(pkginfo.srcdir) do process_apaka_control("apaka.control") end end dpkg_commit_changes("overlay", pkginfo.srcdir, logfile: options[:logfile], include_removal: true) envyml = File.join(pkginfo.srcdir, "env.yml") Packager.warn("Preparing env.yml #{envyml}") patch_yml = {} if File.exists?(envyml) patch_yml = YAML.load_file(envyml) end env_data = pkginfo.generate_env_data("APAKA__" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml) File.open(envyml, "w") do |file| file.write(env_data.to_yaml) end dpkg_commit_changes("envyml", pkginfo.srcdir, logfile: options[:logfile]) envsh = File.join(pkginfo.srcdir, "env.sh") Packager.warn("Preparing env.sh #{envsh}") File.open(envsh, "a") do |file| env_txt = pkginfo.envsh(env_data) file.write(env_txt) end dpkg_commit_changes("envsh", pkginfo.srcdir, logfile: options[:logfile]) # Run dpkg-source # Use the new tar ball as source if !system("dpkg-source", "-I", "-b", pkginfo.srcdir, [:out, :err] => redirection(options[:logfile],"a"), :close_others => true) Packager.warn "Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}" raise RuntimeError, "Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}" end ["#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz", "#{plain_versioned_name(pkginfo)}.orig.tar.gz", "#{versioned_name(pkginfo, options[:distribution])}.dsc"] end
[ "def setup debian_repository\n write_config debian_repository\n restart\n end", "def etc_update \n announcing 'Running etc-update' do\n system(\"chroot #{$chrootdir} /scripts/run.sh update_configs\")\n end\n send_to_state('build', 'etc_update')\n end", "def apt_update(vm)\n vm.provision \"shell\", inline: <<-SHELL\n if [ ! -f /root/apt.updated ]; then\n apt-get -y update\n apt-get -y purge exim4-* libcairo*\n apt-get -y autoremove\n #apt-get -y upgrade\n #apt-get -y dist-upgrade\n apt-get -y install htop tree vim aufs-tools screen curl\n touch /root/apt.updated\n fi\n SHELL\nend", "def update_env_yml\n append_file = File.join(APAKA_PKG_INSTALL_DIR, \"env.yml.append\")\n return unless File.exist?(append_file)\n\n env_file = File.join(APAKA_PKG_INSTALL_DIR,\"env.yml\")\n env_data = YAML.load_file(env_file) || {}\n\n File.open(append_file).each_line do |line|\n variable, value = line.strip.split(\" \")\n if env_data.include?(variable)\n env_data[variable][:values] << value.strip\n else\n env_data[variable] = { \n :type => :add,\n :values => [ value.strip ] \n }\n end\n env_data[variable][:values].uniq!\n end\n File.open(env_file,\"w\") do |file|\n file.write(env_data.to_yaml)\n end\n\n FileUtils.rm(append_file)\nend", "def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend", "def update_env_file!\n present_env = ssh_read_file(app_username, \"#{ app_name }/.env\").to_s\n\n env = {\n \"RAILS_ENV\" => \"production\",\n \"PATH\" => \"/home/#{ app_username }/.rvm/wrappers/#{ app_name }:$PATH\",\n \"SECRET_KEY_BASE\" => SecureRandom.hex(64),\n }.merge(\n Dotenv::Parser.call(present_env)\n ).merge(\n options[:env] || {}\n ).map { |k, v|\n \"export #{ k }=#{ v.inspect }\\n\"\n }.join(\"\")\n\n ssh_write_file(app_username, \"#{ app_name }/.env\", env)\n end", "def update_chroot\n update_scripts\n update_configs\n end", "def mirror_env_to_profile_d env_file\n if /opensuse|sles-/.match?(self[:platform])\n @logger.debug(\"mirroring environment to /etc/profile.d on opensuse/sles platform host\")\n cur_env = exec(Beaker::Command.new(\"cat #{env_file}\")).stdout\n shell_env = ''\n cur_env.each_line do |env_line|\n shell_env << \"export #{env_line}\"\n end\n # here doc it over\n exec(Beaker::Command.new(\"cat << EOF > #{self[:profile_d_env_file]}\\n#{shell_env}EOF\"))\n # set permissions\n exec(Beaker::Command.new(\"chmod +x #{self[:profile_d_env_file]}\"))\n # keep it current\n exec(Beaker::Command.new(\"source #{self[:profile_d_env_file]}\"))\n else\n # noop\n @logger.debug(\"will not mirror environment to /etc/profile.d on non-sles platform host\")\n end\n end", "def setup_path\n # The Java Buildpack for WLS creates the complete domain structure and other linkages during staging.\n # The directory used for staging is at /tmp/staged/app\n # But the actual DEA execution occurs at /home/vcap/app. This discrepancy can result in broken paths and non-startup of the server.\n # So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution\n # Also, this script needs to be invoked before starting the server as it will create the links and also tweak the server args\n # (to listen on correct port, use user supplied jvm args)\n\n File.open(@application.root.to_s + '/' + SETUP_ENV_SCRIPT, 'w') do |f|\n\n f.puts '#!/bin/sh '\n f.puts '# There are 4 things handled by this script '\n f.puts ' '\n f.puts '# 1. Create links to mimic staging env and update scripts with jvm options '\n f.puts '# The Java Buildpack for WLS creates complete domain structure and other linkages during staging at '\n f.puts '# /tmp/staged/app location '\n f.puts '# But the actual DEA execution occurs at /home/vcap/app. '\n f.puts '# This discrepancy can result in broken paths and non-startup of the server. '\n f.puts '# So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution '\n f.puts '# Create paths that match the staging env, as otherwise scripts will break!! '\n f.puts ' '\n f.puts 'if [ ! -d \\\"/tmp/staged\\\" ]; then '\n f.puts ' /bin/mkdir /tmp/staged '\n f.puts 'fi; '\n f.puts 'if [ ! -d \\\"/tmp/staged/app\\\" ]; then '\n f.puts ' /bin/ln -s `pwd` /tmp/staged/app '\n f.puts 'fi; '\n f.puts ' '\n f.puts ' '\n end\n end", "def update_aptly\n distribution = settings.distribution\n puts \"Distribution: #{distribution}\"\n puts 'DEBUG: @config.settings'\n pp @config.settings\n\n deb_path = File.join @config.output, distribution\n deb_abs_path = File.absolute_path deb_path\n\n repo = \"#{@config.aptly_repo}-#{distribution}\"\n\n begin\n result = @aptly.repo_create name: repo, default_distribution: distribution\n puts \"Repo #{repo} created\"\n puts result\n rescue Aptly::ExistsError\n puts \"Repo #{repo} already exists\"\n end\n\n upload_deb directory: deb_abs_path, repo: repo unless settings.skip_package_upload\n update_repo repo: repo\n\n puts 'Done'\n end", "def security_updates\n configure(:unattended_upgrade => {:allowed_origins => [distro_unattended_security_origin].compact})\n unattended_config = <<-CONFIG\nAPT::Periodic::Update-Package-Lists \"#{configuration[:unattended_upgrade][:package_lists]||1}\";\nAPT::Periodic::Unattended-Upgrade \"#{configuration[:unattended_upgrade][:interval]||1}\";\nCONFIG\n\n package 'unattended-upgrades', :ensure => :latest\n file '/etc/apt/apt.conf.d/10periodic',\n :ensure => :present,\n :mode => '644',\n :content => unattended_config\n file '/etc/apt/apt.conf.d/50unattended-upgrades',\n :ensure => :present,\n :mode => '644',\n :content => template(File.join(File.dirname(__FILE__), \"templates\", \"unattended_upgrades.erb\"))\n end", "def create_debian_dir\n deb_dir = File.join(@build_dir, 'debian')\n begin\n FileUtils.mkdir_p(deb_dir)\n rescue => e\n puts \"Could not create directory '#{deb_dir}'\"\n raise e\n end\n end", "def create_debootstrap_rootfs()\n header(\"Creating basic rootfs using debootstrap\")\n\n size_mb = 1024 + (@overlay_dir ? size_of_dir(@overlay_dir) : 0)\n self.on_mounted_tmpfs(size_mb) do |tempdir|\n add_dummy_fstab(tempdir)\n run_debootstrap(tempdir)\n remove_dummy_fstab(tempdir)\n add_apt_sources(tempdir)\n add_eth0_interface(tempdir)\n overlay_files(tempdir)\n customize_rootfs(tempdir)\n package_rootfs(tempdir)\n end\n end", "def update_apt_if_needed\n if self['platform'] =~ /debian|ubuntu|cumulus/\n if @apt_needs_update\n execute(\"apt-get update\")\n @apt_needs_update = false\n end\n end\n end", "def setup_env(env, codename)\n cmd = 'sudo mozyutil stop'\n cmd += '; sudo mozyutil unlink'\n cmd += \"; cd #{PATH}\"\n cmd += '; rm -r LinuxTestFiles'\n cmd += '; sudo mozyutil clearbackupdirs'\n cmd += \"; sudo sh changenetwork.sh -n #{env} -c #{codename} -x\"\n cmd += '; sudo service mozybackup restart'\n ssh_linux_machine(cmd)\n end", "def upgrade_repo!\n package 'apt-transport-https'\n include_recipe \"apt-chef::#{new_resource.channel}\"\n package('chefdk') { action :upgrade }\n end", "def init_base\n echo \"Prepare the debian base\"\n x \"rm -rf modit/*\"\n unless x(\"debootstrap --arch=i386 #{RELEASE} modit\") &&\n init_base_setup() &&\n x(\"chroot modit bash setup.sh\")\n failed()\n end\n \n cache = Dir.glob(\"modit/var/cache/apt/a*/*.deb\")\n \n failed() unless File.exist?(\"modit/home/live\")\nrescue\n failed()\nend", "def update_apt_if_needed\n return unless /debian|ubuntu|cumulus|huaweios/.match?(self['platform'])\n return unless @apt_needs_update\n\n execute(\"apt-get update\")\n @apt_needs_update = false\n end", "def update_compose_file()\r\n file_path = \"#{@path}docker-compose.yml\"\r\n obj = YAML.load_file(file_path)\r\n obj = update_db_element(obj)\r\n obj = add_net_element(obj)\r\n obj = update_virtual_host(obj)\r\n @compose.write_config_to_disk(obj, file_path)\r\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Install package name, where pkg is the debian package name
def install(pkg_name) begin pkg_build_dir = packaging_dir(pkg_name) filepath = Dir.glob("#{pkg_build_dir}/*.deb") if filepath.size < 1 raise RuntimeError, "No debian file found for #{pkg_name} in #{pkg_build_dir}: #{filepath}" elsif filepath.size > 1 raise RuntimeError, "More than one debian file available in #{pkg_build_dir}: #{filepath}" else filepath = filepath.first Packager.info "Found package: #{filepath}" end install_debfile(filepath) rescue Exception => e raise RuntimeError, "Installation of package '#{pkg_name} failed -- #{e}" end end
[ "def install_package host, package_name\n host.install_package package_name\n end", "def install_package host, package_name, package_version = nil\n host.install_package package_name, '', package_version\n end", "def package_if_necessary(pkg)\n if !package_is_installed?(pkg)\n banner \"#{pkg}...\"\n run \"apt-get -y install #{pkg}\"\n end\n end", "def install_package(package_name, version=nil)\n update_package_list\n \n if package_installed?(package_name)\n say_status \"package already installed\", package_name\n else\n package_name = \"#{package_name}=#{version}\" if version\n track_modification!\n as_root do\n exec(\"DEBIAN_FRONTEND=noninteractive apt-get -q -y install #{package_name}\", :no_pty => true)\n end\n say_status \"installed package\", package_name.gsub('=', ' ')\n end\n end", "def install\n yaourt \"--noconfirm\", \"-Sy\", @resource[:name]\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\"Could not find package %s\" % self.name)\n end\n end", "def install!\n src = package_source\n chk = package_checksum\n dmg_package 'Chef Development Kit' do\n app ::File.basename(src, '.dmg')\n volumes_dir 'Chef Development Kit'\n source \"#{'file://' if src.start_with?('/')}#{src}\"\n type 'pkg'\n package_id 'com.getchef.pkg.chefdk'\n checksum chk\n end\n end", "def install\n pacman \"--noconfirm\", \"--noprogressbar\", \"-Sy\", @resource[:name]\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\"Could not find package %s\" % self.name)\n end\n end", "def install_software!( opts = {} )\r\n raise ArgumentError \"missing :package\" unless opts[:package]\r\n \r\n args = { :package_name => opts[:package] }\r\n args[:no_validate] = true if opts[:no_validate]\r\n args[:unlink] = true if opts[:unlink]\r\n \r\n got = @ndev.rpc.request_package_add( args ).parent\r\n errcode = got.xpath('package-result').text.to_i\r\n return true if errcode == 0\r\n \r\n # otherwise return the output error message\r\n got.xpath('output').text.strip \r\n end", "def software_install!( opts = {} )\r\n raise ArgumentError \"missing :package\" unless opts[:package]\r\n \r\n args = { :package_name => opts[:package] }\r\n args[:no_validate] = true if opts[:no_validate]\r\n args[:unlink] = true if opts[:unlink]\r\n args[:reboot] = true if opts[:reboot]\r\n \r\n got = @ndev.rpc.request_package_add( args ).parent\r\n errcode = got.xpath('package-result').text.to_i\r\n return true if errcode == 0\r\n \r\n # otherwise return the output error message\r\n got.xpath('output').text.strip \r\n end", "def install\n cd_and_sh( pkg_dir, install_commands )\n end", "def install!\n src = package_source\n chk = package_checksum\n windows_package 'Chef Development Kit' do\n source src\n checksum chk\n end\n end", "def install_package(target_package_path); raise NotImplementedError; end", "def install_in_ubuntu\n install_ppa(node['SignalFx_ppa']['collectd']['name'],\n node['SignalFx_ppa']['collectd']['uri'])\n install_ppa(node['SignalFx_ppa']['collectd_plugin']['name'],\n node['SignalFx_ppa']['collectd_plugin']['uri'])\n ubuntu_update\n install_package 'collectd'\nend", "def install()\n\t\tos = Cfruby::OS::OSFactory.new.get_os()\n\t\tpkg_manager = os.get_package_manager()\n\n\t\tself.each() { |pkgname|\n\t\t\tpkg_manager.install(pkgname)\n\t\t}\n\tend", "def install\n up2date \"-u\", @resource[:name]\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\n \"Could not find package #{self.name}\"\n )\n end\n end", "def install(package)\n wait_until(\"Installing package\") do\n get \"/invoke/wm.server.packages/packageInstall?activateOnInstall=true&file=#{CGI.escape package.to_s}\"\n end\n end", "def pkg_default_install\n bsdstyle = @bsdstyle\n make = @make\n sudo_cmd = ''\n\n if bsdstyle == true\n sudo_cmd = 'sudo'\n end\n if make.length == 0\n make = $bsyscfg.get_make\n end\n\n <<INSTALL\n#{sudo_cmd} #{make} DESTDIR=#{$project_rootdir}/ install\nINSTALL\n end", "def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend", "def install_custom!\n do_dmg_package_resource!\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute the ruby arch setup for passing through sed escaping is required for using with file rendering no escaping is required
def ruby_arch_setup(do_escape = false) Packager.info "Creating ruby env setup" if do_escape setup = Regexp.escape("arch=$(shell gcc -print-multiarch)\n") # Extract the default ruby version to build for on that platform # this assumes a proper setup of /usr/bin/ruby setup += Regexp.escape("ruby_ver=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'ruby_version\']\\\")" + Regexp.escape("\n") setup += Regexp.escape("ruby_arch_dir=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'archdir\']\\\")" + Regexp.escape("\n") setup += Regexp.escape("ruby_libdir=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'rubylibdir\']\\\")" + Regexp.escape("\n") setup += Regexp.escape("rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\n") setup += Regexp.escape("rockruby_libdir=$(subst /usr,,$(ruby_libdir))\n") else setup = "arch=$(shell gcc -print-multiarch)\n" # Extract the default ruby version to build for on that platform # this assumes a proper setup of /usr/bin/ruby setup += "ruby_ver=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'ruby_version\']\")\n" setup += "ruby_arch_dir=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'archdir\']\")\n" setup += "ruby_libdir=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'rubylibdir\']\")\n" setup += "rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\n" setup += "rockruby_libdir=$(subst /usr,,$(ruby_libdir))\n" end Packager.info "Ruby env setup is:\n#{setup}" setup end
[ "def ruby_arch_setup(do_escape = false)\n Packager.info \"Creating ruby env setup\"\n if do_escape\n setup = Regexp.escape(\"arch=$(shell gcc -print-multiarch)\\n\")\n # Extract the default ruby version to build for on that platform\n # this assumes a proper setup of /usr/bin/ruby\n setup += Regexp.escape(\"ruby_ver=$(shell ruby --version)\\n\")\n setup += Regexp.escape(\"ruby_arch_dir=$(shell ruby -r rbconfig -e \") + \"\\\\\\\"print RbConfig::CONFIG[\\'archdir\\']\\\\\\\")\" + Regexp.escape(\"\\n\")\n setup += Regexp.escape(\"ruby_libdir=$(shell ruby -r rbconfig -e \") + \"\\\\\\\"print RbConfig::CONFIG[\\'rubylibdir\\']\\\\\\\")\" + Regexp.escape(\"\\n\")\n\n setup += Regexp.escape(\"rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\\n\")\n setup += Regexp.escape(\"rockruby_libdir=$(subst /usr,,$(ruby_libdir))\\n\")\n else\n setup = \"arch=$(shell gcc -print-multiarch)\\n\"\n # Extract the default ruby version to build for on that platform\n # this assumes a proper setup of /usr/bin/ruby\n setup += \"ruby_ver=$(shell ruby --version)\\n\"\n setup += \"ruby_arch_dir=$(shell ruby -r rbconfig -e \\\"print RbConfig::CONFIG[\\'archdir\\']\\\")\\n\"\n setup += \"ruby_libdir=$(shell ruby -r rbconfig -e \\\"print RbConfig::CONFIG[\\'rubylibdir\\']\\\")\\n\"\n\n setup += \"rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\\n\"\n setup += \"rockruby_libdir=$(subst /usr,,$(ruby_libdir))\\n\"\n end\n Packager.info \"Setup is: #{setup}\"\n setup\n end", "def extract_arch\n if linux?\n extracted = path.to_s.split('/')[-2]\n else\n extracted = version64? ? X86_64 : I386\n end\n extracted\n end", "def bin_arch\n case node['kernel']['machine']\n when 'x86_64'\n '-x64'\n when 'armv6l', 'armv7l', 'armhf'\n '-arm'\n when 'aarch64'\n '-arm64'\n else\n # X86 32 bit has no string identifier in the upstream filename\n '' \n end\n end", "def archs_for_command cmd\n cmd = cmd.to_s # If we were passed a Pathname, turn it into a string.\n cmd = `/usr/bin/which #{cmd}` unless Pathname.new(cmd).absolute?\n cmd.gsub! ' ', '\\\\ ' # Escape spaces in the filename.\n\n archs = IO.popen(\"/usr/bin/file #{cmd}\").readlines.inject([]) do |archs, line|\n case line\n when /Mach-O (executable|dynamically linked shared library) ppc/\n archs << :ppc7400\n when /Mach-O 64-bit (executable|dynamically linked shared library) ppc64/\n archs << :ppc64\n when /Mach-O (executable|dynamically linked shared library) i386/\n archs << :i386\n when /Mach-O 64-bit (executable|dynamically linked shared library) x86_64/\n archs << :x86_64\n else\n archs\n end\n end\n archs.extend(ArchitectureListExtension)\nend", "def archs_for_command cmd\n cmd = cmd.to_s # If we were passed a Pathname, turn it into a string.\n cmd = `/usr/bin/which #{cmd}` unless Pathname.new(cmd).absolute?\n cmd.gsub! ' ', '\\\\ ' # Escape spaces in the filename.\n\n lines = `/usr/bin/file -L #{cmd}`\n archs = lines.to_a.inject([]) do |archs, line|\n case line\n when /Mach-O (executable|dynamically linked shared library) ppc/\n archs << :ppc7400\n when /Mach-O 64-bit (executable|dynamically linked shared library) ppc64/\n archs << :ppc64\n when /Mach-O (executable|dynamically linked shared library) i386/\n archs << :i386\n when /Mach-O 64-bit (executable|dynamically linked shared library) x86_64/\n archs << :x86_64\n else\n archs\n end\n end\n archs.extend(ArchitectureList)\n end", "def GetArchOfELF(filename)\n bash_out = Convert.to_map(\n SCR.Execute(\n path(\".target.bash_output\"),\n Ops.add(Ops.add(Directory.ybindir, \"/elf-arch \"), filename)\n )\n )\n return \"unknown\" if Ops.get_integer(bash_out, \"exit\", 1) != 0\n Builtins.deletechars(Ops.get_string(bash_out, \"stdout\", \"unknown\"), \"\\n\")\n end", "def yum_syntax(name, version, arch)\n s = name\n s += \"-#{version}\" if version\n s += \".#{arch}\" if arch\n s\n end", "def parse_architecture_from_file_name(filename)\n #\n # We first map the different variations of architectures that we have\n # used historically to our final set.\n #\n if %w{ x86_64 amd64 x64 }.fuzzy_include?(filename)\n \"x86_64\"\n elsif %w{ i386 x86 i86pc i686 }.fuzzy_include?(filename)\n \"i386\"\n elsif %w{ powerpc }.fuzzy_include?(filename)\n \"powerpc\"\n elsif %w{ sparc sun4u sun4v }.fuzzy_include?(filename)\n \"sparc\"\n # Note that ppc64le should come before ppc64 otherwise our search\n # will think ppc64le matches ppc64. Ubuntu also calls it ppc64el.\n elsif %w{ ppc64le ppc64el }.fuzzy_include?(filename)\n \"ppc64le\"\n elsif %w{ ppc64 }.fuzzy_include?(filename)\n \"ppc64\"\n #\n # From here on we need to deal with historical versions\n # that we have published without any architecture in their\n # names.\n #\n #\n # All dmg files are published for x86_64\n elsif filename.end_with?(\".dmg\")\n \"x86_64\"\n #\n # The msi files we catch here are versions that are older than the\n # ones which we introduced 64 builds. Therefore they should map to\n # i386\n elsif filename.end_with?(\".msi\")\n \"i386\"\n #\n # sh files are the packaging format we were using before dmg on Mac.\n # They map to x86_64\n elsif filename.end_with?(\".sh\")\n \"x86_64\"\n #\n # We have two common file names for solaris packages. E.g:\n # chef-11.12.8-2.solaris2.5.10.solaris\n # chef-11.12.8-2.solaris2.5.9.solaris\n # These were build on two boxes:\n # Solaris 9 => sparc\n # Solaris 10 => i386\n elsif filename.end_with?(\".solaris2.5.10.solaris\")\n \"i386\"\n elsif filename.end_with?(\".solaris2.5.9.solaris\")\n \"sparc\"\n else\n raise UnknownArchitecture,\n \"architecture can not be determined for '#{filename}'\"\n end\n end", "def arch_string\n \"ruby-#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}/#{Gem::Platform.local.to_s}\"\n end", "def set_archflags_for_osx\n archflags = []\n fullpath = `which convert`\n fileinfo = `file #{fullpath}`\n\n # default ARCHFLAGS\n archs = $ARCH_FLAG.scan(/-arch\\s+(\\S+)/).flatten\n\n archs.each do |arch|\n if fileinfo.include?(arch)\n archflags << \"-arch #{arch}\"\n end\n end\n\n if archflags.length != 0\n $ARCH_FLAG = archflags.join(' ')\n end\n end", "def canonical_arch\n Config::CONFIG['arch'].sub(/[\\.0-9]*$/, '')\n end", "def makefile_am_contents(dir)\n\n # Makefile.am header.\n contents = \"\\\n#------------------------------------------------------------------------------\n# Syneight - A soft-realtime transaction monitor\n# Copyright (C) 2003-2004 The Syneight Group\n#\n# TODO: License.\n#------------------------------------------------------------------------------\n\n# Generated by autoautoconf #{$version}.\"\n \n contents += \"\\n\\n\"\n\n # Get variables.\n subdirs = get_subdirs(dir)\n cpps = get_files(dir, \"^[-a-zA-Z0-9_]+\\\\.cpp$\")\n headers = get_headers(dir)\n sources = get_files(dir, \"^[-a-zA-Z0-9_]+\\\\.[ch]pp$\")\n tests = get_files(dir, \"^[-a-zA-Z0-9_]+\\\\.t\\\\.cpp$\")\n testsupport = get_files(dir, \"^[-a-zA-Z0-9_]+\\\\.ts\\\\.[ch]pp$\")\n binary = get_files(dir, \"^[-a-zA-Z0-9_]+\\\\.b\\\\.cpp$\").strip\n dirname = File.basename(dir)\n \n # Subdirs.\n if (subdirs != \"\")\n contents += \"## Subdirectories to process.\\n\"\n contents += \"SUBDIRS = #{beautify(subdirs)}\\n\"\n contents += \"\\n\"\n end\n\n # Headers.\n if (headers != \"\")\n contents += \"## Header files.\\n\"\n contents += \"noinst_HEADERS = #{beautify(headers)}\\n\"\n contents += \"\\n\"\n end\n\n # Library.\n if (cpps != \"\" or testsupport != \"\")\n ltlibs = \"\"\n ltlibs += \"lib#{dirname}.la \" if (sources != \"\")\n ltlibs += \"libts#{dirname}.la \" if (testsupport != \"\")\n\n contents += \"## Not to be installed helper-libraries.\\n\"\n contents += \"noinst_LTLIBRARIES = #{ltlibs}\\n\"\n #contents += \"lib_LTLIBRARIES = #{ltlibs}\\n\"\n contents += \"\\n\"\n\n contents += \"## Helper-library sources.\\n\"\n contents += \"lib#{dirname}_la_SOURCES = #{beautify(sources)}\\n\"\n contents += \"\\n\"\n\n subdirlibs = \"\"\n if (subdirs != \"\")\n subdirs.split.each do |subdir|\n\tif (get_files(\"#{dir}/#{subdir}\", \"^[-a-zA-Z0-9_]+\\\\.cpp$\") != \"\")\n \tsubdirlibs += \"#{subdir}/lib#{subdir}.la \"\n\tend\n end\n subdirlibs.strip! if (subdirlibs != \"\")\n end\n\n # Libraries (not in subdirectories) we need to link against.\n otherlibs = get_dependencies(dir)\n\n libs = \"#{subdirlibs} #{otherlibs}\".strip\n\n if (libs != \"\")\n contents += \"## Libraries we link against.\\n\"\n contents += \"lib#{dirname}_la_LIBADD = #{beautify(libs)}\\n\"\n contents += \"\\n\"\n end\n\n # Test support library.\n if (testsupport != \"\")\n contents += \"## Testsupport library sources.\\n\"\n contents += \"libts#{dirname}_la_SOURCES = #{beautify(testsupport)}\\n\"\n contents += \"\\n\"\n end\n end\n\n # Programs.\n if (binary != \"\" or tests != \"\")\n bins = \"\"\n bins += \"#{binary[0..-5]} \" if (binary != \"\")\n tests.split.each do |test|\n bins += \"#{test[0..-5]} \"\n end\n bins.strip!\n\n contents += \"## The executable programs to be built.\\n\"\n contents += \"bin_PROGRAMS = #{beautify(bins)}\\n\"\n contents += \"\\n\"\n end\n\n if (binary != \"\")\n contents += \"## Binary '#{binary[0..-7]}'.\\n\"\n binary2 = binary[0..-5].gsub(/\\./, \"_\")\n contents += \"#{binary2}_SOURCES = #{binary}\\n\"\n contents += \"#{binary2}_LDADD = lib#{dirname}.la\\n\"\n contents += \"\\n\"\n end\n\n if (tests != \"\")\n tests.split.each do |test|\n contents += \"## Test for '#{test[0..-7]}'.\\n\"\n test2 = test[0..-5].gsub(/\\./, \"_\")\n contents += \"#{test2}_SOURCES = #{test}\\n\"\n if (testsupport != \"\")\n contents += \"#{test2}_LDADD = lib#{dirname}.la libts#{dirname}.la\\n\"\n else\n contents += \"#{test2}_LDADD = lib#{dirname}.la\\n\"\n end\n contents += \"\\n\"\n end\n end\n\n # Recursive targets.\n contents += get_recursive_targets(dir)\n\n # Makefile.common include.\n contents += \"# Include some common Makefile fragments.\\n\"\n contents += \"include $(top_srcdir)/Makefile.common\\n\"\n contents += \"\\n\"\n\n return contents\nend", "def arch_for_filename(path)\n file = File.basename(path, File.extname(path))\n\n case file\n when /686/, /386/\n '32-bit'\n when /86_64/, /amd64/\n '64-bit'\n else\n parts = file.split('_')\n\n if parts.empty?\n raise \"Could not determine arch for filename `#{file}'!\"\n end\n\n parts.last.capitalize\n end\n end", "def esc_seq!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 7 )\n\n \n # - - - - main rule block - - - -\n # at line 346:5: ( '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' | 'e' ) | UNICODE_ESC | OCTAL_ESC )\n alt_2 = 3\n look_2_0 = @input.peek( 1 )\n\n if ( look_2_0 == 0x5c )\n case look_2 = @input.peek( 2 )\n when 0x22, 0x27, 0x5c, 0x62, 0x65, 0x66, 0x6e, 0x72, 0x74 then alt_2 = 1\n when 0x75 then alt_2 = 2\n when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37 then alt_2 = 3\n else\n raise NoViableAlternative( \"\", 2, 1 )\n end\n else\n raise NoViableAlternative( \"\", 2, 0 )\n end\n case alt_2\n when 1\n # at line 346:9: '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' | 'e' )\n match( 0x5c )\n if @input.peek(1) == 0x22 || @input.peek(1) == 0x27 || @input.peek(1) == 0x5c || @input.peek(1) == 0x62 || @input.peek( 1 ).between?( 0x65, 0x66 ) || @input.peek(1) == 0x6e || @input.peek(1) == 0x72 || @input.peek(1) == 0x74\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 347:9: UNICODE_ESC\n unicode_esc!\n\n when 3\n # at line 348:9: OCTAL_ESC\n octal_esc!\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 7 )\n\n end", "def ruby_image(ruby)\n if ruby == YJIT_RUBY\n ruby.sub(\"yjit:\", \"\")\n else\n ruby\n end\nend", "def esc_seq!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 7 )\n\n \n # - - - - main rule block - - - -\n # at line 295:5: ( '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' | 'e' ) | UNICODE_ESC | OCTAL_ESC )\n alt_2 = 3\n look_2_0 = @input.peek( 1 )\n\n if ( look_2_0 == 0x5c )\n case look_2 = @input.peek( 2 )\n when 0x22, 0x27, 0x5c, 0x62, 0x65, 0x66, 0x6e, 0x72, 0x74 then alt_2 = 1\n when 0x75 then alt_2 = 2\n when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37 then alt_2 = 3\n else\n raise NoViableAlternative( \"\", 2, 1 )\n end\n else\n raise NoViableAlternative( \"\", 2, 0 )\n end\n case alt_2\n when 1\n # at line 295:9: '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' | 'e' )\n match( 0x5c )\n if @input.peek(1) == 0x22 || @input.peek(1) == 0x27 || @input.peek(1) == 0x5c || @input.peek(1) == 0x62 || @input.peek( 1 ).between?( 0x65, 0x66 ) || @input.peek(1) == 0x6e || @input.peek(1) == 0x72 || @input.peek(1) == 0x74\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 296:9: UNICODE_ESC\n unicode_esc!\n\n when 3\n # at line 297:9: OCTAL_ESC\n octal_esc!\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 7 )\n\n end", "def firmware_and_environment\n\t\t'#{firmware_revision}' + '--' + '#{environment}'\n\tend", "def static_arch_detect_js\r\n %Q|\r\n var arches = {};\r\n arches['#{ARCH_ARMLE}'] = /arm/i;\r\n arches['#{ARCH_MIPSLE}'] = /mips/i;\r\n arches['#{ARCH_X86}'] = /x86/i;\r\n\r\n var arch = null;\r\n for (var name in arches) {\r\n if (navigator.platform.toString().match(arches[name])) {\r\n arch = name;\r\n break;\r\n }\r\n }\r\n\r\n if (arch) {\r\n // load the script with the correct arch\r\n var script = document.createElement('script');\r\n script.setAttribute('src', '#{get_uri}/#{Rex::Text::rand_text_alpha(5)}.js?arch='+arch);\r\n script.setAttribute('type', 'text/javascript');\r\n\r\n // ensure body is parsed and we won't be in an uninitialized state\r\n setTimeout(function(){\r\n var node = document.body \\|\\| document.head;\r\n node.appendChild(script);\r\n }, 100);\r\n }\r\n |\r\n end", "def arch_for_filename(path)\n file = File.basename(path, File.extname(path))\n\n case file\n when /686/, /386/\n \"32-bit\"\n when /86_64/, /amd64/\n \"64-bit\"\n else\n parts = file.split(\"_\")\n\n if parts.empty?\n raise \"Could not determine arch for filename `#{file}'!\"\n end\n\n parts.last.capitalize\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Define the default compat level
def set_compat_level(compatlevel = DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile = "debian/compat") if File.exist?(compatfile) existing_compatlevel = `cat #{compatfile}`.strip Packager.warn "Apaka::Packaging::Debian::set_compat_level: existing '#{compatfile}' with compatlevel #{existing_compatlevel}" end Packager.info "Setting debian compat level to: #{compatlevel}" `echo #{compatlevel} > #{compatfile}` end
[ "def compat\n\t\tmodule_info['Compat'] || {}\n\tend", "def compatibility_mode(compatibility = 1)\n @compatibility = compatibility\n end", "def compatibility_mode=(value)\n @compatibility_mode = value\n end", "def custom_compatibility_version\n @custom_compatibility_version\n end", "def init_compat\n\t\tc = module_info['Compat']\n\n\t\tif (c == nil)\n\t\t\tc = module_info['Compat'] = Hash.new\n\t\tend\n\n\t\t# Initialize the module sub compatibilities\n\t\tc['Payload'] = Hash.new if (c['Payload'] == nil)\n\t\tc['Encoder'] = Hash.new if (c['Encoder'] == nil)\n\t\tc['Nop'] = Hash.new if (c['Nop'] == nil)\n\n\t\t# Update the compat-derived module specific compatibilities from\n\t\t# the specific ones to make a uniform view of compatibilities\n\t\tc['Payload'].update(module_info['PayloadCompat'] || {})\n\t\tc['Encoder'].update(module_info['EncoderCompat'] || {})\n\t\tc['Nop'].update(module_info['NopCompat'] || {})\n\tend", "def min_android_security_patch_level=(value)\n @min_android_security_patch_level = value\n end", "def level\n @configuration.default_level\n end", "def enforcement_level=(value)\n @enforcement_level = value\n end", "def default_log_level(env = ENV)\n\t\t\tif level = (env['CONSOLE_LEVEL'] || env['CONSOLE_LOG_LEVEL'])\n\t\t\t\tLogger::LEVELS[level.to_sym] || Logger.warn\n\t\t\telsif $DEBUG\n\t\t\t\tLogger::DEBUG\n\t\t\telsif $VERBOSE.nil?\n\t\t\t\tLogger::WARN\n\t\t\telse\n\t\t\t\tLogger::INFO\n\t\t\tend\n\t\tend", "def android_security_patch_level=(value)\n @android_security_patch_level = value\n end", "def default_log_level\n\t\t\tif $DEBUG\n\t\t\t\tLogger::DEBUG\n\t\t\telsif $VERBOSE\n\t\t\t\tLogger::INFO\n\t\t\telse\n\t\t\t\tLogger::WARN\n\t\t\tend\n\t\tend", "def default_log_level(env = ENV)\n\t\t\tif level = env['CONSOLE_LOG_LEVEL']\n\t\t\t\tLEVELS[level] || Logger.warn\n\t\t\telsif $DEBUG\n\t\t\t\tLogger::DEBUG\n\t\t\telsif $VERBOSE.nil?\n\t\t\t\tLogger::WARN\n\t\t\telse\n\t\t\t\tLogger::INFO\n\t\t\tend\n\t\tend", "def minor_version_default!\n self.minor_version = 0\n end", "def default!\n self.severity = :DEFAULT\n end", "def safe_level(*) end", "def effective_version_threshold\n version_threshold || course.version_threshold\n end", "def access_level=(new_level)\n super new_level.to_s\n end", "def set_log_level( level )\n Naether::Java.exec_static_method('com.tobedevoured.naether.util.LogUtil', 'changeLevel', ['com.tobedevoured', level] )\n end", "def safe_level\n safe? ? 4 : 0\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute the build dependencies for a packag info object return [Array] list of dependencies
def build_dependencies(pkginfo) dependencies = [] pkgdeps = pkginfo.dependencies deps = pkgdeps[:rock_pkginfo].select do |pkginfo| pkg_name = debian_name(pkginfo, true) !rock_release_platform.ancestorContains(pkg_name) end .map { |p| p.name } gems = pkgdeps[:nonnative].select do |gem,version| pkg_ruby_name = debian_ruby_name(gem, false) pkg_prefixed_name = debian_ruby_name(gem, true) !( rock_release_platform.ancestorContains(gem) || rock_release_platform.ancestorContains(pkg_ruby_name) || rock_release_platform.ancestorContains(pkg_prefixed_name)) end .map{ |p| p[0] } deps.concat(gems) deps end
[ "def dependencies options = {}\n return [] unless exist?\n deps = Array.new\n if options.has_key?(:subprojects) and (not options[:subprojects])\n reload_project_info unless @info_file and @info_file.exist?\n info_files = [@info_file]\n else\n info_files = Dir[\"#{local_path}/**/*.info\"]\n end\n info_files.each do |info|\n f = File.open(info, \"r\").read\n f.each_line do |l|\n matchdata = l.match(/^\\s*dependencies\\s*\\[\\s*\\]\\s*=\\s*[\"']?([^\\s(\"']+)/)\n if nil != matchdata\n deps << matchdata[1].strip\n end\n matchdata = l.match(/^\\s*base +theme\\s*=\\s*(.+)$/)\n if nil != matchdata\n d = matchdata[1].strip\n deps << d.gsub(/\\A[\"']|[\"']\\Z/, '') # Strip leading and trailing quotes\n end\n end\n end\n # Remove duplicates and self-dependency\n deps.uniq!\n deps.delete(name)\n return deps\n end", "def build_depends\n data.build_dependencies\n end", "def compute_revdeps\n result = Hash.new { |h, k| h[k] = Set.new }\n each_autobuild_package do |pkg|\n pkg.dependencies.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n pkg.optional_dependencies.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n pkg.os_packages.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n end\n result\n end", "def list_build_dependencies\n @project.components.map(&:build_requires).flatten.uniq - @project.components.map(&:name)\n end", "def go_list_deps\n args = [\"-deps\"]\n args << \"-mod=vendor\" if config.dig(\"go\", \"mod\") == \"vendor\"\n\n # the CLI command returns packages in a pretty-printed JSON format but\n # not separated by commas. this gsub adds commas after all non-indented\n # \"}\" that close root level objects.\n # (?!\\z) uses negative lookahead to not match the final \"}\"\n deps = package_info_command(*args).gsub(/^}(?!\\z)$/m, \"},\")\n JSON.parse(\"[#{deps}]\")\n end", "def sysdeps\n res = []\n @build.each do |x|\n x.sysdep.each { |k,v| res.concat v }\n end\n res.sort.uniq\n end", "def generate_dependencies_info\n components.each_with_object({}) do |component, hsh|\n hsh.merge!(component.get_dependency_hash)\n end\n end", "def dependencies(pkg)\n pkg.resolve_optional_dependencies\n deps_rock_packages = pkg.dependencies.map do |pkg_name|\n debian_name(Autoproj.manifest.package(pkg_name).autobuild)\n end.sort\n\n pkg_osdeps = Autoproj.osdeps.resolve_os_dependencies(pkg.os_packages)\n # There are limitations regarding handling packages with native dependencies\n #\n # Currently gems need to converted into debs using gem2deb\n # These deps dependencies are updated here before uploading a package\n # \n # Generation of the debian packages from the gems can be done in postprocessing step\n # i.e. see convert_gems\n \n deps_osdeps_packages = []\n native_package_manager = Autoproj.osdeps.os_package_handler\n _, native_pkg_list = pkg_osdeps.find { |handler, _| handler == native_package_manager }\n\n deps_osdeps_packages += native_pkg_list if native_pkg_list\n\n # Update global list\n @osdeps += deps_osdeps_packages\n\n non_native_handlers = pkg_osdeps.collect do |handler, pkg_list|\n if handler != native_package_manager\n [handler, pkg_list]\n end\n end.compact\n\n non_native_handlers.each do |pkg_handler, pkg_list|\n # Convert native ruby gems package names to rock-xxx \n if pkg_handler.kind_of?(Autoproj::PackageManagers::GemManager)\n pkg_list.each do |name,version|\n @ruby_gems << [name,version]\n deps_osdeps_packages << debian_ruby_name(name)\n end\n else\n raise ArgumentError, \"cannot package #{pkg.name} as it has non-native dependencies (#{pkg_list}) -- #{pkg_handler.class} #{pkg_handler}\"\n end\n end\n\n # Remove duplicates\n @osdeps.uniq!\n @ruby_gems.uniq!\n\n # Return rock packages and osdeps\n [deps_rock_packages, deps_osdeps_packages]\n end", "def depends_on\n if self.depends\n self.depends.split(', ').sort.inject([]) do |res, name|\n if p = self.search_class_name.where(name: name.split(/ /)[0]).first\n res << p\n else\n logger.info(\"No package #{name.split(/ /)[0]} for #{self.depends} in #{self.inspect}\")\n end\n res\n end\n else\n []\n end\n end", "def package_deps(package_name)\n FileUtils.mkdir_p pkg\n \n info_file = File.join pkg, \"#{package_name}.tcz.dep\"\n unless File.exist? info_file\n info_url = 'http://distro.ibiblio.org/tinycorelinux/4.x/x86/tcz/' +\n \"#{package_name}.tcz.dep\"\n download_file info_url, info_file\n end\n \n deps = []\n File.read(info_file).split.map do |dep|\n break unless /\\.tcz\\Z/ =~ dep\n deps << dep.sub(/\\.tcz\\Z/, '')\n end\n deps\n end", "def dependencies_array(leaf, processed = {})\n return processed[leaf] if processed[leaf]\n\n deps_array = []\n processed[leaf] = deps_array\n\n leaf.each do |pack, versions|\n a = []\n versions.each do |version, deps|\n perms = []\n sub_perms = dependencies_array(deps, processed)\n if sub_perms == []\n perms += [version]\n else\n sub_perms[0].each do |perm|\n perms << [version] + [perm].flatten\n end\n end\n a += perms\n end\n deps_array << a\n end\n\n deps_array\n end", "def build_components_object\n info = @scan_report.to_h.fetch(:info)\n return [] unless info[:dependencies]\n\n components = []\n\n info[:dependencies].each do |dependency|\n components << parse_dependency(dependency)\n end\n components\n end", "def dependencies(name)\n dependencies = []\n submodule = submodule(name)\n if submodule.has_key?(:dependencies)\n submodule[:dependencies].each do |dependency|\n dependencies << dependency\n dependencies << dependencies(dependency)\n end\n end\n\n dependencies.flatten.uniq.sort\n end", "def build_dependency_string(data)\n dependencies = []\n PluginPackager.filter_dependencies('debian', data[:dependencies]).each do |dep|\n if dep[:version] && dep[:revision]\n dependencies << \"#{dep[:name]} (>=#{dep[:version]}-#{dep[:revision]})\"\n elsif dep[:version]\n dependencies << \"#{dep[:name]} (>=#{dep[:version]})\"\n else\n dependencies << dep[:name]\n end\n end\n\n if data[:plugindependency]\n dependencies << \"#{data[:plugindependency][:name]} (= ${binary:Version})\"\n end\n\n dependencies.join(', ')\n end", "def remaining_dependencies\n dependencies = []\n @current_packages.each do |_, package|\n package.spec.dependencies.each do |dep|\n next if satisfy? dep\n dependencies << dep\n end\n end\n dependencies\n end", "def dependencies_array(tree, processed = {})\n return processed[tree] if processed[tree]\n\n deps_array = []\n processed[tree] = deps_array\n \n tree.each do |pack, versions|\n a = []\n versions.each do |version, deps|\n perms = []\n sub_perms = dependencies_array(deps, processed)\n if sub_perms == []\n perms += [version]\n else\n sub_perms[0].each do |perm|\n perms << [version] + [perm].flatten\n end\n end\n a += perms\n end\n deps_array << a\n end\n\n deps_array\n end", "def dependencies_build\n ret = {}\n\n bpm_build.each do |target_name, opts|\n next unless opts.is_a?(Hash)\n \n minifier = opts['minifier']\n case minifier\n when String\n ret[minifier] = '>= 0'\n when Hash\n ret.merge! minifier\n end\n end\n ret\n end", "def module_dependencies_from_metadata\n metadata = module_metadata\n return [] unless metadata.key?('dependencies')\n\n dependencies = []\n metadata['dependencies'].each do |d|\n \n if d['private'] == true\n next\n end\n \n tmp = { module_name: d['name'].sub('/', '-') }\n \n if d.key?('version_requirement')\n tmp[:version] = module_version_from_requirement(tmp[:module_name],\n d['version_requirement'])\n end\n dependencies.push(tmp)\n end\n\n dependencies\n end", "def dependencies_for(package, version)\n fetch_package_info(package) # probably already done, can't hurt\n\n spec = @specs_by_package_version[package.name][version.to_s]\n\n raise \"missing spec #{package.inspect} / #{version.inspect}\" if spec.nil?\n\n info = spec.info\n\n available_platforms = info.map(&:first)\n matching_platform = Gel::Platform.match(package.platform, available_platforms)\n\n info = info.select { |p, i| p == matching_platform }\n\n # FIXME: ruby_constraints ???\n\n info.flat_map { |_, i| i[:dependencies] }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Newlinedelimited list of actions
def encode(serialized_actions = []) serialized_actions.join("\n") end
[ "def get_actions\n\n actions = []\n\n unless self.actions.nil? or self.actions.empty?\n actions = self.actions.split(\"\\n\").collect {|entry| entry.split('-') }\n end\n return actions\n end", "def action_add(line)\n @actionplan << \"#{line}\"\n end", "def actions(*list)\n return @actions if list.empty?\n @actions = list.flatten\n end", "def action_text\n @action_text || action.try(:join, \",\")\n end", "def actions\n # Get our ordered list if an order has been specified\n if @action_priority\n list = @action_priority.collect {|a| to_action_class(a)}\n else\n list = []\n end\n \n # Add in all remaining actions at the end of the list\n list += (@actions.collect {|a| to_action_class(a)} - list)\n \n # Ensure that the main help action (--help) and version action (--version) are at end of the list\n built_ins = [Console::VersionAction, Console::HelpAction]\n list = [Console::HelpAction] + (list - built_ins) + [Console::VersionAction]\n \n # Done!\n list\n end", "def actions!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n type = ACTIONS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 142:11: 'actions'\n match( \"actions\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n end", "def actions!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n type = ACTIONS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 141:11: 'actions'\n match( \"actions\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end", "def aside_grouped_actions\n [\n general_actions,\n # supporter_actions,\n start_up_actions,\n investor_actions,\n # partners_actions,\n # tools_actions\n ]\n end", "def process_actions(input_actions_string)\n action_string = \"\"\n actions = []\n\n beg_idx = 2\n fin_idx = input_actions_string.index(\"}\")\n while beg_idx\n action_string = input_actions_string[(beg_idx+1)..(fin_idx-1)]\n beg_idx = input_actions_string.index(\"{\", fin_idx)\n fin_idx = input_actions_string.index(\"}\", beg_idx) if beg_idx\n actions << process_action(action_string)\n end\n\n actions\nend", "def make_command_list\n add_command(nil, :actor)\n add_command(nil, :recipe)\n end", "def action_list(args = {})\n name = args[:name]\n\n # Generate and perform necessary Actions for the\n # selected, referenced additional resources.\n @@markerselector = ApexMarkerSelector.new if @@markerselector.nil?\n args[:selector] = @@markerselector\n alist = super(args)\n\n # Apex put marker callouts in normal paragraphs.\n # <p><Vidoe02></p>\n # Captured all paragraphs and now needed to\n # look at the contents to see if match.\n new_alist = []\n alist.each do |marker_action|\n content = marker_action.fragment.node.text\n new_alist << marker_action if content.match?(/\\<insert[ ]+[^\\>]+\\>/)\n end\n return new_alist\n end", "def itemactions itemindex = 0, text = ''\r\n item = nil\r\n actionlist = Array.new()\r\n if (text == '')\r\n item = emc_data_list_item(itemindex)\r\n else\r\n item = emc_data_list_item_by_text(text)\r\n end \r\n item.element(:tag_name => 'action').buttons.each do |button|\r\n p button.text\r\n actionlist.push(button.text)\r\n end\r\n return actionlist\r\n end", "def actions\n @actions ||= []\n end", "def actions(*action_names)\n [*action_names].each do |name| \n if Arsenal.actions[name]\n # strategy_name, type, name\n @run_list << [self.name, Arsenal.actions[name].type, name]\n end\n end\n end", "def printable_commands(*)\n item = []\n item << banner\n item << (desc ? \"# #{desc.gsub(/\\s+/m, ' ')}\" : \"\")\n [item]\n end", "def list_actions(actions)\n\t\t\t\t@params[:list_actions] = actions\n\t\t\tend", "def action_steps(ordered: true)\n steps(ordered: ordered).map do |step|\n matches = step.match(/^((.*?)[.?!])\\s+(.+)/)\n [matches[1], matches[3]]\n end\n end", "def with_actions( action_names )\n action_names.each do |action_name|\n action_items << ActionItem.new(name: action_name )\n end\n end", "def category_actions\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n array = nil\n act = nil\n next_act = nil\n # - - - - @init action - - - -\n array = Array.new\n\n begin\n # at line 84:10: ';' ( WS )? 'actions' '=' '\\\"' act= action_location ( ( WS )? next_act= action_location )* '\\\"'\n match(T__11, TOKENS_FOLLOWING_T__11_IN_category_actions_441)\n # at line 84:14: ( WS )?\n alt_16 = 2\n look_16_0 = @input.peek(1)\n\n if (look_16_0 == WS)\n alt_16 = 1\n end\n case alt_16\n when 1\n # at line 84:14: WS\n match(WS, TOKENS_FOLLOWING_WS_IN_category_actions_443)\n\n end\n match(T__20, TOKENS_FOLLOWING_T__20_IN_category_actions_446)\n match(T__13, TOKENS_FOLLOWING_T__13_IN_category_actions_448)\n match(T__14, TOKENS_FOLLOWING_T__14_IN_category_actions_450)\n @state.following.push(TOKENS_FOLLOWING_action_location_IN_category_actions_454)\n act = action_location\n @state.following.pop\n # --> action\n array << (act && @input.to_s(act.start, act.stop))\n # <-- action\n # at line 85:10: ( ( WS )? next_act= action_location )*\n while true # decision 18\n alt_18 = 2\n look_18_0 = @input.peek(1)\n\n if (look_18_0.between?(WS, DIGIT) || look_18_0 == T__10 || look_18_0 == T__13 || look_18_0.between?(T__28, T__42))\n alt_18 = 1\n\n end\n case alt_18\n when 1\n # at line 85:12: ( WS )? next_act= action_location\n # at line 85:12: ( WS )?\n alt_17 = 2\n look_17_0 = @input.peek(1)\n\n if (look_17_0 == WS)\n alt_17 = 1\n end\n case alt_17\n when 1\n # at line 85:12: WS\n match(WS, TOKENS_FOLLOWING_WS_IN_category_actions_470)\n\n end\n @state.following.push(TOKENS_FOLLOWING_action_location_IN_category_actions_475)\n next_act = action_location\n @state.following.pop\n # --> action\n array << (next_act && @input.to_s(next_act.start, next_act.stop))\n # <-- action\n\n else\n break # out of loop for decision 18\n end\n end # loop for decision 18\n match(T__14, TOKENS_FOLLOWING_T__14_IN_category_actions_482)\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n\n end\n\n return array\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Maximum content length before a URL, this is 140 (maximum length of a Twitter message) minus 21 (space plus 20 characters for a t.co URL) or 22 with the https protocol
def max_content_length @@max_content_length ||= Config.web =~ /^https/ ? 118 : 119 end
[ "def url_char_length\n 23 #assume https for good measure\n end", "def validate_url_length\n raise UrlLengthError unless to_url(:validation => false).size <= URL_MAX_LENGTH\n end", "def too_long? content\n content.length > MaxTweetLength\n end", "def length\n [default&.length, max_content_length].compact.max\n end", "def gift_message_max_length\n raise \"Method not implemented!\"\n end", "def get_max_length\n return @payload.get_path(\"max_length\"){-1}\n end", "def tweet_lengths_below_max\n errors.add(:enter_tweet, 'Too long') if enter_tweet && (enter_tweet.length + screen_name.length) > MAX_TWEET_CHARS\n errors.add(:exit_tweet, 'Too long') if exit_tweet && (exit_tweet.length + screen_name.length) > MAX_TWEET_CHARS\n end", "def render_length_limit; end", "def long_content? num=MAX_SIZE\r\n content.length > num\r\n end", "def content_length\n @content_length ||= ((s = self[HttpClient::CONTENT_LENGTH]) &&\n (s =~ /^(\\d+)$/)) ? $1.to_i : nil\n end", "def truncate(text, url)\n return text if text.bytesize <= TEXT_BYTESIZE_MAX\n tail = \"...\\n<#{url}|Read more at Qiita:Team...>\"\n text.byteslice(0, TEXT_BYTESIZE_MAX - tail.bytesize).scrub(\"\") + tail\n end", "def comment_length\n\t120\nend", "def tag_content_max_length\n limits_yaml = YAML.load_data!(\"#{ECON_DATA_PATH}/limits.yml\")\n return limits_yaml['tag_content_max_length']\n end", "def char_limit; end", "def url_truncate(length)\n \n begin\n \n # parse URL\n uri = URI.parse(self)\n \n # determine relevant parts of URL\n prefix = ((uri.scheme.blank? || uri.scheme == 'http') ? '' : (uri.scheme.to_s + '://')) +\n uri.host.to_s +\n (((uri.port.blank? || uri.port == uri.default_port) ? '' : (':' + uri.port.to_s)))\n parts = uri.path.split(/\\//).map { |c| c.blank? ? nil : c }.compact\n if parts.empty?\n file = ''\n path = ''\n else\n file = '/' + parts.delete_at(-1)\n path = parts.empty? ? '' : ('/' + parts.join('/'))\n end\n \n # determine result width\n result_width = length * DEFAULT_WIDTH\n \n # determine widths\n prefix_width = prefix.width\n path_width = path.width\n file_width = file.width\n dots_width = '/...'.width\n \n # return complate URL if possible\n if prefix_width + path_width + file_width <= result_width\n return prefix + path + file\n end\n \n # determine which parts of URL can be used\n if (path_width > dots_width) # it makes sense to truncate path\n if prefix_width + dots_width + file_width <= result_width # it's sufficient to truncate path\n result_path_width = result_width - prefix_width - file_width\n return prefix + path.truncate(result_path_width / DEFAULT_WIDTH) + file\n else # it's NOT sufficient to truncate path\n result_file_width = result_width - prefix_width - dots_width\n if result_file_width > dots_width # it make sense to truncate file\n return (prefix + '/...' + file).truncate(length)\n else # it make NO sense to truncate file\n return (prefix + '/...').truncate(length)\n end\n end\n else # it makes NO sense to truncate path\n return (prefix + path + file).truncate(length)\n end\n \n rescue\n if ENV['RAILS_ENV'] == 'production'\n return self.truncate\n else\n raise $!\n end\n end\n \n end", "def set_maximum_message_length\n Colloquy.maximum_message_length = (@options[:flows][:maximum_message_length] || 160).to_i\n end", "def http_content_length_limit(limit)\n @options[:http_content_length_limit] = limit\n end", "def max_length\n return @max_length\n end", "def long_content?\n content.length > CONTENT_LENGTH rescue nil\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
wraper for the brand method ActiveMerchant returns master or american_express Banwire requires mastercard and amex
def get_brand(brand) case brand when "master" return "mastercard" when "american_express" return "amex" else return brand end end
[ "def get_brand(card_bin)\n Cielo::API30::Request::QueryBrandRequest.new(merchant, environment).execute(card_bin)\n end", "def is_brand? brand\n ActiveDevice::Handset.is_brand? request.user_agent, brand\n end", "def is_brand? brand\n Handset.is_brand? request.user_agent, brand\n end", "def brand_name_to_use_when_getting_distributors\n brand_name = @website.brand.name.downcase\n case brand_name\n when \"duran audio\"\n brand_name = \"axys tunnel by jbl\"\n when \"audio architect\"\n brand_name = \"bss\"\n else\n brand_name\n end\n\n brand_name\n end", "def make_brand\n\tprint_ASCII(\"Brands\")\n\tbrands\nend", "def matching_brand?(number, brand)\n brand?(number) == brand\n end", "def company_brand_code\n self.dig_for_string(\"agentSummary\", \"office\", \"brandCode\")\n end", "def device_brand\n ActiveDevice::Brand.mobile_brand request.user_agent\n end", "def brand_with_model\n fetch('camera.brand_with_model')\n end", "def brand_for_mailer\n if self.brands && self.brands.length > 0\n self.brands.first\n elsif self.initial_brand.present?\n self.initial_brand\n elsif Brand.count > 0\n Brand.first\n else\n Brand.new\n end\n end", "def brand_code\n self.dig_for_string(\"companySummary\", \"brandCode\")\n end", "def brand_with_model; end", "def credit_card_brand_options\n SUPPORTED_CARDTYPES.map { |brand| [I18n.t(brand, scope: 'activemerchant.credit_card.brand'), brand] }\n end", "def brand?\n @product.property('brand').present?\n end", "def feature_pay_mastercard\n details? ? details[\"Features\"][\"payMasterCard\"] : ''\n end", "def brand_code\n self.dig_for_string(\"officeSummary\", \"brandCode\")\n end", "def get_brand(id)\n nutritionix_request('brand',::CGI::escape(id), {})\n end", "def organisation_brand(organisation)\n return unless organisation\n\n brand = organisation[\"details\"][\"brand\"]\n brand = \"executive-office\" if executive_order_crest?(organisation)\n brand\n end", "def default_payment_method?\n card_brand.present?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /farms GET /farms.json
def index @farms = Farm.all respond_with(@users,@farms) end
[ "def farms\n user = User.find(params[:id])\n\n @farms = []\n # Find which farms this user is authorized to access\n if (user.is_hog_owner?)\n @farms = user.owner.farms\n elsif user.is_barn_manager?\n @farms << user.owner.barn.location.farm\n elsif user.is_site_manager?\n @farms << user.owner.location.farm\n elsif user.is_farm_owner?\n @farms << user.owner.farm\n elsif user.is_admin?\n @farms = Farm.all\n end\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @farms }\n end\n end", "def index\n @farms = Farm.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @farms }\n format.json { render :json => @farms }\n end\n end", "def index\n @farms = Farm.all\n end", "def show\n @farming = Farming.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @farming }\n end\n end", "def index\n @farmings = Farming.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @farmings }\n end\n end", "def index\n @mastfarms = Mastfarm.limit(100)\n end", "def index\n @community_farms = CommunityFarm.all\n end", "def new\n @farm = Farm.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @farm }\n end\n end", "def show\n @foam = Foam.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @foam }\n end\n end", "def new\n @farm = Farm.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @farm }\n end\n end", "def index\n @favorite_routes = FavoriteRoute.all\n render json: @favorite_routes\n end", "def show\n @firearm = Firearm.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @firearm }\n end\n end", "def show\n @feast = Feast.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @feast }\n end\n end", "def new\n @farming = Farming.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @farming }\n end\n end", "def show\n @flight = Flight.find(params[:id])\n render json: @flight\n end", "def show\n @fmr = Fmr.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fmr }\n end\n end", "def show\n @farmer = Farmer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @farmer }\n end\n end", "def show\n @films_box = FilmsBox.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @films_box }\n end\n end", "def index\n @sub_farms = SubFarm.all\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
def cache(path, original_filename, output_filename, data=nil)
def cache(input_path, output_path, data=nil) path = input_path @new_hashes[input_path] = hash(@input_directory, input_path) if data @data[path] = data if data @wildcard_dependencies[path] = data[:wildcard_dependencies] if data[:wildcard_dependencies] @dependencies[path] = data[:dependencies] if data[:dependencies] end FileUtils.mkdir_p(File.dirname(cached_path_for(path))) if File.exist? File.join(@output_directory, output_path) FileUtils.cp(File.join(@output_directory, output_path), cached_path_for(path)) else FileUtils.cp(File.join(@input_directory, input_path), cached_path_for(input_path)) end end
[ "def cache_stored_file!; end", "def cache_stored_file!\n cache!\n end", "def cache_data filename, data, mtime = Iodine.time\n\t\t\tCACHE_LOCK.synchronize { CACHE_STORE[filename] = CacheObject.new( data, mtime ) }\n\t\t\tdata\n\t\tend", "def caching\n @caching = \"data_update[#{data_path}]\"\n end", "def cache!(new_file)\n super\n @old_tmp_file = new_file\n end", "def put_into_cache(data, filename)\n if @options.has_key?('cache')\n @options['cache'].each { |scheme|\n case(scheme)\n when 'storable'\n @@cache.save_storable(data, filename)\n when 'mem_share'\n @@cache.save_mem_share(data, filename)\n when 'mem_copy'\n @@cache.save_mem_copy(data, filename)\n else\n raise ArgumentError, \"Unsupported caching scheme: <#{scheme}>.\"\n end\n }\n end\n end", "def cache file_obj,data_result,url,username,password\n data_result[:uuid]=UUID.generate\n key=generate_key url,username,password \n \n begin\n data_result[:data_tmp_path] = store_data_to_tmp file_obj,data_result[:uuid]\n data_result[:time_stored]=Time.now\n @@file_cache[key]=data_result\n rescue Exception=>e \n @@file_cache[key]=nil\n end \n end", "def cache(file_obj, data_result, url, username, password)\n data_result[:uuid] = UUID.generate\n key = generate_key url, username, password\n\n begin\n data_result[:data_tmp_path] = store_data_to_tmp file_obj, data_result[:uuid]\n data_result[:time_stored] = Time.now\n @@file_cache[key] = data_result\n rescue Exception => e\n @@file_cache[key] = nil\n end\n end", "def store_cachefile(cache_filename, template)\n s = template.script\n s = \"\\#@ARGS #{template.args.join(',')}\\n#{s}\" if template.args\n tmp_filename = \"#{cache_filename}.#{rand()}\"\n File.open(tmp_filename, 'w') {|f| f.write(s) }\n File.rename(tmp_filename, cache_filename)\n end", "def cache_path; end", "def write_cached_file\n remove_cached_file\n FileUtils.mkdir_p File.dirname(cached_filename)\n File.open(cached_filename, File::CREAT|File::TRUNC|File::WRONLY, 0644) do |f|\n f.write data\n f.close\n end\n end", "def cache(filename, max_age, options=nil)\r\n\t\tdata = self.from_cache(filename, max_age, options)\r\n\t\t\r\n\t\tif(data != nil)\r\n\t\t\treturn data\r\n\t\telse\r\n\t\t\tdata = yield\r\n\t\t\tself.to_cache filename, data, options\r\n\t\t\treturn data\r\n\t\tend\r\n\r\n\tend", "def cache(path, &block)\n return yield unless cache? # Skip cache\n path = cache_path.join(path)\n if cache_ttl.zero? or (path.exist? and (Time.now - path.mtime) < cache_ttl)\n logger.debug \"Cache hit: #{path}\"\n return path.read \n end\n logger.debug \"Cache miss: #{path}\"\n path.parent.mkpath unless path.parent.exist?\n yield.tap { |s| path.open('w') { |f| f << s.to_s } }\n end", "def cache_file(input)\n key = Digest.hexencode(Digest::SHA2.digest(input.to_yaml))\n return @directory + \"/\" + key + \".yaml\"\n end", "def cache_path(for_file = T.unsafe(nil)); end", "def cache_file\n File.join( cache_dir, Dir.pwd.hash.to_s )\nend", "def cache(uri, obj)\n filename=cacheFileName(uri)\n print(\"Creating #{filename}\\n\")\n File.open(filename, 'w') {|f| f.write(obj) }\nend", "def cache_write_file(cache_file, content) \n # only cache GET request [http://rack.rubyforge.org/doc/classes/Rack/Request.html#M000239]\n if request.get?\n FileUtils.mkdir_p(File.dirname(cache_file)) rescue \"ERROR: could NOT create the cache directory: [ #{File.dirname(cache_file)} ]\"\n File.open(cache_file, 'wb'){ |f| f << content}\n end\n return content\n end", "def cache_at_client(filename,s)\n s.puts(\"CACHE:\\n\")\n s.puts(filename)\n s.puts(\"\\n\")\n data = s.read\n\tdest_file = File.open(filename, 'wb')\n\tdest_file.print(data)\n\tdest_file.close\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create L2VPN session Create L2VPN session and bind to a L2VPNService
def create_l2_vpn_session(l2_vpn_session, opts = {}) data, _status_code, _headers = create_l2_vpn_session_with_http_info(l2_vpn_session, opts) data end
[ "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.create_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling VpnApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_l2_vpn_session(l2_vpn_session, opts = {})\n data, _status_code, _headers = create_l2_vpn_session_with_http_info(l2_vpn_session, opts)\n return data\n end", "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session ...'\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_or_patch_l2_vpn(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts = {})\n create_or_patch_l2_vpn_with_http_info(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts)\n nil\n end", "def create_l2_vpn_service_with_http_info(l2_vpn_service, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.create_l2_vpn_service ...\"\n end\n # verify the required parameter 'l2_vpn_service' is set\n if @api_client.config.client_side_validation && l2_vpn_service.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_service' when calling VpnApi.create_l2_vpn_service\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/services\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_service)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnService')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#create_l2_vpn_service\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_or_replace_l2_vpn(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts = {})\n data, _status_code, _headers = create_or_replace_l2_vpn_with_http_info(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts)\n data\n end", "def create_or_patch_tier1_l2_vpn_session_0(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n create_or_patch_tier1_l2_vpn_session_0_with_http_info(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session, opts)\n nil\n end", "def create_or_replace_l2_vpn_0(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts = {})\n data, _status_code, _headers = create_or_replace_l2_vpn_0_with_http_info(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts)\n data\n end", "def create_openvpn_server\n\n\t\treturn if self.status == \"Online\"\n\n\t\tbegin\n\t\t\tloop_until_server_online\n\t\trescue Exception => e\n\t\t\tif self.retry_count <= 3 then\n\t\t\t\tself.retry_count += 1\n\t\t\t\tself.status = \"Pending\" # keep status set to pending\n\t\t\t\tsave!\n\t\t\t\t# delete the existing cloud server instance\n\t\t\t\tif not self.cloud_server_id_number.nil? then\n\t\t\t\t\tdelete_cloud_server(self.cloud_server_id_number)\n\t\t\t\tend\n\t\t\t\tsleep 10\n\t\t\t\tcreate_cloud_server\n\t\t\t\treturn\n\t\t\tend\n\t\tend\n\n\t\tvpn_server=OpenvpnConfig::Server.new(self.external_ip_addr, self.internal_ip_addr, self.server_group.domain_name, self.server_group.vpn_network, self.server_group.vpn_subnet, \"root\", self.server_group.ssh_key_basepath)\n\t\tvpn_server.logger=Logger.new(STDOUT)\n\t\tvpn_server.install_openvpn\n\t\tif vpn_server.configure_vpn_server(self.name) then\n\t\t\tself.status = \"Online\"\n\t\t\tsave\n\n\t\t\tovpn_server_val=0\n\t\t\t# use 'f' on SQLite\n\t\t\tif Server.connection.adapter_name =~ /SQLite/ then\n\t\t\t\tovpn_server_val=\"f\"\n\t\t\tend\n\t\t\tServer.find(:all, :conditions => [\"server_group_id = ? AND openvpn_server = ?\", self.server_group_id, ovpn_server_val]).each do |vpn_client|\n\t\t\t\tvpn_client.create_openvpn_client\n\t\t\tend\n\t\telse\n\t\t\tfail_and_raise \"Failed to install OpenVPN on the server.\"\n\t\tend\n\n\tend", "def create_or_update_l2_vpn_session_with_http_info(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_0_id' is set\n if @api_client.config.client_side_validation && tier_0_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_0_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-0s/{tier-0-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-0-id' + '}', tier_0_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#create_or_update_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_l2_vpn_service_with_http_info(l2_vpn_service, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnServicesApi.create_l2_vpn_service ...'\n end\n # verify the required parameter 'l2_vpn_service' is set\n if @api_client.config.client_side_validation && l2_vpn_service.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_service' when calling ManagementPlaneApiVpnL2VpnServicesApi.create_l2_vpn_service\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/services'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_service)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnService')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnServicesApi#create_l2_vpn_service\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_openvpn_client\n\n\t\treturn if self.status == \"Online\"\n\n\t\tvpn_server=Server.find(:first, :conditions => [\"server_group_id = ? AND openvpn_server = ?\", self.server_group_id, true])\n\n\t\tbegin\n\t\t\tloop_until_server_online\n\n\t\t\t# server is online but can't ping OpenVPN servers .10 IP\n\t\t\tif not ping_test(vpn_server.internal_ip_addr) then\n\t\t\t\tcs_conn=self.cloud_server_init\n\t\t\t\tcs_conn.reboot_server(self.cloud_server_id_number)\n\t\t\t\tself.add_error_message(\"Server failed ping test.\")\n\t\t\t\tself.retry_count += 1\n\t\t\t\tself.save\n\t\t\t\tsleep 20\n Server.create_vpn_client_for_type(self)\n\t\t\t\treturn\n\t\t\tend\n\n\t\trescue Exception => e\n\t\t\tif self.retry_count <= 3 then\n\t\t\t\tself.retry_count += 1\n\t\t\t\tself.status = \"Pending\" # keep status set to pending\n\t\t\t\tsave!\n\t\t\t\t# delete the existing cloud server instance\n\t\t\t\tif not self.cloud_server_id_number.nil? then\n\t\t\t\t\tdelete_cloud_server(self.cloud_server_id_number)\n\t\t\t\t\tself.cloud_server_id_number=nil\n\t\t\t\t\tsave!\n\t\t\t\tend\n\t\t\t\tsleep 10\n\t\t\t\tAsyncExec.run_job(CreateCloudServer, self.id, true)\n\t\t\t\treturn\n\t\t\tend\n\t\tend\n\n\t\tvpn_server_config=OpenvpnConfig::Server.new(vpn_server.external_ip_addr, vpn_server.internal_ip_addr, self.server_group.domain_name, vpn_server.server_group.vpn_network, vpn_server.server_group.vpn_subnet, vpn_server.server_group.vpn_device, vpn_server.server_group.vpn_proto, \"root\", self.server_group.ssh_key_basepath)\n\n\t\tclient=OpenvpnConfig::LinuxClient.new(vpn_server_config, self.external_ip_addr, \"root\", self.server_group.ssh_key_basepath)\n\t\tclient.logger=Rails.logger\n\t\tclient.install_openvpn\n\t\tself.vpn_network_interfaces.each_with_index do |vni, index|\n\t\t\tclient_name = (index == 0) ? self.name : \"#{self.name}-#{index.to_s}\"\n\t\t\tif not client.configure_client_vpn(client_name, vni.vpn_ip_addr, vni.ptp_ip_addr) then\n\t\t\t\tfail_and_raise \"Failed to configure OpenVPN on the client.\"\n\t\t\tend\n\t\tend\n\n\t\tif self.server_command then\n\t\t\tif not Util::Ssh.run_cmd(self.external_ip_addr, self.server_command.command, \"root\", self.server_group.ssh_key_basepath)\n\t\t\t\tfail_and_raise \"Failed to run post install command.\"\n\t\t\tend\n\t\tend\n\n\t\tif not client.start_openvpn then\n\t\t\tfail_and_raise \"Failed to configure OpenVPN on the client.\"\n\t\tend\n\n\t\t# mark the client as online\n\t\tself.status = \"Online\"\n\t\tsave\n\t\t\n\tend", "def create_or_patch_l2_vpn_session_0(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n create_or_patch_l2_vpn_session_0_with_http_info(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts)\n nil\n end", "def initialize(options)\n pass = nil\n \n # Parsing Options - Set to default values if missing\n if !options.has_key?(\"Host\")\n options[\"Host\"] = \"localhost\"\n end\n \n if !options.has_key?(\"Port\")\n options[\"Port\"] = 1234\n end\n \n if !options.has_key?(\"Timeout\")\n options[\"Timeout\"] = 10 \n end\n \n if options.has_key?(\"Password\")\n pass = options[\"Password\"]\n options.delete(\"Password\")\n end\n \n # Add Prompt to options\n options[\"Prompt\"] = />INFO:OpenVPN.*\\n/\n \n # Create Socket Telnet Connection\n @sock = Net::Telnet::new(options)\n\n # Password Management\n # ----------------------\n # This is just a little trick. \n # The openvpn telnet server for management requests just password without username. \n # The Net::Telnet client wait first for username prompt indeed, so we have to deceive it\n # that there is a user without pass, and this is made inverting the prompt values and \n # sending just pass prompt and pass value :)\n \n\t if !pass.nil?\n \t @sock.login(\"LoginPrompt\" => /ENTER PASSWORD:/, \"Name\" => pass) \n end\n end", "def create_or_patch_l2_vpn_session_with_http_info(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_0_id' is set\n if @api_client.config.client_side_validation && tier_0_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_0_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-0s/{tier-0-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-0-id' + '}', tier_0_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PATCH, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#create_or_patch_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def start\n return ThreadComm.new{|comm_in, comm_out|\n halt = false\n retry_cnt= 0\n\n Net.delete_namespace(@config.namespace) if @config.isolate\n while !halt do\n begin\n retry_str = retry_cnt > 0 ? \" - retry #{retry_cnt}\" : \"\"\n\n # (1) Main thread: Write out the auth file for OpenVPN to read\n File.open(@config.auth, \"w\", 0600){|f|\n f.puts(@config.login.user)\n f.puts(@config.login.pass)\n }\n\n # (2) Main thred: Create network namespace if required\n namespace_exec = \"\"\n if @config.isolate\n Log.warn(\"Starting isolated VPN connection#{retry_str}\")\n if @config.nameservers.any?\n network = Net::Network.new(nil, nil, nil, @config.nameservers)\n Net.create_namespace(@config.namespace, network: network)\n else\n Net.create_namespace(@config.namespace)\n end\n namespace_exec = \"ip netns exec #{@config.namespace} \"\n msg = \"Namespace #{@config.namespace} can't reach google.com\"\n raise(Exception, msg) if !Net.namespace_connectivity?(@config.namespace, 'google.com')\n else\n Log.warn(\"Starting machine wide VPN connection#{retry_str}\")\n end\n\n # (3) OpenVPN thread: manages the openvpn process (inside/outside namespace)\n Log.info(\"Using OpenVPN config #{@config.ovpn.colorize(:cyan)}\")\n ovpndir = \"/etc/openvpn/client\"\n env_path = \"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"\n cmd = \"openvpn --nobind --auth-nocache --setenv '#{env_path}'\"\n cmd += \" --config #{@config.ovpn} --auth-user-pass #{@config.auth}\"\n\n # Add dynamic DNS updates from VPN connection\n if !@config.nameservers || !@config.nameservers.any?\n resolved_script = \"/etc/openvpn/scripts/update-systemd-resolved\"\n cmd += \" --script-security 2 --up #{resolved_script} --down #{resolved_script} --down-pre\"\n end\n\n stdin, stdout, openvpn_thread = Open3.popen2e(\"#{namespace_exec}bash -c '#{Net.proxy_export}#{cmd}'\", chdir: ovpndir)\n\n # (4) Log thread: Start a separate log thread drain stdout to logs\n log_thread = Thread.new{\n while line=stdout.gets do\n if line.include?(\"Initialization Sequence Completed\")\n comm_in << ThreadMsg.new(Model::CommCmd.vpn_up)\n end\n if msg = Model::FailMessages.find{|x| line.include?(x)}\n comm_in << ThreadMsg.new(Model::CommCmd.fail, msg)\n end\n\n # Trim out OVPN timestamps e.g. Fri Jul 20 07:14:55 2018\n line = line[line.index(Time.now.year.to_s)+5..-1] if line.include?(Time.now.year.to_s)\n Log.info(\"#{'OVPN:'.colorize(:yellow)} #{line.strip}\")\n end\n }\n\n # (5) Main thread: Manage thread communication and actions\n wait_cnt = 0\n vpn_started = false\n while true do\n if !comm_in.empty?\n msg = comm_in.pop\n\n # Wait for tunnel to be created\n if msg.cmd == Model::CommCmd.vpn_up && !vpn_started\n vpn_started = true\n Log.info(\"VPN #{@config.name.colorize(:cyan)} is up and running\")\n comm_out << ThreadMsg.new(Model::CommCmd.vpn_up)\n\n # Add any routes from the configuration\n if @config.routes && @config.routes.any?\n tun_route = Socket.getifaddrs.find{|x| x.addr and x.name == 'tun0' and x.addr.ipv4?}.dstaddr.ip_address\n @config.routes.each{|route|\n Log.info(\"Adding route #{route.colorize(:cyan)} for #{tun_route.colorize(:cyan)} on tun0\", newline:false)\n exec_with_status(\"ip route add #{route} via #{tun_route} dev tun0\")\n }\n end\n\n # Drop default route to avoid isolation leaks\n if @config.isolate\n Log.info(\"Dropping #{'default'.colorize(:cyan)} route\", newline:false)\n exec_with_status(\"ip netns exec #{@config.namespace} ip route del default\")\n end\n\n # Launch apps in the namespace\n (@config.apps || []).each{|app| self.launch_app_in_namespace(app)}\n end\n\n # Terminate on halt, potentially retry on errors\n if msg.cmd == Model::CommCmd.halt\n Log.warn(\"Halting the management thread\")\n raise SystemExit\n elsif msg.cmd == Model::CommCmd.fail\n Log.error(msg.value)\n raise(Exception, \"OpenVPN connection failed\")\n end\n else\n sleep(0.1)\n end\n\n # Bail if vpn is down and log is drained\n if !openvpn_thread.alive?\n sleep(0.1)\n raise(Exception, \"OpenVPN connection failed\") if comm_in.empty?\n elsif !vpn_started\n Log.info(\"Waiting for vpn #{@config.name.colorize(:cyan)}...\") if wait_cnt % 10 == 0\n wait_cnt += 1\n end\n end\n\n # Allow for aborting\n rescue SystemExit, Interrupt\n halt = true\n\n # Log exceptions\n rescue Exception => e\n Log.error(e.to_s)\n comm_out << ThreadMsg.new(Model::CommCmd.tun_down)\n halt = true if !@config.retry\n\n # Clean up on errors and try again\n ensure\n File.delete(@config.auth) if File.exist?(@config.auth)\n stdin.close if !stdin.nil?\n stdout.close if !stdout.nil?\n self.stop(openvpn_thread)\n\n # Wait for log to drain then send out halt notification\n sleep(0.25)\n while !Log.empty? do\n sleep(0.25)\n end\n comm_out << ThreadMsg.new(Model::CommCmd.halt) if halt\n end\n\n # Retry sleep logic\n if halt\n Log.warn(\"System exit or user terminated!\")\n else\n retry_cnt += 1\n Log.warn(\"OpenVPN terminated/disconnected waiting 10\")\n sleep(10)\n end\n end\n }\n end", "def vpn( command = nil )\n log.info(x) { \"VPN connection command #{command} has been issued.\" }\n vpn_uc = SafeDb::Vpn.new\n vpn_uc.command = command if command\n vpn_uc.flow()\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create L2VPN session Create L2VPN session and bind to a L2VPNService
def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {}) if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session ...' end # verify the required parameter 'l2_vpn_session' is set if @api_client.config.client_side_validation && l2_vpn_session.nil? fail ArgumentError, "Missing the required parameter 'l2_vpn_session' when calling ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session" end # resource path local_var_path = '/vpn/l2vpn/sessions' # query parameters query_params = {} # header parameters header_params = {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = {} # http body (model) post_body = @api_client.object_to_http_body(l2_vpn_session) auth_names = ['BasicAuth'] data, status_code, headers = @api_client.call_api(:POST, local_var_path, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => 'L2VpnSession') if @api_client.config.debugging @api_client.config.logger.debug "API called: ManagementPlaneApiVpnL2VpnSessionsApi#create_l2_vpn_session\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end
[ "def create_l2_vpn_session(l2_vpn_session, opts = {})\n data, _status_code, _headers = create_l2_vpn_session_with_http_info(l2_vpn_session, opts)\n data\n end", "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.create_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling VpnApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_l2_vpn_session(l2_vpn_session, opts = {})\n data, _status_code, _headers = create_l2_vpn_session_with_http_info(l2_vpn_session, opts)\n return data\n end", "def create_or_patch_l2_vpn(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts = {})\n create_or_patch_l2_vpn_with_http_info(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts)\n nil\n end", "def create_l2_vpn_service_with_http_info(l2_vpn_service, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.create_l2_vpn_service ...\"\n end\n # verify the required parameter 'l2_vpn_service' is set\n if @api_client.config.client_side_validation && l2_vpn_service.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_service' when calling VpnApi.create_l2_vpn_service\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/services\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_service)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnService')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#create_l2_vpn_service\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_or_replace_l2_vpn(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts = {})\n data, _status_code, _headers = create_or_replace_l2_vpn_with_http_info(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts)\n data\n end", "def create_or_patch_tier1_l2_vpn_session_0(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n create_or_patch_tier1_l2_vpn_session_0_with_http_info(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session, opts)\n nil\n end", "def create_or_replace_l2_vpn_0(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts = {})\n data, _status_code, _headers = create_or_replace_l2_vpn_0_with_http_info(tier_0_id, locale_service_id, l2vpn_id, l2_vpn, opts)\n data\n end", "def create_openvpn_server\n\n\t\treturn if self.status == \"Online\"\n\n\t\tbegin\n\t\t\tloop_until_server_online\n\t\trescue Exception => e\n\t\t\tif self.retry_count <= 3 then\n\t\t\t\tself.retry_count += 1\n\t\t\t\tself.status = \"Pending\" # keep status set to pending\n\t\t\t\tsave!\n\t\t\t\t# delete the existing cloud server instance\n\t\t\t\tif not self.cloud_server_id_number.nil? then\n\t\t\t\t\tdelete_cloud_server(self.cloud_server_id_number)\n\t\t\t\tend\n\t\t\t\tsleep 10\n\t\t\t\tcreate_cloud_server\n\t\t\t\treturn\n\t\t\tend\n\t\tend\n\n\t\tvpn_server=OpenvpnConfig::Server.new(self.external_ip_addr, self.internal_ip_addr, self.server_group.domain_name, self.server_group.vpn_network, self.server_group.vpn_subnet, \"root\", self.server_group.ssh_key_basepath)\n\t\tvpn_server.logger=Logger.new(STDOUT)\n\t\tvpn_server.install_openvpn\n\t\tif vpn_server.configure_vpn_server(self.name) then\n\t\t\tself.status = \"Online\"\n\t\t\tsave\n\n\t\t\tovpn_server_val=0\n\t\t\t# use 'f' on SQLite\n\t\t\tif Server.connection.adapter_name =~ /SQLite/ then\n\t\t\t\tovpn_server_val=\"f\"\n\t\t\tend\n\t\t\tServer.find(:all, :conditions => [\"server_group_id = ? AND openvpn_server = ?\", self.server_group_id, ovpn_server_val]).each do |vpn_client|\n\t\t\t\tvpn_client.create_openvpn_client\n\t\t\tend\n\t\telse\n\t\t\tfail_and_raise \"Failed to install OpenVPN on the server.\"\n\t\tend\n\n\tend", "def create_or_update_l2_vpn_session_with_http_info(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_0_id' is set\n if @api_client.config.client_side_validation && tier_0_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_0_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_update_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-0s/{tier-0-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-0-id' + '}', tier_0_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#create_or_update_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_l2_vpn_service_with_http_info(l2_vpn_service, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnServicesApi.create_l2_vpn_service ...'\n end\n # verify the required parameter 'l2_vpn_service' is set\n if @api_client.config.client_side_validation && l2_vpn_service.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_service' when calling ManagementPlaneApiVpnL2VpnServicesApi.create_l2_vpn_service\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/services'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_service)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnService')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnServicesApi#create_l2_vpn_service\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_openvpn_client\n\n\t\treturn if self.status == \"Online\"\n\n\t\tvpn_server=Server.find(:first, :conditions => [\"server_group_id = ? AND openvpn_server = ?\", self.server_group_id, true])\n\n\t\tbegin\n\t\t\tloop_until_server_online\n\n\t\t\t# server is online but can't ping OpenVPN servers .10 IP\n\t\t\tif not ping_test(vpn_server.internal_ip_addr) then\n\t\t\t\tcs_conn=self.cloud_server_init\n\t\t\t\tcs_conn.reboot_server(self.cloud_server_id_number)\n\t\t\t\tself.add_error_message(\"Server failed ping test.\")\n\t\t\t\tself.retry_count += 1\n\t\t\t\tself.save\n\t\t\t\tsleep 20\n Server.create_vpn_client_for_type(self)\n\t\t\t\treturn\n\t\t\tend\n\n\t\trescue Exception => e\n\t\t\tif self.retry_count <= 3 then\n\t\t\t\tself.retry_count += 1\n\t\t\t\tself.status = \"Pending\" # keep status set to pending\n\t\t\t\tsave!\n\t\t\t\t# delete the existing cloud server instance\n\t\t\t\tif not self.cloud_server_id_number.nil? then\n\t\t\t\t\tdelete_cloud_server(self.cloud_server_id_number)\n\t\t\t\t\tself.cloud_server_id_number=nil\n\t\t\t\t\tsave!\n\t\t\t\tend\n\t\t\t\tsleep 10\n\t\t\t\tAsyncExec.run_job(CreateCloudServer, self.id, true)\n\t\t\t\treturn\n\t\t\tend\n\t\tend\n\n\t\tvpn_server_config=OpenvpnConfig::Server.new(vpn_server.external_ip_addr, vpn_server.internal_ip_addr, self.server_group.domain_name, vpn_server.server_group.vpn_network, vpn_server.server_group.vpn_subnet, vpn_server.server_group.vpn_device, vpn_server.server_group.vpn_proto, \"root\", self.server_group.ssh_key_basepath)\n\n\t\tclient=OpenvpnConfig::LinuxClient.new(vpn_server_config, self.external_ip_addr, \"root\", self.server_group.ssh_key_basepath)\n\t\tclient.logger=Rails.logger\n\t\tclient.install_openvpn\n\t\tself.vpn_network_interfaces.each_with_index do |vni, index|\n\t\t\tclient_name = (index == 0) ? self.name : \"#{self.name}-#{index.to_s}\"\n\t\t\tif not client.configure_client_vpn(client_name, vni.vpn_ip_addr, vni.ptp_ip_addr) then\n\t\t\t\tfail_and_raise \"Failed to configure OpenVPN on the client.\"\n\t\t\tend\n\t\tend\n\n\t\tif self.server_command then\n\t\t\tif not Util::Ssh.run_cmd(self.external_ip_addr, self.server_command.command, \"root\", self.server_group.ssh_key_basepath)\n\t\t\t\tfail_and_raise \"Failed to run post install command.\"\n\t\t\tend\n\t\tend\n\n\t\tif not client.start_openvpn then\n\t\t\tfail_and_raise \"Failed to configure OpenVPN on the client.\"\n\t\tend\n\n\t\t# mark the client as online\n\t\tself.status = \"Online\"\n\t\tsave\n\t\t\n\tend", "def create_or_patch_l2_vpn_session_0(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n create_or_patch_l2_vpn_session_0_with_http_info(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts)\n nil\n end", "def initialize(options)\n pass = nil\n \n # Parsing Options - Set to default values if missing\n if !options.has_key?(\"Host\")\n options[\"Host\"] = \"localhost\"\n end\n \n if !options.has_key?(\"Port\")\n options[\"Port\"] = 1234\n end\n \n if !options.has_key?(\"Timeout\")\n options[\"Timeout\"] = 10 \n end\n \n if options.has_key?(\"Password\")\n pass = options[\"Password\"]\n options.delete(\"Password\")\n end\n \n # Add Prompt to options\n options[\"Prompt\"] = />INFO:OpenVPN.*\\n/\n \n # Create Socket Telnet Connection\n @sock = Net::Telnet::new(options)\n\n # Password Management\n # ----------------------\n # This is just a little trick. \n # The openvpn telnet server for management requests just password without username. \n # The Net::Telnet client wait first for username prompt indeed, so we have to deceive it\n # that there is a user without pass, and this is made inverting the prompt values and \n # sending just pass prompt and pass value :)\n \n\t if !pass.nil?\n \t @sock.login(\"LoginPrompt\" => /ENTER PASSWORD:/, \"Name\" => pass) \n end\n end", "def create_or_patch_l2_vpn_session_with_http_info(tier_0_id, locale_service_id, service_id, session_id, l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_0_id' is set\n if @api_client.config.client_side_validation && tier_0_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_0_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.create_or_patch_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-0s/{tier-0-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-0-id' + '}', tier_0_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PATCH, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#create_or_patch_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def start\n return ThreadComm.new{|comm_in, comm_out|\n halt = false\n retry_cnt= 0\n\n Net.delete_namespace(@config.namespace) if @config.isolate\n while !halt do\n begin\n retry_str = retry_cnt > 0 ? \" - retry #{retry_cnt}\" : \"\"\n\n # (1) Main thread: Write out the auth file for OpenVPN to read\n File.open(@config.auth, \"w\", 0600){|f|\n f.puts(@config.login.user)\n f.puts(@config.login.pass)\n }\n\n # (2) Main thred: Create network namespace if required\n namespace_exec = \"\"\n if @config.isolate\n Log.warn(\"Starting isolated VPN connection#{retry_str}\")\n if @config.nameservers.any?\n network = Net::Network.new(nil, nil, nil, @config.nameservers)\n Net.create_namespace(@config.namespace, network: network)\n else\n Net.create_namespace(@config.namespace)\n end\n namespace_exec = \"ip netns exec #{@config.namespace} \"\n msg = \"Namespace #{@config.namespace} can't reach google.com\"\n raise(Exception, msg) if !Net.namespace_connectivity?(@config.namespace, 'google.com')\n else\n Log.warn(\"Starting machine wide VPN connection#{retry_str}\")\n end\n\n # (3) OpenVPN thread: manages the openvpn process (inside/outside namespace)\n Log.info(\"Using OpenVPN config #{@config.ovpn.colorize(:cyan)}\")\n ovpndir = \"/etc/openvpn/client\"\n env_path = \"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"\n cmd = \"openvpn --nobind --auth-nocache --setenv '#{env_path}'\"\n cmd += \" --config #{@config.ovpn} --auth-user-pass #{@config.auth}\"\n\n # Add dynamic DNS updates from VPN connection\n if !@config.nameservers || !@config.nameservers.any?\n resolved_script = \"/etc/openvpn/scripts/update-systemd-resolved\"\n cmd += \" --script-security 2 --up #{resolved_script} --down #{resolved_script} --down-pre\"\n end\n\n stdin, stdout, openvpn_thread = Open3.popen2e(\"#{namespace_exec}bash -c '#{Net.proxy_export}#{cmd}'\", chdir: ovpndir)\n\n # (4) Log thread: Start a separate log thread drain stdout to logs\n log_thread = Thread.new{\n while line=stdout.gets do\n if line.include?(\"Initialization Sequence Completed\")\n comm_in << ThreadMsg.new(Model::CommCmd.vpn_up)\n end\n if msg = Model::FailMessages.find{|x| line.include?(x)}\n comm_in << ThreadMsg.new(Model::CommCmd.fail, msg)\n end\n\n # Trim out OVPN timestamps e.g. Fri Jul 20 07:14:55 2018\n line = line[line.index(Time.now.year.to_s)+5..-1] if line.include?(Time.now.year.to_s)\n Log.info(\"#{'OVPN:'.colorize(:yellow)} #{line.strip}\")\n end\n }\n\n # (5) Main thread: Manage thread communication and actions\n wait_cnt = 0\n vpn_started = false\n while true do\n if !comm_in.empty?\n msg = comm_in.pop\n\n # Wait for tunnel to be created\n if msg.cmd == Model::CommCmd.vpn_up && !vpn_started\n vpn_started = true\n Log.info(\"VPN #{@config.name.colorize(:cyan)} is up and running\")\n comm_out << ThreadMsg.new(Model::CommCmd.vpn_up)\n\n # Add any routes from the configuration\n if @config.routes && @config.routes.any?\n tun_route = Socket.getifaddrs.find{|x| x.addr and x.name == 'tun0' and x.addr.ipv4?}.dstaddr.ip_address\n @config.routes.each{|route|\n Log.info(\"Adding route #{route.colorize(:cyan)} for #{tun_route.colorize(:cyan)} on tun0\", newline:false)\n exec_with_status(\"ip route add #{route} via #{tun_route} dev tun0\")\n }\n end\n\n # Drop default route to avoid isolation leaks\n if @config.isolate\n Log.info(\"Dropping #{'default'.colorize(:cyan)} route\", newline:false)\n exec_with_status(\"ip netns exec #{@config.namespace} ip route del default\")\n end\n\n # Launch apps in the namespace\n (@config.apps || []).each{|app| self.launch_app_in_namespace(app)}\n end\n\n # Terminate on halt, potentially retry on errors\n if msg.cmd == Model::CommCmd.halt\n Log.warn(\"Halting the management thread\")\n raise SystemExit\n elsif msg.cmd == Model::CommCmd.fail\n Log.error(msg.value)\n raise(Exception, \"OpenVPN connection failed\")\n end\n else\n sleep(0.1)\n end\n\n # Bail if vpn is down and log is drained\n if !openvpn_thread.alive?\n sleep(0.1)\n raise(Exception, \"OpenVPN connection failed\") if comm_in.empty?\n elsif !vpn_started\n Log.info(\"Waiting for vpn #{@config.name.colorize(:cyan)}...\") if wait_cnt % 10 == 0\n wait_cnt += 1\n end\n end\n\n # Allow for aborting\n rescue SystemExit, Interrupt\n halt = true\n\n # Log exceptions\n rescue Exception => e\n Log.error(e.to_s)\n comm_out << ThreadMsg.new(Model::CommCmd.tun_down)\n halt = true if !@config.retry\n\n # Clean up on errors and try again\n ensure\n File.delete(@config.auth) if File.exist?(@config.auth)\n stdin.close if !stdin.nil?\n stdout.close if !stdout.nil?\n self.stop(openvpn_thread)\n\n # Wait for log to drain then send out halt notification\n sleep(0.25)\n while !Log.empty? do\n sleep(0.25)\n end\n comm_out << ThreadMsg.new(Model::CommCmd.halt) if halt\n end\n\n # Retry sleep logic\n if halt\n Log.warn(\"System exit or user terminated!\")\n else\n retry_cnt += 1\n Log.warn(\"OpenVPN terminated/disconnected waiting 10\")\n sleep(10)\n end\n end\n }\n end", "def vpn( command = nil )\n log.info(x) { \"VPN connection command #{command} has been issued.\" }\n vpn_uc = SafeDb::Vpn.new\n vpn_uc.command = command if command\n vpn_uc.flow()\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Delete a L2VPN session Delete a specific L2VPN session. If there are any logical switch ports attached to it, those needs to be deleted first.
def delete_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {}) if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.delete_l2_vpn_session ...' end # verify the required parameter 'l2vpn_session_id' is set if @api_client.config.client_side_validation && l2vpn_session_id.nil? fail ArgumentError, "Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.delete_l2_vpn_session" end # resource path local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s) # query parameters query_params = {} # header parameters header_params = {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = {} # http body (model) post_body = nil auth_names = ['BasicAuth'] data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names) if @api_client.config.debugging @api_client.config.logger.debug "API called: ManagementPlaneApiVpnL2VpnSessionsApi#delete_l2_vpn_session\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end
[ "def delete_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.delete_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.delete_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#delete_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def delete\n if @session\n @session.destroy\n @session = nil\n end\n end", "def delete\n if @session\n @session.destroy\n @session = nil\n end\n end", "def delete_tier1_l2_vpn_session(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n delete_tier1_l2_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts)\n nil\n end", "def deleteSession(name,node=nil)\n Jiocloud::Utils.put(sessionurl + '/destroy/' + getSessionID({:name => name,:node => node}),'')\n end", "def destroy\n close_gateway\n response = @@client[\"tunnels/#{@id}\"].delete.body\n refresh!\n end", "def delete_session(session_id)\n delete(\"session:#{session_id}\")\n end", "def destroy_session\n response_handler(rest_delete('/rest/login-sessions'))\n self\n end", "def delete_session(session_id)\n if item = find(:session_id => session_id)\n item.delete\n end\n end", "def delete_tier1_l2_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.delete_tier1_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_1_id' is set\n if @api_client.config.client_side_validation && tier_1_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_1_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.delete_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.delete_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.delete_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.delete_tier1_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-1s/{tier-1-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-1-id' + '}', tier_1_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#delete_tier1_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def delete_session(req, sid, _options)\n # Get the current database record for this session then delete it.\n find_session_model(req, sid).delete\n # Generate a new one and return it's ID\n find_session_model(req).session_id\n end", "def delete_sessions(node)\n @sessions.delete_all(node)\n end", "def destroy\n set_session\n\n if @session.destroy\n respond_to do |format|\n format.html { redirect_back allow_other_host: false, fallback_location: batch_connect_sessions_url, notice: t(\"dashboard.batch_connect_sessions_status_blurb_delete_success\") }\n format.json { head :no_content }\n end\n else\n respond_to do |format|\n format.html { redirect_back allow_other_host: false, fallback_location: batch_connect_sessions_url, alert: t(\"dashboard.batch_connect_sessions_status_blurb_delete_failure\") }\n format.json { render json: @session.errors, status: :unprocessable_entity }\n end\n end\n end", "def delete\n client_opts = {}\n client_opts[:vpn_connection_id] = vpn_connection_id\n client.delete_vpn_connection(client_opts)\n nil\n end", "def destroy\n set_session\n\n if @session.destroy\n respond_to do |format|\n format.html { redirect_to batch_connect_sessions_url, notice: t('dashboard.batch_connect_sessions_status_blurb_delete_success') }\n format.json { head :no_content }\n end\n else\n respond_to do |format|\n format.html { redirect_to batch_connect_sessions_url, alert: t('dashboard.batch_connect_sessions_status_blurb_delete_failure') }\n format.json { render json: @session.errors, status: :unprocessable_entity }\n end\n end\n end", "def delete_tier1_ip_sec_vpn_session(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n delete_tier1_ip_sec_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts)\n nil\n end", "def destroy\n @tunnel = Tunnel.find(params[:id])\n @tunnel.destroy\n\n respond_to do |format|\n format.html { redirect_to tunnels_url }\n format.json { head :no_content }\n end\n end", "def remove_guest\n session.delete(:guest_id)\n end", "def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get a L2VPN session Get a specific L2VPN session
def get_l2_vpn_session(l2vpn_session_id, opts = {}) data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts) data end
[ "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n return data\n end", "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.create_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling VpnApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_tier1_l2_vpn_session(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n data, _status_code, _headers = get_tier1_l2_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts)\n data\n end", "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session ...'\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n data\n end", "def get_l2_vpn_session_with_http_info(tier_0_id, locale_service_id, service_id, session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_0_id' is set\n if @api_client.config.client_side_validation && tier_0_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_0_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-0s/{tier-0-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-0-id' + '}', tier_0_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_tier1_l2_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_1_id' is set\n if @api_client.config.client_side_validation && tier_1_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_1_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-1s/{tier-1-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-1-id' + '}', tier_1_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#get_tier1_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.list_l2_vpn_sessions ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n return data\n end", "def get_session(id)\n return @sessions[id.to_s]\n end", "def getSession(args = {})\n if args.key?(:id) && ! args[:id].nil?\n session = Jiocloud::Utils.get(sessionurl + '/info/' + args[:id])\n elsif args.key?(:name) && ! args[:name].nil?\n if args.key?(:node) && ! args[:node].nil?\n sessions = Jiocloud::Utils.get(sessionurl + '/node/' + args[:node])\n else\n sessions = Jiocloud::Utils.get(sessionurl + '/list')\n end\n session = sessions.select {|session| session['Name'] == args[:name]}\n end\n\n if session.empty?\n return {}\n elsif session.count > 1\n raise(\"Multiple matching (#{session.count}) Consul Sessions found for #{args[:name]}\")\n else\n return session[0]\n end\n end", "def read_l2_vpn(tier_0_id, locale_service_id, l2vpn_id, opts = {})\n data, _status_code, _headers = read_l2_vpn_with_http_info(tier_0_id, locale_service_id, l2vpn_id, opts)\n data\n end", "def get_l2_vpn_session_remote_macs_for_ls_with_http_info(session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_remote_macs_for_ls ...\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling VpnApi.get_l2_vpn_session_remote_macs_for_ls\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{session-id}/remote-mac\".sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'logical_switch_id'] = opts[:'logical_switch_id'] if !opts[:'logical_switch_id'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSessionRemoteMacs')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_remote_macs_for_ls\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_session\n @sessions = new_session_list if !@sessions\n @active_session = @sessions[0] if !@active_session\n @active_session\n end", "def create_l2_vpn_session(l2_vpn_session, opts = {})\n data, _status_code, _headers = create_l2_vpn_session_with_http_info(l2_vpn_session, opts)\n return data\n end", "def get_session(arg1, arg2=nil)\n\t\tif arg2.nil?\n\t\t\t# 1.\n\t\t\taddr = arg1\n\t\telse\n\t\t\t# 2.\n\t\t\thost = arg1\n\t\t\tport = arg2\n\t\t\taddr = Address.new(host, port)\n\t\tend\n\n\t\t@pool[addr] ||= Session.new(@builder, addr, @loop)\n\tend", "def get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_peer_codes ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session_peer_codes\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}/peer-codes\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionPeerCodes')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_peer_codes\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get a L2VPN session Get a specific L2VPN session
def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {}) if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session ...' end # verify the required parameter 'l2vpn_session_id' is set if @api_client.config.client_side_validation && l2vpn_session_id.nil? fail ArgumentError, "Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session" end # resource path local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s) # query parameters query_params = {} # header parameters header_params = {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = {} # http body (model) post_body = nil auth_names = ['BasicAuth'] data, status_code, headers = @api_client.call_api(:GET, local_var_path, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => 'L2VpnSession') if @api_client.config.debugging @api_client.config.logger.debug "API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end
[ "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n data\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n return data\n end", "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.create_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling VpnApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_tier1_l2_vpn_session(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n data, _status_code, _headers = get_tier1_l2_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts)\n data\n end", "def create_l2_vpn_session_with_http_info(l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session ...'\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling ManagementPlaneApiVpnL2VpnSessionsApi.create_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#create_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n data\n end", "def get_l2_vpn_session_with_http_info(tier_0_id, locale_service_id, service_id, session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_0_id' is set\n if @api_client.config.client_side_validation && tier_0_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_0_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-0s/{tier-0-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-0-id' + '}', tier_0_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_tier1_l2_vpn_session_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session ...'\n end\n # verify the required parameter 'tier_1_id' is set\n if @api_client.config.client_side_validation && tier_1_id.nil?\n fail ArgumentError, \"Missing the required parameter 'tier_1_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'locale_service_id' is set\n if @api_client.config.client_side_validation && locale_service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'locale_service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi.get_tier1_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/global-infra/tier-1s/{tier-1-id}/locale-services/{locale-service-id}/l2vpn-services/{service-id}/sessions/{session-id}'.sub('{' + 'tier-1-id' + '}', tier_1_id.to_s).sub('{' + 'locale-service-id' + '}', locale_service_id.to_s).sub('{' + 'service-id' + '}', service_id.to_s).sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkServicesVPNL2VPNSessionsApi#get_tier1_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.list_l2_vpn_sessions ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n return data\n end", "def get_session(id)\n return @sessions[id.to_s]\n end", "def getSession(args = {})\n if args.key?(:id) && ! args[:id].nil?\n session = Jiocloud::Utils.get(sessionurl + '/info/' + args[:id])\n elsif args.key?(:name) && ! args[:name].nil?\n if args.key?(:node) && ! args[:node].nil?\n sessions = Jiocloud::Utils.get(sessionurl + '/node/' + args[:node])\n else\n sessions = Jiocloud::Utils.get(sessionurl + '/list')\n end\n session = sessions.select {|session| session['Name'] == args[:name]}\n end\n\n if session.empty?\n return {}\n elsif session.count > 1\n raise(\"Multiple matching (#{session.count}) Consul Sessions found for #{args[:name]}\")\n else\n return session[0]\n end\n end", "def read_l2_vpn(tier_0_id, locale_service_id, l2vpn_id, opts = {})\n data, _status_code, _headers = read_l2_vpn_with_http_info(tier_0_id, locale_service_id, l2vpn_id, opts)\n data\n end", "def get_l2_vpn_session_remote_macs_for_ls_with_http_info(session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_remote_macs_for_ls ...\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling VpnApi.get_l2_vpn_session_remote_macs_for_ls\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{session-id}/remote-mac\".sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'logical_switch_id'] = opts[:'logical_switch_id'] if !opts[:'logical_switch_id'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSessionRemoteMacs')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_remote_macs_for_ls\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_session\n @sessions = new_session_list if !@sessions\n @active_session = @sessions[0] if !@active_session\n @active_session\n end", "def create_l2_vpn_session(l2_vpn_session, opts = {})\n data, _status_code, _headers = create_l2_vpn_session_with_http_info(l2_vpn_session, opts)\n return data\n end", "def get_session(arg1, arg2=nil)\n\t\tif arg2.nil?\n\t\t\t# 1.\n\t\t\taddr = arg1\n\t\telse\n\t\t\t# 2.\n\t\t\thost = arg1\n\t\t\tport = arg2\n\t\t\taddr = Address.new(host, port)\n\t\tend\n\n\t\t@pool[addr] ||= Session.new(@builder, addr, @loop)\n\tend", "def get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_peer_codes ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session_peer_codes\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}/peer-codes\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionPeerCodes')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_peer_codes\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get peer codes for the L2VpnSession Get peer codes for the L2VPN session to program the remote side of the tunnel.
def get_l2_vpn_session_peer_codes(l2vpn_session_id, opts = {}) data, _status_code, _headers = get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts) data end
[ "def get_l2_vpn_session_peer_codes(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts)\n return data\n end", "def get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_peer_codes ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session_peer_codes\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}/peer-codes\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionPeerCodes')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_peer_codes\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session_peer_codes ...'\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session_peer_codes\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}/peer-codes'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionPeerCodes')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session_peer_codes\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n return data\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n data\n end", "def get_l2_vpn_session_remote_macs_for_ls_with_http_info(session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_remote_macs_for_ls ...\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling VpnApi.get_l2_vpn_session_remote_macs_for_ls\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{session-id}/remote-mac\".sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'logical_switch_id'] = opts[:'logical_switch_id'] if !opts[:'logical_switch_id'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSessionRemoteMacs')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_remote_macs_for_ls\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_cvv2_code()\n return @RESPONSE_HASH['CVV2']\n end", "def get_l2_vpn_session_remote_macs_for_ls_with_http_info(session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnStatisticsL2VpnRemoteMacsApi.get_l2_vpn_session_remote_macs_for_ls ...'\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling ManagementPlaneApiVpnStatisticsL2VpnRemoteMacsApi.get_l2_vpn_session_remote_macs_for_ls\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{session-id}/remote-mac'.sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'logical_switch_id'] = opts[:'logical_switch_id'] if !opts[:'logical_switch_id'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSessionRemoteMacs')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnStatisticsL2VpnRemoteMacsApi#get_l2_vpn_session_remote_macs_for_ls\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_or_patch_tier1_l2_vpn_session_from_peer_codes_create_with_peer_code(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session_data, opts = {})\n create_or_patch_tier1_l2_vpn_session_from_peer_codes_create_with_peer_code_with_http_info(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session_data, opts)\n nil\n end", "def voip_response_code\n voip_response(:response_code)\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n data\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n return data\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.list_l2_vpn_sessions ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_tier1_l2_vpn_session_remote_macs_for_ls(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n data, _status_code, _headers = get_tier1_l2_vpn_session_remote_macs_for_ls_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts)\n data\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions ...'\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = '/vpn/l2vpn/sessions'\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def peer_list\n ip_list = []\n raw_peers.each_slice(6) { |e| ip_list << e if e.length == 6 }\n\n ip_list.map! { |e| { :ip => e[0..3].join('.'), :port => (e[4] * 256) + e[5] } }\n end", "def list_ip_sec_vpn_sessions(opts = {})\n data, _status_code, _headers = list_ip_sec_vpn_sessions_with_http_info(opts)\n data\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get peer codes for the L2VpnSession Get peer codes for the L2VPN session to program the remote side of the tunnel.
def get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts = {}) if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session_peer_codes ...' end # verify the required parameter 'l2vpn_session_id' is set if @api_client.config.client_side_validation && l2vpn_session_id.nil? fail ArgumentError, "Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session_peer_codes" end # resource path local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}/peer-codes'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s) # query parameters query_params = {} # header parameters header_params = {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = {} # http body (model) post_body = nil auth_names = ['BasicAuth'] data, status_code, headers = @api_client.call_api(:GET, local_var_path, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => 'L2VpnSessionPeerCodes') if @api_client.config.debugging @api_client.config.logger.debug "API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session_peer_codes\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end
[ "def get_l2_vpn_session_peer_codes(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts)\n data\n end", "def get_l2_vpn_session_peer_codes(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts)\n return data\n end", "def get_l2_vpn_session_peer_codes_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_peer_codes ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session_peer_codes\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}/peer-codes\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionPeerCodes')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_peer_codes\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n return data\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n data\n end", "def get_l2_vpn_session_remote_macs_for_ls_with_http_info(session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session_remote_macs_for_ls ...\"\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling VpnApi.get_l2_vpn_session_remote_macs_for_ls\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{session-id}/remote-mac\".sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'logical_switch_id'] = opts[:'logical_switch_id'] if !opts[:'logical_switch_id'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSessionRemoteMacs')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session_remote_macs_for_ls\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_cvv2_code()\n return @RESPONSE_HASH['CVV2']\n end", "def get_l2_vpn_session_remote_macs_for_ls_with_http_info(session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnStatisticsL2VpnRemoteMacsApi.get_l2_vpn_session_remote_macs_for_ls ...'\n end\n # verify the required parameter 'session_id' is set\n if @api_client.config.client_side_validation && session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'session_id' when calling ManagementPlaneApiVpnStatisticsL2VpnRemoteMacsApi.get_l2_vpn_session_remote_macs_for_ls\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{session-id}/remote-mac'.sub('{' + 'session-id' + '}', session_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'logical_switch_id'] = opts[:'logical_switch_id'] if !opts[:'logical_switch_id'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VPNSessionRemoteMacs')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnStatisticsL2VpnRemoteMacsApi#get_l2_vpn_session_remote_macs_for_ls\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_or_patch_tier1_l2_vpn_session_from_peer_codes_create_with_peer_code(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session_data, opts = {})\n create_or_patch_tier1_l2_vpn_session_from_peer_codes_create_with_peer_code_with_http_info(tier_1_id, locale_service_id, service_id, session_id, l2_vpn_session_data, opts)\n nil\n end", "def voip_response_code\n voip_response(:response_code)\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n data\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.get_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_l2_vpn_session(l2vpn_session_id, opts = {})\n data, _status_code, _headers = get_l2_vpn_session_with_http_info(l2vpn_session_id, opts)\n return data\n end", "def get_l2_vpn_session_with_http_info(l2vpn_session_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session ...'\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.get_l2_vpn_session\"\n end\n # resource path\n local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#get_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.list_l2_vpn_sessions ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_tier1_l2_vpn_session_remote_macs_for_ls(tier_1_id, locale_service_id, service_id, session_id, opts = {})\n data, _status_code, _headers = get_tier1_l2_vpn_session_remote_macs_for_ls_with_http_info(tier_1_id, locale_service_id, service_id, session_id, opts)\n data\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions ...'\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = '/vpn/l2vpn/sessions'\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def peer_list\n ip_list = []\n raw_peers.each_slice(6) { |e| ip_list << e if e.length == 6 }\n\n ip_list.map! { |e| { :ip => e[0..3].join('.'), :port => (e[4] * 256) + e[5] } }\n end", "def list_ip_sec_vpn_sessions(opts = {})\n data, _status_code, _headers = list_ip_sec_vpn_sessions_with_http_info(opts)\n data\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get all L2VPN sessions Get paginated list of all L2VPN sessions
def list_l2_vpn_sessions(opts = {}) data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts) data end
[ "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.list_l2_vpn_sessions ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n return data\n end", "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions ...'\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = '/vpn/l2vpn/sessions'\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiVpnL2VpnSessionsApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_sessions\n response = send(op:\"ls-sessions\").first\n response[\"sessions\"]\n end", "def list_ip_sec_vpn_sessions(opts = {})\n data, _status_code, _headers = list_ip_sec_vpn_sessions_with_http_info(opts)\n data\n end", "def list_ip_sec_vpn_sessions(opts = {})\n data, _status_code, _headers = list_ip_sec_vpn_sessions_with_http_info(opts)\n return data\n end", "def sessions\n ipc_returning(command('list-sessions'), Session)\n end", "def iscsi_sessions_list()\n api_call = {\n :method => \"ListISCSISessions\",\n :params => {}\n }\n answer = query_sf(api_call)\n return answer[\"sessions\"]\n end", "def index\n sessions = request do\n api.request(\n :expects => 200,\n :headers => headers,\n :method => :get,\n :path => \"/oauth/sessions\"\n ).body\n end\n styled_header(\"OAuth Sessions\")\n styled_array(sessions.map { |session|\n [session[\"description\"], session[\"id\"]]\n })\n end", "def index\n @lift_sessions = LiftSession.all\n end", "def list_my_sessions(opts = {})\n data, _status_code, _headers = list_my_sessions_with_http_info(opts)\n data\n end", "def list(options={})\n param = { :uniq_id => @uniq_id }.merge options\n Storm::Base::SODServer.remote_list '/VPN/list', param do |u|\n user = VPNUser.new\n user.from_hash u\n user\n end\n end", "def listtokens(session)\n\tbegin\n\t\tprint_status(\"Getting Tokens...\")\n\t\tdt = ''\n\t\tsession.core.use(\"incognito\")\n\t\ti = 0\n\t\tdt << \"****************************\\n\"\n\t\tdt << \" List of Available Tokens\\n\"\n\t\tdt << \"****************************\\n\\n\"\n\t\twhile i < 2\n\t\t\ttokens = session.incognito.incognito_list_tokens(i)\n\t\t\tif i == 0\n\t\t\t\ttType = \"User\"\n\t\t\telse\n\t\t\t\ttType = \"Group\"\n\t\t\tend\n\t\t\tdt << \"#{tType} Delegation Tokens Available \\n\"\n\t\t\tdt << \"======================================== \\n\"\n\n\t\t\ttokens['delegation'].each_line{ |string|\n\t\t\t\tdt << string + \"\\n\"\n\t\t\t}\n\n\t\t\tdt << \"\\n\"\n\t\t\tdt << \"#{tType} Impersonation Tokens Available \\n\"\n\t\t\tdt << \"======================================== \\n\"\n\n\t\t\ttokens['impersonation'].each_line{ |string|\n\t\t\t\tdt << string + \"\\n\"\n\t\t\t}\n\t \t\ti += 1\n\t \t\tbreak if i == 2\n\t\tend\n\t\tprint_status(\"All tokens have been processed\")\n\trescue ::Exception => e\n\t\tprint_status(\"Error Getting Tokens: #{e.class} #{e}\")\n\tend\n\tdt\n\nend", "def index\n @api_v1_mentorship_sessions = Api::V1::MentorshipSession.all\n end", "def list_tier1_l2_vpn_sessions(tier_1_id, locale_service_id, service_id, opts = {})\n data, _status_code, _headers = list_tier1_l2_vpn_sessions_with_http_info(tier_1_id, locale_service_id, service_id, opts)\n data\n end", "def list_l2_vpn_sessions(tier_0_id, locale_service_id, service_id, opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(tier_0_id, locale_service_id, service_id, opts)\n data\n end", "def index\n @stolen_sessions = StolenSession.all\n end", "def index\n @user_sessions = UserSession.all\n end", "def index\n @user_session_tokens = UserSessionToken.all\n\n render json: @user_session_tokens\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get all L2VPN sessions Get paginated list of all L2VPN sessions
def list_l2_vpn_sessions_with_http_info(opts = {}) if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions ...' end if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000 fail ArgumentError, 'invalid value for "opts[:"page_size"]" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.' end if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0 fail ArgumentError, 'invalid value for "opts[:"page_size"]" when calling ManagementPlaneApiVpnL2VpnSessionsApi.list_l2_vpn_sessions, must be greater than or equal to 0.' end # resource path local_var_path = '/vpn/l2vpn/sessions' # query parameters query_params = {} query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil? query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil? query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil? query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil? query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil? query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil? # header parameters header_params = {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = {} # http body (model) post_body = nil auth_names = ['BasicAuth'] data, status_code, headers = @api_client.call_api(:GET, local_var_path, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => 'L2VpnSessionListResult') if @api_client.config.debugging @api_client.config.logger.debug "API called: ManagementPlaneApiVpnL2VpnSessionsApi#list_l2_vpn_sessions\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end
[ "def list_l2_vpn_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.list_l2_vpn_sessions ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] > 1000\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be smaller than or equal to 1000.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'page_size'].nil? && opts[:'page_size'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"page_size\"]\" when calling VpnApi.list_l2_vpn_sessions, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions\"\n\n # query parameters\n query_params = {}\n query_params[:'cursor'] = opts[:'cursor'] if !opts[:'cursor'].nil?\n query_params[:'included_fields'] = opts[:'included_fields'] if !opts[:'included_fields'].nil?\n query_params[:'l2vpn_service_id'] = opts[:'l2vpn_service_id'] if !opts[:'l2vpn_service_id'].nil?\n query_params[:'page_size'] = opts[:'page_size'] if !opts[:'page_size'].nil?\n query_params[:'sort_ascending'] = opts[:'sort_ascending'] if !opts[:'sort_ascending'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSessionListResult')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#list_l2_vpn_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n data\n end", "def list_l2_vpn_sessions(opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(opts)\n return data\n end", "def list_sessions\n response = send(op:\"ls-sessions\").first\n response[\"sessions\"]\n end", "def list_ip_sec_vpn_sessions(opts = {})\n data, _status_code, _headers = list_ip_sec_vpn_sessions_with_http_info(opts)\n data\n end", "def list_ip_sec_vpn_sessions(opts = {})\n data, _status_code, _headers = list_ip_sec_vpn_sessions_with_http_info(opts)\n return data\n end", "def sessions\n ipc_returning(command('list-sessions'), Session)\n end", "def iscsi_sessions_list()\n api_call = {\n :method => \"ListISCSISessions\",\n :params => {}\n }\n answer = query_sf(api_call)\n return answer[\"sessions\"]\n end", "def index\n sessions = request do\n api.request(\n :expects => 200,\n :headers => headers,\n :method => :get,\n :path => \"/oauth/sessions\"\n ).body\n end\n styled_header(\"OAuth Sessions\")\n styled_array(sessions.map { |session|\n [session[\"description\"], session[\"id\"]]\n })\n end", "def index\n @lift_sessions = LiftSession.all\n end", "def list_my_sessions(opts = {})\n data, _status_code, _headers = list_my_sessions_with_http_info(opts)\n data\n end", "def list(options={})\n param = { :uniq_id => @uniq_id }.merge options\n Storm::Base::SODServer.remote_list '/VPN/list', param do |u|\n user = VPNUser.new\n user.from_hash u\n user\n end\n end", "def listtokens(session)\n\tbegin\n\t\tprint_status(\"Getting Tokens...\")\n\t\tdt = ''\n\t\tsession.core.use(\"incognito\")\n\t\ti = 0\n\t\tdt << \"****************************\\n\"\n\t\tdt << \" List of Available Tokens\\n\"\n\t\tdt << \"****************************\\n\\n\"\n\t\twhile i < 2\n\t\t\ttokens = session.incognito.incognito_list_tokens(i)\n\t\t\tif i == 0\n\t\t\t\ttType = \"User\"\n\t\t\telse\n\t\t\t\ttType = \"Group\"\n\t\t\tend\n\t\t\tdt << \"#{tType} Delegation Tokens Available \\n\"\n\t\t\tdt << \"======================================== \\n\"\n\n\t\t\ttokens['delegation'].each_line{ |string|\n\t\t\t\tdt << string + \"\\n\"\n\t\t\t}\n\n\t\t\tdt << \"\\n\"\n\t\t\tdt << \"#{tType} Impersonation Tokens Available \\n\"\n\t\t\tdt << \"======================================== \\n\"\n\n\t\t\ttokens['impersonation'].each_line{ |string|\n\t\t\t\tdt << string + \"\\n\"\n\t\t\t}\n\t \t\ti += 1\n\t \t\tbreak if i == 2\n\t\tend\n\t\tprint_status(\"All tokens have been processed\")\n\trescue ::Exception => e\n\t\tprint_status(\"Error Getting Tokens: #{e.class} #{e}\")\n\tend\n\tdt\n\nend", "def index\n @api_v1_mentorship_sessions = Api::V1::MentorshipSession.all\n end", "def list_tier1_l2_vpn_sessions(tier_1_id, locale_service_id, service_id, opts = {})\n data, _status_code, _headers = list_tier1_l2_vpn_sessions_with_http_info(tier_1_id, locale_service_id, service_id, opts)\n data\n end", "def list_l2_vpn_sessions(tier_0_id, locale_service_id, service_id, opts = {})\n data, _status_code, _headers = list_l2_vpn_sessions_with_http_info(tier_0_id, locale_service_id, service_id, opts)\n data\n end", "def index\n @stolen_sessions = StolenSession.all\n end", "def index\n @user_sessions = UserSession.all\n end", "def index\n @user_session_tokens = UserSessionToken.all\n\n render json: @user_session_tokens\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Edit a L2VPN session Edit a specific L2VPN session
def update_l2_vpn_session_with_http_info(l2vpn_session_id, l2_vpn_session, opts = {}) if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: ManagementPlaneApiVpnL2VpnSessionsApi.update_l2_vpn_session ...' end # verify the required parameter 'l2vpn_session_id' is set if @api_client.config.client_side_validation && l2vpn_session_id.nil? fail ArgumentError, "Missing the required parameter 'l2vpn_session_id' when calling ManagementPlaneApiVpnL2VpnSessionsApi.update_l2_vpn_session" end # verify the required parameter 'l2_vpn_session' is set if @api_client.config.client_side_validation && l2_vpn_session.nil? fail ArgumentError, "Missing the required parameter 'l2_vpn_session' when calling ManagementPlaneApiVpnL2VpnSessionsApi.update_l2_vpn_session" end # resource path local_var_path = '/vpn/l2vpn/sessions/{l2vpn-session-id}'.sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s) # query parameters query_params = {} # header parameters header_params = {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = {} # http body (model) post_body = @api_client.object_to_http_body(l2_vpn_session) auth_names = ['BasicAuth'] data, status_code, headers = @api_client.call_api(:PUT, local_var_path, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => 'L2VpnSession') if @api_client.config.debugging @api_client.config.logger.debug "API called: ManagementPlaneApiVpnL2VpnSessionsApi#update_l2_vpn_session\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end
[ "def update_l2_vpn_session_with_http_info(l2vpn_session_id, l2_vpn_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: VpnApi.update_l2_vpn_session ...\"\n end\n # verify the required parameter 'l2vpn_session_id' is set\n if @api_client.config.client_side_validation && l2vpn_session_id.nil?\n fail ArgumentError, \"Missing the required parameter 'l2vpn_session_id' when calling VpnApi.update_l2_vpn_session\"\n end\n # verify the required parameter 'l2_vpn_session' is set\n if @api_client.config.client_side_validation && l2_vpn_session.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_vpn_session' when calling VpnApi.update_l2_vpn_session\"\n end\n # resource path\n local_var_path = \"/vpn/l2vpn/sessions/{l2vpn-session-id}\".sub('{' + 'l2vpn-session-id' + '}', l2vpn_session_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_vpn_session)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2VpnSession')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VpnApi#update_l2_vpn_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def update\n\t\tif(signed_in?)\n \t@triton_session = TritonSession.find(params[:id])\n\t respond_to do |format|\n\t if @triton_session.update_attributes(params[:triton_session])\n\t format.html { redirect_to @triton_session, notice: 'Triton session was successfully updated.' }\n\t format.json { head :no_content }\n\t else\n\t format.html { render action: \"edit\" }\n\t format.json { render json: @triton_session.errors, status: :unprocessable_entity }\n\t end\n\t end\n\t\telse\n\t\t\tredirect_to signin_path\n\t\tend\n end", "def update\n respond_to do |format|\n if @otg_sess.update(otg_sess_params)\n format.html { redirect_to @otg_sess, notice: 'Otg sess was successfully updated.' }\n format.json { render :show, status: :ok, location: @otg_sess }\n else\n format.html { render :edit }\n format.json { render json: @otg_sess.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @lift_session.update(lift_session_params)\n format.html { redirect_to @lift_session, notice: 'Lift session was successfully updated.' }\n format.json { render :show, status: :ok, location: @lift_session }\n else\n format.html { render :edit }\n format.json { render json: @lift_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @ykt_session = YktSession.find(params[:id])\n\n respond_to do |format|\n if @ykt_session.update_attributes(params[:ykt_session])\n format.html { redirect_to @ykt_session, notice: 'Ykt session was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ykt_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\t\t\t#need to set the id first\n\t\t\t$session.update(to_xml('update'))\n\t\tend", "def update\n @term_session = TermSession.find(params[:id])\n \n respond_to do |format|\n if @term_session.update_attributes(params[:term_session])\n flash[:notice] = 'Session was successfully updated.'\n format.html { redirect_to @term_session.term}\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @term_session.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @edit_session = EditSession.find(params[:id])\n\n respond_to do |format|\n if @edit_session.update_attributes(params[:edit_session])\n flash[:notice] = 'EditSession was successfully updated.'\n format.html { redirect_to(@edit_session) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @edit_session.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @ergo_session.update(ergo_session_params)\n format.html { redirect_to @ergo_session, notice: 'Ergo session was successfully updated.' }\n format.json { render :show, status: :ok, location: @ergo_session }\n else\n format.html { render :edit }\n format.json { render json: @ergo_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @stolen_session.update(stolen_session_params)\n format.html { redirect_to @stolen_session, notice: 'Stolen session was successfully updated.' }\n format.json { render :show, status: :ok, location: @stolen_session }\n else\n format.html { render :edit }\n format.json { render json: @stolen_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @robot_instance_session.update(robot_instance_session_params)\n format.html { redirect_to @robot_instance_session, notice: 'Robot instance session was successfully updated.' }\n format.json { render :show, status: :ok, location: @robot_instance_session }\n else\n format.html { render :edit }\n format.json { render json: @robot_instance_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @agent_session.update(agent_session_params)\n format.html { redirect_to @agent_session, notice: 'Agent session was successfully updated.' }\n format.json { render :show, status: :ok, location: @agent_session }\n else\n format.html { render :edit }\n format.json { render json: @agent_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @session_plan_category.session_plans.find(params[:id]).update(session_plan_params)\n redirect_to session_plan_category_session_plans_path(@session_plan_category), notice: \"Session Plan updated successfully.\"\n else\n render 'edit'\n end\n end", "def update\n respond_to do |format|\n if @tunnel.update(tunnel_params)\n format.html { redirect_to @tunnel, notice: 'Tunnel was successfully updated.' }\n format.json { render :show, status: :ok, location: @tunnel }\n else\n format.html { render :edit }\n format.json { render json: @tunnel.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @tunnel = Tunnel.find(params[:id])\n\n respond_to do |format|\n if @tunnel.update_attributes(params[:tunnel])\n format.html { redirect_to @tunnel, notice: 'El Tunel ha sido actualizado exitosamente.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @tunnel.errors, status: :unprocessable_entity }\n end\n end\n end", "def session_update(fname, lname, dob)\n session[:app_fname] = fname\n session[:app_lname] = lname\n session[:app_dob] = dob\n end", "def update\n respond_to do |format|\n if @sevone_session.update(sevone_session_params)\n format.html { redirect_to @sevone_session, notice: 'Sevone session was successfully updated.' }\n format.json { render :show, status: :ok, location: @sevone_session }\n else\n format.html { render :edit }\n format.json { render json: @sevone_session.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @tsession = Tsession.find(params[:id])\n\n respond_to do |format|\n if @tsession.update_attributes(params[:tsession])\n format.html { redirect_to @tsession, notice: 'Tsession was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @tsession.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @inventory_session = InventorySession.find(params[:id])\n\n respond_to do |format|\n if @inventory_session.update_attributes(inventory_session_params)\n format.html { redirect_to [:adm, @inventory_session], notice: 'Inventory session was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @inventory_session.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /mbrackets GET /mbrackets.json
def index @mbrackets = Mbracket.all end
[ "def index\n @cnmrbrackets = Cnmrbracket.all\n end", "def list_assets\n get('/video/v1/assets')\n end", "def show\n @player_color_bucket = PlayerColorBucket.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @player_color_bucket }\n end\n end", "def index\n @assets = Asset.all\n\n render json: @assets\n end", "def manifest\n @manifest ||= Sprockets::Manifest.new(sprockets, File.join(public_folder, prefix))\n end", "def new\n @bucket = Bucket.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bucket }\n end\n end", "def index\n @brags = Brag.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @brags }\n end\n end", "def index\n @brainstorms = Brainstorm.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @brainstorms }\n end\n end", "def destroy\n @cnmrbracket.destroy\n respond_to do |format|\n format.html { redirect_to cnmrbrackets_url, notice: 'Cnmrbracket was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def show\n @bootstrap = Bootstrap.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bootstrap }\n end\n end", "def index\n @blocks = Block.all\n\n render json: @blocks\n end", "def index\n @videos = Video.all\n render json: @videos\n end", "def index\n @api_javascripts = Api::Javascript.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @api_javascripts }\n end\n end", "def index\n @media_files = MediaFile.all\n render json: @media_files\n end", "def new\n @bucketlist = Bucketlist.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bucketlist }\n end\n end", "def index\n @materials = Material.all\n\n render json: @materials\n end", "def show\n @mkb = Mkb.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @mkb }\n end\n end", "def index\n @themes = Theme.all\n\n render json: @themes\n end", "def index\n @bmps = Bmp.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @bmps }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /mbrackets POST /mbrackets.json
def create @mbracket = Mbracket.new(mbracket_params) respond_to do |format| if @mbracket.save format.html { redirect_to @mbracket, notice: 'Mbracket was successfully created.' } format.json { render :show, status: :created, location: @mbracket } else format.html { render :new } format.json { render json: @mbracket.errors, status: :unprocessable_entity } end end end
[ "def create\n @bucket = Bucket.new(params[:bucket])\n\n respond_to do |format|\n if @bucket.save\n format.html { redirect_to @bucket, notice: 'Bucket was successfully created.' }\n format.json { render json: @bucket, status: :created, location: @bucket }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bucket.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @abucket = @account.abuckets.build(params[:abucket])\n respond_to do |format|\n if @abucket.save\n flash[:notice] = 'Bucket was successfully assigned.'\n format.html { redirect_to(account_abuckets_path(@account)) }\n format.xml { render :xml => @abucket, :status => :created, :location => @abucket }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @abucket.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bucket = Bucket.new(bucket_params)\n\n respond_to do |format|\n if @bucket.save\n format.html { redirect_to @bucket, notice: 'Bucket was successfully created.' }\n format.json { render :show, status: :created, location: @bucket }\n else\n format.html { render :new }\n format.json { render json: @bucket.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n puts \"params #{params}\"\n params[:asset][:objects] = JSON.parse(params[:asset][:objects])\n\n puts \"params after #{params}\"\n @asset = Asset.new(asset_params)\n\n if @asset.save\n render json: @asset, status: 200, location: @asset\n else\n render json: @asset.errors, status: :unprocessable_entity\n end\n end", "def create\n @cnmrbracket = Cnmrbracket.new(cnmrbracket_params)\n\n respond_to do |format|\n if @cnmrbracket.save\n format.html { redirect_to @cnmrbracket, notice: 'Cnmrbracket was successfully created.' }\n format.json { render :show, status: :created, location: @cnmrbracket }\n else\n format.html { render :new }\n format.json { render json: @cnmrbracket.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @player_color_bucket = PlayerColorBucket.new(params[:player_color_bucket])\n\n respond_to do |format|\n if @player_color_bucket.save\n format.html { redirect_to @player_color_bucket, notice: 'Player color bucket was successfully created.' }\n format.json { render json: @player_color_bucket, status: :created, location: @player_color_bucket }\n else\n format.html { render action: \"new\" }\n format.json { render json: @player_color_bucket.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @cnmrbracket.destroy\n respond_to do |format|\n format.html { redirect_to cnmrbrackets_url, notice: 'Cnmrbracket was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def create\n @account = @user.accounts.find(params[:account_id])\n @bucket = @account.buckets.new(bucket_params)\n respond_to do |format|\n if @bucket.save\n flash[:success] = \"Successfully added a bucket!\"\n format.html { redirect_to account_path(@account) }\n else\n flash.now[:error] = @bucket.errors.messages.values.flatten.uniq\n format.js { render action: 'new' }\n end\n end\n end", "def create\n session[:bracket_params].deep_merge!(params[:bracket]) if params[:bracket]\n @bracket = Bracket.new(session[:bracket_params])\n @bracket.user_id = current_user.id\n\n respond_to do |format|\n format.js\n\n format.html {\n if @bracket.save\n redirect_to @bracket, notice: 'Bracket was successfully created.'\n session[:bracket_params] = nil\n else\n render action: \"new\"\n end\n }\n format.json {\n if @bracket.save\n render json: @bracket, status: :created, location: @bracket\n session[:bracket_params] = nil\n else\n render json: @bracket.errors, status: :unprocessable_entity\n end\n }\n\n end\n end", "def create\n @bucketlist = Bucketlist.new(params[:bucketlist])\n\n respond_to do |format|\n if @bucketlist.save\n format.html { redirect_to @bucketlist, notice: 'Bucketlist was successfully created.' }\n format.json { render json: @bucketlist, status: :created, location: @bucketlist }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bucketlist.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @bucket = Bucket.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bucket }\n end\n end", "def create\n @bracelet_action = BraceletAction.new(bracelet_action_params)\n\n respond_to do |format|\n if @bracelet_action.save\n format.html { redirect_to @bracelet_action, notice: 'Bracelet action was successfully created.' }\n format.json { render :show, status: :created, location: @bracelet_action }\n else\n format.html { render :new }\n format.json { render json: @bracelet_action.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @kb = Kb.new(params[:kb])\n\n respond_to do |format|\n if @kb.save\n format.html { redirect_to @kb, :notice => 'Kb was successfully created.' }\n format.json { render :json => @kb, :status => :created, :location => @kb }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @kb.errors, :status => :unprocessable_entity }\n end\n end\n end", "def upload_to_s3(json)\n p json\nend", "def create\n @kebab = Kebab.new(kebab_params)\n\n respond_to do |format|\n if @kebab.save\n format.html { redirect_to @kebab, notice: 'Kebab was successfully created.' }\n format.json { render :show, status: :created, location: @kebab }\n else\n format.html { render :new }\n format.json { render json: @kebab.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bootstrap = Bootstrap.new(params[:bootstrap])\n\n respond_to do |format|\n if @bootstrap.save\n format.html { redirect_to @bootstrap, notice: 'Bootstrap was successfully created.' }\n format.json { render json: @bootstrap, status: :created, location: @bootstrap }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bootstrap.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @brave_burst = BraveBurst.new(brave_burst_params)\n\n respond_to do |format|\n if @brave_burst.save\n format.html { redirect_to @brave_burst, notice: 'Brave burst was successfully created.' }\n format.json { render action: 'show', status: :created, location: @brave_burst }\n else\n format.html { render action: 'new' }\n format.json { render json: @brave_burst.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @media_asset = MediaAsset.new media_asset_params\n\n if @media_asset.save\n respond_to do |format|\n format.html { redirect_to(url_for(action: 'index')) }\n format.json { render json: @media_asset }\n end\n end\n end", "def new\n @mkb = Mkb.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @mkb }\n format.js\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /mbrackets/1 PATCH/PUT /mbrackets/1.json
def update respond_to do |format| if @mbracket.update(mbracket_params) format.html { redirect_to @mbracket, notice: 'Mbracket was successfully updated.' } format.json { render :show, status: :ok, location: @mbracket } else format.html { render :edit } format.json { render json: @mbracket.errors, status: :unprocessable_entity } end end end
[ "def update\n respond_to do |format|\n if @abucket.update_attributes(params[:abucket])\n flash[:notice] = 'Bucket was successfully updated.'\n format.html { redirect_to(account_abuckets_path(@account)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @abucket.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @bucket = Bucket.find(params[:id])\n\n respond_to do |format|\n if @bucket.update_attributes(params[:bucket])\n format.html { redirect_to @bucket, notice: 'Bucket was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bucket.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @bucket_bloc.update(bucket_bloc_params)\n format.html { redirect_to @bucket_bloc, notice: 'Bucket bloc was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @bucket_bloc.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bucket = Bucket.find(params[:id])\n\n respond_to do |format|\n if @bucket.update_attributes(params[:bucket])\n format.html { redirect_to @bucket, notice: 'Bucket was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bucket.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @boilerplate.update(boilerplate_params)\n format.json { render :show, status: :ok, location: @api_v1_boilerplate }\n else\n format.json { render json: @boilerplate.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @bucket.update(bucket_params)\n format.html { redirect_to @bucket, notice: 'Bucket was successfully updated.' }\n format.json { render :show, status: :ok, location: @bucket }\n else\n format.html { render :edit }\n format.json { render json: @bucket.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @bootstrap.update(bootstrap_params)\n format.html { redirect_to @bootstrap, notice: 'Bootstrap was successfully updated.' }\n format.json { render :show, status: :ok, location: @bootstrap }\n else\n format.html { render :edit }\n format.json { render json: @bootstrap.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bootstrap = Bootstrap.find(params[:id])\n\n respond_to do |format|\n if @bootstrap.update_attributes(params[:bootstrap])\n format.html { redirect_to @bootstrap, notice: 'Bootstrap was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bootstrap.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bundle = Bundle.find(params[:id])\n @bundle.update_attributes(:title => params[:bundlename]) if !params[:bundlename].blank?\n @bundle.update_attributes(:description => params[:bundlescription]) if !params[:bundlescription].blank?\n r_ids_array = []\n params[:resources].split(\",\").slice!(1..-1).each do |resource|\n r_ids_array << resource.slice!(9..-1)\n end\n r_ids_array.each do |resource_id|\n resource = Resource.find(resource_id)\n if !@bundle.resources.include?(resource)\n @bundle.resources << resource\n end\n end\n\n\n respond_to do |format|\n # if @bundle.resources.update_attributes(params[:])\n format.html { redirect_to bundle_path(@bundle), notice: 'Bundle was successfully updated.' }\n format.json { head :no_content }\n # else\n # format.html { render action: \"edit\" }\n # format.json { render json: @bundle.errors, status: :unprocessable_entity }\n # end\n end\n end", "def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end", "def patch(path, params: {}, headers: {})\n request_json :patch, path, params, headers\n end", "def update\n respond_to do |format|\n if @bundl.update(bundl_params)\n format.html { redirect_back fallback_location: root_path }\n format.json { head :no_content }\n else\n format.html { render :edit }\n format.json { render json: @bundl.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @pull_manifest.update(pull_manifest_params)\n format.html { redirect_to @pull_manifest, notice: 'Pull manifest was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @pull_manifest.errors, status: :unprocessable_entity }\n end\n end\n end", "def put\n request_method('PUT')\n end", "def update\n @kb = Kb.find(params[:id])\n\n respond_to do |format|\n if @kb.update_attributes(params[:kb])\n format.html { redirect_to @kb, :notice => 'Kb was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @kb.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @js_bin.update(js_bin_params)\n format.html { redirect_to edit_js_bin_path(@js_bin, anchor: :preview), notice: 'Js bin was successfully updated.' }\n format.json { render :show, status: :ok, location: @js_bin }\n else\n format.html { render :edit }\n format.json { render json: @js_bin.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @climb_app.update(climb_app_params)\n format.html { redirect_to @climb_app, notice: 'ClimbApp was successfully updated.' }\n format.json { render :show, status: :ok, location: @climb_app }\n else\n format.html { render :edit }\n format.json { render json: @climb_app.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @green_kub_box.update(green_kub_box_params)\n format.html { redirect_to @green_kub_box, notice: 'Green kub box was successfully updated.' }\n format.json { render :show, status: :ok, location: @green_kub_box }\n else\n format.html { render :edit }\n format.json { render json: @green_kub_box.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @fabrics_rack.update(fabrics_rack_params)\n format.html do\n redirect_to @fabrics_rack,\n notice: 'On rack was successfully updated.'\n end\n format.json do\n render :show, status: :ok, location: @fabrics_rack\n end\n else\n format.html { render :edit }\n format.json do\n render json: @fabrics_rack.errors, status: :unprocessable_entity\n end\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Read a save file, and attempt to initialize a game from it
def load_game print_saves begin read_save rescue IOError, SystemCallError puts 'File not found' load_game end end
[ "def load_game\n data = File.read(\"save.txt\")\n load_game = Game.from_json data\n request_save = load_game.play\n\n if request_save\n save_file request_save\n end\n end", "def load_saved_game\n game_data = get_saved_game_file\n if game_data\n @file.secret_word = game_data[:secret_word] || @file.pick_new_word\n @used_letters = game_data[:used_letters] || []\n @hearts_left = game_data[:hearts] || 6\n else\n puts \"There is no saved file, starting new game\"\n end\n end", "def load_game\n file = File.open(\"../saves/log.yml\", \"r\")\n s_log = file.read.to_i\n file.close\n if s_log == 0\n puts \"No save data.\"\n else\n puts \"Select a save file (1 to #{s_log}).\"\n s_choice = gets.chomp.to_i\n if s_choice <= s_log\n puts \"Loading save game # #{s_choice}...\"\n game_state = YAML.load(File.read(\"../saves/save_#{s_choice}.yml\"))\n game_state.play\n else\n puts \"Selected save file does not exist.\"\n end\n end\n end", "def load_game_from_save\n #TODO implement game state saving and loading\n end", "def load_game\n puts \"What is the name of your game?\"\n name = gets.chomp\n if File.exist?(\"saved/#{name}\")\n loaded_game = Marshal.load(File.open(\"saved/#{name}\", 'r'))\n loaded_game.play\n else\n puts \"No game with that name found\"\n return\n end\n \n end", "def load_game\n\t\tputs \"please enter a name for your saved game.\"\n\t\tfilename = gets.chomp + '.yaml'\n\t\tFile.open(filename, 'r') do |f|\n\t\tx = YAML::load(f)\n\t\t@count = x[:count]\n\t\t@secret_word = x[:secret_word]\n\t\t@selection_array = x[:selection_array]\n\t\t@blanks = x[:blanks]\n\t\tend\n\tend", "def load_game\n if File.exists?(@save_file)\n File.open(@save_file, 'r') do |f|\n @hero = Marshal.load(f)\n end\n @cmd_window.setpos(0,0)\n @cmd_window << \"Game loaded!\".rjust(CMD_WIDTH)\n else\n @cmd_window.setpos(0,0)\n @cmd_window << \"No save file found!\".rjust(CMD_WIDTH)\n end\n end", "def load_game\n save_state = YAML.load_file(\"save.yaml\")\n save_state.loaded_game\n end", "def load_into_cog(filename: 'savefile')\n game_state = nil\n File.open(\"saves/#{filename}.bin\", 'rb') do |f|\n game_state = Marshal.load(f)\n end\n save_cog(game_state: game_state, filename: filename)\n end", "def load_save\n file_name = ask_save\n save_obj = {}\n File.open(\"saves/#{file_name}.yaml\", 'r') do |file|\n save_obj = YAML.safe_load(file, permitted_classes: [Save])\n end\n save_obj\n end", "def load\n parsed_state = $gtk.deserialize_state('game_state.txt')\n if !parsed_state\n set_story_line \"no game to load. press save first.\"\n else\n $gtk.args.state = parsed_state\n end\n end", "def init_gamedata\r\n file = File.open(@filename, \"r\")\r\n @time_stamp = file.mtime\r\n @characters = Marshal.load(file)\r\n @frame_count = Marshal.load(file)\r\n @game_system = Marshal.load(file)\r\n @game_switches = Marshal.load(file)\r\n @game_variables = Marshal.load(file)\r\n @total_sec = @frame_count / Graphics.frame_rate\r\n file.close\r\n end", "def load_game\n \t\t#Get information for game to load\n \t\tget_username\n \t\tfile_name = get_saved_games\n \t\treturn if file_name==\"menu\"\n \t\tload = Marshal.load File.read(file_name)\n\n \t\t#Load the instance variables\n \t\t@start_time = Time.now\n \t\t@save_time = load[:save_time]\n \t\t@top_card = load[:top_card]\n \t\t@number_of_hint = load[:number_of_hint]\n \t\t@number_of_correct = load[:number_of_correct]\n \t\t@number_of_wrong= load[:number_of_correct]\n \t\t@deck = load[:deck]\n \t\t@hand = load[:hand]\n \t\t@username = load[:username]\n \t\t@total_hint=load[:total_hint]\n \t\t@is_end = false\n\n \t\t#Output a message for the progress\n \t\tmsg1 = \"You have completed #{@number_of_correct} sets (roughly #{(@number_of_correct*100).fdiv(27).truncate(2)}%)\"\n \t\tmsg2 = \"You have #{@total_hint-@number_of_hint} hints left. Lets Continue!\"\n \t\tputs\n \t\t(msg1.length+10).times {print \"*\"}\n \t\tputs \"\\n**** \"+msg1+\" ****\"\n \t\tputs \"**** \"+msg2.center(msg1.length)+\" ****\"\n \t\t(msg1.length+10).times {print \"*\"}\n \t\tputs\n\n \t\tcontinue_game\n \tend", "def load_state \n begin\n directory = File.join(File.dirname(File.expand_path(__FILE__)),'..','saves')\n entries = Dir.entries(directory)\n entries.reject!{|en| (en=~/\\A\\d{4}_\\d{2}_\\d{2}_\\d{2}_\\d{2}\\.yaml\\z/).nil?}\n load_from = entries[entries.size - 1]\n # yml = W.load_state(YAML.parse_file load_from)\n yml = YAML.load_file(File.join(directory,load_from))\n puts \"Loading #{directory}/#{load_from}...\"\n raise \"Yaml is nil\" unless yml\n @master_players, @rooms, @items, @mobiles, @default_room= yml\n @rooms.each_pair { |n,r| r.on_load }\n @master_players.each_pair { |n,p| p.on_load }\n puts \"Loaded Game.\"\n rescue Object => e\n puts 'There was an error wtih loading'\n puts \"************************************************\"\n puts e.backtrace\n puts \"************************************************\"\n puts 'running the scaffolding script instead.'\n initialize_state\n Migrator.script('scaffold.rb')\n end\n end", "def load_game\n\t\tall_saved_games = yaml_load(SAVED_FILENAME)\n\t\tgame_name = get_game_name(all_saved_games, \"load\")\n\t\treturn if game_name.nil?\n\n\t\tsaved_game = YAML::load(all_saved_games[game_name])\n\t\tmessage_then_enter \"'#{ game_name }' successfully loaded.\"\n\t\tsaved_game.play_game\n\tend", "def initialize(filename, ondisk=true, players=[])\n if ondisk\n # Game object exists on disk. Read it from file.\n self.read_data(filename)\n else\n # Create new Game object with starting values.\n @filename = filename\n @created_at = DateTime.now\n @last_updated = DateTime.now\n @mode = players.length\n @players = players\n self.initialize_scoreboard\n self.write_data\n end\n @plist = @players.map {|x| x.downcase}\n end", "def load_file(filename)\n file = File.open(filename, \"r\")\n grid = JSON.parse(file.read)\n file.close\n return Game.new(grid.length, grid[0].length, grid)\nend", "def open_saved\n\t\tprint Dir[\"./save/*\"]\n\t\tputs \"\"\n\t\tputs \"Type in the name of the file you would like to open. Only the part in front of '.json'.\"\n\t\topening = interact\n\t\tfile = File.read(\"./save/#{opening}.json\")\n\t\tparsed = JSON.parse(file)\n\t\t@board = parsed.values_at(\"@board\")[0]\n\t\t@missed_letters\t= parsed.values_at(\"@missed_letters\")[0]\n\t\t@turns = parsed.values_at(\"@turns\")[0]\n\t\t@secret_word = parsed.values_at(\"@secret_word\")[0]\n\t\t@winner\t= parsed.values_at(\"@winner\")[0]\n\tend", "def initialize(*saved_info)\n\t\tif saved_info.empty?\n\t\t\tnew_game\n\t\telse\n\t\t\tload_game(saved_info)\n\t\tend\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create slicing planes which are perpendicular to the reference segment.
def slicing_planes(segmenter) plane_normal = segmenter.segment.line[1] segmenter.points.map { |point| [point.clone, plane_normal.clone] } end
[ "def get_plane\n Plane.construct_with_three_points(point_a, point_b, point_c)\n end", "def create_perpendicular\n p1dot = self * P1\n p2dot = self * P2\n p1dot = -p1dot if p1dot < 0\n p2dot = -p2dot if p2dot < 0\n p1dot < p2dot ? (self % P1) : (self % P2)\n end", "def plane\n return nil if @vertices.length < 3\n Plane.three_points(*@vertices[0..2])\n end", "def project_to_plane(plane)\n end", "def screen_to_plane\n corners = plane_corners\n origin = corners[0]\n xaxis = corners[1] - corners[0]\n yaxis = corners[2] - corners[0]\n zaxis = (xaxis * yaxis).normalize\n\n # REVIEW: Is this logical pixels or physical pixels? I want logical!\n xaxis.length /= Sketchup.active_model.active_view.vpwidth\n yaxis.length /= Sketchup.active_model.active_view.vpheight\n\n new_transformation(origin, xaxis, yaxis, zaxis)\n end", "def initialize(p1, p2, p3)\n v1 = p1.vector_to(p2)\n v2 = p1.vector_to(p3)\n normal = v1.cross_product(v2)\n\n @point = p1\n @normal = normal\n\n # keep around an arbitrary vector on the plane for convenience later\n @vec_on_plane = v1\n end", "def any_point_on_plane\n \n unless (@c == 0)\n return Vector[0, 0, @d/@c]\n end\n \n unless (@b == 0)\n return Vector[0, @d/@b, 0]\n end\n \n unless (@a == 0)\n return Vector[@d/@a, 0, 0]\n end\n \n # Actually if we get to this point, the plane undetermined and all of R3 satisfies the definition\n return Vector[0,0,0]\n end", "def unit_perpendicular_vector(options={})\n return [self[1], -self[0]].unit_vector if options[:counter_clockwise]\n [-self[1], self[0]].unit_vector\n end", "def create_plane(xsegs, ysegs)\r\n # STEP 1: create all the relevant data\r\n \r\n @positions = [] # arrayList to hold positions\r\n @tex_coords = [] # arrayList to hold texture coordinates\r\n\r\n usegsize = 1 / xsegs.to_f # horizontal stepsize\r\n vsegsize = 1 / ysegs.to_f # vertical stepsize\r\n\r\n xsegs.times do |x|\r\n ysegs.times do |y|\r\n u = x / xsegs.to_f\r\n v = y / ysegs.to_f\r\n\r\n # generate positions for the vertices of each cell (-0.5 to center the shape around the origin)\r\n positions << Vec3D.new(u - 0.5, v - 0.5, 0)\r\n positions << Vec3D.new(u + usegsize - 0.5, v - 0.5, 0)\r\n positions << Vec3D.new(u + usegsize - 0.5, v + vsegsize - 0.5, 0)\r\n positions << Vec3D.new(u - 0.5, v + vsegsize - 0.5, 0)\r\n\r\n # generate texture coordinates for the vertices of each cell\r\n tex_coords << Vec2D.new(u, v)\r\n tex_coords << Vec2D.new(u + usegsize, v)\r\n tex_coords << Vec2D.new(u + usegsize, v + vsegsize)\r\n tex_coords << Vec2D.new(u, v + vsegsize)\r\n end\r\n end\r\n\r\n # STEP 2: put all the relevant data into the PShape\r\n\r\n texture_mode(NORMAL) # set texture_mode to normalized (range 0 to 1)\r\n tex = load_image(data_path 'Texture01.jpg')\r\n \r\n mesh = create_shape # create the initial PShape\r\n renderer = ShapeRender.new(mesh) # initialize the shape renderer\r\n mesh.begin_shape(QUADS) # define the PShape type: QUADS\r\n mesh.no_stroke\r\n mesh.texture(tex) # set a texture to make a textured PShape\r\n # put all the vertices, uv texture coordinates and normals into the PShape\r\n positions.each_with_index { |p, i|\r\n t = tex_coords[i]\r\n p.to_vertex_uv(renderer, t.x, t.y)\r\n }\r\n mesh.end_shape\r\n\r\n return mesh # our work is done here, return DA MESH! -)\r\nend", "def find_normal_vector()\n Vector.construct_with_plane(self)\n end", "def offset_vector(p1, p2, plane, length=nil)\r\n fp = p2.project_to_plane(plane)\r\n v = p1.vector_to(fp)\r\n v.length = length unless length.nil?\r\n v\r\n end", "def planeproj(vec, norm, assist = [0,0,0])\n # ensure paramters are Geom::Vector3d \n vec = Geom::Vector3d.new(vec)\n norm = Geom::Vector3d.new(norm).normalize\n assist = Geom::Vector3d.new(assist)\n \n # deal with zero vector\n return vec if vec.length == 0\n \n # if vector parallel to normal of face\n vec = assist if vec.parallel?(norm)\n \n # get the projection\n return vec - norm.transform(Geom::Transformation.scaling(vec % norm))\n end", "def perpendicular\n Vector2d.new(-@y, @x)\n end", "def perpendicular\n Vector2d.new(-y, x)\n end", "def perpendicular_bisector(point=nil)\n l = self.perpendicular_line(self.midpoint)\n \n if !point.nil?\n point = Point.new(point[0], point[1]) if point.is_a?(Array) \n raise TypeError, \"This method is not defined for #{ point.class }\" unless point.is_a?(Point)\n return Segment.new(point, self.midpoint) if l.contains?(point)\n end\n\n return l\n end", "def polygon_vertices(sides, size)\n return [CP::Vec2.new(-size, size), CP::Vec2.new(size, size), CP::Vec2.new(size, -size), CP::Vec2.new(-size, -size)]\n end", "def init_surface\n\tfor u in 0..3\n\t\tfor v in 0..3\n\t\t\t$ctlpoints[u][v][0] = 2.0*(u - 1.5)\n\t\t\t$ctlpoints[u][v][1] = 2.0*(v - 1.5)\n\t\t\t\n\t\t\tif ( (u == 1 || u == 2) && (v == 1 || v == 2))\n\t\t\t\t$ctlpoints[u][v][2] = 3\n\t\t\telse\n\t\t\t\t$ctlpoints[u][v][2] = -3\n\t\t\tend\n\t\tend\n\tend\nend", "def construct_separation_vector\n separationVector = getSeparationVector\n if(separationVector.zero?)\n separationVector\n else\n normalizedSeparationVector = normalize_vector(separationVector)\n totalX = $SEPARATIONWEIGHT * normalizedSeparationVector.x\n totalY = $SEPARATIONWEIGHT * normalizedSeparationVector.y\n Posn.new(totalX, totalY)\n end\n end", "def create_segments\n Segment.split_to_segments(build_waypoints)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes a date range and breaks it into an array of ranges by month. The first and last items may be partial months (ex starting in the middle and ending at the end) and the intermediate items are always full months (1st to last of month)
def months(date_range) results = [] results << (date_range.begin..date_range.begin.end_of_month) current = date_range.begin.end_of_month + 1.day while current < date_range.end.beginning_of_month month_start = current.beginning_of_month month_end = current.end_of_month results << (month_start..month_end) current = month_end + 1.day end results << (date_range.end.beginning_of_month..date_range.end) if current < date_range.end results end
[ "def range_to_month_array\n Function.new do |range|\n range.select { |date| date.day == 1 }\n end\n end", "def months\n (Periods::Month.for(start_date)..Periods::Month.for(end_date)).to_a\n end", "def months\n temporale = CalendariumRomanum::Temporale.new(@config.year)\n first_month = temporale.start_date.month\n last_month = temporale.end_date.month\n\n (first_month .. 12)\n .collect {|m| Date.new(@config.year, m, 1) } \\\n + (1 .. last_month)\n .collect {|m| Date.new(@config.year + 1, m, 1) }\n end", "def extract_month_ranges( ranges )\n\t\treturn self.extract_ranges( :month, ranges, 0, MONTHNAMES.size - 1 ) do |val|\n\t\t\tself.map_integer_value( :month, val, [ABBR_MONTHNAMES, MONTHNAMES] )\n\t\tend\n\tend", "def merch_year_and_month_from_dates(start_date, end_date)\n merch_months = []\n\n middle_of_start_month = Date.new(start_date.year, start_date.month, 14)\n middle_of_end_month = Date.new(end_date.year, end_date.month, 14)\n date = middle_of_start_month\n \n while date <= middle_of_end_month do\n merch_months.push(date_conversion(date))\n date = date >> 1\n end\n merch_months\n end", "def month_range(month, year)\n next_month = month == 12 ? 1 : month + 1\n next_year = month == 12 ? year + 1 : year\n Date.new(year, month, 1)..Date.new(next_year, next_month, 1)\n end", "def get_months( entries )\n return [] if entries.empty?\n first_time = entries.collect { |e| e.created }.min\n last_time = entries.collect { |e| e.created }.max\n start = Time.mktime( first_time.year, first_time.month, 1 )\n stop = Time.mktime( last_time.year, last_time.month, last_time.day )\n months = []\n until start > stop\n next_year, next_month = start.year, start.month + 1\n if next_month > 12\n next_year += next_month / 12\n next_month %= 12\n end\n month_end = Time.mktime( next_year, next_month, 1 ) - 1\n months << [ start, month_end, start.strftime( \"/%Y/%m/\" ) ] unless find( :after => start, :before => month_end).empty?\n start = month_end + 1\n end\n months\n end", "def all_dates_in_month(month, year)\n first_day = Date.civil(year, month, 1)\n last_day = Date.civil(year, month, -1)\n\n (first_day..last_day).to_a\n end", "def all_month\n beginning_of_month..end_of_month\n end", "def all_month\n beginning_of_month..end_of_month\n end", "def generate_months_view_dates(date_month_start, date_month_end)\n dates_in_month_view = []\n ((date_month_start.wday - delta_start_of_weekday_from_sunday) % 7).times do |day|\n dates_in_month_view = dates_in_month_view + [(date_month_start - (((date_month_start.wday - delta_start_of_weekday_from_sunday) % 7) - day))]\n end\n\n date_month_end.day.times do |day|\n dates_in_month_view = dates_in_month_view + [date_month_start + day]\n end\n\n (6 - date_month_end.wday + delta_start_of_weekday_from_sunday).times do |day|\n dates_in_month_view = dates_in_month_view + [date_month_end + day + 1]\n end\n\n dates_in_month_view\n end", "def generate_monthly_report(start_date, end_date)\n dstart = start_date.beginning_of_month\n dend = end_date.beginning_of_month\n\n array_of_months = (dstart..dend).select {|d| d.day == 1}\n\n array_of_months.map! do |day|\n start_of_month = day\n start_of_next_month = day >> 1 # >> is a special method in Date that changes month\n {:start => start_of_month, :end => start_of_next_month}\n end\n\n return generate_report(array_of_months, {:date_format => \"%Y-%m\", :date_name => \"Month\"})\n end", "def extractMonths(dates)\n last = dates[0]\n first = dates[-1]\n\n first_month = first[4...6].to_i\n first_year = ad_to_tw(first[0, 4].to_i)\n\n last_month = last[4...6].to_i\n last_year = ad_to_tw(last[0, 4].to_i)\n return calculateWholeMonths(first_year, first_month, last_year, last_month)\nend", "def year_month_ranges year\n\t\t\tfrom = Date.new(year, Time.now.month, Time.now.beginning_of_month.day) - 1.year\n\t\t\tto = Date.new(year, Time.now.month, Time.now.end_of_month.day)\n\t\t\t(from..to).group_by(&:month).map { |n,v| v.first.beginning_of_month.to_s..(v.first.end_of_month + 1.day).midnight.to_s }\n\t\tend", "def month_subtimeframes\n (from.year..to.yesterday.year).map do |year|\n (1..12).map do |month|\n Timeframe.new(:year => year, :month => month) & self\n end\n end.flatten.compact\n end", "def get_periods(first_start, first_end) \n\t\tperiods = []\n\n\t\tnot_ends = \n\t\t\t(@until.is_a? Fixnum) ?\n\t\t\tlambda {|i,d| i < @until} :\n\t\t\tlambda {|i,d| d < @until}\n\n\t\tduration = first_end - first_start\n\t\tcurrent_start = first_start\n\n\t\ti = 0\n\t\tcount = 0\n\t\twhile not_ends.call(count, current_start)\n\t\t\tperiods << [current_start, current_start + duration]\n\n\t\t\ti += 1\n\t\t\t# get first month that has given day number\n\t\t\ti+=1 while (first_start << -(i * @period_duration)).day != first_start.day\n\n\t\t\tcurrent_start = (first_start << -(i * @period_duration))\n\t\t\tcount += 1 \n\t\tend\n\n\t\tperiods\n\tend", "def months\n return @month_list if not @month_list.nil?\n @month_list = []\n\n month_collection = {}\n\n #group days by month\n for d in days(@rate_search_container.nights)\n month_collection[Date.new(d.date.year,d.date.month,1)] = [] if month_collection[Date.new(d.date.year,d.date.month,1)].nil?\n month_collection[Date.new(d.date.year,d.date.month,1)] << d\n end\n\n month_collection = month_collection.sort\n # now, month_collection is an array!! http://www.ruby-doc.org/core/classes/Hash.html#M000176\n\n for m in month_collection\n @month_list << RateMonthDrop.new(m[1])\n end\n \n return @month_list\n\n end", "def each_months(n=1,offset=0,dur=1)\n build_subrange do |s|\n s.step = n\n s.adjust_range { |r| day_range(r) }\n s.offset { |dt| dt.to_date >> offset }\n s.increment { |dt,i| dt.to_date >> i }\n s.span { |dt| dt.to_date >> dur }\n end\n end", "def months_to_date(end_date)\n date_loop = self\n last_date = end_date\n dates = []\n begin\n dates << date_loop\n if date_loop.next_month.beginning_of_month > last_date.beginning_of_month\n date_loop = last_date.tomorrow\n elsif date_loop.next_month.beginning_of_month == last_date.beginning_of_month\n date_loop = last_date\n else\n date_loop = date_loop.next_month\n end\n end while last_date >= date_loop\n dates\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET|POST /sites/filter GET|POST /projects/:project_id/sites/filter
def filter do_authorize_class get_project_if_exists do_authorize_instance(:show, @project) unless @project.nil? filter_response, opts = Settings.api_response.response_advanced( api_filter_params, list_permissions, Site, Site.filter_settings ) respond_filter(filter_response, opts) end
[ "def filter\n @project = Project.find(params[:project_id])\n end", "def filter\n do_authorize_class\n get_project_if_exists\n get_harvest_if_exists\n\n filter_response, opts = Settings.api_response.response_advanced(\n api_filter_params,\n list_permissions,\n HarvestItem,\n HarvestItem.filter_settings\n )\n respond_filter(filter_response, opts)\n end", "def apply_project_filters(project)\n handle_action_exceptions(__method__) do\n cmd_line = [\"applyprojectfilters '#{project}'\"]\n cmd_line << 'json' if @json\n\n handle_return(@toolshck_ether.cmd(cmd_line.join(' ')))\n end\n end", "def select\n @filter = current_user.company.task_filters.find(params[:id])\n\n if @filter.user == current_user or @filter.shared?\n target_filter = current_task_filter\n target_filter.qualifiers.clear\n target_filter.keywords.clear\n target_filter.unread_only = @filter.unread_only\n\n @filter.qualifiers.each { |q| target_filter.qualifiers << q.clone }\n @filter.keywords.each do |kw|\n # N.B Shouldn't have to pass in all these values, but it\n # doesn't work when we don't, so...\n target_filter.keywords.build(:task_filter => target_filter,\n :company => current_user.company,\n :word => kw.word)\n end\n target_filter.save!\n else\n flash[:notice] = _\"You don't have access to that task filter\"\n end\n if request.xhr?\n render :partial => \"search_filter_keys\"\n else\n redirect_to '/tasks/list'\n end\n end", "def project_id_filter\n index.filter(term: {project_id: project.id})\n end", "def filter_ministry_project\n # if we aren't an admin or ministry project admin we shouldn't be here\n record_not_found and return if !admin? and !ministry_project_admin?\n\n # clear filters if requested\n if params[:clear] == 'true'\n session[:registration_type] = nil\n session[:status] = nil\n session[:district_id] = nil\n session[:region_id] = nil\n session[:building_id] = nil\n session[:ministry_project_id] = nil\n session[:group_leader] = nil\n session[:grade] = nil\n session[:division_id] = nil\n\n flash[:notice] = 'All filters have been cleared.'\n else\n # update session values from passed in params\n session[:registration_type] = params[:registration_type] unless params[:registration_type].blank?\n session[:status] = params[:status] unless params[:status].blank?\n session[:district_id] = params[:district_id] unless params[:district_id].blank?\n session[:region_id] = params[:region_id] unless params[:region_id].blank?\n session[:building_id] = params[:building_id] unless params[:building_id].blank?\n session[:ministry_project_id] = params[:ministry_project_id] unless params[:ministry_project_id].blank?\n session[:group_leader] = params[:group_leader] unless params[:group_leader].blank?\n session[:grade] = params[:grade] unless params[:grade].blank?\n session[:division_id] = params[:division_id] unless params[:division_id].blank?\n\n # remove filters if none is passed\n session[:registration_type] = nil if params[:registration_type] == 'none'\n session[:status] = nil if params[:status] == 'none'\n session[:district_id] = nil if params[:district_id] == 'none'\n session[:region_id] = nil if params[:region_id] == 'none'\n session[:building_id] = nil if params[:building_id] == 'none'\n session[:ministry_project_id] = nil if params[:ministry_project_id] == 'none'\n session[:group_leader] = nil if params[:group_leader] == 'none'\n session[:grade] = nil if params[:grade] == 'none'\n session[:division_id] = nil if params[:division_id] == 'none'\n\n flash[:notice] = 'Filters updated successfully.'\n end\n\n respond_to do |format|\n format.html {\n redirect_to(ministry_project_participant_registrations_url)\n }\n end\n end", "def filter\n index\n end", "def project_params\n FilterProjectParams.call params: params\n end", "def report_task_filter\n locals = {\n :redirect_action => \"list\", \n :redirect_params => params\n }\n return render(:partial => \"/task_filters/search_filter\", :locals => locals)\n end", "def filter\n # We only care about retrieving the ids of the assets.\n query_fields = [2]\n\n # Pull out the orgs and ensure that they are viewable by the current_user\n orgs = query_params[:orgs]\n viewable_orgs = current_user.viewable_organizations.pluck(:id)\n orgs = orgs.blank? ? viewable_orgs : (viewable_orgs & orgs)\n\n # Pull out the filters and convert them to hash Objects\n query_filters = query_params[:filters]\n query_filters.map!{ |f| f.to_h.symbolize_keys }\n\n # Create a new query and get a list of asset ids that match the filter.\n query = SavedQuery.new\n query.organization_list = orgs\n query.parse_query_fields query_fields, query_filters\n\n # Convert those asset ids into Asset Objects\n if summary_only \n assets = query.data.map{ |i| base_asset_class.find(i[\"id\"]).summary_api_json }\n else\n assets = query.data.map{ |i| convert(base_asset_class.find(i[\"id\"])).api_json }\n end\n render json: {data: {count: query.data.size, assets: assets}}\n end", "def initialize_available_filters\n add_available_filter \"spent_on\", :type => :date_past\n\n add_available_filter(\"project_id\",\n :type => :list, :values => lambda { project_values }\n ) if project.nil?\n\n if project && !project.leaf?\n ################\n # Smile specific #768560: V4.0.0 : Time entries list : access to hidden BAR values\n # Smile specific : subproject_values + current project param\n add_available_filter \"subproject_id\",\n :type => :list_subprojects,\n :values => lambda { subproject_values(true) }\n end\n\n ################\n # Smile specific #271407 Time Entries : filter by BU\n # + BU\n unless project\n bu_projects = Project.bu_projects.sort\n\n if bu_projects.any?\n add_available_filter \"bu_project\",\n :label => :label_bu,\n :type => :list_optional,\n :values => ( bu_projects.collect{|p| [p.name, p.id.to_s]} )\n end\n end\n # END -- Smile specific #271407 Time Entries : filter by BU\n #######################\n\n ################\n # Smile specific #355842 Rapport temps passé : filtre projet mis-à-jour\n # No way to filter projects of sub-request for project_updated_on, if no project\n # + PROJECT UPDATED ON\n add_available_filter('project_updated_on',\n :type => :date_past,\n :name => \"#{l(:label_project)} #{l(:field_updated_on)}\"\n ) if project\n # END -- Smile specific #355842 Rapport temps passé : filtre projet mis-à-jour\n #######################\n\n ################\n # Smile specific : new filters, only if on project\n # Smile comment : Too heavy for all projects query\n if advanced_filters\n if project\n ################\n # Smile specific #423277 Rapport : Filtre sur tâche parente et racine\n # + ROOT_ID, PARENT_ID\n add_available_filter 'root_id',\n :name => l('field_issue_root_id'),\n :type => :list_optional,\n :values => lambda {\n calc_project_and_children_issues\n @children_root_issues_id_and_label\n }\n\n add_available_filter 'parent_id',\n :name => l('field_issue_parent_id'),\n :type => :list_optional,\n :values => lambda {\n calc_project_and_children_issues\n @children_parent_issues_id_and_label\n }\n\n ################\n # Smile specific #247451 Entrées de temps et Rapport : filtre par demande\n # * Unable to know if we are on an issue :\n # scope modified afterwards by the controller to filter on issue\n # => possible to filter on an issue that is not the current one\n # => obviously will return no result\n # + ISSUE_ID\n add_available_filter 'issue_id',\n :type => :list_optional,\n :values => lambda {\n calc_project_and_children_issues\n @children_issues_id_and_label\n }\n # END -- Smile specific #247451 Entrées de temps et Rapport : filtre par demande\n #######################\n\n #---------------\n # Smile specific #147568 Filter on parent task\n # children_count\n # Works even if NO project specified\n add_available_filter 'children_count', :type => :integer\n\n #---------------\n # Smile specific #226967 Filter 'number of parents'\n add_available_filter 'level_in_tree',\n :type => :integer\n end\n else\n # NATIVE source code\n add_available_filter(\"issue_id\", :type => :tree, :label => :label_issue)\n end\n # END -- Smile specific : new filters, only if on project\n #######################\n\n\n ################\n # Smile specific : issue created on filter\n # + ISSUE CREATED ON\n add_available_filter \"issue_created_on\", :type => :date_past, :name =>\"#{l(:field_issue)} #{l(:field_created_on)}\"\n\n add_available_filter(\"issue.tracker_id\",\n :type => :list,\n :name => l(\"label_attribute_of_issue\", :name => l(:field_tracker)),\n :values => lambda { trackers.map {|t| [t.name, t.id.to_s]} })\n add_available_filter(\"issue.status_id\",\n :type => :list,\n :name => l(\"label_attribute_of_issue\", :name => l(:field_status)),\n :values => lambda { issue_statuses_values })\n add_available_filter(\"issue.fixed_version_id\",\n :type => :list,\n :name => l(\"label_attribute_of_issue\", :name => l(:field_fixed_version)),\n :values => lambda { fixed_version_values })\n add_available_filter \"issue.category_id\",\n :type => :list_optional,\n :name => l(\"label_attribute_of_issue\", :name => l(:field_category)),\n :values => lambda { project.issue_categories.collect{|s| [s.name, s.id.to_s] } } if project\n\n add_available_filter(\"user_id\",\n :type => :list_optional, :values => lambda { author_values }\n )\n\n ################\n # Smile specific #831010: Time Report Query : new time entry user filter, me\n if User.current.logged?\n add_available_filter(\"user_id_me\",\n :type => :list_optional, :values => lambda { [[\"<< #{l(:label_me)} >>\", 'me']] }, :name =>\"#{l(:field_user)} (#{l(:label_me)})\"\n )\n end\n\n ################\n # Smile specific : starting from Redmine ~ 4\n if TimeEntry.instance_methods.include?(:author)\n add_available_filter(\"author_id\",\n :type => :list_optional, :values => lambda { author_values }\n )\n end\n\n ################\n # Smile specific #831010: Time Report Query : new time entry user filter, me\n # + AUTHOR_ID_ME\n if Redmine::VERSION::MAJOR >= 4\n if User.current.logged?\n add_available_filter(\"author_id_me\",\n :type => :list_optional, :values => lambda { [[\"<< #{l(:label_me)} >>\", 'me']] }, :name =>\"#{l(:field_author)} (#{l(:label_me)})\"\n )\n end\n end\n\n ################\n # Smile specific #473776 Spent Time Report : Filter on Assignee's group\n # + MEMBER OF GROUP\n add_available_filter(\"member_of_group\",\n :type => :list_optional, :values => lambda { Group.givable.order(:lastname).collect {|g| [g.name, g.id.to_s]} }\n )\n # END -- Smile specific #473776 Spent Time Report : Filter on Assignee's group\n #######################\n\n activities = (project ? project.activities : TimeEntryActivity.shared)\n add_available_filter(\"activity_id\",\n :type => :list, :values => activities.map {|a| [a.name, a.id.to_s]}\n )\n\n add_available_filter(\"project.status\",\n :type => :list,\n :name => l(:label_attribute_of_project, :name => l(:field_status)),\n :values => lambda { project_statuses_values }\n ) if project.nil? || !project.leaf?\n\n add_available_filter \"comments\", :type => :text\n\n ################\n # Smile specific\n # + ESTIMATED HOURS\n add_available_filter \"estimated_hours\", :type => :float\n\n ################\n # Smile specific #994 Budget and Remaining enhancement\n # If we display all issue, display budget_hours and remaining_hours columns\n # + BUDGET HOURS\n budget_and_remaining_enabled = Project.respond_to?('b_a_r_module') &&\n ( self.project.nil? || self.project.budget_and_remaining_enabled )\n\n add_available_filter \"budget_hours\", :type => :float if budget_and_remaining_enabled\n # END -- Smile specific #994 Budget and Remaining enhancement\n #######################\n\n add_available_filter \"hours\", :type => :float\n\n add_available_filter \"spent_hours_for_issue_and_user\", :type => :float\n add_available_filter \"spent_hours_for_issue\", :type => :float\n add_available_filter \"spent_hours_for_user\", :type => :float\n\n add_available_filter \"spent_hours_for_issue_and_user_this_month\", :type => :float\n add_available_filter \"spent_hours_for_issue_this_month\", :type => :float\n add_available_filter \"spent_hours_for_user_this_month\", :type => :float\n\n add_available_filter \"spent_hours_for_issue_and_user_previous_month\", :type => :float\n add_available_filter \"spent_hours_for_issue_previous_month\", :type => :float\n add_available_filter \"spent_hours_for_user_previous_month\", :type => :float\n\n ################\n # Smile specific #994 Budget and Remaining enhancement\n # + REMAINING HOURS\n add_available_filter \"remaining_hours\", :type => :float if budget_and_remaining_enabled\n # END -- Smile specific #994 Budget and Remaining enhancement\n #######################\n\n #################\n # Added by plugin\n list_yes_no =\n [\n [l(:general_text_Yes), '1'], # index must be a string !\n [l(:general_text_No), '0'],\n ]\n\n add_available_filter \"is_last_time_entry_for_issue_and_user\", :type => :list_optional, :values => list_yes_no\n add_available_filter \"is_last_time_entry_for_issue\", :type => :list_optional, :values => list_yes_no\n add_available_filter \"is_last_time_entry_for_user\", :type => :list_optional, :values => list_yes_no\n\n add_custom_fields_filters(TimeEntryCustomField)\n add_associations_custom_fields_filters :project\n add_custom_fields_filters(issue_custom_fields, :issue)\n add_associations_custom_fields_filters :user\n end", "def apply_filter\n end", "def filter\n tag_list=params[:tags].split(/,/)\n @projects = Project.visible.find_all_for_all_tags(tag_list)\n @tags = Tag.\n for_projects(@projects).\n reject { |tag| tag_list.include?(tag.name) }\n @tag_count = Hash.new(0)\n @projects.each do |project|\n project.tags.each do |tag|\n @tag_count[tag.name] += 1\n end\n end\n @tag_sum = 0\n @tags.each { |tag| @tag_sum += @tag_count[tag.name] }\n render :layout => 'base'\n end", "def query_filters; end", "def filter\n do_authorize_class\n\n filter_response, opts = Settings.api_response.response_advanced(\n api_filter_params,\n Access::ByPermission.dataset_items(current_user, dataset_id: params[:dataset_id]),\n DatasetItem,\n DatasetItem.filter_settings(:reverse_order)\n )\n\n respond_filter(filter_response, opts)\n end", "def index\n #keep filter for next time\n session[:req_filter] = params[:req_filter] if params[:req_filter]\n session[:scope_filter] = params[:scope_filter] if params[:scope_filter]\n session[:status_filter] = params[:status_filter] if params[:status_filter]\n\n #uses named scopes chained together to implement filter\n \t@requisitions = Requisition.by_req(session[:req_filter]).\\\n\t by_scope(session[:scope_filter]).by_status(session[:status_filter]).\\\n\t where(\"project = ?\", session[:project]).order('req_num').\\\n\t paginate(:page => params[:page])\n\t \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @requisitions }\n end\n end", "def query\n respond_to do |format|\n format.html{ redirect_to root_path }\n format.json{\n render :json => @filter.run(params)\n }\n end\n end", "def setup_task_filters\n f = params[:filter]\n\n filter_ids = [ params[:filter] ].flatten.compact\n if filter_ids.empty? or filter_ids.include?(TaskFilter::ALL_TASKS)\n session[:filter_customer] = \"0\"\n session[:filter_milestone] = \"0\"\n session[:filter_project] = 0\n else\n customers = []\n milestones = []\n projects = []\n \n filter_ids.each do |id|\n code = id[0, 1]\n id = id[1, id.length]\n\n customers << id if code == \"c\"\n projects << id if code == \"p\"\n milestones << id if code == \"m\"\n # this is a bit of hack - we're using (- projectid - 1) to signify\n # \"milestones that are unassigned in the project with that id\"\n # We need to subtract 1 more as -1 is used by views to signify any unassigned\n # milestone.\n milestones << - (id.to_i + 1) if code == \"u\"\n end\n \n session[:filter_customer] = customers\n session[:filter_project] = projects\n session[:filter_milestone] = milestones\n end\n\n # These filters use the query menu html, but can only have a single value.\n # Just grab the first value to use that. \n single_filters = [ :sort, :group_by, :colors, :icons ]\n single_filters.each do |filter|\n values = params[filter] || []\n session[filter] = values.first\n end\n \n filter_names = [:filter_user, :filter_hidden, :filter_status,\n :hide_deferred, :hide_dependencies, :filter_type, \n :filter_severity, :filter_priority,\n :show_all_unread ]\n filter_names.each do |filter|\n session[filter] = params[filter]\n end\n\n # set any filters on custom properties\n current_user.company.properties.each do |prop|\n filter = prop.filter_name\n session[filter] = params[filter]\n end\n\n current_user.last_filter = session[:filter_hidden]\n current_user.last_milestone_id = session[:filter_milestone]\n current_user.last_project_id = session[:filter_project]\n session[:last_project_id] = session[:filter_project]\n current_user.save\n\n redirect_to(params[:redirect_action])\n end", "def filter\n query = build_query()\n @lancamentos = query\n\n\n render :layout => nil\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
include Override Objectinclude so that modules extending Module instance can define include.
def include( *modules ) super if defined?( super ) original_include( *modules ) end
[ "def add_include(include)\n object_class.record_location self\n return include unless @document_self\n object_class.add_include include\n end", "def add_include include\n add_to @includes, include\n\n include\n end", "def include( *args )\n includeSubobject( *args )\n end", "def include(mod)\n mod.included(self)\n end", "def included(mod); end", "def included(a_module)\n end", "def on_include(&block)\n self.inclusion = block\n self\n end", "def set_include_statement\n super\n end", "def included_modules() end", "def add_include(permitted = [])\n @include = JsonApiServer::Include.new(request, model, permitted)\n self\n end", "def _blockenspiel_custom_include(*modules_) # :nodoc:\n _blockenspiel_default_include(*modules_)\n modules_.reverse_each do |mod_|\n mod_.public_instance_methods.each do |method_|\n _blockenspiel_auto_dsl_method(method_)\n end\n end\n end", "def do_includes\n @content.scan(/rb_include_module\\s*\\(\\s*(\\w+?),\\s*(\\w+?)\\s*\\)/) do |c,m|\n next unless cls = @classes[c]\n m = @known_classes[m] || m\n\n comment = new_comment '', @top_level, :c\n incl = cls.add_include RDoc::Include.new(m, comment)\n incl.record_location @top_level\n end\n end", "def include_in_object(obj, include_ancestry = false)\n (include_ancestry ? @rmixins : @mixins).each { |m| obj.extend(m) }\n self\n end", "def include_in_object(obj_, include_ancestry_=false)\n (include_ancestry_ ? @rmixins : @mixins).each{ |m_| obj_.extend(m_) }\n self\n end", "def include_in_front(mixin)\n inherit_from_self!\n include mixin\n end", "def include(*modules)\n `for(var i=0,l=modules.length;i<l;++i){var mod=modules[i];mod.m$appendFeatures(this);mod.m$included(this);mod.__includers__[this.__name__]=true;}`\n return self\n end", "def includes(name, options = {}, &block)\n self.include[name] = ARSerialization.register(options, &block)\n end", "def include(header)\n @inc << \"#include #{header}\"\n end", "def add_include(path)\n @includes << path\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
extend_object Override Module.extended so that modules extending Module can define extend_object.
def extend_object( hooked_instance ) original_extend_object( hooked_instance ) super if defined?( super ) end
[ "def _extend_(mod)\n mod.__send__(:extend_object, self)\n end", "def extend_object(obj) end", "def representer_module_for(object, *args)\n # TODO: cache this!\n evaluate_option(:extend, object) # TODO: pass args? do we actually have args at the time this is called (compile-time)?\n end", "def extended(object)\n super\n\n case object\n when Class\n raise RuntimeError, \"extending classes with feature flags is not currently supported\"\n when Module\n Flaggable.setup_flaggable_object(object)\n\n # Add the feature check methods to the module that was extended.\n #\n object.internal_feature_checks_module.include internal_feature_checks_module\n\n # Because we added feature check methods above, extend again to make them available.\n #\n object.extend internal_feature_checks_module\n\n # Add the feature methods to the module's internal feature module.\n #\n object.internal_feature_module.include internal_feature_module\n\n # Add our feature flags to the object's feature flags.\n #\n object.feature_flags.concat(feature_flags)\n end\n end", "def extend_object( hooked_instance )\n\n ::Module::Cluster.ensure_parser_constructed_module_evaluated( self )\n ::Module::Cluster.evaluate_cluster_stack( :before_extend, hooked_instance, self )\n \n super if defined?( super )\n \n end", "def add_extend ext\n add_to @extends, ext\n\n ext\n end", "def extended_modules; end", "def extended( instance )\n \n super if defined?( super )\n \n configuration_modules.each { |this_member| instance.extend( this_member ) }\n \n end", "def add_extends out, extends\n add_extension_modules out, 'Extended by', extends\n end", "def extension\n self[:extend]\n end", "def extended(object)\n object.send(:setup_decorations)\n super\n end", "def mix_into_object obj, module_name = nil\n if module_name\n module_name = \"#{module_name}Mixin\"\n else\n module_name = object_mixin_class(obj)\n raise ArgumentError, \"module_name unspecified\" unless module_name\n end\n\n # Don't mixin more than once.\n mixins = obj.extended_by\n\n if @verbose\n $stderr.puts \" ### #{self.class}.mix_into_object(#{obj.class.name}, #{module_name})\" if @verbose\n $stderr.puts \" ### #{self}: ancestors =\\n#{self.class.ancestors.pretty_inspect}\" if @verbose\n end\n\n (@self_class_ancestors ||= self.class.ancestors.freeze).reverse_each do | cls |\n mixin = cls_const_get(cls, module_name)\n # $stderr.puts \" *** #{obj} << #{mixin.inspect}\" if mixin\n\n if mixin && ! mixins.include?(mixin)\n $stderr.puts \" ### #{self}: \\#<#{obj.class.name} #{obj.object_id} #{obj.id || :NEW}>.extend(#{mixin})\" if @verbose # || true\n obj.extend(mixin)\n end\n # $stderr.puts \" ### callers\\n#{caller * \"\\n\"}\" if @verbose\n end\n\n # Start observing events on the object.\n case obj\n when Content, Content::Version, ContentKey, ContentKey::Version\n observe_content!(obj)\n end\n\n $stderr.puts \" ### #{self}: \\#<#{obj.class.name} #{obj.object_id} #{obj.id || :NEW}>.extended_by =\\n#{obj.extended_by.pretty_inspect}\" if @verbose #\n\n self\n end", "def write_object(object)\n extend_for(super)\n end", "def extend_with(target, mod)\n runtime.logger.trace(\"Extending '#{target}' with '#{mod}'\")\n target.send(:extend, mod)\n end", "def extend! extensions, &extension_block\n subr = to_resource.extending(*extensions)\n subr = DeclarationBuilder.new(subr).extension_exec(&extension_block) if extension_block\n __class__.new subr, *(@extensions+[subr.__extension_module__])\n end", "def included(submodule)\n super\n submodule.instance_eval { extend Extension }\n submodule\n end", "def extend(mod, *filters); end", "def add(module_object)\n @additional_modules << module_object\n end", "def _extended?(_module)\n _extended.include?(_module)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure supplied GitHub usernames are real
def test_github_profile_exists people.each do |name, info| github_username = info.fetch("github", false) if github_username assert_kind_of Sawyer::Resource, octokit.user(github_username), "#{github_username} is not a real GitHub account" end end end
[ "def username_exists?(github_username)\n !!User.all.find_by(github_username: github_username)\n end", "def github_usernames\n @github_usernames ||= cloud9_workspaces.map do |ws|\n next unless ws[:scmurl]\n\n info = github_info_from_url(ws[:scmurl])\n\n next unless info\n\n info[:user]\n end\n .reject(&:nil?)\n end", "def test_username_with_invalid_examples\n person = @valid_person\n invalid_usernames = %w{rails/rocks web2.0 javscript:something ME}\n invalid_usernames.each do |username|\n person.username = username\n assert !person.valid?, \"#{username} shouldn't pass validation, but does\"\n end\n end", "def translate_github_user_to_jira_user (github_user_object)\n #Example: msolomon@thrillist.com\n #user_email_domain = thrillist.com\n #user_email_prefix = msolomon\n if github_user_object[\"email\"] != nil\n user_email_domain = github_user_object[\"email\"].split('@')[1]\n user_email_prefix = github_user_object[\"email\"].split('@')[0]\n\n #convert prefix to JIRA markdown or a link to github name if email domain is not thrillist\n if user_email_domain == \"thrillist.com\"\n user = user_email_prefix.insert(0, \"[~\") + \"]\"\n end\n else\n user = \"[\"+github_user_object[\"login\"]+\"|\"+github_user_object[\"html_url\"]+\"]\"\n end\n\n #overwrite special cases\n case github_user_object[\"login\"]\n when \"kpeltzer\"\n user = \"[~kpeltzer]\"\n when \"ken\"\n user = \"[~kpeltzer]\"\n when \"kwadwo\"\n user = \"[~kboateng]\"\n when \"tarasiegel\"\n user = \"[~tsiegel]\"\n when \"samiamorwas\"\n user = \"[~mhaarhaus]\"\n when \"patrick\"\n user = \"[~plange]\"\n when \"pfunklange\"\n user = \"[~plange]\"\n when \"stefsic\"\n user = \"[~ssicurelli]\"\n when \"lmon\"\n user = \"[~lukemonaco]\"\n when \"schuylerpenny\"\n user = \"[~spenny]\"\n when \"khalid-richards\"\n user = \"[~krichards]\"\n when \"THRILL-jacinto\"\n user = \"[~jacinto]\"\n when \"emchale\"\n user = \"[~emchale]\"\n when \"mpriscella\"\n user = \"[~mpriscella]\"\n when \"vtapia5070\"\n user = \"[~vtapia]\"\n when \"bethannezink\"\n user = \"[~bzink]\"\n when \"boskonyc\"\n user = \"[~bblagojevic]\"\n when \"fabriceTOUSSAINT\"\n user = \"[~ftoussaint]\"\n when \"mdgreenwald\"\n user = \"[~mgreenwald]\"\n when \"morelme\"\n user = \"[~mmorel]\"\n when \"vitaly-rozman\"\n user = \"[~vrozman]\"\n end\n\n return user\nend", "def ensure_user(user, followers = true, orgs = true)\n # Github only supports alpa-nums and dashes in its usernames.\n # All other sympbols are treated as emails.\n if not user.match(/^[\\w\\-]*$/)\n begin\n name, email = user.split(\"<\")\n email = email.split(\">\")[0]\n name = name.strip unless name.nil?\n email = email.strip unless email.nil?\n rescue StandardError\n warn \"Not a valid email address: #{user}\"\n return\n end\n\n unless is_valid_email(email)\n warn \"Extracted email(#{email}) not valid for user #{user}\"\n end\n u = ensure_user_byemail(email, name)\n else\n u = ensure_user_byuname(user)\n ensure_user_followers(user) if followers\n ensure_orgs(user) if orgs\n end\n return u\n end", "def github_user\n if USER.empty?\n abort \"** No GitHub user set. See #{LGHCONF}\"\n else\n USER\n end\n end", "def github_username(url)\n GITHUB_URL_REGEX.match(url) ? GITHUB_URL_REGEX.match(url)[1] : nil\n end", "def check_author_names\n if self.errors[:author_names].empty?\n inserted_author_name = self.author_names.split(',')\n inserted_author_name.collect!{|ia| ia.strip} \n if inserted_author_name.empty? or inserted_author_name.include?(\"\")\n self.errors.add(:author_names, 'are not entered properly. Please check again.') \n else \n for a_name in inserted_author_name\n if /[\\,\\\"\\?\\!\\;\\:\\#\\$\\%\\&\\(\\)\\*\\+\\-\\/\\<\\>\\=\\@\\[\\]\\\\\\^\\_\\{\\}\\|\\~0-9]/.match(a_name)\n self.errors.add(:author_names, 'name should not contain any special characters or numbers.')\n return\n end\n end\n end\n end\n end", "def cleanup_usernames\n \n #\n # Handle a string of comma separated (coming from the webui\n #\n self.usernames = self.usernames.split(\",\") if self.usernames.kind_of? String\n\n #\n # Make sure there's no spaces in the usernames\n #\n self.usernames.each {|u| u.gsub!(/\\s+/, \"\")}\n \n #\n # Make sure all usernames are lowercase\n #\n self.usernames.each {|u| u.downcase!}\n \n #\n # Make sure we don't have multiple same usernames\n #\n self.usernames.uniq!\n end", "def cleanup_usernames\n \n #\n # Handle a string of comma separated (coming from the webui\n #\n self.usernames = self.usernames.split(\",\") if self.usernames.kind_of? String\n\n #\n # Make sure there's no spaces in the usernames\n #\n self.usernames.each {|u| u.gsub!(/\\s+/, \"\")}\n \n #\n # Make sure all usernames are lowercase\n #\n self.usernames.each {|u| u.downcase!}\n \n #\n # Make sure we don't have multiple same usernames\n #\n self.usernames.uniq!\n end", "def github_check\n begin\n unless github.blank?\n RestClient.get \"https://github.com/#{github}\"\n end\n rescue\n errors.add :base, \"Invalid Github account.\"\n end\n end", "def private_namespace_and_team_available\n ns = Namespace.make_valid(username)\n\n if ns.nil?\n errors.add(:username, \"'#{username}' cannot be transformed into a \" \\\n \"valid namespace name\")\n elsif Namespace.exists?(name: ns)\n clar = (ns != username) ? \" (modified so it's valid)\" : \"\"\n errors.add(:username, \"cannot be used: there is already a namespace \" \\\n \"named '#{ns}'#{clar}\")\n elsif Team.exists?(name: username)\n errors.add(:username, \"cannot be used: there is already a team named \" \\\n \"like this\")\n end\n end", "def getGitHubURLForUsername(username)\n\treturn \"https://github.com/\" + username\nend", "def github_url_for(username)\n \"http://github.com/#{username}\"\n end", "def github_url_must_be_valid\n if github.present? && github.downcase.strip.index(\"https://github.com/\") != 0\n errors.add(:github, \"Github URL is invalid\")\n end\n end", "def github_user_and_project(github_url)\n matches = /github\\.com.(.*?)\\/(.*)/.match(github_url)\n matches ? [matches[1], matches[2].sub(/\\.git\\z/, '')] : [nil, nil]\n end", "def unknown_github_logins\n pull_request_commits.select(&:author).map(&:author).reject do |author|\n signed_a_cla?(author.login)\n end.map(&:login).uniq\n end", "def test_ID_25847_edit_profile_name_limit\n login_as_user1\n go_to_edit_profile_page\n verify_user_names_are_not_required_to_be_unique \"Jame Smith\"\n end", "def github_user_from_gitconfig\n results = `git config github.user`.chomp\n return nil if results.empty?\n results\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the next lower item in the list.
def lower_item return nil unless in_list? acts_as_list_class.find(:first, :conditions => "position = #{(send(:position).to_i + 1).to_s}" ) end
[ "def lower_item\n return nil unless in_list?\n lower_items(1).first\n end", "def lower_item\n return nil unless in_list?\n @lower_item ||= self.class.branch(self).find(:first, :conditions => {:position => position+1})\n end", "def find_lt(list, item, &block)\r\n\t\ti = bisect_left(list, item, &block)\r\n\t\treturn list[i - 1] unless 0 == i\r\n\tend", "def next_item\n return unless in_list?\n items_in_list.where(position_field => self[position_field]+1).first\n end", "def find_le(list, item, &block)\r\n\t\ti = bisect_right(list, item, &block)\r\n\t\treturn list[i - 1] unless 0 == i\r\n\tend", "def move_lower\n return unless lower_item\n\n acts_as_list_class.transaction do\n lower_item.decrement_position\n increment_position\n end\n end", "def move_lower\n return unless lower_item\n\n acts_as_list_class.transaction do\n if lower_item.current_position != current_position\n swap_positions_with(lower_item)\n else\n lower_item.decrement_position\n increment_position\n end\n end\n end", "def higher_item\n return nil unless in_list?\n higher_items(1).first\n end", "def prev_item\n @current -= 1 if @current > 0\n\n items\n end", "def move_lower\n acts_as_list_class.transaction do\n lower_item.try(:decrement_position) and increment_position\n end\n end", "def previous_item\n return unless in_list?\n items_in_list.where(position_field => self[position_field]-1).first\n end", "def find_min\r\n return nil if !@head\r\n cursor = @head\r\n min = cursor.data\r\n while cursor\r\n if cursor.data < min\r\n min = cursor.data\r\n end\r\n cursor = cursor.next\r\n end\r\n return min\r\n end", "def prev_item\n @current -= 1 if @current > 0\n\n items\n end", "def next\n units_sorted = self.order.units.sort_by {|unit| unit.id}\n if units_sorted.find_index(self) < units_sorted.length\n return units_sorted[units_sorted.find_index(self)+1]\n else\n return nil\n end\n end", "def next\n units_sorted = self.order.units.sort_by {|unit| unit.id}\n if units_sorted.find_index(self) < units_sorted.length\n return units_sorted[units_sorted.find_index(self)+1]\n else\n return nil\n end\n end", "def next_item\n @current += 1 if @current < last\n\n items\n end", "def higher_item\n return nil unless in_list?\n @higher_item ||= self.class.branch(self).find(:first, :conditions => {:position => position-1})\n end", "def next_item\n @current += 1 if @current < last\n\n items\n end", "def next\n index = @@list.index(self)\n if (!index)\n raise \"ERROR: Can't find given player\"\n end\n\n return index + 1 < @@list.length ? @@list[index + 1] : @@list[0]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Decrease the position of this item without adjusting the rest of the list.
def decrement_position return unless in_list? update_attribute :position, self.send(:position).to_i - 1 end
[ "def decrement_position\n return unless in_list?\n set_list_position(current_position - 1)\n end", "def decrement_position\n return unless in_list?\n update_attribute position_column, self.send(position_column).to_i - 1\n end", "def decrement_position\n return unless in_list?\n update_position( self.send(position_column).to_i-1 )\n end", "def move_higher\n return unless higher_item\n\n acts_as_list_class.transaction do\n higher_item.increment_position\n decrement_position\n end\n end", "def move_lower\n acts_as_list_class.transaction do\n lower_item.try(:decrement_position) and increment_position\n end\n end", "def move_higher\n return unless higher_item\n\n acts_as_list_class.transaction do\n if higher_item.current_position != current_position\n swap_positions_with(higher_item)\n else\n higher_item.increment_position\n decrement_position\n end\n end\n end", "def move_higher\n return unless higher_item\n higher_item.increment_position\n decrement_position\n end", "def move_item_up\n if @active_item > 0\n pitem = @items[@active_item - 1]\n @items[@active_item - 1] = @items[@active_item]\n @items[@active_item] = pitem\n @active_item = @active_item - 1\n draw_items\n end\n end", "def move_lower\n return unless lower_item\n\n acts_as_list_class.transaction do\n lower_item.decrement_position\n increment_position\n end\n end", "def move_higher\n high_item = higher_item\n return unless high_item\n\n high_item.increment_position\n decrement_position\n end", "def decrement_queue_position\n return unless in_queue?\n update_attribute queue_position_column, self.send(queue_position_column).to_i - 1\n end", "def remove_from_position\n return if self.position.nil?\n renumber_from = self.position\n self.position = nil\n self.update_column(:position, nil)\n Requirement.where(requirements_template_id: self.requirements_template_id).\n where(\"position > ?\", renumber_from).\n update_all(\"position = position - 1\")\n end", "def decrement_position_on_reorder(old_position)\n UserStory\n .where('position > ? AND position <= ? AND id <> ?', old_position, self.position, self.id)\n .where(sprint_id: self.sprint_id, board_id: self.board_id).update_all('position = position - 1')\n end", "def decrease_sell_in\n @item.sell_in -= 1\n end", "def decrement(records)\n records.update_all(['position = position - 1'])\n end", "def decrease \n _multiplier = ($multiplier == 0 ? 1 : $multiplier )\n delta = _multiplier\n $log.debug \"XXXX decrease got mult #{$_multiplier} \"\n c = @current_component \n # if decreasing last component then increase previous\n # otherwise always increase the next\n n = get_next_component || get_prev_component \n return unless n # if no other, don't allow\n if @orientation == :HORIZONTAL_SPLIT\n c.height -= delta\n n.height += delta\n # TODO\n else\n if c.width > 3 + delta\n c.width -= delta\n n.width += delta\n end\n end\n @repaint_required = true\n self\n end", "def decrease_sell_in(item)\n item.sell_in -= 1\n end", "def remove_item\n @parent.remove_item(self)\n end", "def destroy_item_at(position)\n # TODO Stubbed - Requires definition and implementation\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }