query
stringlengths 7
9.5k
| document
stringlengths 10
1.07M
| negatives
sequencelengths 19
19
| metadata
dict |
---|---|---|---|
Checks to see if any new promotions have been applied to the order. | def new_promotions?
!new_promotions.empty?
end | [
"def existing_promotions?\n !existing_promotions.empty?\n end",
"def pending_promotions?\n !pending_promotions.empty?\n end",
"def checkout_promotions?\n !checkout_promotions.empty?\n end",
"def check_promotions\n line_items.each do |item|\n Spree::ItemAdjustments.new(item).calculate_promo_total\n end\n end",
"def related_promotions?\n !related_promotions.empty?\n end",
"def apply_promotions!\n raise PromotionApplyError unless pending_promotions.empty?\n @promotion_results = Promotion.active.apply!(self)\n apply_adjustments!\n\n # Convert all the applied promotions into an array of decorated\n # promotions.\n @pending_promotions = applied_promotions.map {|p| ::Promotions::Decorator.new(p.promotion)}\n\n # If no promotions have been added to the order, they're all new.\n # Otherwise generate a new collection which is the difference between the\n # existing ones and the new ones.\n if previous_promotion_ids.empty?\n @new_promotions = pending_promotions\n else\n @new_promotions = pending_promotions.reject {|p| previous_promotion_ids.include?(p.id)}\n @existing_promotions = pending_promotions.select {|p| previous_promotion_ids.include?(p.id)}\n end\n\n pending_promotions\n end",
"def promotions\n @promotions ||= order.promotions\n end",
"def can_be_added?(order)\n eligible?(order) &&\n (can_combine?(order) || order.promotion_credits.empty?) &&\n Credit.count(:conditions => {\n :adjustment_source_id => self.id,\n :adjustment_source_type => self.class.name,\n :order_id => order.id\n }) == 0\n end",
"def payment_required?\n order = self.dup\n order.just_a_quote = true\n order.order_items.map(&:set_price)\n order.calc_subtotal\n order.apply_discount\n\n order.calc_shipping(true) if order.shipping_price.blank?\n\n order.subtotal != 0 or order.shipping_price != 0\n end",
"def apply_promotions!\n transaction do\n adjustments.destroy_all\n order_items.each { |order_item| order_item.adjustments.destroy_all }\n\n billing_group.promotions.active.each do |promotion|\n promotion.apply!(self)\n end\n activated_promotions.live.each do |promotion|\n promotion.apply!(self)\n end\n end\n end",
"def check_prepaid_orders(_orders)\n now = Time.zone.now\n this_months_orders = _orders.select do |ord|\n status = ord.status == 'QUEUED'\n scheduled_this_month = (\n ord.scheduled_at > Date.today.to_time.strftime('%F %T') &&\n ord.scheduled_at < now.end_of_month.strftime('%F %T')\n )\n is_prepaid = ord.is_prepaid == 1\n [status, scheduled_this_month, is_prepaid].all?\n end\n order_check = false\n # puts this_months_orders.inspect\n if this_months_orders != []\n this_months_orders.each do |order|\n if order.scheduled_at > now.strftime('%F %T')\n order_check = true\n break\n end\n end\n elsif all_orders_sent?(_orders)\n puts \"no queued orders found for sub: #{subscription_id}, all orders sent!\"\n order_check = true\n end\n return order_check\n end",
"def need_deliver_order_items\n order_items.select(&:need_deliver?)\n end",
"def apply!(order)\n result = check(order)\n if result.successful?\n order.applied_promotions.build(:promotion => self)\n result.effects = effects.map {|e| e.apply!(order, result.conditions)}\n end\n\n result\n end",
"def update_orders\n @data.applied_amounts.each do |order_id, applied|\n applied = unfmt_money(applied)\n if (@data.original_pays[order_id] - applied).abs > 0.001\n update_order(order_id, applied)\n end\n end\n end",
"def punch_out_order_message?\n !punch_out_order_message.nil?\n end",
"def confirm!\n no_stock_of = self.order_items.select(&:validate_stock_levels)\n unless no_stock_of.empty?\n raise Shoppe::Errors::InsufficientStockToFulfil, :order => self, :out_of_stock_items => no_stock_of\n end\n \n run_callbacks :confirmation do\n # If we have successfully charged the card (i.e. no exception) we can go ahead and mark this\n # order as 'received' which means it can be accepted by staff.\n self.status = 'received'\n self.received_at = Time.now\n self.save!\n\n self.order_items.each(&:confirm!)\n\n # Send an email to the customer\n deliver_received_order_email\n end\n \n # We're all good.\n true\n end",
"def set_has_promotion\n if self.price_group_id_changed? or self.prices.any?(&:changed?)\n if (self.prices + self.price_group_prices).empty?\n self.has_promotion = false\n elsif (self.prices + self.price_group_prices).detect(&:promo_amount)\n self.has_promotion = true\n end\n end\n nil\n end",
"def check_order\n if order.order_lines.count == 0\n order.destroy\n end\n end",
"def orders?\n return false unless orders.any?\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks to see if there are any promotions pending/applied to the order. | def pending_promotions?
!pending_promotions.empty?
end | [
"def checkout_promotions?\n !checkout_promotions.empty?\n end",
"def existing_promotions?\n !existing_promotions.empty?\n end",
"def related_promotions?\n !related_promotions.empty?\n end",
"def need_deliver_order_items\n order_items.select(&:need_deliver?)\n end",
"def check_prepaid_orders(_orders)\n now = Time.zone.now\n this_months_orders = _orders.select do |ord|\n status = ord.status == 'QUEUED'\n scheduled_this_month = (\n ord.scheduled_at > Date.today.to_time.strftime('%F %T') &&\n ord.scheduled_at < now.end_of_month.strftime('%F %T')\n )\n is_prepaid = ord.is_prepaid == 1\n [status, scheduled_this_month, is_prepaid].all?\n end\n order_check = false\n # puts this_months_orders.inspect\n if this_months_orders != []\n this_months_orders.each do |order|\n if order.scheduled_at > now.strftime('%F %T')\n order_check = true\n break\n end\n end\n elsif all_orders_sent?(_orders)\n puts \"no queued orders found for sub: #{subscription_id}, all orders sent!\"\n order_check = true\n end\n return order_check\n end",
"def new_promotions?\n !new_promotions.empty?\n end",
"def checkout_allowed?\n order_items.count > 0\n end",
"def get_promotions\n if @purchasing_items.any?\n get_using_promotions\n get_eligible_codes\n get_eligible_quantities\n end\n end",
"def check_promotions\n line_items.each do |item|\n Spree::ItemAdjustments.new(item).calculate_promo_total\n end\n end",
"def hasReceiptsButNoneProcessedYet\n hasReceipts && !hasProcessedReceipts\n end",
"def pending_changes?\n !update_approvals(false).empty?\n end",
"def capture_pending_payments\n success = true\n order.payments.pending.each do |payment|\n unless payment.capture!\n copy_errors(payment)\n success = false\n end\n end\n success\n end",
"def has_any_pending_tickets?\r\n TicketOrder.where(user: self).requires_attention.any?\r\n end",
"def punch_out_order_message?\n !punch_out_order_message.nil?\n end",
"def ready_to_approve?\n status = self.units.map(&:unit_status) & ['condition', 'copyright', 'unapproved']\n return status.empty?\n end",
"def promotions\n @promotions ||= order.promotions\n end",
"def pending?\n (pending.length > 0) ? true : false\n end",
"def not_paid_at_all\n\t\tget_cart_pending_balance == get_cart_price\n\tend",
"def void_pending_purchase_orders\n self.purchase_orders.select(&:pending?).each {|o| o.void}\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks for any promotions that have already been applied to the order. | def existing_promotions?
!existing_promotions.empty?
end | [
"def promotions\n @promotions ||= order.promotions\n end",
"def check_promotions\n line_items.each do |item|\n Spree::ItemAdjustments.new(item).calculate_promo_total\n end\n end",
"def checkout_promotions?\n !checkout_promotions.empty?\n end",
"def pending_promotions?\n !pending_promotions.empty?\n end",
"def apply_promotions!\n raise PromotionApplyError unless pending_promotions.empty?\n @promotion_results = Promotion.active.apply!(self)\n apply_adjustments!\n\n # Convert all the applied promotions into an array of decorated\n # promotions.\n @pending_promotions = applied_promotions.map {|p| ::Promotions::Decorator.new(p.promotion)}\n\n # If no promotions have been added to the order, they're all new.\n # Otherwise generate a new collection which is the difference between the\n # existing ones and the new ones.\n if previous_promotion_ids.empty?\n @new_promotions = pending_promotions\n else\n @new_promotions = pending_promotions.reject {|p| previous_promotion_ids.include?(p.id)}\n @existing_promotions = pending_promotions.select {|p| previous_promotion_ids.include?(p.id)}\n end\n\n pending_promotions\n end",
"def related_promotions?\n !related_promotions.empty?\n end",
"def apply!(order)\n result = check(order)\n if result.successful?\n order.applied_promotions.build(:promotion => self)\n result.effects = effects.map {|e| e.apply!(order, result.conditions)}\n end\n\n result\n end",
"def new_promotions?\n !new_promotions.empty?\n end",
"def apply_promotions!\n transaction do\n adjustments.destroy_all\n order_items.each { |order_item| order_item.adjustments.destroy_all }\n\n billing_group.promotions.active.each do |promotion|\n promotion.apply!(self)\n end\n activated_promotions.live.each do |promotion|\n promotion.apply!(self)\n end\n end\n end",
"def checkout_promotions\n @checkout_promotions ||= Promotion.active.code_based\n end",
"def need_deliver_order_items\n order_items.select(&:need_deliver?)\n end",
"def get_promotions\n if @purchasing_items.any?\n get_using_promotions\n get_eligible_codes\n get_eligible_quantities\n end\n end",
"def applicable?(promotable)\n promotable.is_a?(Spree::Order)\n end",
"def before_confirm\n return if defined?(SpreeProductAssembly)\n return unless @order.checkout_steps.include? 'delivery'\n\n packages = @order.shipments.map(&:to_package)\n @differentiator = Spree::Stock::Differentiator.new(@order, packages)\n @differentiator.missing.each do |variant, quantity|\n @order.contents.remove(variant, quantity)\n end\n end",
"def apply!(order)\n promotion_handler.apply!(order, matching_items(order))\n end",
"def refresh_promotions!\n @promotions = _promotions\n end",
"def check_order\n if order.order_lines.count == 0\n order.destroy\n end\n end",
"def payment_required?\n order = self.dup\n order.just_a_quote = true\n order.order_items.map(&:set_price)\n order.calc_subtotal\n order.apply_discount\n\n order.calc_shipping(true) if order.shipping_price.blank?\n\n order.subtotal != 0 or order.shipping_price != 0\n end",
"def ensure_not_referenced_by_any_order_item\n\n if order_items.count.zero?\n return true\n else\n errors.add(:base, 'Order Items present')\n return false\n end\n\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
An array of IDs of the promotions that have been applied, used to dump the previous promotion state to session. | def promotion_id_dump
pending_promotions.map(&:id)
end | [
"def previous_promotion_ids\n @previous_promotion_ids ||= []\n end",
"def promotions\n @promotions ||= order.promotions\n end",
"def apply_promotions!\n raise PromotionApplyError unless pending_promotions.empty?\n @promotion_results = Promotion.active.apply!(self)\n apply_adjustments!\n\n # Convert all the applied promotions into an array of decorated\n # promotions.\n @pending_promotions = applied_promotions.map {|p| ::Promotions::Decorator.new(p.promotion)}\n\n # If no promotions have been added to the order, they're all new.\n # Otherwise generate a new collection which is the difference between the\n # existing ones and the new ones.\n if previous_promotion_ids.empty?\n @new_promotions = pending_promotions\n else\n @new_promotions = pending_promotions.reject {|p| previous_promotion_ids.include?(p.id)}\n @existing_promotions = pending_promotions.select {|p| previous_promotion_ids.include?(p.id)}\n end\n\n pending_promotions\n end",
"def checkout_promotions\n @checkout_promotions ||= Promotion.active.code_based\n end",
"def promotions_to_remove\n []\n end",
"def refresh_promotions!\n @promotions = _promotions\n end",
"def pids\n @pids ||= []\n end",
"def pids\n list = []\n @processes.each do |p|\n list.push(p.pid)\n end\n list\n end",
"def get_promotions\n if @purchasing_items.any?\n get_using_promotions\n get_eligible_codes\n get_eligible_quantities\n end\n end",
"def all_promotions\n result = []\n result += self.promotions\n self.ticket_types.each { |tt| result += tt.promotions }\n result.sort_by { |promo| promo[:created_at] }.reverse!\n end",
"def biz_process_ids\n biz_processes.map { |bp| bp.id }\n end",
"def set_discounts_arr\n cart.discounts.where(kind: 'set').map { |set| set[:product_ids].push(set[:id].to_s + 's') }\n end",
"def pids\n @pids_lock.synchronize { @pids }\n end",
"def workflow_ids\n approval_access = RBAC::Access.new('workflows', 'approve').process\n approval_access.send(:generate_ids)\n\n Rails.logger.info(\"Approvable workflows: #{approval_access.id_list}\")\n\n approval_access.id_list\n end",
"def apply_promotions!\n transaction do\n adjustments.destroy_all\n order_items.each { |order_item| order_item.adjustments.destroy_all }\n\n billing_group.promotions.active.each do |promotion|\n promotion.apply!(self)\n end\n activated_promotions.live.each do |promotion|\n promotion.apply!(self)\n end\n end\n end",
"def procedure_id_counts\n return Array.new\n end",
"def promoted_products\n promoted.is_a?(Product) ? [promoted] : promoted.products.all\n end",
"def prisoner_ids\n @prisoner_ids || prisoners.collect{|p| p.id}\n end",
"def current_cart_pids\n pids = {}\n\n @gears.each do |gear|\n gear.carts.values.each do |cart|\n Dir.glob(\"#{$home_root}/#{gear.uuid}/#{cart.directory}/{run,pid}/*.pid\") do |pid_file|\n $logger.info(\"Reading pid file #{pid_file} for cart #{cart.name}\")\n pid = IO.read(pid_file).chomp\n proc_name = File.basename(pid_file, \".pid\")\n\n pids[proc_name] = pid\n end\n end\n end\n\n pids\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the IDs of promotions that were previously applied to the order. | def previous_promotion_ids
@previous_promotion_ids ||= []
end | [
"def promotions\n @promotions ||= order.promotions\n end",
"def printer_order_ids\n poids = []\n self.order_line_items.each do |li|\n poids << li.printer_order_id unless poids.include?(li.printer_order_id)\n end\n\n return poids\n end",
"def promotion_id_dump\n pending_promotions.map(&:id)\n end",
"def checkout_promotions\n @checkout_promotions ||= Promotion.active.code_based\n end",
"def apply_promotions!\n raise PromotionApplyError unless pending_promotions.empty?\n @promotion_results = Promotion.active.apply!(self)\n apply_adjustments!\n\n # Convert all the applied promotions into an array of decorated\n # promotions.\n @pending_promotions = applied_promotions.map {|p| ::Promotions::Decorator.new(p.promotion)}\n\n # If no promotions have been added to the order, they're all new.\n # Otherwise generate a new collection which is the difference between the\n # existing ones and the new ones.\n if previous_promotion_ids.empty?\n @new_promotions = pending_promotions\n else\n @new_promotions = pending_promotions.reject {|p| previous_promotion_ids.include?(p.id)}\n @existing_promotions = pending_promotions.select {|p| previous_promotion_ids.include?(p.id)}\n end\n\n pending_promotions\n end",
"def promotions_to_remove\n []\n end",
"def pids\n list = []\n @processes.each do |p|\n list.push(p.pid)\n end\n list\n end",
"def get_undelivered_order_ids\n\n #enable this one for previous restricted conditioned(pause/resume)\n #orders =self.orders.select('id, delivery_date').where('spree_orders.delivery_date > ?', Time.now).limit(3)\n orders = self.orders.select('id, delivery_date').where('spree_orders.state in (?) ', ['confirm', 'placed']).limit(3)\n\n order_ids = {}\n\n orders.each_with_index do |order, index|\n order_ids.merge!(index => order.id)\n end\n\n order_ids\n end",
"def compute_ids\n compute_messages_id\n compute_cookies_id\n compute_sequences_id\n @sequences.values.each do |seq|\n seq.compute_shots_id\n end\n changed\n end",
"def pids\n @pids_lock.synchronize { @pids }\n end",
"def removed_transactions_ids\n @removed_transactions\n end",
"def pids\n @pids ||= []\n end",
"def current_cart_pids\n pids = {}\n\n @gears.each do |gear|\n gear.carts.values.each do |cart|\n Dir.glob(\"#{$home_root}/#{gear.uuid}/#{cart.directory}/{run,pid}/*.pid\") do |pid_file|\n $logger.info(\"Reading pid file #{pid_file} for cart #{cart.name}\")\n pid = IO.read(pid_file).chomp\n proc_name = File.basename(pid_file, \".pid\")\n\n pids[proc_name] = pid\n end\n end\n end\n\n pids\n end",
"def biz_process_ids\n biz_processes.map { |bp| bp.id }\n end",
"def prereq_ids\n return [] unless scoped_course\n scoped_course.prereq_ids\n end",
"def variant_ids\n order_items.collect{|oi| oi.variant_id }\n end",
"def refresh_promotions!\n @promotions = _promotions\n end",
"def tipster_ids_in_cart\n initial_cart_session if session[:cart].nil?\n session[:cart][:tipster_ids].uniq\n end",
"def operator_ids\n @operator_ids ||= extract_operator_ids\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks to see if there is a code promotion applied to the order and that it is successful. | def code_promotion_successful?
promotion_results.code_based.successful?
end | [
"def code_promotion_failed?\n !promo_code.blank? and promotion_results.code_based.failed?\n end",
"def has_promotion_code?\n begin\n return (!order_payload[:custom_fields][:netsuite_custbody][\"coupon_code\"].blank? or !sales_order.promo_code.internal_id.blank?)\n rescue\n return false\n end\n end",
"def apply_coupon\n if @order.update_attributes(object_params)\n\n if @order.coupon_code.present?\n\n if (Spree::Promotion.exists?(:code => @order.coupon_code))\n if (Spree::Promotion.where(:code => @order.coupon_code).last.eligible?(@order) && Spree::Promotion.where(:code => @order.coupon_code).last.order_activatable?(@order))\n fire_event('spree.checkout.coupon_code_added', :coupon_code => @order.coupon_code)\n # If it doesn't exist, raise an error!\n # Giving them another chance to enter a valid coupon code\n @message = \"Coupon applied\"\n else\n # Check why the coupon cannot be applied (at least a few checks)\n promotion = Spree::Promotion.where(:code => @order.coupon_code).last\n if promotion.expired?\n @message = \"The coupon is expired or not yet active.\"\n elsif promotion.usage_limit_exceeded?(@order)\n @message = \"The coupon cannot be applied, it's usage limit has been exceeded.\"\n elsif promotion.created_at.to_i > @order.created_at.to_i\n @message = \"The coupon cannot be applied because it has been created after the order.\"\n else\n @message = \"The coupon cannot be applied to this order.\"\n end\n end\n else\n @message = t(:promotion_not_found)\n end\n end\n # Need to reload the order, otherwise total is not updated\n @order.reload\n respond_with(@order, @message)\n end\n end",
"def test_say_if_is_discounted\n setup_new_order_with_items()\n promo = promotions(:percent_rebate)\n \n assert !@order.is_discounted?\n @order.promotion_code = promo.code\n assert @order.is_discounted?\n end",
"def normal_and_standalone_promo?\n # Guard clause to ensure promo code is definitely a standalone promo\n return unless promotion.stand_alone_promo(self[:promotion_id])\n if !promotions.empty? && !promotions.map(&:standalone).include?(true)\n return errors.add(:promotion_id, '..other promo exists!')\n end\n end",
"def test_cleanup_promotion_order_finished\n # Setup - add promotion and complete order\n setup_new_order_with_items()\n promo = promotions(:fixed_rebate)\n @o.promotion_code = promo.code\n @o.order_status_code = order_status_codes(:ordered_paid_to_ship)\n assert @o.save\n \n # Now expire the promo\n assert promo.update_attributes({\n :start => Date.today - 2.weeks,\n :end => Date.today - 1.week\n })\n promo.reload\n @o.reload\n \n # Update something on the order, like an admin would.\n # Maybe we shipped out the order and changed the status code.\n @o.order_status_code = order_status_codes(:sent_to_fulfillment)\n assert !@o.should_promotion_be_applied?(promo)\n assert @o.save\n \n @o.reload\n \n # Check to see if promotion is still applied (it should be!)\n assert_equal promo, @o.promotion\n assert_kind_of OrderLineItem, @o.promotion_line_item\n end",
"def code_promotion_unchecked?\n promo_code.blank?\n end",
"def perform(options = {})\n order = options[:order]\n return if promotion_credit_exists?(order)\n\n amount = compute_amount(order)\n order.adjustments.create!(\n amount: amount,\n order: order,\n source: self,\n promotion_code: options[:promotion_code],\n label: I18n.t('spree.adjustment_labels.order', promotion: Spree::Promotion.model_name.human, promotion_name: promotion.name)\n )\n true\n end",
"def test_box8love_coupon_not_applicable_on_outlet\n cart_items = {\n \"cart_items\": [\n {\n \"product_id\": 1,\n \"quantity\": 1,\n \"unit_cost\": 200\n }\n ]\n }\n response = Order.new(cart_items).apply_promocode('BOX8LOVE', 7)\n assert_equal(false, response[:valid])\n assert_equal(Coupon::APPLICABLE_RESPONSE[:not_applicable_on_outlet], response[:message])\n assert_equal(0.0, response[:discount])\n assert_equal(0.0, response[:cashback])\n end",
"def perform(options = {})\n order = options[:order]\n return unless eligible? order\n\n action_taken = false\n promotion_action_line_items.each do |item|\n current_quantity = order.quantity_of(item.variant)\n next unless current_quantity < item.quantity && item_available?(item)\n\n line_item = Spree::Dependencies.cart_add_item_service.constantize.call(order: order,\n variant: item.variant,\n quantity: item.quantity - current_quantity).value\n action_taken = true if line_item.try(:valid?)\n end\n action_taken\n end",
"def apply_coupon_code\n # find promotion code\n coupon_code = params[:coupon_code].to_s.gsub(/\\s+/, '').downcase\n promotion_code = Spree::PromotionCode.find_by value: coupon_code\n\n unless promotion_code\n return flow_render_coupon_error('Coupon code not found')\n end\n\n promotion = Spree::Promotion.find promotion_code.promotion_id\n experience_key = @order.flow_order.dig('experience', 'key')\n forbiden_keys = promotion.flow_data.dig('filter', 'experience') || []\n\n if experience_key.present? && !forbiden_keys.include?(experience_key)\n return flow_render_coupon_error('Promotion is not available in current country')\n end\n\n # authorize! :update, @order, order_token\n # all good, apply coupon to Solidus as Flow is not present\n\n @order.coupon_code = params[:coupon_code]\n @handler = Spree::PromotionHandler::Coupon.new(@order).apply\n\n if @handler.successful?\n render \"spree/api/promotions/handler\", status: 200\n else\n logger.error(\"apply_coupon_code_error=#{@handler.error.inspect}\")\n render \"spree/api/promotions/handler\", status: 422\n end\n end",
"def can_be_added?(order)\n eligible?(order) &&\n (can_combine?(order) || order.promotion_credits.empty?) &&\n Credit.count(:conditions => {\n :adjustment_source_id => self.id,\n :adjustment_source_type => self.class.name,\n :order_id => order.id\n }) == 0\n end",
"def refresh_promo_code\n if ! self.order.destroyed? && ! self.gift?\n order_item = self.order.reload\n order_item.apply_promo_code(order_item.promo_code) if order_item.promo_code.present?\n end\n rescue Shoppe::Errors::InvalidPromoCode\n order_item.clear_promo_code unless order_item.destroyed?\n end",
"def buy_promotion(promo)\n page = _get_page promo[:buy_page]\n _page_indicates_success? page\n end",
"def perform(options = {})\n order = options[:order]\n return if order.promotion_credit_exists?(self.promotion)\n\n self.create_adjustment(\"#{I18n.t(:promotion)} (#{promotion.name})\", order, order)\n end",
"def check_for_coupon\n coupon = self.orders.where(state: \"paused\").first.coupon\n valid_coupon = Coupon.is_valid?(coupon.coupon_code) if coupon.present?\n valid_coupon.present? ? valid_coupon.coupon_code : nil\n end",
"def activate(payload)\n return unless order_activatable? payload[:order]\n\n # Regular style promotion with single code per promo\n if code.present?\n event_code = payload[:coupon_code].to_s.strip.downcase\n return unless event_code == self.code.to_s.strip.downcase\n end\n \n # Regular content based promo\n if path.present?\n return unless path == payload[:path]\n end\n \n # Groupon style promo - check if order.coupon_code is associated with this promotion and has not already been used.\n if payload[:event_name] == 'spree.checkout.groupon_code_added'\n return unless groupon_codes.unused.where(:code => payload[:coupon_code])\n # There may be more than one groupon campaign active at any one time, and there mau be more than one activator selected, so guard against that\n return unless self.id == payload[:promotion_id].to_i\n end\nRails.logger.warn(\"++++++++ #{self.id}, #{payload[:promotion_id]}, #{self.id == payload[:promotion_id].to_i}\")\n actions.each do |action|\n action.perform(payload)\n end\n \n # For groupon style codes - mark them as used after the action have been applied\n if payload[:event_name] == 'spree.checkout.groupon_code_added'\n begin\n groupon_code = Spree::GrouponCode.where(:code => payload[:coupon_code]).first\n groupon_code.update_attributes(:order_id => payload[:order].id)\n rescue\n raise \"Can't update groupon code - please check\"\n end\n end\n end",
"def apply!(order)\n result = check(order)\n if result.successful?\n order.applied_promotions.build(:promotion => self)\n result.effects = effects.map {|e| e.apply!(order, result.conditions)}\n end\n\n result\n end",
"def compute_on_promotion?\n @compute_on_promotion ||= calculable.respond_to?(:promotion)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks to see if there is a promotion code set against this order. If it is nil, then code promotions are pending i.e. haven't been checked yet. | def code_promotion_unchecked?
promo_code.blank?
end | [
"def has_promotion_code?\n begin\n return (!order_payload[:custom_fields][:netsuite_custbody][\"coupon_code\"].blank? or !sales_order.promo_code.internal_id.blank?)\n rescue\n return false\n end\n end",
"def code_promotion_failed?\n !promo_code.blank? and promotion_results.code_based.failed?\n end",
"def get_eligible_codes\n conjunction_false_promotion_applied = false\n code_promotion_applied = false\n using_promotions_ids = @using_promotions.pluck(:promotion_id)\n\n # if a promotion with conjunction=false applied, no other Code promotions eligible\n if Promotion.where(id: using_promotions_ids, conjunction: false).any?\n conjunction_false_promotion_applied = true\n # if any promotion applied, no Code promotions with conjuction=false eligible\n elsif Promotion.where(id: using_promotions_ids, type: 'Code').any?\n code_promotion_applied = true\n end\n\n if !conjunction_false_promotion_applied\n if code_promotion_applied\n @codes = Code.where(\"id NOT IN (#{using_promotions_ids.join(',')}) AND conjunction = ?\", true)\n else\n @codes = Code.where(\"id NOT IN (#{using_promotions_ids.join(',')})\")\n end\n end\n end",
"def code_promotion_successful?\n promotion_results.code_based.successful?\n end",
"def check_for_coupon\n coupon = self.orders.where(state: \"paused\").first.coupon\n valid_coupon = Coupon.is_valid?(coupon.coupon_code) if coupon.present?\n valid_coupon.present? ? valid_coupon.coupon_code : nil\n end",
"def normal_and_standalone_promo?\n # Guard clause to ensure promo code is definitely a standalone promo\n return unless promotion.stand_alone_promo(self[:promotion_id])\n if !promotions.empty? && !promotions.map(&:standalone).include?(true)\n return errors.add(:promotion_id, '..other promo exists!')\n end\n end",
"def checkout_promotions\n @checkout_promotions ||= Promotion.active.code_based\n end",
"def compute_on_promotion?\n @compute_on_promotion ||= calculable.respond_to?(:promotion)\n end",
"def pending_promotions?\n !pending_promotions.empty?\n end",
"def checkout_promotions?\n !checkout_promotions.empty?\n end",
"def standalone_promo?\n errors.add(:promotion_id, \"#{unique} exists!\") if !promotions.empty? && promotions.map(&:standalone).include?(true)\n end",
"def set_has_promotion\n if self.price_group_id_changed? or self.prices.any?(&:changed?)\n if (self.prices + self.price_group_prices).empty?\n self.has_promotion = false\n elsif (self.prices + self.price_group_prices).detect(&:promo_amount)\n self.has_promotion = true\n end\n end\n nil\n end",
"def promotion_credit_exists?(promotion)\n !! adjustments.promotion.reload.detect { |credit| credit.originator.promotion.id == promotion.id }\n end",
"def ready_to_approve?\n status = self.units.map(&:unit_status) & ['condition', 'copyright', 'unapproved']\n return status.empty?\n end",
"def buy_promotion(promo)\n page = _get_page promo[:buy_page]\n _page_indicates_success? page\n end",
"def get_promo_code order\n order ? order[:promo_code] ? order[:promo_code] : nil : nil\n end",
"def refresh_promo_code\n if ! self.order.destroyed? && ! self.gift?\n order_item = self.order.reload\n order_item.apply_promo_code(order_item.promo_code) if order_item.promo_code.present?\n end\n rescue Shoppe::Errors::InvalidPromoCode\n order_item.clear_promo_code unless order_item.destroyed?\n end",
"def get_promotions\n if @purchasing_items.any?\n get_using_promotions\n get_eligible_codes\n get_eligible_quantities\n end\n end",
"def can_be_added?(order)\n eligible?(order) &&\n (can_combine?(order) || order.promotion_credits.empty?) &&\n Credit.count(:conditions => {\n :adjustment_source_id => self.id,\n :adjustment_source_type => self.class.name,\n :order_id => order.id\n }) == 0\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks to see if a promotion code has been entered, promotions have been applied and that the order has failed to qualify for any code based promotions. | def code_promotion_failed?
!promo_code.blank? and promotion_results.code_based.failed?
end | [
"def normal_and_standalone_promo?\n # Guard clause to ensure promo code is definitely a standalone promo\n return unless promotion.stand_alone_promo(self[:promotion_id])\n if !promotions.empty? && !promotions.map(&:standalone).include?(true)\n return errors.add(:promotion_id, '..other promo exists!')\n end\n end",
"def code_promotion_successful?\n promotion_results.code_based.successful?\n end",
"def validate_order_approval\n units_beings_prepared = self.has_units_being_prepared\n if not units_beings_prepared.empty?\n errors[:order_status] << \"cannot be set to approved because units #{units_beings_prepared.map(&:id).join(', ')} are neither approved nor canceled\"\n end\n end",
"def validate_order_approval\n units_beings_prepared = self.has_units_being_prepared\n if not units_beings_prepared.empty?\n errors[:order_status] << \"cannot be set to approved because units #{units_beings_prepared.map(&:id).join(', ')} are neither approved nor canceled\"\n end\n end",
"def code_promotion_unchecked?\n promo_code.blank?\n end",
"def has_promotion_code?\n begin\n return (!order_payload[:custom_fields][:netsuite_custbody][\"coupon_code\"].blank? or !sales_order.promo_code.internal_id.blank?)\n rescue\n return false\n end\n end",
"def is_valid?(options={})\n raise UsedPromocode.new 'promocode.messages.already_used' if self.status == Promo::STATUS[:used]\n raise ExpiredPromocode.new 'promocode.messages.expired' if is_expired?\n raise InvalidPromocode.new 'promocode.messages.invalid' if self.status != Promo::STATUS[:valid]\n\n # Validating use with a specific product associated\n if self.has_product?\n logger.debug \"#------------ Promocode associated with a product\"\n raise InvalidPromocode.new 'promocode.messages.invalid_use' if options[:product_list].nil?\n products = options[:product_list].collect{|p| p.product }\n if self.product && !products.include?(self.product)\n logger.debug \"#--------------- Product associated not found on the list\"\n raise InvalidPromoProduct.new 'promocode.messages.not_valid_for'\n end\n end\n\n # Validating use with when a class of product is associated with the promocode\n # not a specific product (no product_id defined)\n if self.product_id.nil? && !self.product_type.nil?\n logger.debug \"#------------ Promocode associated with a class\"\n raise InvalidPromocode.new 'promocode.messages.invalid_use' if options[:product_list].nil?\n products = options[:product_list].collect{|p| p.product }\n if products.none? { |p| p.class.to_s == self.product_type }\n logger.debug \"#--------------- Class associated not found on the list\"\n raise InvalidPromoProduct.new 'promocode.messages.must_have_course'\n end\n end\n\n # Validating use according to cart value if minimum purchase value is specified\n if !self.minimum_purchase_value.nil?\n raise InsufficientCartValue.new 'promocode.messages.insufficient_cart_value' if options[:purchase_value] < self.minimum_purchase_value\n end\n # Returns the promocode if it's valid\n self\n end",
"def standalone_promo?\n errors.add(:promotion_id, \"#{unique} exists!\") if !promotions.empty? && promotions.map(&:standalone).include?(true)\n end",
"def verify_all_codes_reported(reported_result, pop_key, sup_key, options)\n reported_codes = reported_result[:supplemental_data][pop_key][sup_key]\n required_codes = REQUIRED_CODES[sup_key]\n missing_codes = required_codes - reported_codes.keys\n return if missing_codes.empty?\n\n msg = \"For CMS eligible clinicians and eligible professionals programs, all #{sup_key} codes present in the value set must be reported,\" \\\n 'even if the count is zero. If an eCQM is episode-based, the count will reflect the patient count rather than the episode count.'\n add_error(msg, file_name: options[:file_name])\n @missing_codes[sup_key] = true\n end",
"def verify_all_codes_reported(reported_result, pop_key, sup_key, options)\n reported_codes = reported_result[:supplemental_data][pop_key][sup_key]\n required_codes = REQUIRED_CODES[sup_key]\n missing_codes = required_codes - reported_codes.keys\n return if missing_codes.empty?\n\n msg = \"For CMS eligible clinicians and eligible professionals programs, all #{sup_key} codes present in the value set must be reported,\" \\\n 'even if the count is zero. If an eCQM is episode-based, the count will reflect the patient count rather than the episode count.'\n add_error(msg, file_name: options[:file_name])\n @missing_codes[sup_key] = true\n end",
"def red_pencil_promotion?\n\t\t if valid_date? && valid_discount_amount? && valid_promotion_length?\n\t\t \treturn true\n\t\t else\n\t\t \treturn false\t\t \t\n\t\t end \n\tend",
"def test_cleanup_promotion_order_finished\n # Setup - add promotion and complete order\n setup_new_order_with_items()\n promo = promotions(:fixed_rebate)\n @o.promotion_code = promo.code\n @o.order_status_code = order_status_codes(:ordered_paid_to_ship)\n assert @o.save\n \n # Now expire the promo\n assert promo.update_attributes({\n :start => Date.today - 2.weeks,\n :end => Date.today - 1.week\n })\n promo.reload\n @o.reload\n \n # Update something on the order, like an admin would.\n # Maybe we shipped out the order and changed the status code.\n @o.order_status_code = order_status_codes(:sent_to_fulfillment)\n assert !@o.should_promotion_be_applied?(promo)\n assert @o.save\n \n @o.reload\n \n # Check to see if promotion is still applied (it should be!)\n assert_equal promo, @o.promotion\n assert_kind_of OrderLineItem, @o.promotion_line_item\n end",
"def calculate_eligibility\n if !finalized? && source && promotion?\n source.promotion.eligible?(adjustable, promotion_code: promotion_code)\n else\n eligible?\n end\n end",
"def confirm!\n no_stock_of = self.order_items.select(&:validate_stock_levels)\n unless no_stock_of.empty?\n raise Shoppe::Errors::InsufficientStockToFulfil, :order => self, :out_of_stock_items => no_stock_of\n end\n \n run_callbacks :confirmation do\n # If we have successfully charged the card (i.e. no exception) we can go ahead and mark this\n # order as 'received' which means it can be accepted by staff.\n self.status = 'received'\n self.received_at = Time.now\n self.save!\n\n self.order_items.each(&:confirm!)\n\n # Send an email to the customer\n deliver_received_order_email\n end\n \n # We're all good.\n true\n end",
"def calculate_eligibility\n if !finalized? && source && promotion?\n source.promotion.eligible?(adjustable, promotion_code: promotion_code)\n else\n eligible?\n end\n end",
"def reject_promotion_if_not_enough_items\n promotion = Promotion.find(@basket_item.promotion_id)\n if !promotion.item_id.nil?\n items_count = BasketItem.where( basket_id: @basket_item.basket_id,\n item_id: promotion.item_id)\n .count\n # count pieces of the item for which promotions already applied\n item_promotions_count = 0\n Promotion.where(item_id: promotion.item_id).each do | p |\n item_promotions_count += BasketItem.where(basket_id: @basket_item.basket_id,\n promotion_id: p.id)\n .count * promotion.item_quantity\n end\n\n if items_count - item_promotions_count < promotion.item_quantity\n flash[:danger] = \"Not enough items in the basket to apply the promotion!\"\n @reject = true\n end\n end\n end",
"def test_should_not_create_invalid_promotion\n a_promotion = Promotion.new\n assert !a_promotion.valid?\n assert a_promotion.errors.invalid?(:code)\n assert a_promotion.errors.invalid?(:description)\n # It defaults to 0, so it will never happen.\n # assert a_promotion.errors.invalid?(:discount_type)\n # It defaults to 0.0, so it will never happen.\n # assert a_promotion.errors.invalid?(:discount_amount)\n # A promotion must have a code, a description, a type and an amount.\n assert_equal \"can't be blank\", a_promotion.errors.on(:code)\n assert_equal \"can't be blank\", a_promotion.errors.on(:description)\n\n a_promotion.discount_type = 2\n # If the item_id is empty when discount_type is 2, it cannot be saved.\n assert !a_promotion.valid?\n assert a_promotion.errors.invalid?(:item_id)\n assert_equal \"Please add an item for the 'Buy [n] get 1 free' promotion\", a_promotion.errors.on(:item_id)\n \n a_promotion.code = \"PERCENT_REBATE\"\n assert !a_promotion.valid?\n assert a_promotion.errors.invalid?(:code)\n # A promotion must have an unique code.\n assert_equal \"has already been taken\", a_promotion.errors.on(:code)\n\n assert !a_promotion.save\n end",
"def validate\n !discount_code.nil? && discount.nil? ? raise(InvalidDiscountCode, \"There is no discount with that code\") : true\n end",
"def can_be_added?(order)\n eligible?(order) &&\n (can_combine?(order) || order.promotion_credits.empty?) &&\n Credit.count(:conditions => {\n :adjustment_source_id => self.id,\n :adjustment_source_type => self.class.name,\n :order_id => order.id\n }) == 0\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Attempts to apply promotions to this order. It'll return any promotions it successfully applies. | def apply_promotions!
raise PromotionApplyError unless pending_promotions.empty?
@promotion_results = Promotion.active.apply!(self)
apply_adjustments!
# Convert all the applied promotions into an array of decorated
# promotions.
@pending_promotions = applied_promotions.map {|p| ::Promotions::Decorator.new(p.promotion)}
# If no promotions have been added to the order, they're all new.
# Otherwise generate a new collection which is the difference between the
# existing ones and the new ones.
if previous_promotion_ids.empty?
@new_promotions = pending_promotions
else
@new_promotions = pending_promotions.reject {|p| previous_promotion_ids.include?(p.id)}
@existing_promotions = pending_promotions.select {|p| previous_promotion_ids.include?(p.id)}
end
pending_promotions
end | [
"def apply!(order)\n result = check(order)\n if result.successful?\n order.applied_promotions.build(:promotion => self)\n result.effects = effects.map {|e| e.apply!(order, result.conditions)}\n end\n\n result\n end",
"def apply_promotions!\n transaction do\n adjustments.destroy_all\n order_items.each { |order_item| order_item.adjustments.destroy_all }\n\n billing_group.promotions.active.each do |promotion|\n promotion.apply!(self)\n end\n activated_promotions.live.each do |promotion|\n promotion.apply!(self)\n end\n end\n end",
"def promotions\n @promotions ||= order.promotions\n end",
"def apply!(order)\n promotion_handler.apply!(order, matching_items(order))\n end",
"def get_promotions\n if @purchasing_items.any?\n get_using_promotions\n get_eligible_codes\n get_eligible_quantities\n end\n end",
"def check_promotions\n line_items.each do |item|\n Spree::ItemAdjustments.new(item).calculate_promo_total\n end\n end",
"def apply_payments\n\n @context.no_going_back\n\n @data.apply_details.each do |ad|\n if ad.amount > 0.0\n @data.payment.apply_to_order(@session.user, ad.order, ad.amount, @session)\n end\n\n if ad.force_ship\n ad.order.force_ship(@session)\n end\n end\n\n if @data.payment.used_up\n tidy_up_for_real\n else\n look_for_match\n end\n end",
"def apply_promotional_rules\n @promotional_rules.each do |rule|\n @total = rule.call(@items, @total)\n end\n end",
"def refresh_promotions!\n @promotions = _promotions\n end",
"def process_adjustments\n promo_rules.each do |rule|\n @amount = amount - rule.adjustment(self)\n end\n end",
"def apply!(order, items)\n tax_included = order.includes_tax?\n price_method = tax_included ? :price_with_tax : :price_sans_tax\n subtotal_method = tax_included ? :subtotal_with_tax : :subtotal_sans_tax\n\n items_by_price = flatten(items).sort { |a, b|\n b.send(price_method) <=> a.send(price_method)\n }\n\n items_by_price.each_slice(required_items) do |slice|\n break if slice.size < required_items\n bundle = recombine(slice)\n subtotals = {}\n bundle_total = bundle.map { |item|\n subtotals[item.id] = item.send(subtotal_method)\n }.sum\n difference = items_total - bundle_total\n applied = 0.to_money\n\n bundle.each_with_index do |item, i|\n\n # The last item gets the remainder to avoid rounding errors.\n # Others calculate a portion and add it to applied amounts.\n amount = if i == bundle.count - 1\n difference - applied\n else\n subtotals[item.id] / bundle_total * difference\n end\n applied += amount\n\n # Discount as a Price object to include taxation metadata.\n discount = Price.new(amount, tax_included, item.tax_rate)\n item.adjustments.create(\n source: promotion,\n label: promotion.description,\n amount: item.price_includes_tax? ? discount.with_tax : discount.sans_tax\n )\n end\n end\n end",
"def update_adjustments\n order.adjustments.reload.each { |adjustment| adjustment.update! }\n choose_best_promotion_adjustment\n LineItemDiscount::PromotionPool.new(order).adjust!\n end",
"def apply_coupon\n if @order.update_attributes(object_params)\n\n if @order.coupon_code.present?\n\n if (Spree::Promotion.exists?(:code => @order.coupon_code))\n if (Spree::Promotion.where(:code => @order.coupon_code).last.eligible?(@order) && Spree::Promotion.where(:code => @order.coupon_code).last.order_activatable?(@order))\n fire_event('spree.checkout.coupon_code_added', :coupon_code => @order.coupon_code)\n # If it doesn't exist, raise an error!\n # Giving them another chance to enter a valid coupon code\n @message = \"Coupon applied\"\n else\n # Check why the coupon cannot be applied (at least a few checks)\n promotion = Spree::Promotion.where(:code => @order.coupon_code).last\n if promotion.expired?\n @message = \"The coupon is expired or not yet active.\"\n elsif promotion.usage_limit_exceeded?(@order)\n @message = \"The coupon cannot be applied, it's usage limit has been exceeded.\"\n elsif promotion.created_at.to_i > @order.created_at.to_i\n @message = \"The coupon cannot be applied because it has been created after the order.\"\n else\n @message = \"The coupon cannot be applied to this order.\"\n end\n end\n else\n @message = t(:promotion_not_found)\n end\n end\n # Need to reload the order, otherwise total is not updated\n @order.reload\n respond_with(@order, @message)\n end\n end",
"def promotion_for(item)\n @promotions ||= []\n @promotions.sort_by(&:priority).reverse.detect{|pr| pr.can_apply_to?(item)}\\\n || Promotion.new( NoPromote.new )\n end",
"def perform(options = {})\n order = options[:order]\n return if order.promotion_credit_exists?(self.promotion)\n\n self.create_adjustment(\"#{I18n.t(:promotion)} (#{promotion.name})\", order, order)\n end",
"def update_adjustments\n # separate into adjustments to keep and adjustements to toss\n obsolete_adjustments = adjustments.not_promotions.select{|adjustment| !adjustment.applicable?}\n obsolete_adjustments.each(&:delete)\n \n # We do not want this to process promotion credits because promotion credits are done in a specific order\n self.adjustments.not_promotions.reload.each(&:update!)\n end",
"def checkout_promotions\n @checkout_promotions ||= Promotion.active.code_based\n end",
"def reapply_to_order(user, order, amount, session)\n p = @data_object\n\n # find the original payment\n pays = Pays.for_order_and_payment(order, self)\n\n difference = amount - pays.pys_amount\n\n # adjust the order\n\n order.adjust_payment(user, self, difference)\n\n common_apply_processing(user, order, difference, session)\n\n # adjust the amount applied. Do it in two steps so we don't leave the object\n # inconsistent\n if p.pay_amount_applied + difference > p.pay_amount\n raise \"Too much applied\"\n end\n p.pay_amount_applied += difference\n\n save\n\n if amount.abs < 0.001\n pays.delete\n else\n pays.pys_amount = amount\n pays.save\n end\n\n end",
"def related_promotions\n @related_promotions ||= Promotions::Relevance.to_product(self)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create [num] SHF applications in the given state return the list of SHF applications created | def create_shf_apps_in_state(num, state, create_date: Time.current)
shf_apps = []
num.times do
shf_apps << create(:shf_application, state: state, created_at: create_date,
updated_at: create_date)
end
shf_apps
end | [
"def create_apps_in_states(create_date: Time.current)\n\n NUM_APPS_IN_STATE.each_pair do |state, number|\n create_shf_apps_in_state(number, state, create_date: create_date)\n end\n\n end",
"def get_recent_shf_apps(start_date, end_date)\n\n @recent_shf_apps = ShfApplication.updated_in_date_range(start_date, end_date)\n\n unless @recent_shf_apps.empty?\n ShfApplication.all_states.each do |app_state|\n recent_app_state_counts[app_state] = @recent_shf_apps.where(state: app_state).count\n end\n end\n\n @recent_shf_apps # return this to make testing easier\n end",
"def make_application(user)\n\n if Random.new.rand(1.0) < 0.3\n # set the state to accepted for about 30% of the applications\n state = MA_ACCEPTED_STATE\n else\n # set a random state for the rest of the applications (except accepted and being destroyed)\n states = ShfApplication.aasm.states.map(&:name) -\n [MA_ACCEPTED_STATE, MA_BEING_DESTROYED_STATE]\n\n state = FFaker.fetch_sample(states)\n end\n\n make_n_save_app(user, state)\n\n end",
"def create_applications\n applications = Array.new\n businesses = create_businesses_and_owners\n\n NUM_LOAN_APPLICATIONS.times do |i|\n business = businesses[rand(businesses.size)]\n application = LoanApplication.create(\n application_date: get_random_date(5),\n requested_principle: (50000 + rand(750000)),\n requested_term_months: get_random_terms_length,\n loan_mini_score: rand(),\n business_id: business.id)\n applications << application\n end\n applications\nend",
"def make_predefined_applicants\n new_applicants = []\n app_states_except_being_destroyed = ShfApplication.aasm.states.map(&:name) - [:being_destroyed]\n app_states_except_being_destroyed.each do |application_state|\n app_state_i18n = ShfApplication.human_attribute_name(\"state.#{application_state}\")\n new_applicants.concat(make_predefined_with(lastname: APPLICANT_LNAME,\n firstname: app_state_i18n.capitalize,\n number: PREDEFINED_NUM_APPLICANTS_EACH_APP_STATE)) do |applicant|\n @shf_application_factory.make_n_save_app(applicant, application_state)\n end\n end\n new_applicants\n end",
"def make_special_apps(start, fin, stat_num, custom)\n special_num = 0\n received = 0\n new_stat = 1\n count = fin - 5\n start.upto(fin) do |n|\n (start - 5).upto(fin - 5) do |p|\n if p == count\n new_stat = stat_num\n count -= 1\n special_num += 1\n else\n received += 1\n end\n application = Application.create(\n status: new_stat,\n property_id: p,\n tenant_id: n,\n description: 'special description'\n )\n application.form.attach(io: File.open('app/assets/images/sample.pdf'), filename: 'sample.pdf')\n application.save\n new_stat = 1\n printf(\"#{special_num}/5 #{custom} Applications, #{received}/20 Received Applications \\r\")\n end\n end\n puts \"\\n\"\nend",
"def list\n if @apps.count < 1\n @put.feedback \"No applications found.\\n\"\n exit\n end\n # Iterate through all apps and print\n len = 15\n bigKey = @apps.keys.max { |a, b| a.length <=> b.length }\n len = bigKey.length\n print \"\\nList of applications\\n--------------------\\n\"\n @apps.each {|key, value|\n printf(\"#{@ncl} [#{@gre}%-#{len}s#{@ncl}] - #{@gre}%02d#{@ncl}p | #{@gre}%4d#{@ncl} | DB: #{@gre}%s#{@ncl} | #{@pur}%s%s%s%s%s%s%s#{@ncl} | #{@gre}%s#{@ncl}\\n\",\n key.to_s,\n value['ports'].count,\n value['ports'][0],\n value['adapter'] == \"sqlite3\" ? \"S3\" :\n value['adapter'] == \"postgresql\" ? \"PG\" : \"--\",\n value[\"repository\"] ? \"R\" : \"-\",\n value[\"thin\"] ? \"T\" : \"-\",\n value[\"available\"] ? \"A\" : \"-\",\n value[\"enabled\"] ? \"E\" : \"-\",\n value[\"db\"] ? \"D\" : \"-\",\n value[\"online\"] ? \"O\" : \"-\",\n !value[\"update\"] ? \"U\" : \"-\",\n value['url']\n )\n }\n print \"\\n\"\n end",
"def request_launch_new_instances(num=1)\n out = []\n num.times {out << launch_new_instance!}\n out\n end",
"def request_launch_new_instances(num=1)\n out = []\n num.times {out << launch_new_instance!(options) }\n out\n end",
"def create_initial_state(app)\n raise 'Invalid app' unless SUPPORTED_APPS[app]\n SUPPORTED_APPS[app].initial_state\nend",
"def make_n_save_app(user, state, co_number = get_company_number, acceptance_date: Date.current)\n # Reset instance vars so AR records will be reloaded when run in TEST\n # (rspec DB tests load tasks but there is no \"reload\" available)\n @files_uploaded = nil\n @upload_later = nil\n @email = nil\n\n # Create a basic application and assign some random business categories\n app = make_app(user)\n\n app.companies = [] # We ensure that this association is present\n\n app.state = state\n app.update(when_approved: acceptance_date) if app.accepted?\n\n app.file_delivery_method = get_delivery_method_for_app(state)\n app.file_delivery_selection_date = acceptance_date\n\n # make a full company object (instance) for the membership application\n app.companies << find_or_make_new_company(co_number)\n\n user.shf_application = app\n user.save!\n\n set_membership_packet_sent user\n user\n end",
"def aps_application_names(start = 0, count = 1)\n a = redis.smembers(:aps_applications)\n return a if count == 0\n a[start..(start + count)] || []\n end",
"def update_app_list\n # Differentiate between a null app_nids params and no app_nids params\n return unless params[:organization].key?(:app_nids) && (desired_nids = Array(params[:organization][:app_nids]))\n\n existing_apps = @organization.app_instances.active\n\n existing_apps.each do |app_instance|\n desired_nids.delete(app_instance.app.nid) || app_instance.terminate\n end\n\n desired_nids.each do |nid|\n begin\n @organization.app_instances.create(product: nid)\n rescue => e\n Rails.logger.error { \"#{e.message} #{e.backtrace.join(\"\\n\")}\" }\n end\n\n end\n\n # Force reload\n existing_apps.reload\n end",
"def request_launch_new_instances(num=1)\n out = []\n num.times {out << request_launch_one_instance_at_a_time}\n out\n end",
"def create_vapps context, n, vdcs_pool = nil\n cloud = CloudController.new context\n \n #Get VDCs\n vdcs = (vdcs_pool || cloud.list_virtual_datacenters())\n \n #Create vApps\n vapps = []\n for i in 1..n.to_i\n vapps << cloud.create_virtual_appliance(vdcs[rand(vdcs.length)], \"vapp_#{i}\")\n end\n vapps\n end",
"def ini_programs\n Scent.all[0..3].each do |scent|\n SmellProgram.create!(user: self, scent: scent, status: 1)\n end\n end",
"def create(args)\r\n raise TypeError unless args.kind_of?(Hash)\r\n \r\n valid_keys = %w/\r\n app_name inherit creation_flags cwd environment startup_info\r\n thread_inherit process_inherit\r\n /\r\n\r\n valid_si_keys = %/\r\n startf_flags desktop title x y x_size y_size x_count_chars\r\n y_count_chars fill_attribute sw_flags stdin stdout stderr\r\n /\r\n\r\n # Set some default values\r\n hash = {\r\n 'inherit' => 0,\r\n 'process_inherit' => 0,\r\n 'thread_inherit' => 0,\r\n 'creation_flags' => 0,\r\n 'cwd' => 0\r\n }\r\n env = 0\r\n \r\n # Validate the keys, and convert symbols and case to lowercase strings. \r\n args.each{ |key, val|\r\n key = key.to_s.downcase\r\n unless valid_keys.include?(key)\r\n raise ProcessError, \"invalid key '#{key}'\"\r\n end\r\n \r\n # Convert true to 1 and nil/false to 0.\r\n case val\r\n when true, false\r\n hash[key] = val == false ? 0 : 1\r\n when nil\r\n hash[key] = 0 # Win32API no likey nil\r\n else\r\n hash[key] = val\r\n end\r\n }\r\n \r\n si_hash = {}\r\n \r\n # If the startup_info key is present, validate its subkeys\r\n if hash['startup_info']\r\n hash['startup_info'].each{ |key, val|\r\n key = key.to_s.downcase\r\n unless valid_si_keys.include?(key)\r\n raise ProcessError, \"invalid startup_info key '#{key}'\"\r\n end\r\n si_hash[key] = val\r\n }\r\n end\r\n \r\n # The +app_name+ key is mandatory\r\n unless hash['app_name']\r\n raise ProcessError, 'app_name must be specified'\r\n end\r\n \r\n # The environment string should be passed as a string of ';' separated\r\n # paths.\r\n if hash['environment'] \r\n env = hash['environment'].split(File::PATH_SEPARATOR) << 0.chr\r\n env = [env.join(\"\\0\")].pack('p*').unpack('L').first\r\n end\r\n \r\n startinfo = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]\r\n startinfo = startinfo.pack('LLLLLLLLLLLLSSLLLL')\r\n procinfo = [0,0,0,0].pack('LLLL')\r\n\r\n # Process SECURITY_ATTRIBUTE structure\r\n process_security = 0\r\n if hash['process_inherit?']\r\n process_security = [0,0,0].pack('LLL')\r\n process_security[0,4] = [12].pack('L') # sizeof(SECURITY_ATTRIBUTE)\r\n process_security[8,4] = [hash['process_inherit?']].pack('L')\r\n end\r\n\r\n # Thread SECURITY_ATTRIBUTE structure\r\n thread_security = 0\r\n if hash['thread_security?']\r\n thread_security = [0,0,0].pack('LLL')\r\n thread_security[0,4] = [12].pack('L') # sizeof(SECURITY_ATTRIBUTE)\r\n thread_security[8,4] = [hash['thread_inherit?']].pack('L')\r\n end\r\n \r\n # The bytes not covered here are reserved (null)\r\n unless si_hash.empty?\r\n startinfo[0,4] = [startinfo.size].pack('L')\r\n startinfo[8,4] = [si_hash['desktop']].pack('p*') if si_hash['desktop']\r\n startinfo[12,4] = [si_hash['title']].pack('p*') if si_hash['title']\r\n startinfo[16,4] = [si_hash['x']].pack('L') if si_hash['x']\r\n startinfo[20,4] = [si_hash['y']].pack('L') if si_hash['y']\r\n startinfo[24,4] = [si_hash['x_size']].pack('L') if si_hash['x_size']\r\n startinfo[28,4] = [si_hash['y_size']].pack('L') if si_hash['y_size']\r\n startinfo[32,4] = [si_hash['x_count_chars']].pack('L') if si_hash['x_count_chars']\r\n startinfo[36,4] = [si_hash['y_count_chars']].pack('L') if si_hash['y_count_chars']\r\n startinfo[40,4] = [si_hash['fill_attribute']].pack('L') if si_hash['fill_attribute']\r\n startinfo[44,4] = [si_hash['startf_flags']].pack('L') if si_hash['startf_flags']\r\n startinfo[48,2] = [si_hash['sw_flags']].pack('S') if si_hash['sw_flags']\r\n startinfo[56,4] = [si_hash['stdin']].pack('L') if si_hash['stdin']\r\n startinfo[60,4] = [si_hash['stdout']].pack('L') if si_hash['stdout']\r\n startinfo[64,4] = [si_hash['stderr']].pack('L') if si_hash['stderr'] \r\n end\r\n\r\n rv = CreateProcess(\r\n 0, # App name\r\n hash['app_name'], # Command line\r\n process_security, # Process attributes\r\n thread_security, # Thread attributes\r\n hash['inherit'], # Inherit handles?\r\n hash['creation_flags'], # Creation flags\r\n env, # Environment\r\n hash['cwd'], # Working directory\r\n startinfo, # Startup Info\r\n procinfo # Process Info\r\n )\r\n \r\n if rv == 0\r\n raise ProcessError, \"CreateProcess() failed: %\", get_last_error\r\n end\r\n \r\n ProcessInfo.new(\r\n procinfo[0,4].unpack('L').first, # hProcess\r\n procinfo[4,4].unpack('L').first, # hThread\r\n procinfo[8,4].unpack('L').first, # hProcessId\r\n procinfo[12,4].unpack('L').first # hThreadId\r\n )\r\n end",
"def list_apps\n execute_command \"cf apps\"\n end",
"def apps(compartment_name:)\n fn_management_client\n .list_applications(\n compartment_ocid(compartment_name: compartment_name),\n limit: 50\n ).collect(&:data).flatten\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create applications in all states and set the created_at: and updated_at dates to create_date default create_date = Time.zone.now | def create_apps_in_states(create_date: Time.current)
NUM_APPS_IN_STATE.each_pair do |state, number|
create_shf_apps_in_state(number, state, create_date: create_date)
end
end | [
"def create_shf_apps_in_state(num, state, create_date: Time.current)\n shf_apps = []\n\n num.times do\n shf_apps << create(:shf_application, state: state, created_at: create_date,\n updated_at: create_date)\n end\n\n shf_apps\n end",
"def apply_create_timestamp\n created_at = Time.now\n updated_at = Time.now\n end",
"def create\n @date_app = DateApp.new(date_app_params)\n\n respond_to do |format|\n if @date_app.save\n format.html { redirect_to @date_app, notice: 'Date app was successfully created.' }\n format.json { render :show, status: :created, location: @date_app }\n else\n format.html { render :new }\n format.json { render json: @date_app.errors, status: :unprocessable_entity }\n end\n end\n end",
"def make_predefined_applicants\n new_applicants = []\n app_states_except_being_destroyed = ShfApplication.aasm.states.map(&:name) - [:being_destroyed]\n app_states_except_being_destroyed.each do |application_state|\n app_state_i18n = ShfApplication.human_attribute_name(\"state.#{application_state}\")\n new_applicants.concat(make_predefined_with(lastname: APPLICANT_LNAME,\n firstname: app_state_i18n.capitalize,\n number: PREDEFINED_NUM_APPLICANTS_EACH_APP_STATE)) do |applicant|\n @shf_application_factory.make_n_save_app(applicant, application_state)\n end\n end\n new_applicants\n end",
"def create_run_environment\n create_run_dirs\n create_db\n insert_states(default_states)\n end",
"def create\n @application_date = ApplicationDate.new(application_date_params)\n\n respond_to do |format|\n if @application_date.save\n format.html { redirect_to admin_home_path, notice: '时间阶段创建成功' }\n format.json { render action: 'show', status: :created, location: @application_date }\n else\n format.html { render action: 'new' }\n format.json { render json: @application_date.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_everything\n create_users\n create_user_keys\n create_comments\n create_filters\n create_columns\n create_organizations\n create_approvals\n create_whitelists\n create_user_key_columns\n create_user_key_organizations\n end",
"def create\n @driver_application = DriverApplication.new(driver_application_params)\n \n # Always get the current time to save\n @driver_application.created_at = DateTime.current\n\n respond_to do |format|\n if @driver_application.save\n format.html { redirect_to @driver_application, notice: 'Driver application was successfully created.' }\n format.json { render :show, status: :created, location: @driver_application }\n else\n format.html { render :new }\n format.json { render json: @driver_application.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_app\n post_app('name' => app_name)\n end",
"def set_created_at\n self.created_at ||= Date.today if new_record?\n end",
"def created\n\t\t\t# Picked a random lender application and temporary set it's status to pending\n\t\tlender_app \t\t\t= LenderApplication.first\n\t\tlender_app.status \t= 'pending'\n\t\tLenderApplicationsMailer.created(lender_app)\n\tend",
"def create_calendar\n @calendar.create_schedule\n end",
"def create_application\n unless self.has_current_application?\n Application.create_for_student self\n end\n end",
"def create_contexts\n create_stores\n create_employees\n create_assignments\n end",
"def set_created_at_time\n self.created_at = Time.now\n end",
"def create_and_activate\n create\n activate\n end",
"def create\n self[:created] = Time.now.to_s\n save\n end",
"def create_initial_state(app)\n raise 'Invalid app' unless SUPPORTED_APPS[app]\n SUPPORTED_APPS[app].initial_state\nend",
"def create_timestamp\n self.created_at = Time.now\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
add an uploaded file to the SHF application | def add_uploaded_file(shf_app)
shf_app.uploaded_files << create(:uploaded_file, actual_file: File.open(UPLOAD_PNG_FILE))
end | [
"def add(file_path)\n @store.upload(file_path)\n end",
"def add_file(p0) end",
"def add_file(file_path)\n Resource.client.add_file(self, file_path)\n end",
"def upload\n end",
"def add_file(file_path)\n Dropio::Client.instance.add_file(self, file_path)\n end",
"def append_file(key, file)\n filename = file.path\n content_type = MIME::Types.type_for(filename).first\n\n @params << [ key, UploadIO.new(file, content_type, filename) ]\n end",
"def add_from_file\n add_image(params[:listing_id], params[:listing_image], nil)\n end",
"def add_file(file, release_id, package_id, processor=nil)\n page = '/frs/admin/editrelease.php'\n\n userfile = open file, 'rb'\n\n type_id = userfile.path[%r|\\.[^\\./]+$|]\n type_id = FILETYPES[type_id]\n processor_id = PROCESSORS[processor.downcase]\n\n form = {\n \"step2\" => '1',\n \"group_id\" => group_id,\n \"package_id\" => package_id,\n \"release_id\" => release_id,\n \"userfile\" => userfile,\n \"type_id\" => type_id,\n \"processor_id\" => processor_id,\n \"submit\" => \"Add This File\"\n }\n\n boundary = Array::new(8){ \"%2.2d\" % rand(42) }.join('__')\n boundary = \"multipart/form-data; boundary=___#{ boundary }___\"\n\n http_post(page, form, 'content-type' => boundary)\n end",
"def add_file(drop, file_path)\n token = get_default_token(drop)\n asset = nil\n \n File.open(file_path, 'r') do |file|\n uri = URI.parse(Dropio.upload_url)\n req = Net::HTTP::Post.new(uri.path, DEFAULT_HEADER)\n form = create_form( { :drop_name => drop.name, :token => token , :file => file } )\n req.multipart_params = form \n complete_request(req, uri.host) { |body| asset = Mapper.map_assets(drop, body) }\n end\n \n asset\n end",
"def add_file name, file, mime, content\n @params << \n \"Content-Disposition: form-data; name=\\\"#{name}\\\"; filename=\\\"#{file}\\\"\\n\" +\n \"Content-Transfer-Encoding: binary\\n\" +\n \"Content-Type: #{mime}\\n\" + \n \"\\n\" + \n \"#{content}\"\n end",
"def upload\n\t\tfull_path = \"#{Dir.pwd}/uploads/#{params[:file].original_filename}\" \n\t\t`cp #{params[:file].path} #{full_path}` \n\t\t`chmod 777 #{full_path}` # file is owned by root because we are running on port 80.\t\n\t\tSound.create({\n\t\t\t:path => full_path,\n\t\t\t:label => params[:label],\n\t\t\t:filename => params[:file].original_filename\t\n\t\t})\n\t\tredirect_to '/' \n\tend",
"def upload(filename, args={})\n args['index'] = @name\n args['name'] = filename\n path = 'data/inputs/oneshot'\n @service.context.post(path, args)\n end",
"def wf_upload_file(*event_args)\n __debug_items(binding)\n opt = event_args.extract_options!.presence || event_args.first || {}\n opt[:meth] ||= calling_method\n\n # The return from Shrine will be the results of the workflow step.\n stat, _hdrs, body = self.results = upload_file(**opt)\n\n # Update status arrays accordingly.\n data = nil\n if stat.nil?\n self.failures << 'missing env data'\n elsif stat != 200\n self.failures << 'invalid file'\n elsif !record\n self.succeeded << 'no record' # TODO: should this be a failure?\n elsif (data = json_parse(body.first)&.except(:emma_data)&.to_json)\n self.succeeded << record.id\n else\n self.failures << 'invalid file_data'\n end\n\n # Ensure that the record is updated now instead of waiting for the file\n # data to be returned when the form is submitted in case the submission is\n # canceled and the uploaded file needs to be removed from storage.\n record.update_column(record.file_data_column, data) if data\n end",
"def add_file_to_submission(ont, submission)\n filename, tmpfile = file_from_request\n if tmpfile\n if filename.nil?\n error 400, \"Failure to resolve ontology filename from upload file.\"\n end\n # Copy tmpfile to appropriate location\n ont.bring(:acronym) if ont.bring?(:acronym)\n # Ensure the ontology acronym is available\n if ont.acronym.nil?\n error 500, \"Failure to resolve ontology acronym\"\n end\n file_location = OntologySubmission.copy_file_repository(ont.acronym, submission.submissionId, tmpfile, filename)\n submission.uploadFilePath = file_location\n end\n return filename, tmpfile\n end",
"def upload\n secure_silence_logs do\n return bad_request unless params[:file] && params[:title] && current_account\n is_file = params[:file].respond_to?(:path)\n if !is_file && !(URI.parse(params[:file]) rescue nil)\n return bad_request(:error => \"The 'file' parameter must be the contents of a file or a URL.\")\n end\n \n if params[:file_hash] && Document.accessible(current_account, current_organization).exists?(:file_hash=>params[:file_hash])\n return conflict(:error => \"This file is a duplicate of an existing one you have access to.\")\n end\n params[:url] = params[:file] unless is_file\n @response = Document.upload(params, current_account, current_organization).canonical\n render_cross_origin_json\n end\n end",
"def feature_upload(id, file)\r\n dir = \"home_features\"\r\n file.original_filename = id.to_s + (File.extname file.original_filename)\r\n return write(dir, file)\r\n end",
"def upload_file(sid, filename, malicious_file)\r\n # Put our payload in:\r\n # C:\\Program Files\\SolarWinds\\SolarWinds FSMServer\\plugins\\com.lisletech.athena.http.servlets_1.2\\jsp\\\r\n filename = \"../../jsp/#{filename}\"\r\n\r\n mime_data = Rex::MIME::Message.new\r\n mime_data.add_part(malicious_file, 'application/vnd.ms-excel', nil, \"name=\\\"file\\\"; filename=\\\"#{filename}\\\"\")\r\n mime_data.add_part('uploadFile', nil, nil, 'name=\"action\"')\r\n\r\n proto = ssl ? 'https' : 'http'\r\n ref = \"#{proto}://#{rhost}:#{rport}#{normalize_uri(target_uri.path, 'fsm', 'settings-new.jsp')}\"\r\n\r\n send_request_cgi(\r\n 'uri' => normalize_uri(target_uri.path, 'fsm', 'settings-new.jsp'),\r\n 'method' => 'POST',\r\n 'vars_get' => { 'action' => 'uploadFile' },\r\n 'ctype' => \"multipart/form-data; boundary=#{mime_data.bound}\",\r\n 'data' => mime_data.to_s,\r\n 'cookie' => sid,\r\n 'headers' => { 'Referer' => ref }\r\n )\r\n end",
"def upload(filename, args={})\n args['index'] = @name\n args['name'] = filename\n @service.request(:method => :POST,\n :resource => [\"data\", \"inputs\", \"oneshot\"],\n :body => args)\n end",
"def call\n uploaded_file = shrine_class.uploaded_file(storage: upload_storage, id: upload_location)\n uploaded_file.open(**upload_open_options)\n uploaded_file\n rescue Shrine::FileNotFound\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create a member with a membership fee payment, branding fee paid return the member | def create_member_with_member_and_branding_payments_expiring(member_pay_expires = Time.zone.today + 1.year,
payment_create_date: Time.zone.now,
membership_status: :current_member)
u = create(:member, last_day: member_pay_expires, membership_status: membership_status)
u.shf_application.update(created_at: payment_create_date, updated_at: payment_create_date)
create(:payment,
user: u,
payment_type: Payment::PAYMENT_TYPE_BRANDING,
status: SUCCESSFUL_PAYMENT,
expire_date: member_pay_expires,
created_at: payment_create_date,
updated_at: payment_create_date)
u.payments.each { |payment| payment.update(created_at: payment_create_date, updated_at: payment_create_date) }
u
end | [
"def create_member_with_payments_on(payment_start_dates = [Date.today])\n # make the member with the first payment start date:\n first_payment_start_date = payment_start_dates.first\n new_member = create(:member, first_day: first_payment_start_date)\n new_member_co = new_member.shf_application.companies.first\n branding_fee_expiry = Company.expire_date_for_start_date(first_payment_start_date)\n\n # The most recent payment is used for calculations, which is based on created_at\n # So we must be sure to set it to the first payment date\n first_membership_payment = new_member.payments.member_fee.first\n first_membership_payment.update(created_at: first_payment_start_date)\n\n travel_to(first_payment_start_date) do\n create(:h_branding_fee_payment,\n :successful,\n user: new_member,\n company: new_member_co,\n start_date: first_payment_start_date,\n expire_date: branding_fee_expiry,\n notes: \"branding license starts #{first_payment_start_date.to_date}, expires #{branding_fee_expiry.to_date}\")\n end\n\n # make any other Memberships and payments with the remaining payment start dates given:\n (payment_start_dates - [first_payment_start_date]).each do | payment_start_date |\n travel_to(payment_start_date) do\n new_member.memberships << create(:membership, owner: new_member, first_day: payment_start_date)\n membership_fee_expiry = User.expire_date_for_start_date(payment_start_date)\n create(:membership_fee_payment,\n :successful,\n user: new_member,\n company: new_member_co,\n start_date: payment_start_date,\n expire_date: membership_fee_expiry,\n notes: \"membership starts #{payment_start_date.to_date}, expires #{membership_fee_expiry.to_date}\")\n\n branding_fee_expiry = Company.expire_date_for_start_date(payment_start_date)\n create(:h_branding_fee_payment,\n :successful,\n user: new_member,\n company: new_member_co,\n start_date: payment_start_date,\n expire_date: branding_fee_expiry,\n notes: \"branding license starts #{payment_start_date.to_date}, expires #{branding_fee_expiry.to_date}\")\n end\n end\n\n new_member\n end",
"def create\n @free_member = FreeMember.new(admin_free_member_params)\n\n respond_to do |format|\n if @free_member.save\n UserNotifier.invite_free_member(@free_member).deliver\n format.html { redirect_to admin_free_members_url,\n notice: I18n.t('flash.free_member.successfully_created') }\n else\n format.html { render :new }\n format.json { render json: @free_member.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @membership_fee = MembershipFee.new\n @membership_fee.member_id=(params[:member_id])\n @member = Member.find(@membership_fee.member_id)\n @membership_fee.payment_method=@member.payment_method\n @membership_fee.fee=@member.beitrag\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @membership_fee }\n end\n end",
"def make_predefined_current_members\n make_members_paid_through(Date.current + 1.day)\n make_members_paid_through(Date.current + 1.month)\n\n earliest_renew_days = Memberships::MembershipsManager.days_can_renew_early\n make_members_paid_through(Date.current + earliest_renew_days)\n make_members_paid_through(Date.current + earliest_renew_days + 1)\n\n can_renew_term_first_day = Date.current + earliest_renew_days - 1\n renew_minus_1_members = make_members_paid_through(can_renew_term_first_day, number: 3)\n\n # add past memberships and payments to this member\n renew_minus_1_member_last = renew_minus_1_members.last\n create_past_memberships_for(renew_minus_1_member_last, 4)\n renew_minus_1_member_last.update(email: \"has-past-#{renew_minus_1_member_last.email}\")\n\n pd_thru_6_months_members = make_members_paid_through(Date.current + 6.months)\n # add past memberships and payments to this member, including pending payments\n last_pd_thru_6_months_member = pd_thru_6_months_members.last\n create_past_memberships_for(last_pd_thru_6_months_member, 6)\n oldest_membership = last_pd_thru_6_months_member.memberships.sort_by(&:first_day).first\n 3.times{ @payments_factory.new_klarna_pending_membership_payment(last_pd_thru_6_months_member,\n oldest_membership.first_day,\n oldest_membership.last_day) }\n\n last_pd_thru_6_months_member.update(email: \"has-past-pending-#{last_pd_thru_6_months_member.email}\")\n\n\n make_members_paid_through(Date.current + 2.years - 1.day, term_first_day: Date.current)\n end",
"def create_membership\n member = Member.new(squad: @squad, user: current_user, membership: 'owner')\n member.save(validate: false)\n end",
"def create\n @membership_fee = MembershipFee.new(params[:membership_fee])\n\n respond_to do |format|\n if @membership_fee.save\n format.html { redirect_to @membership_fee, notice: 'Membership fee was successfully created.' }\n format.json { render json: @membership_fee, status: :created, location: @membership_fee }\n else\n format.html { render action: \"new\" }\n format.json { render json: @membership_fee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_single_staff_member\n print \"---------------------------------------------------------------\n \\nVery well. You will need to provide some information about the new staff member. \"\n staff_member = StaffMember.new(name: request_non_unique(\"staff member\", \"name\"),\n branch_id: request_branch_id_for(\"staff member\"))\n staff_member.email = request_unique(StaffMember, \"email\", validate_email(staff_member.name))\n staff_member.save\n print \"\\n\\nGreat! #{staff_member.name} is in the system! Would you like to create another staff member (yes/no)? \"\nend",
"def create\n @member_payment = MemberPayment.new(params[:member_payment])\n\n respond_to do |format|\n if @member_payment.save\n format.html { redirect_to member_payments_path, notice: 'Member payment was successfully created.' }\n format.json { render json: @member_payment, status: :created, location: @member_payment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @member_payment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_member(member)\n tm = TripMembership.new\n tm.trip = self\n tm.user = member\n tm.save\n\n tm\n end",
"def create_member(name, total)\n @CONNECTION.execute(\"INSERT INTO 'members' (name, total) VALUES ('#{name}', #{total});\")\n end",
"def create\n if params[:member_payments_ids].present?\n MemberPayment.find(params[:member_payments_ids]).each do |mp|\n @cash.cash_member_payments.create!(\n cash_member_payment_params.merge(member_payment: mp, user: current_user)\n )\n end\n else\n @cash_member_payment = @cash.cash_member_payments.new(cash_member_payment_params)\n @cash_member_payment.errors.add :base, t('cash_member_payments.errors.select_payments')\n @member_payments = @cash_member_payment.customer.member_payments.pendings\n end\n\n render 'action'\n end",
"def create_membership_request\n member = Member.new(squad: @squad, user: current_user, membership: 'request')\n member.save(validate: false)\n end",
"def create\n @conference_day = nil\n if params[:payment] and params[:payment][:single_day]\n @conference_day = Schedule.conference_day(params[:payment][:single_day].to_i)\n end\n @fee = Fee.new(current_user, single_day: @conference_day)\n\n @payment = current_user.payments.new(amount: @fee.price_to_pay, single_day: @conference_day)\n @payment.save\n if @payment.error_code\n render :error\n else\n redirect_to @payment.redirect_url\n end\n end",
"def make_membershipcard(user, membership, stripe_customer=nil)\n membershipcard = Membershipcard.where(\n user_id: user.id, \n epicenter_id: self.id, \n ).first_or_create\n membershipcard.membership_id = membership.id\n if stripe_customer\n membershipcard.payment_id = stripe_customer.id\n end\n membershipcard.save\n return membershipcard\n end",
"def make_member(user)\n member_access = self.get_access_point('member')\n self.make_tshirt( user, member_access )\n self.give_fruittree_to( user )\n self.give_fruitbag_to( user )\n self.save\n end",
"def add_fee(factory, claim)\n claim.fees << build(factory, claim:)\n end",
"def create\n @member = @organization.members.build(member_params)\n\n respond_to do |format|\n if @member.save\n logger.info(\"User #{current_user.email} created Member '#{@member.first_name} #{@member.last_name}' on #{@member.updated_at}\")\n format.html { redirect_to organization_path(@organization), notice: 'Member was successfully created.' }\n #format.json { render :show, status: :created, location: @member }\n else\n format.html { render :new }\n format.json { render json: @member.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_fee(options = {})\n post(:fees, fees: [options]).pop\n end",
"def create_finance_fee(collection, student)\n last_receipt_no = FinanceFee.last.receipt_no if FinanceFee.last\n fee = FinanceFee.new\n fee.finance_fee_collection_id = collection\n fee.student_id = student\n fee.is_paid = false\n if last_receipt_no.nil?\n fee.receipt_no = 001\n else\n fee.receipt_no = last_receipt_no.next\n end\n fee.save\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create a paid up member with a given company number, make a payment with the expire date. | def create_co_and_payment(company_number, payment_exp_date, member_pay_expires: Time.zone.today + 1.year, payment_create_date: Time.zone.now)
u = create(:member_with_membership_app, company_number: company_number)
u.shf_application.update(created_at: payment_create_date, updated_at: payment_create_date)
co = u.shf_application.companies.first
create(:payment,
user: u,
payment_type: Payment::PAYMENT_TYPE_MEMBER,
status: SUCCESSFUL_PAYMENT,
expire_date: member_pay_expires,
created_at: payment_create_date,
updated_at: payment_create_date)
branding_payment = create(:payment,
user: u,
payment_type: Payment::PAYMENT_TYPE_BRANDING,
status: SUCCESSFUL_PAYMENT,
expire_date: payment_exp_date,
created_at: payment_create_date,
updated_at: payment_create_date)
co.payments << branding_payment
co
end | [
"def create_member_with_member_and_branding_payments_expiring(member_pay_expires = Time.zone.today + 1.year,\n payment_create_date: Time.zone.now,\n membership_status: :current_member)\n u = create(:member, last_day: member_pay_expires, membership_status: membership_status)\n u.shf_application.update(created_at: payment_create_date, updated_at: payment_create_date)\n\n create(:payment,\n user: u,\n payment_type: Payment::PAYMENT_TYPE_BRANDING,\n status: SUCCESSFUL_PAYMENT,\n expire_date: member_pay_expires,\n created_at: payment_create_date,\n updated_at: payment_create_date)\n u.payments.each { |payment| payment.update(created_at: payment_create_date, updated_at: payment_create_date) }\n u\n end",
"def create_member_with_payments_on(payment_start_dates = [Date.today])\n # make the member with the first payment start date:\n first_payment_start_date = payment_start_dates.first\n new_member = create(:member, first_day: first_payment_start_date)\n new_member_co = new_member.shf_application.companies.first\n branding_fee_expiry = Company.expire_date_for_start_date(first_payment_start_date)\n\n # The most recent payment is used for calculations, which is based on created_at\n # So we must be sure to set it to the first payment date\n first_membership_payment = new_member.payments.member_fee.first\n first_membership_payment.update(created_at: first_payment_start_date)\n\n travel_to(first_payment_start_date) do\n create(:h_branding_fee_payment,\n :successful,\n user: new_member,\n company: new_member_co,\n start_date: first_payment_start_date,\n expire_date: branding_fee_expiry,\n notes: \"branding license starts #{first_payment_start_date.to_date}, expires #{branding_fee_expiry.to_date}\")\n end\n\n # make any other Memberships and payments with the remaining payment start dates given:\n (payment_start_dates - [first_payment_start_date]).each do | payment_start_date |\n travel_to(payment_start_date) do\n new_member.memberships << create(:membership, owner: new_member, first_day: payment_start_date)\n membership_fee_expiry = User.expire_date_for_start_date(payment_start_date)\n create(:membership_fee_payment,\n :successful,\n user: new_member,\n company: new_member_co,\n start_date: payment_start_date,\n expire_date: membership_fee_expiry,\n notes: \"membership starts #{payment_start_date.to_date}, expires #{membership_fee_expiry.to_date}\")\n\n branding_fee_expiry = Company.expire_date_for_start_date(payment_start_date)\n create(:h_branding_fee_payment,\n :successful,\n user: new_member,\n company: new_member_co,\n start_date: payment_start_date,\n expire_date: branding_fee_expiry,\n notes: \"branding license starts #{payment_start_date.to_date}, expires #{branding_fee_expiry.to_date}\")\n end\n end\n\n new_member\n end",
"def make_payment amount, day = 0\n\t\tif (amount <= principal)\n\t\t\t@available_credit += amount\n\t\t\t@principal -= amount\n\t\t\trecord_transaction(-amount, day)\n\t\tend\n\tend",
"def payment\n \tcurrent_user\n \t@owner = Owner.find(@current_user.resource_id)\n \t@plan = SignupPlan.find_by_id(params[:id])\n \t@current_user.resource.signup_plan = @plan\n \t@invoice = Invoice.new\n \t@invoice.signup_plan = @plan\n \t@invoice.user = @current_user\n \t@invoice.amount = @plan.price\n \t@invoice.status = \"Payment due\"\n \t@invoice.save!\n end",
"def create_account\n set_user\n set_payer\n set_user_sport\n save_account\n end",
"def sign_up(gym, cost)\n Membership.new(self, gym, cost)\n\n end",
"def create\n @company_member = User.new(company_member_params)\n @company_member.role = \"serviceprovider\"\n respond_to do |format|\n if @company_member.save\n unless @company_member.company.nil?\n @company_member.company.status = \"pending\"\n @company_member.company.save\n end\n format.html { redirect_to companymembers_path, notice: 'Successfully created.' }\n else\n format.html { render :new }\n format.json { render json: @company_member.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_new(billing_period, accountinvoicetype = \"invoice\", accountinvoicestatus = \"new\")\n account = billing_period.account\n today = Date.today\n ActsAsTenant.with_tenant(account) do\n if account.name\n rec1 = account.name\n rec2 = account.name_addon\n else\n rec1 = \"#{account.firstname} #{account.lastname}\"\n rec2 = nil\n end\n accountinvoice = Accountinvoice.new({\n invoice_date: today,\n email_to: account.email_billing_address,\n accountinvoicetype: Accountinvoicetype.find_by(key: accountinvoicetype),\n invoice_number: get_next_number('invoice'),\n recipient_name_1: rec1,\n recipient_name_2: rec2,\n street: account.street,\n street_no: account.streetno,\n zip: account.zip,\n city: account.city,\n country: account.country,\n accountinvoicestatus: Accountinvoicestatus.find_by(key: accountinvoicestatus),\n paymentmethod: billing_period.paymentmethod,\n paymentdate: billing_period.paymentdate\n })\n accountinvoice.additional_text = I18n.t(:reverse_charge_infotext) if billing_period.account.is_eu_ext?\n accountinvoice.save!\n billing_period.accountinvoice = accountinvoice\n billing_period.save!\n\n Accountinvoiceposition.new.create_new(accountinvoice, billing_period)\n accountinvoice\n end\n end",
"def buy_60_credits\n x_tran_key=\"9Pk82B77ab9nh7Nq\"\n @order=Purchase.new\n baseprice = 75\n amount = baseprice\n amount = amount.to_s() + \".00\"\n @amount = amount\n @order.amount = amount\n @order.credits = 60\n @order.number = number\n @order.state = \"pending\"\n loginid=\"8Ty3NvP8z7z\"\n fp = InsertFP(loginid, x_tran_key, @amount, \"0001\")\n id = session[:company_id]\n @company = Company.find(id)\n @order.company = id\n @order.save\n end",
"def schedule_payment! amount, due_at = Time.now\n scheduled_payments.create :amount => amount, :due_at => due_at\n end",
"def buy_20_credits\n x_tran_key=\"9Pk82B77ab9nh7Nq\"\n @order=Purchase.new\n baseprice = 50\n amount = baseprice\t\n amount = amount.to_s() + \".00\"\n @amount = amount\n @order.amount = amount\n @order.credits = 20\n @order.number = number\n @order.state = \"pending\"\n loginid=\"8Ty3NvP8z7z\"\n fp = InsertFP(loginid, x_tran_key, @amount, \"0001\")\n id = session[:company_id]\n @company = Company.find(id)\n @order.company = id\n @order.save\n end",
"def create_wepay_account\n \t\tif self.has_wepay_access_token? && !self.has_wepay_account?\n \t\tparams = { :name => self.company_name, :description => self.company_description }\t\t\t\n \t\tresponse = Shopketti::Application::WEPAY.call(\"/account/create\", self.wepay_access_token, params)\n\n \tif response[\"account_id\"]\n \tself.wepay_account_id = response[\"account_id\"]\n \treturn self.save\n \telse\n \traise \"Error - \" + response[\"error_description\"]\n \tend\n\n \tend\t\t\n \traise \"Error - cannot create WePay account\"\n end",
"def create_company\n @company = Company.new(company_name: params[:company_name], key: generate_key)\n user = registered_user\n user.company = @company\n user.role = 'company'\n @company.users << user\n\n if @company.save && user.save\n UserNotifier.send_signup_email(user).deliver\n authenticate!\n else\n resource_not_saved\n @company.destroy\n user.destroy\n render 'new_company'\n end\n end",
"def create_first_payment\n make_payment\n end",
"def buy_5_credits\n x_tran_key=\"9Pk82B77ab9nh7Nq\"\n @order=Purchase.new\n baseprice = 25\n amount = baseprice\n amount = amount.to_s() + \".00\"\n @amount = amount\n @order.amount = amount\n @order.credits = 5\n @order.number = number\n @order.state = \"pending\"\n loginid=\"8Ty3NvP8z7z\"\n fp = InsertFP(loginid, x_tran_key, @amount, \"0001\")\n id = session[:company_id]\n @company = Company.find(id)\n @order.company = id\n @order.save\n end",
"def purchase(amount, description, payment_id, coupon_code = self.coupon_code)\n raise \"No Payment Method\" if self.customer_id.blank? && self.stripe_token.blank? && self.paypal_token.blank?\n\n coupon = CouponCode.find_by_code(coupon_code)\n unless coupon.blank?\n amount = process_coupon(coupon, amount)\n end\n\n #Calculate HST tax for Canadian registrants\n if self.country == 'CA'\n hst_tax = amount * 0.13\n amount += hst_tax\n end\n\n charge = self.payments.create(amount: amount, comment: description, stripe_token: payment_id, paypal_token: paypal_token)\n charge.process\n charge.save\n self.save\n\n #kick off the receipt email\n unless paypal_token.present? \n \tStripePayment.business_account_registration_receipt(self).deliver\n end\n end",
"def create_payment(campaign_id, data)\n options = @options\n data.merge!({\n :user_fee_amount => 100,\n :admin_fee_amount => 40\n })\n options.merge!({ \n :body => { \n :payment => data\n }.to_json\n })\n response = self.class.post(\"/campaigns/\"+campaign_id+\"/payments\", options)\n response[\"payment\"] || response\n end",
"def create_payment_profile_if_required\n if self.payment_profile.blank?\n project = self.invoice.project\n \n new_payment_profile = project.payment_profiles.new( \n :name => self.name, \n :expected_payment_date => self.invoice.invoice_date, \n :generate_cost_from_time => false, \n :expected_cost => (self.amount * self.quantity))\n \n \n new_payment_profile.save!\n self.payment_profile_id = new_payment_profile.id\n self.save!\n end\n end",
"def new_bank_donor\n donor = Donor.find_by_any_email(params[\"email\"]).first || Donor.new(\n user_email: params[\"email\"],\n role: \"subscriber\")\n donor.save\n\n donor.paymentid = \"donor#{donor.id}\"\n donor.save\n\n repeating = params[\"repeating\"] == \"1\"\n DonorMailer.bank_donation_instructions(donor, repeating).deliver\n\n render text: donor.paymentid\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /event_registrations/1 DELETE /event_registrations/1.json | def destroy
@registration = Registration.find(params[:id])
@event = @registration.event
@registration.destroy
respond_to do |format|
format.html { redirect_to manage_event_path(@event), notice: 'Event registration was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @event_registration.destroy\n respond_to do |format|\n format.html { redirect_to event_registrations_path}\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_registration = EventRegistration.find(params[:id])\n @event_registration.destroy\n\n respond_to do |format|\n format.html { redirect_to event_registrations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_registration.destroy\n respond_to do |format|\n format.html { redirect_to event_registrations_url, notice: 'Event registration was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_register.destroy\n respond_to do |format|\n format.html { redirect_to event_registers_url, notice: 'Event register was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n \n @event_signup = EventSignup.find(params[:id])\n event_id = @event_signup.event_id\n @event_signup.destroy\n\n respond_to do |format|\n format.html { redirect_to event_url(event_id), :notice => 'Your request has been deleted.' }\n format.json { head :ok }\n end\n end",
"def destroy\n @event_registration_status = EventRegistrationStatus.find(params[:id])\n @event_registration_status.destroy\n\n head :no_content\n end",
"def destroy\n @registration_table.destroy\n respond_to do |format|\n format.html { redirect_to [@event], notice: 'RSVP was removed from table.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cim_reg_event = cim_reg_event.find(params[:id])\n @cim_reg_event.destroy\n\n unless @cim_reg_event.errors.empty?\n flash[:notice] = \"WARNING: Couldn't delete cim_reg_event because:\"\n @cim_reg_event.errors.full_messages.each { |m| flash[:notice] << \"<br/>\" << m }\n end\n\n respond_to do |format|\n format.html { redirect_to(events_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @event_req = EventReq.find(params[:id])\n @event_req.destroy\n\n respond_to do |format|\n format.html { redirect_to event_reqs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notify_event = NotifyEvent.find(params[:id])\n @notify_event.destroy\n\n respond_to do |format|\n format.html { redirect_to notify_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @recurring_event.destroy\n respond_to do |format|\n format.html { redirect_to recurring_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @attend_event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Request successfully rescinded.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @person_event.destroy\n respond_to do |format|\n format.html { redirect_to person_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notification_event.destroy\n respond_to do |format|\n format.html { redirect_to notification_events_url, notice: 'Notification event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_reminder.destroy\n respond_to do |format|\n format.html { redirect_to event_reminders_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @member_attending_event_register = MemberAttendingEventRegister.find(params[:id])\n @member_attending_event_register.destroy\n\n respond_to do |format|\n format.html { redirect_to(member_attending_event_registers_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n \t@invite_event.destroy\n \trespond_to do |format|\n \t\tformat.html { redirect_to invite_events_url }\n \t\tformat.json { head :no_content }\n \tend\n \tend",
"def destroy\n @manage_event.destroy\n respond_to do |format|\n format.html { redirect_to manage_events_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Instructions when there is missing license information, but the user has specified disableapi preventing remote license sources being used. | def no_remote_instructions(unlicensed)
puts <<-INST
There is no license defined for #{counter(unlicensed)}. You are running with the `--disable-api`
option. If you remove this option, gemterms will attempt to use RubyGems and
other sources for license information.
INST
true
end | [
"def supports_api\n license = License.get\n\n if license and not license.supports_api?\n errors.add :license, \" - this product does not support API access\"\n end\n end",
"def check_license()\n return true\n end",
"def license_evaluation\n response = { message: \"Endpoint deprecated. License metric results are now produced by Foodcritic.\" }\n render json: response,\n status: 410\n end",
"def int_driving_license\n end",
"def opted_out?(api_name)\n if @is_opted_out\n @logger.log(\n LogLevelEnum::INFO,\n 'API_NOT_ENABLED',\n {\n '{file}' => FILE,\n '{api}' => api_name\n }\n )\n end\n @is_opted_out\n end",
"def skip_if_api_down\n unless @data_repo_client.api_available?\n puts '-- skipping due to TDR API being unavailable --' ; skip\n end\n end",
"def show_license_pending\n if object.license&.approval_required\n object.license_status?(current_user, \"pending\")\n else\n false\n end\n end",
"def disable_api!\n self.update_attribute(:api_key, \"\")\n end",
"def autolife_disabled?\n return false\n end",
"def is_evaluation_license\n super\n end",
"def disable_autogrow\n change_autogrow_status_link.click\n autogrow_status_select.select('No')\n submit_autogrow_status_btn.click\n wait_until{ success_messages == \"Autogrow protection disabled.\" }\n end",
"def check_license\n log 'LICENSE not found', 'pFMQMQ' unless check? 'LICENSE*'\n end",
"def disable_auto_api\n @enable_auto_api = false\n end",
"def license_allows_download?(document)\n document[:license_ss] =~ /(Creative Commons|No known restrictions)/\n end",
"def invalid_license\n validate_values('license', :valid_licenses)\n end",
"def detect_license!\n self[\"license\"] = license_key\n self.text = [license_text, *notices].join(\"\\n\" + TEXT_SEPARATOR + \"\\n\").strip\n end",
"def skip_if_api_down\n unless @azul_is_ok\n puts @skip_message; skip\n end\n end",
"def has_license?\n !license.nil?\n end",
"def bypass(&blk); wrap_enabling(false,&blk); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /birds POST /birds.json Add new bird in data Sanctuary | def create
@bird = Bird.new(bird_params)
if @bird.save
render json: @bird, status: :created
else
render json: @bird.errors, status: :unprocessable_entity
end
end | [
"def add_bird\n bird_obj = bird_params\n continent = params['continents']\n unless continent.nil?\n continent = continent.split(',').map { |v| v.strip } if continent.is_a?(String)\n if continent.is_a?(Array) && !continent.blank?\n continent = continent[0].split(',').map { |v| v.strip } if continent.size==1\n end\n else\n continent =[]\n end\n bird_obj['continents'] = continent\n bird = ::Bird.new(bird_obj)\n begin\n bird.save!\n bird_obj = ::Bird.data(bird.id)\n render status: HttpCodes::CREATED, json: bird_obj.extend(BirdRepresenter).to_a.as_json\n rescue Mongoid::Errors::Validations\n error = Error.new.extend(ErrorRepresenter)\n error.message = \"#{bird.errors.full_messages.join(\";\")}\"\n error.validation_errors = bird.errors.to_hash\n render status: HttpCodes::BAD_REQUEST, json: error.as_json\n rescue Exception => e\n error = Error.new.extend(ErrorRepresenter)\n error.message = \"#{e.class} #{e.message}\\n #{e.backtrace}\"\n render status: HttpCodes::BAD_REQUEST, json: error.as_json\n end\n end",
"def create\n @bird = Bird.new(bird_params)\n if @bird.save\n render json: { bird: @bird }, status: :created\n else\n render json: @bird.errors, status: :bad_request\n end\n end",
"def create\n @bird = Bird.new(bird_params)\n if @bird.save\n render json: @bird, status: :created\n else\n render json: @bird.errors, status: :unprocessable_entity \n end\n end",
"def create\n @bird = Bird.new(bird_params)\n\n respond_to do |format|\n if @bird.save\n format.html { redirect_to admin_birds_path, notice: 'Bird was successfully created.' }\n format.json { render :show, status: :created, location: admin_bird_path(@bird) }\n else\n format.html { render :new }\n format.json { render json: @bird.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n bird = Bird.new(bird_params)\n if bird.save\n msg = MSG['create']['success']\n opt = { bird: bird, status: :created }\n else\n @success = false\n msg = bird.errors.full_messages\n opt = { status: :bad_request }\n end\n common_response(msg, opt)\n end",
"def create\n @songbird = Songbird.new(params[:songbird])\n\n respond_to do |format|\n if @songbird.save\n format.html { redirect_to @songbird, notice: 'Songbird was successfully created.' }\n format.json { render json: @songbird, status: :created, location: @songbird }\n else\n format.html { render action: \"new\" }\n format.json { render json: @songbird.errors, status: :unprocessable_entity }\n end\n end\n end",
"def bird_specification\n @bird = Bird.find(params[:id])\n if @bird\n @bird = JSON.parse(@bird.to_json)\n @bird[\"id\"]=params[:id]\n render json: @bird.except(\"_id\"), status: :ok\n else\n render :nothing => true, status: :not_found\n end\n end",
"def index\n @birds = Bird.all.to_a\n begin\n respond_to do |format|\n format.json { render json: {items: @birds, description: \"List all visible birds in the registry\", additionalProperties: false, title: \"POST /birds [request]\",:status => OK } }\n end\n rescue => e\n render json: ({:status => INTERNAL_SERVER_ERROR})\n end\n end",
"def create\n @ayu_din = AyurvedaDincharya.new(bike_params)\n\n if @ayu_din.save\n render json: @ayu_din, status: :created #serializer: Web::V1::BikeSerializer\n else\n render json: @ayu_din.errors, status: :unprocessable_entity\n end\n end",
"def add_dog (db, breedname, colors, markings, group_id)\n\tdb.execute(\"INSERT INTO dogs (breedname, colors, markings, group_id) VALUES (?,?,?,?)\", [breedname, colors, markings,group_id])\nend",
"def create\n @soundbite = Soundbite.new(soundbite_params)\n\n if @soundbite.save\n render json: @soundbite, status: :created, location: [:v1, @soundbite]\n else\n render json: @soundbite.errors, status: :unprocessable_entity\n end\n end",
"def create\n @boat = Boat.new(boat_params)\n\n if @boat.save\n render json: @boat, status: :created, location: @boat\n else\n render json: @boat.errors, status: :unprocessable_entity\n end\n end",
"def create\n @brain = Brain.new(params[:brain])\n\n respond_to do |format|\n if @brain.save\n format.html { redirect_to @brain, notice: 'Brain was successfully created.' }\n format.json { render json: @brain, status: :created, location: @brain }\n else\n format.html { render action: \"new\" }\n format.json { render json: @brain.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @boat = Boat.new(boat_params)\n @boat.save!\n render :json => @boat.as_json\n end",
"def create\n @borad = Borad.new(params[:borad])\n\n respond_to do |format|\n if @borad.save\n format.html { redirect_to @borad, :notice => 'Borad was successfully created.' }\n format.json { render :json => @borad, :status => :created, :location => @borad }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @borad.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @brend = Brend.new(params[:brend])\n\n respond_to do |format|\n if @brend.save\n format.html { redirect_to @brend, notice: 'Brend was successfully created.' }\n format.json { render json: @brend, status: :created, location: @brend }\n else\n format.html { render action: \"new\" }\n format.json { render json: @brend.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @dibb = Dibb.new(params[:dibb])\n\n respond_to do |format|\n if @dibb.save\n format.html { redirect_to @dibb, notice: 'Dibb was successfully created.' }\n format.json { render json: @dibb, status: :created, location: @dibb }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dibb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @bird.update(bird_params)\n render json: @bird, status: :ok, location: @bird\n else\n render json: @bird.errors, status: :unprocessable_entity\n end\n end",
"def create\n @basin = Basin.new(params[:basin])\n\n respond_to do |format|\n if @basin.save\n format.html { redirect_to @basin, notice: 'Basin was successfully created.' }\n format.json { render json: @basin, status: :created, location: @basin }\n else\n format.html { render action: \"new\" }\n format.json { render json: @basin.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /birds/1 GET /birds/1.json Return specific bird provided by bird id | def bird_specification
@bird = Bird.find(params[:id])
if @bird
@bird = JSON.parse(@bird.to_json)
@bird["id"]=params[:id]
render json: @bird.except("_id"), status: :ok
else
render :nothing => true, status: :not_found
end
end | [
"def show\n @bird = Bird.find(params[:id])\n\n render json: @bird, status: :ok\n end",
"def index\n @birds = Bird.all\n respond_to do |format|\n format.json { render json: get_birds_response(@birds), status: :ok } # Method available in birds_helper.rb\n end\n end",
"def birdspotter\n birdspotter = Api::V1::Birdspotter.find(params[:id])\n render json: {\n status: 200,\n message: \"OK\",\n birdspotter: Api::V1::BirdspotterSerializer.new(birdspotter) \n }\n end",
"def index\n @birds = Bird.where(visible: true)\n render json: @birds, status: 200\n end",
"def index\n @birds = Bird.all.to_a\n begin\n respond_to do |format|\n format.json { render json: {items: @birds, description: \"List all visible birds in the registry\", additionalProperties: false, title: \"POST /birds [request]\",:status => OK } }\n end\n rescue => e\n render json: ({:status => INTERNAL_SERVER_ERROR})\n end\n end",
"def show\n @songbird = Songbird.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @songbird }\n end\n end",
"def birds_list\n birds = ::Bird.all\n render status: HttpCodes::OK,json: birds.extend(BirdRepresenter).to_a.as_json unless birds.empty?\n render status: HttpCodes::OK,json: {} if birds.empty?\n end",
"def showdogid\n @photo_galeries = PhotoGalery.where( dog_id: params[:dog_id] )\n respond_to do |format|\n format.json { render json: @photo_galeries, status:200 }\n end\n end",
"def beer(id)\n options = { :bid => id }\n get('/beer_info', options)\n end",
"def get_beer\n beer_id = params['id'];\n response = HTTP.get('http://api.brewerydb.com/v2/beer/' + beer_id,\n :params=> {\n :key => ENV[\"BREWERYDB_BEERRATER_KEY\"],\n :withBreweries => \"y\"\n }\n )\n\n body = response.parse\n\n # check for success\n if body[\"status\"] == \"success\"\n data = body[\"data\"]\n unless data.nil?\n render json: {\n status: 200,\n message: \"#{params['id']} found\",\n data: data}\n else\n render json: {\n status: 200,\n message: \"#{params['id']} not found\",\n data: []\n }\n end\n else\n render json: { status: 401, message: body[\"errorMessage\"]}\n end\n end",
"def show\n @soundbite = Soundbite.find(params[:id])\n\n render json: @soundbite\n end",
"def get_blast(blast_id)\n self.api_get(:blast, {:blast_id => blast_id.to_s})\n end",
"def update\n @bird = Bird.find(params[:id])\n\n respond_to do |format|\n if @bird.update(bird_params)\n format.html { redirect_to admin_birds_path, notice: 'Bird was successfully updated.' }\n format.json { render :show, status: :ok, location: admin_bird_path(@bird) }\n else\n format.html { render :edit }\n format.json { render json: @bird.errors, status: :unprocessable_entity }\n end\n end\n end",
"def pet_species(id)\r\n BnetApi.make_request(\"/wow/pet/species/#{id}\")\r\n end",
"def create\n @bird = Bird.new(bird_params)\n if @bird.save\n render json: { bird: @bird }, status: :created\n else\n render json: @bird.errors, status: :bad_request\n end\n end",
"def show\n @beer = BreweryDB.beer(params[:id]) \n render json: @beer\n end",
"def add_bird\n bird_obj = bird_params\n continent = params['continents']\n unless continent.nil?\n continent = continent.split(',').map { |v| v.strip } if continent.is_a?(String)\n if continent.is_a?(Array) && !continent.blank?\n continent = continent[0].split(',').map { |v| v.strip } if continent.size==1\n end\n else\n continent =[]\n end\n bird_obj['continents'] = continent\n bird = ::Bird.new(bird_obj)\n begin\n bird.save!\n bird_obj = ::Bird.data(bird.id)\n render status: HttpCodes::CREATED, json: bird_obj.extend(BirdRepresenter).to_a.as_json\n rescue Mongoid::Errors::Validations\n error = Error.new.extend(ErrorRepresenter)\n error.message = \"#{bird.errors.full_messages.join(\";\")}\"\n error.validation_errors = bird.errors.to_hash\n render status: HttpCodes::BAD_REQUEST, json: error.as_json\n rescue Exception => e\n error = Error.new.extend(ErrorRepresenter)\n error.message = \"#{e.class} #{e.message}\\n #{e.backtrace}\"\n render status: HttpCodes::BAD_REQUEST, json: error.as_json\n end\n end",
"def get_cocktail_info_from_cocktail_db(cocktail_db_drink_id)\n response = RestClient.get 'https://www.thecocktaildb.com/api/json/v1/1/lookup.php?i=' + cocktail_db_drink_id\n drink_info = JSON.parse(response)[\"drinks\"][0]\nend",
"def remove_bird\n bird = ::Bird.find(params[:id])\n if bird.nil?\n error = Error.new.extend(ErrorRepresenter)\n error.message = \"Bird does not nound\"\n error.validation_errors = \"Invalid Bird Id \"\n render status: HttpCodes::NOT_FOUND, json: error.as_json\n else\n bird.destroy\n render status: HttpCodes::OK ,json: \"OK\".to_json\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
end ======================================================================================== Problem 2: Golden Ratio Golden Ratio is ratio between consecutive Fibonacci numbers calculate the golden ratio up to specified precision validate using MiniTest unit tests module Assignment08 | def golden_ratio(precision)
x = fib(99)/fib(98).to_f
x.round(precision)
end | [
"def fibonacci(n)\n golden_ratio = (1 + 5**0.5) / 2\n ((golden_ratio**n + 1) / 5**0.5).to_i\nend",
"def test_gold_total\n\t\tgold_val = 20.67 * @f.get_gold\n\t\tassert_equal gold_val, @f.calculate_gold_worth\n\tend",
"def fib(n)\n golden_ratio = (1 + 5**0.5) / 2\n ((golden_ratio**n + 1) / 5**0.5).to_i\nend",
"def problem_100\n 2.sqrt_frac do |top,bot|\n if top.odd? && bot.odd?\n x,n = top/2+1,bot/2+1\n puts \"#{x} / #{n}\"\n return n if x > 1_000_000_000_000\n end\n end\nend",
"def test_silver_total\n\t\tsilver_val = 1.31 * @f.get_silver\n\t\tassert_equal silver_val, @f.calculate_silver_worth\n\tend",
"def golden_ratio(a, b)\n if a > b\n x = b\n y = a\n else\n x = a\n y = b\n end\n\n ratio = (y / x).round(5)\n puts \"first ratio = #{ratio}\"\n\n if check_ratio(ratio)\n ratio\n else\n check_ratio(y / (x + y)) ? ratio : golden_ratio(y, x + y)\n end\n ratio\nend",
"def test_monetary_sum_gold_greater_one\n\t\tsilver = 1;\n\t\tgold = 5;\n\t\tres = @test_sim.print_monetary_sum(silver,gold)\n\n\t\texpected = silver * 1.31 + gold * 20.67\n\n\t\tassert_equal expected, res\n\n\tend",
"def test_monetary_sum_both_greater_1\n\t\tsilver = 5;\n\t\tgold = 5;\n\t\tres = @test_sim.print_monetary_sum(silver,gold)\n\n\t\texpected = silver * 1.31 + gold * 20.67\n\n\t\tassert_equal expected, res\n\n\tend",
"def fibs(n) ; PHI**n - PHA**n ; end",
"def solve112(target_proportion)\n bouncy_proportion = 0\n total_bouncy = 0\n num = 0\n while bouncy_proportion < target_proportion\n num += 1\n if bouncy?(num)\n total_bouncy += 1\n bouncy_proportion = 1.0*total_bouncy/num\n end\n end\n puts target_proportion, num\nend",
"def calculate_probability(useful_results, reroll_count)\n return 100.0 * useful_results / ( 6 ** reroll_count )\n end",
"def calc_grade\n @sum = 0\n unit_tests.each_pair do |test_name, info|\n if info[:output] and (info[:output].strip.chomp == info[:expected].strip.chomp)\n info[:points] = default_points_per_test unless info[:points]\n @sum += info[:points].to_f\n else\n @sum += (info[:points] = 0.0)\n end\n end\n @sum \n end",
"def test_monetary_sum_silver_greater_1\n\t\tsilver = 5;\n\t\tgold = 1;\n\t\tres = @test_sim.print_monetary_sum(silver,gold)\n\n\t\texpected = silver * 1.31 + gold * 20.67\n\n\t\tassert_equal expected, res\n\n\tend",
"def test_equiprobability\n winners_1, winners_2 = nil, nil\n\n @unique_advertises_count.times do\n winners_1 = Auctioneer.auction(\n creatives: @creatives,\n number_of_winners: @unique_advertises_count\n )\n winners_2 = Auctioneer.auction(\n creatives: @creatives,\n number_of_winners: @unique_advertises_count\n )\n\n break if winners_1 != winners_2\n end\n\n assert { winners_1 != winners_2 }\n end",
"def test_monetary_sum_both_one\n\t\tsilver = 1;\n\t\tgold = 1;\n\t\tres = @test_sim.print_monetary_sum(silver,gold)\n\n\t\texpected = silver * 1.31 + gold * 20.67\n\n\t\tassert_equal expected, res\n\n\tend",
"def test_Arithmetic_Sample03\n assert_equal(4, 2**2)\n assert_equal(0.5, 2.0**-1)\n assert_equal(0.5, 2**-1.0)\n assert_equal(3.0, 27**(1.0/3.0))\n assert_equal(1, 2**0)\n assert_equal(1, 2**(1/4))\n assert_equal(2.0, 16**(1.0/4.0))\n end",
"def test_Math_InstanceMethods_frexp\n\t\tfraction, exponent = Math.frexp(1234)\n\t\tassert_equal([0.6025390625, 11], [fraction, exponent])\n\t\tassert_equal(1234.0, fraction * 2**exponent)\n\tend",
"def percent_of(expected); end",
"def expected_output(team_elo, opp_elo)\n exponent = (opp_elo - team_elo)/400.00\n expected_output = 1.00/(1.00 + 10.00**exponent)\n\n return expected_output\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Row explanation: [ "language", prs ] Output: [ ["Ruby", 3], ... ["CSS", 1] ] | def language_prs
languages.map do |language|
[language.fetch("language").name, language.fetch("prs")]
end
end | [
"def rows\n text.split(\"\\n\").map do |element|\n mapped = element.split.map do |item|\n item.to_i\n end\n require 'pry'; binding.pry\n end\n end",
"def localize_column(row)\n row\n .map do |key, value|\n next [key, value] unless key.match?(LANGUAGE_SET)\n next unless key.end_with?(language)\n\n [key.sub(/_#{language}$/, ''), value]\n end\n .compact\n .to_h\n end",
"def reformat_languages(languages)\n new_hash = {}\n java_style_array = Array.new\n\n languages.each do |style, language_data| # :oo , :ruby\n language_data.each do |language_name, language_name_value| # :ruby , {type =>}\n if language_name == :javascript\n java_style_array << style\n end\n language_name_value.each do |attribute, value|\n if language_name == :javascript\n new_hash[language_name] = {type: value, style: java_style_array}\n else\n new_hash[language_name] = {type: value, style: [style]}\n end\n\n end\n end\n end\n puts new_hash\n return new_hash\nend",
"def languages\n\n @questions = Question.where(area: \"linguagens, códigos e suas tecnologias\").order(:year, :number)\n return @questions\n\n end",
"def subject_array(subject, columns, lang = :english)\n sub_arr = []\n\n if subject.major?\n sub_arr.unshift({content: title_for(subject, lang).mb_chars.upcase.to_s, font: PdfReportCard::DEFAULT_BOLD_FONT})\n else\n sub_arr.unshift title_for(subject, lang)\n end\n\n (1..3).each do |p|\n if columns > 3\n # sub_arr << (subject.show_score? ? checkmark_replace(get_score(subject, p)) : \"\")\n sub_arr += positional_score(get_score(subject, p))\n sub_arr << (subject.show_effort? ? checkmark_replace(get_effort(subject, p)) : \"\")\n else\n sub_arr << checkmark_replace(get_effort(subject, p))\n end\n end\n\n sub_arr\n end",
"def number_of_programming_languages(text_array)\n resume_as_array = resume_to_array(text_array)\n result = 0\n @programming_languages.each do |p|\n if resume_as_array.include?(p)\n result += 1\n end\n end\n return result\nend",
"def subject_array(subject, columns, lang = :english)\n sub_arr = []\n\n if subject.major?\n sub_arr.unshift({content: title_for(subject, lang).mb_chars.upcase.to_s, font: PdfReportCard::DEFAULT_BOLD_FONT})\n else\n sub_arr.unshift title_for(subject, lang)\n end\n\n (1..3).each do |p|\n if columns > 3\n sub_arr << (subject.show_score? ? checkmark_replace(get_score(subject, p)) : \"\")\n sub_arr << (subject.show_effort? ? checkmark_replace(get_effort(subject, p)) : \"\")\n else\n sub_arr << checkmark_replace(get_effort(subject, p))\n end\n end\n\n sub_arr\n end",
"def print_layout_row_title\n [{ code: :ewc_code },\n { code: :ewc_code, lookup: true, label: false },\n { code: :description },\n { code: :lau_code, lookup: true },\n { code: :fmme_method, lookup: true },\n { code: :from_non_disposal_ind, lookup: true }]\n end",
"def row_cells_to_text_columns(table_row)\n table_row.cells.map do |cell|\n # Join paragraphs into a String using a space delimiter.\n cell_paragraphs(cell).join(' ')\n end\n end",
"def label_descrs(row)\n label = Label.descr_by_name('G_CODE')\n result = [label]\n unless row.nil?\n if self.custom_field.is_a?(CustomFields::OptionCombindedReference) \n source_field = Field.find(self.value[:field_id].to_i)\n actual_row = Field.row_by_field(source_field, row)\n screen_cell = actual_row.cell(source_field.custom_field_id) unless actual_row.nil?\n screen_descrs = CustomFields::ScreenCombindedReference.cell_screen_group_descr(screen_cell.value) unless screen_cell.nil?\n result += screen_descrs.to_s\n end\n end\n result\n end",
"def highlight_by_language(text, language)\n ::CodeRay.scan(text, language).html(:line_numbers => :inline, :wrap => :span)\n end",
"def column_styles\n # [\n # {:name => 'general', :column => 0},\n # {:name => 'general', :column => 1},\n # {:name => 'general', :column => 2}\n #]\n []\n end",
"def get_numpr_prop_from_ast(ast, key)\n values = []\n ast.grep(Sablon::HTMLConverter::Paragraph).each do |para|\n numpr = para.instance_variable_get('@properties')['numPr']\n numpr.each { |val| values.push(val[key]) if val[key] }\n end\n values\n end",
"def get_row_style(row)\n if @row_styles[(row+1).to_s].nil?\n @row_styles[(row+1).to_s] = {}\n @row_styles[(row+1).to_s][:style] = '0'\n @workbook.fonts['0'][:count] += 1\n end\n return @row_styles[(row+1).to_s][:style]\n end",
"def context_type_result_row\n ContextTypeDef.new(\n :result_row,\n [\n /(\\s{3,20}NP|\\s{3,20}SQ|\\s{3,20}RIT|(\\d{1,2}')?\\d\\d.\\d\\d\\s*\\d{3,4}[\\,|\\.]\\d{1,2}\\s*(RI|RE|RM)?)(\\r\\n|\\n|$|\\Z)/i\n ],\n :individual_category_header # parent context\n )\n end",
"def p_to_scores(paragraphs)\n score = []\n paragraphs.each do |para|\n # this is where i add searching librarys to give me the score..(take out elementary words using a elmentary grade library, dictiionary check words.. then add points)\n score << para.length\n end\n score\n end",
"def languages\n documents.collect(&:sections).flatten.collect(&:examples).flatten.map(&:language).compact.uniq\n end",
"def calculate_LI_languages\n linked_in.languages.split(',').count * 30\n end",
"def label_languages(lang_list)\n lang_label = 0\n lang_list.each do |lang|\n lang_label += 1\n lang.label = \"L#{lang_label}\"\n end\n lang_list\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Transforms a SQLiX embedded XML Document. Takes a valid XML document as either a string or url, including local filename, or REXML::Document. Use the seeds hash to feed the references of the initial row element. Returns the resulting XML Document as a string, or as an REXML::Document if that was given. sqlix:query elements are in this format: | def transform(xml_source, seeds={})
if xml_source.is_a?(REXML::Document)
xml_document = xml_source
else
src = fetch_xml(xml_source)
xml_document = REXML::Document.new(src)
end
queries = REXML::XPath.match(xml_document.root,'//x:query', {'x' => 'http://www.transami.net/namespace/sqlix'})
queries.each do |element|
listener = SQLiX_Listener.new(@dbi, seeds)
el_str = ''
element.write(el_str)
REXML::Document.parse_stream(el_str, listener)
new_element = listener.build_document
new_element.elements.each do |el|
element.parent.add_element(el)
end
element.remove
end
# return output
if xml_source.is_a?(REXML::Document)
return xml_document
else
xml_string = ""
xml_document.write(xml_string)
xml.gsub!(/>\s+</,'><') # clean poor rexml whitespace handling
return xml_string
end
end | [
"def xml_to_solr( text, solr_doc=Solr::Document.new )\n doc = REXML::Document.new( text )\n doc.root.elements.each do |element|\n solr_doc << Solr::Field.new( :\"#{element.name}_t\" => \"#{element.text}\" )\n end\n\n return solr_doc\n end",
"def to_solr_xml(opts={})\n blanks = ['', [], ['']]\n\n atomic_fields_we_want = %w{\n id barcode date_uploaded date_modified physical_number project_label\n date_created earliest_created latest_created sort_title height_in_cm\n width_in_cm page_count rights\n }\n array_fields_we_want = %w{\n contributor creator title alternative_title description publisher\n series category\n }\n #k, sub_k (k = key for field (may point to an array), sub_k = key for hash)\n hash_values_we_want = {\n 'geographic_origin' => %w{ label },\n 'geographic_subject' => %w{ label },\n 'subject' => %w{ label },\n 'genre' => %w{ pul_label },\n 'language' => %w{ label },\n 'box' => %w{ barcode physical_location physical_number },\n 'project' => %w{ label }\n }\n\n export_hashes = self.to_export(opts)\n export_hashes = [ export_hashes ] unless export_hashes.kind_of?(Array)\n\n bob = Nokogiri::XML::Builder.new do |xml|\n xml.send(:add) do\n export_hashes.each do |export_hash|\n xml.send(:doc_) do\n export_hash.each do |k,v|\n lang_code = LANG_LOOKUP[export_hash['language'][0]['code']]\n if blanks.include?(v)\n # skip it. blank? doesn't quite work because of ['']\n elsif atomic_fields_we_want.include?(k)\n if k == 'id'\n xml.send(:field, v.split(':').last, name: k)\n xml.send(:field, v, name: 'pulstore_pid')\n elsif k == 'date_created'\n unless blanks.include?(v)\n xml.send(:field, v, name: 'date_display')\n xml.send(:field, v, name: k)\n end\n elsif k == 'earliest_created'\n display = \"#{v}-#{export_hash['latest_created']}\"\n unless blanks.include?(v)\n xml.send(:field, display, name: 'date_display') \n xml.send(:field, v, name: k)\n end\n else\n xml.send(:field, v, name: k) unless blanks.include?(v)\n end\n elsif array_fields_we_want.include?(k)\n if ['title','alternative_title','publisher','series'].include?(k)\n name = lang_code.nil? ? k : \"#{k}_#{lang_code}\"\n xml.send(:field, v.first, name: name)\n if ['title','publisher'].include?(k)\n xml.send(:field, v.first, name: \"#{k}_display\")\n end\n else\n v.each do |member|\n xml.send(:field, member, name: k) unless blanks.include?(member)\n end\n end\n elsif hash_values_we_want.has_key?(k)\n if v.kind_of?(Array) # array of hashes\n v.each do |hsh|\n hash_values_we_want[k].each do |sub_k|\n xml.send(:field, hsh[sub_k], name: \"#{k}_#{sub_k}\") unless blanks.include?(v)\n end\n end\n else # v is a hash\n hash_values_we_want[k].each do |sub_k|\n xml.send(:field, v[sub_k], name: \"#{k}_#{sub_k}\") unless blanks.include?(v)\n end\n end\n end\n end\n if export_hash['active_fedora_model_ssi'] == 'PulStore::Lae::Folder'\n ttl = to_ttl(data: export_hash)\n xml.send(:field, ttl, name: :ttl)\n manifest = to_manifest(data: export_hash)\n xml.send(:field, manifest, name: :manifest)\n thumb = PulStore::ImageServerUtils.build_iiif_base_uri(export_hash['pages'].first['id'])\n xml.send(:field, thumb, name: :thumbnail_base)\n end\n\n end\n end\n end\n end\n return bob.to_xml\n end",
"def make_sql(xml_file_or_string)\n src = REXML::SourceFactory.create_from(xml_file_or_string) # REXML allows to parse a file or a string\n listener = XiSQL_Listener.new(@dbi) # initialize listener passing the DBConnection object\r\n REXML::Document.parse_stream(src, listener) # parse it!\n listener.build_sql # build the sql statements from the parse\n @sqls = listener.sqls # here's are sql statements array\n return @sqls # pass the sql array back for convienence\n end",
"def query_xml(hash)\n Nokogiri::XML::Builder.new do |x|\n x.union do\n %w{node way}.each do |t|\n x.query(type: t) do\n hash.each do |k,v|\n options = {k: k}\n if v.is_a? String\n options[:v] = v\n end\n x.send(:'has-kv', options)\n end\n end\n end\n x.recurse(type: 'way-node') # include all nodes in a given way\n end\n end.doc.root.to_s\n end",
"def q!(query, db)\n # build bindings from references\n bindings = []\n if query.references\n query.references.each do |reference|\n bindings << @row[reference]\n end\n end\n # query\n qry = query.query.gsub(''',\"'\").gsub('"','\"') # replace single and double quotes that rexml substituted out.\n if bindings.empty?\n rows = db.connection.select_all(qry)\n else\n rows = db.connection.select_all(qry, *bindings)\n end\n result_rows = []\n rows.each do |row|\n result_rows << Row.new(query.name, row, query.attributes)\n end\n result_rows.each do |row|\n self << row\n end\n return result_rows\n end",
"def build_solr_xml(command, ruby_data)\n builder = Nokogiri::XML::Builder.new do |sxml|\n sxml.send(command) do\n ruby_data = [ruby_data] if ruby_data.class != Array\n ruby_data.each do |data|\n sxml.doc_ do\n data.keys.each do |key|\n data[key] = [data[key]] if data[key].class != Array\n data[key].each do |val|\n sxml.field(val, :name => key.to_s)\n end\n end\n end\n end\n end\n end\n builder.to_xml\n end",
"def row_to_solr(doc, headers, row)\n headers.each do |column|\n doc.add_child(\"<field name='#{column}'>#{row[column]}</field>\") if row[column]\n end\n return doc\n end",
"def to_solr_xml\n output = ''\n builder = Builder::XmlMarkup.new(:target => output, :indent => 2)\n\n builder.add do |add|\n add.doc do |doc|\n self.class.solr_fields.each do |field_name|\n field_name = field_name.to_s\n\n if field_name == '_id'\n solr_field_name = 'id'\n else\n solr_field_name = field_name\n end\n\n val = instance_variable_get(\"@#{field_name}\")\n\n if val\n if val.kind_of? Array\n val.each do |child|\n doc.field({:name => solr_field_name}, child)\n end\n else\n doc.field({:name => solr_field_name}, val)\n end\n end\n end\n end\n end\n\n output\n end",
"def build_solr_xml(ruby_data)\n builder = Nokogiri::XML::Builder.new do |sxml|\n sxml.add do \n ruby_data = [ruby_data] if ruby_data.class != Array\n ruby_data.each do |data|\n sxml.doc_ do\n data.keys.each do |key|\n data[key] = [data[key]] if data[key].class != Array\n data[key].each do |val|\n sxml.field(val, :name => key.to_s) \n end\n end\n end\n end\n end\n end\n builder.to_xml\n end",
"def insert_xmldocument(table_name, xml_doc)\n\t\t# prep sql template\n\t\tsql = \"INSERT INTO #{table_name} (docno, sort, depth, path, content, attribute) VALUES (?, ?, ?, ?, ?, ?);\"\n\t\tsth = @dbh.prepare(sql)\n\t\t# translate xml\n\t\tparse_array = parse_xml(xml_doc)\n\t\t# fetch a document no for this xml record\n\t\tdocno = fetch_docno(table_name)\n\t\t# insert each row\n\t\tparse_array.each do |r|\n\t\t\tsort = r['sort']\n\t\t\tdepth = r['depth']\n\t\t\tpath = r['path']\n\t\t\tcontent = r['content']\n\t\t\tattribute = r['attribute']\n\t\t\tsth.execute(docno, sort, depth, path, content, attribute)\n\t\tend\n\t\tsth.finish\n\tend",
"def transform(xml_file, xsl_file,include_words)\n saxon = \"#{Rails.root}/lib/saxon\"\n tmp_file = \"#{Rails.root}/tmp/#{self.id}-#{Time.now.to_i}.xml\"\n \n xsl_param = \"showW='n'\"\n xsl_param = \"showW='y'\"if include_words\n\n saxon_jar = \"#{saxon}/Saxon-HE-9.5.1-1.jar\"\n cmd = \"java -jar #{saxon_jar} #{xml_file} #{xsl_file} #{xsl_param} > #{tmp_file}\"\n Document.do_command(cmd)\n file = File.open(tmp_file)\n out = file.read\n File.delete(tmp_file)\n return out\n end",
"def to_solr\n xml = Builder::XmlMarkup.new\n \n xml.add do\n xml.doc do\n self.class.simple_solr_fields.each do |name, value|\n if value.nil?\n # no value given, get it from the attribute\n xml.field self.send(name), :name => name\n elsif value.is_a?(Symbol)\n # symbol given, use it to get the attribute\n xml.field self.send(value), :name => name\n elsif value.is_a?(Proc)\n # Procs are used to fetch information from the instance\n xml.field value.call(self), :name => name\n else\n # value given, use it directly.\n xml.field value, :name => name\n end\n end\n end\n end\n end",
"def parse_xquery(query)\n begin\n str = @parser.parse_XQuery(query)\n rescue Parsers::ParseException => e\n fail ParseException, 'Parsing error'\n end\n\n Nokogiri.XML(str) do |config|\n config.default_xml.noblanks\n end\n end",
"def sanitize_id_format_raw_xml(original_input)\n return nil if original_input.nil? || original_input.raw_content.blank?\n format_sanitizer = Stix::Stix111::SanitizerValidFormat.new(Setting.XML_PARSING_LIBRARY.to_s.to_sym)\n format_sanitizer.sanitize_xml(original_input.raw_content)\n @warnings = format_sanitizer.warnings\n if format_sanitizer.valid?\n original_input.raw_content = format_sanitizer.xml\n else\n @errors = format_sanitizer.errors\n nil\n end\n end",
"def to_xml\n xml = String.new\n builder = Builder::XmlMarkup.new(:target => xml, :indent => 2)\n \n # Xml instructions (version and charset)\n builder.instruct!\n \n builder.source(:primary => primary_source) do\n builder.id(id, :type => \"integer\")\n builder.uri(uri.to_s)\n end\n \n xml\n end",
"def generate_sqls(xps)\r\n\t\t\t@xps_source = REXML::SourceFactory.create_from(xps)\n\t\t\tlistener = SQLCreator_Listener.new\r\n\t\t\tREXML::Document.parse_stream(@xps_source, listener)\n\t\t\t@sqls = listener.build_sql\n\t\t\treturn @sqls\n\t\tend",
"def setup\n xml_results = <<BEGIN\n <search:response total=\"2\" start=\"1\" page-length=\"10\" xmlns:search=\"http://marklogic.com/appservices/search\">\n <search:result index=\"1\" uri=\"/documents/discoverBook.xml\" path=\"fn:doc('/documents/discoverBook.xml')\" score=\"243\" confidence=\"0.97047\" fitness=\"1\">\n <search:snippet>\n <search:match path=\"fn:doc('/documents/discoverBook.xml')/*:book/*:bookinfo/*:title\">Discoverers <search:highlight>and</search:highlight> Explorers</search:match>\n <search:match path=\"fn:doc('/documents/discoverBook.xml')/*:book/*:chapter[1]/*:chapterinfo/*:biblioentry/*:title\">Discoverers <search:highlight>and</search:highlight> Explorers</search:match>\n </search:snippet>\n </search:result>\n <search:result index=\"2\" uri=\"/documents/a_and_c.xml\" path=\"fn:doc('/documents/a_and_c.xml')\" score=\"234\" confidence=\"0.952329\" fitness=\"1\">\n <search:snippet>\n <search:match path=\"fn:doc('/documents/a_and_c.xml')/PLAY/PERSONAE/PERSONA[10]\">Officers, Soldiers, Messengers, <search:highlight>and</search:highlight> other Attendants.</search:match>\n </search:snippet>\n </search:result>\n <search:qtext>and</search:qtext>\n <search:metrics>\n <search:query-resolution-time>PT0.009197S</search:query-resolution-time>\n <search:facet-resolution-time>PT0.000083S</search:facet-resolution-time>\n <search:snippet-resolution-time>PT0.019534S</search:snippet-resolution-time>\n <search:total-time>PT0.029033S</search:total-time>\n </search:metrics>\n</search:response>\nBEGIN\n\n xml_results_noh = <<BEGIN\n <search:response total=\"2\" start=\"1\" page-length=\"10\" xmlns:search=\"http://marklogic.com/appservices/search\">\n <search:result index=\"1\" uri=\"/documents/discoverBook.xml\" path=\"fn:doc('/documents/discoverBook.xml')\" score=\"243\" confidence=\"0.97047\" fitness=\"1\">\n <search:snippet>\n <search:match path=\"fn:doc('/documents/discoverBook.xml')/*:book/*:bookinfo/*:title\">Discoverers and Explorers</search:match>\n <search:match path=\"fn:doc('/documents/discoverBook.xml')/*:book/*:chapter[1]/*:chapterinfo/*:biblioentry/*:title\">Discoverers and Explorers</search:match>\n </search:snippet>\n </search:result>\n <search:result index=\"2\" uri=\"/documents/a_and_c.xml\" path=\"fn:doc('/documents/a_and_c.xml')\" score=\"234\" confidence=\"0.952329\" fitness=\"1\">\n <search:snippet>\n <search:match path=\"fn:doc('/documents/a_and_c.xml')/PLAY/PERSONAE/PERSONA[10]\">Officers, Soldiers, Messengers, and other Attendants.</search:match>\n </search:snippet>\n </search:result>\n <search:qtext>and</search:qtext>\n <search:metrics>\n <search:query-resolution-time>PT0.009197S</search:query-resolution-time>\n <search:facet-resolution-time>PT0.000083S</search:facet-resolution-time>\n <search:snippet-resolution-time>PT0.019534S</search:snippet-resolution-time>\n <search:total-time>PT0.029033S</search:total-time>\n </search:metrics>\n</search:response>\nBEGIN\n\n results_with_facets = <<-BEGIN\n<search:response total=\"21973\" start=\"1\" page-length=\"10\" xmlns:search=\"http://marklogic.com/appservices/search\">\n <search:result index=\"9\" uri=\"/Users/clarkrichey/Downloads/wits/wits21402.xml\" path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits21402.xml")\" score=\"196\" confidence=\"0.338805\" fitness=\"0.890659\">\n <search:snippet>\n <search:match path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits21402.xml")/*:Incident/*:Subject\">1 newspaper editor injured in letter <search:highlight>bomb</search:highlight> attack by Informal Anarchist Federation in Turin, Piemonte, Italy</search:match>\n <search:match path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits21402.xml")/*:Incident/*:EventTypeList\">\n<search:highlight>Bombing</search:highlight>\n</search:match>\n <search:match path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits21402.xml")/*:Incident/*:WeaponTypeList/*:WeaponType\">Letter <search:highlight>Bomb</search:highlight></search:match>\n </search:snippet>\n </search:result>\n <search:result index=\"10\" uri=\"/Users/clarkrichey/Downloads/wits/wits23118.xml\" path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits23118.xml")\" score=\"196\" confidence=\"0.338805\" fitness=\"0.890659\">\n <search:snippet>\n <search:match path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits23118.xml")/*:Incident/*:Subject\">1 government employee killed in <search:highlight>bombing</search:highlight> in Ghazni, Afghanistan</search:match>\n <search:match path=\"fn:doc("/Users/clarkrichey/Downloads/wits/wits23118.xml")/*:Incident/*:EventTypeList\">\n<search:highlight>Bombing</search:highlight>\n</search:match>\n </search:snippet>\n </search:result>\n <search:facet name=\"Region\">\n <search:facet-value name=\"Africa\" count=\"622\">Africa</search:facet-value>\n <search:facet-value name=\"Central and South America\" count=\"1012\">Central and South America</search:facet-value>\n <search:facet-value name=\"East Asia-Pacific\" count=\"1198\">East Asia-Pacific</search:facet-value>\n <search:facet-value name=\"Eurasia\" count=\"761\">Eurasia</search:facet-value>\n <search:facet-value name=\"Europe\" count=\"1057\">Europe</search:facet-value>\n <search:facet-value name=\"Middle East and Persian Gulf\" count=\"10374\">Middle East and Persian Gulf</search:facet-value>\n <search:facet-value name=\"North America and Caribbean\" count=\"16\">North America and Caribbean</search:facet-value>\n <search:facet-value name=\"South Asia\" count=\"6933\">South Asia</search:facet-value>\n </search:facet>\n <search:facet name=\"Country\">\n <search:facet-value name=\"England\" count=\"200\">England</search:facet-value>\n <search:facet-value name=\"Ireland\" count=\"422\">Ireland</search:facet-value>\n <search:facet-value name=\"Brazil\" count=\"10\">Brazil</search:facet-value>\n </search:facet>\n <search:qtext>bomb</search:qtext>\n <search:metrics>\n <search:query-resolution-time>PT0.420016S</search:query-resolution-time>\n <search:facet-resolution-time>PT0.002873S</search:facet-resolution-time>\n <search:snippet-resolution-time>PT0.039998S</search:snippet-resolution-time>\n <search:total-time>PT0.463759S</search:total-time>\n </search:metrics>\n</search:response>\n BEGIN\n @search_results = ActiveDocument::SearchResults.new(xml_results)\n @search_results_noh = ActiveDocument::SearchResults.new(xml_results_noh)\n @faceted_results = ActiveDocument::SearchResults.new(results_with_facets)\n end",
"def wrap_in_xml_record(xml_fragment)\n wrap_in_xml_infobase(%(\n <record class=\"NormalLevel\" fullPath=\"/50000009/50130009/50130229\" recordId=\"50130229\">\n #{ xml_fragment }\n </record>)\n )\nend",
"def result2_data_rows(experiment_id, uri, table, columns)\n data_rows = []\n\n req = Net::HTTP::Post.new(uri.path)\n\n xml = REXML::Document.new\n request = xml.add_element('request', 'id' => 'foo')\n request.add_element('result').add_element('format').add_text('xml')\n query = request.add_element('query')\n query.add_element('repository', 'name' => experiment_id)\n query.add_element('table', 'tname' => table)\n project = query.add_element('project')\n\n columns.each do |m|\n project.add_element('arg').add_element('col', 'name' => m.to_s, 'table' => table)\n end\n\n req.body = xml.to_s\n\n result = Net::HTTP.start(uri.host, uri.port) do |http|\n res = http.request(req)\n res.body\n end\n\n\n result_doc = REXML::Document.new(result)\n REXML::XPath.each(result_doc, '//omf:r', 'omf'=> RESULT2_NAMESPACE).each do |r|\n row = REXML::XPath.each(r, 'omf:c', 'omf'=> RESULT2_NAMESPACE).map {|v| v.text.auto_parse }\n data_rows << row\n end\n\n data_rows\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
q! method, she's an odd one, takes a query and returns an array of Row objects produced but also inserts those Row objects into this Row's (self) children it is also neccessary to pass the DBConnection object the query will be run against this actually pisses me off in that the whole point of having this class | def q!(query, db)
# build bindings from references
bindings = []
if query.references
query.references.each do |reference|
bindings << @row[reference]
end
end
# query
qry = query.query.gsub(''',"'").gsub('"','"') # replace single and double quotes that rexml substituted out.
if bindings.empty?
rows = db.connection.select_all(qry)
else
rows = db.connection.select_all(qry, *bindings)
end
result_rows = []
rows.each do |row|
result_rows << Row.new(query.name, row, query.attributes)
end
result_rows.each do |row|
self << row
end
return result_rows
end | [
"def drillthrough(query)\n RowSet.new @connection.create_statement.execute_query(query.to_s)\n end",
"def query_entries(sql, *sqlargs) # :yields: entry\n @db.execute(sql, *sqlargs) do |row|\n yield Entry.new.load_from_database_row(row)\n end\n end",
"def run(rows, query_logger: nil)\n query(rows) { |*args|\n assoc_rows = args[0] ? Query.new(args[0], use: @use, eager_loaders: @eager_loaders, query_logger: query_logger).run : []\n merge! assoc_rows, rows, *args[1..-1]\n }\n end",
"def run(rows, query_logger: nil)\n calc_ids(rows) { |ids|\n assoc = if ids.any?\n binds = @binds.merge({:ids => ids})\n RawQuery.new(@sql, binds, use: @use, eager_loaders: @eager_loaders, query_logger: query_logger).run\n else\n []\n end\n merge! assoc, rows\n }\n end",
"def execute(sql, transaction)\r\n yield(row)\r\n end",
"def new_rows(table)\n fields = fields_from(table)\n pkey, fields = extract_pkey_from(fields)\n my.select_db(@newdb)\n \n query = \"SELECT \"\n query << pkey.collect do |f|\n \"n.#{f[\"Field\"]} #{f[\"Field\"]}\"\n end.join(\",\")\n query << \",\"\n query << fields.collect do |f|\n \"n.#{f[\"Field\"]} n_#{f[\"Field\"]}\"\n end.join(\",\")\n \n query << \" FROM #{@newdb}.#{table} AS n LEFT JOIN #{@olddb}.#{table} AS o ON \"\n query << pkey.collect do |f|\n \"n.#{f[\"Field\"]} = o.#{f[\"Field\"]}\"\n end.join(\" AND \")\n query << \" WHERE \"\n query << pkey.collect do |f|\n \"o.#{f[\"Field\"]} IS NULL\"\n end.join(\" AND \")\n \n result = my.query(query)\n new_rows = []\n while row = result.fetch_hash\n new_rows << row\n end\n new_rows\n end",
"def node_query_mapping_insert\n # binding.pry\n branches.each do |br|\n br.nodes.each do |nd|\n nodeName = nd.name\n\n # columnsArray=nd.columns.map{|c| \"'\"+(c.relalias.nil? ? '' : c.relalias+'.')+c.colname+\"'\"}.join(',')\n columnsArray = nd.columns.map { |c| \"'\" + c.relname + '.' + c.colname + \"'\" }.join(',')\n query = \"INSERT INTO #{@nqTblName} values (#{@test_id} ,'#{br.name}','#{nd.name}', '#{nd.query.gsub(/'/, '\\'\\'')}',#{nd.location}, ARRAY[#{columnsArray}], #{nd.suspicious_score} , '#{@type}' )\"\n # pp query\n DBConn.exec(query)\n end\n end\n end",
"def sql_array\n ::Sequel::SQL::SQLArray.new(self)\n end",
"def build_rows(ids, parent_id, row_id, root, level, rows, table, conn)\n ids.each do |id|\n child_ids = conn.select_values(\"SELECT #{id_field} FROM #{table} WHERE #{parent_id_field} = #{id}\")\n \n row = {\n :parent_id => row_id, \n :child_id => id, \n :num_levels_from_parent => level, \n :is_bottom => (child_ids.empty? ? 1 : 0),\n :is_top => (root ? 1 : 0),\n }\n rows << row\n \n build_rows(child_ids, id, row_id, false, level + 1, rows, table, conn)\n end\n end",
"def initialize(query:, columns:, table:, size:, serializer:, model:, &transaction)\n @query = query\n @serializer = serializer\n @table = table\n qutex = Mutex.new\n\n queue = case\n when activerecord?\n @query.pluck(*@columns)\n when arel?\n ActiveRecord::Base.connection.execute(@query.to_sql).map(&:values)\n when tuple?\n @query.map { |result| result.slice(*columns).values }\n when twodimensional?\n @query\n else\n raise ArgumentError, 'query wasn\\'t recognizable, please use some that looks like a: ActiveRecord::Base, Arel::SelectManager, Array[Hash], Array[Array]'\n end\n\n puts \"Migrating #{queue.count} #{table} records\"\n\n # Spin up a number of threads based on the `maximum` given\n 1.upto(size).map do\n Thread.new do\n loop do\n # Try to get a new queue item\n item = qutex.synchronize { queue.shift }\n\n if item.nil?\n # There is no more work\n break\n else\n # Wait for a free connection\n model.connection_pool.with_connection do\n model.transaction do\n # Execute each statement coming back\n Array[instance_exec(*item, &transaction)].each do |instruction|\n next if instruction.nil?\n model.connection.execute(instruction.to_sql)\n end\n end\n end\n end\n end\n end\n end.map(&:join)\n end",
"def query(&block)\n items = assert_connected(table).query(&block)\n results = []\n items.each { |i| results << new(i) }\n results\n end",
"def insert_row(row)\n rec = insert_into_core(row) || insert_into_extension(row)\n entity.send(add_related, rec)\n end",
"def process(row)\n return row unless row\n rows = []\n target = configuration[:target]\n table = configuration[:table]\n conn = ETL::Engine.connection(target)\n build_rows([row[:id]], row[:id], row[:id], row[:parent_id].nil?, 0, rows, table, conn)\n rows\n end",
"def execute_all( sql ) # :yields: row\n loop do\n stmt = prepare( sql )\n stmt.execute do |result|\n result.each { |row| yield row if block_given? }\n end\n sql = stmt.remainder\n if sql.length > 0\n yield nil if block_given? # notify of new query starting\n else\n break\n end\n end\n end",
"def add_child(query)\n children << query\n end",
"def insert_all; end",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def pg_gem_batch__from psql_db, db_queries\n psql_db = array__from psql_db\n db_queries = array__from db_queries\n pg_gem_conn = pg_gem_conn__from psql_db\n pg_connection = pg_gem_conn[5]\n batch = [pg_connection].product db_queries\n end",
"def _query(q)\n ret = []\n begin\n db = Mysql.real_connect('localhost', @user, @pass, @database)\n if q.size == 1\n ret = db.query(q[0])\n else\n q.each do |query|\n ret.push(db.query(query))\n end\n end\n rescue Mysql::Error => e\n puts \"Error code: #{e.errno}\"\n puts \"Error message: #{e.error}\"\n puts \"Error SQLSTATE: #{e.sqlstate}\" if e.respond_to?(\"sqlstate\")\n ensure\n # disconnect from server\n db.close if db\n end\n ret\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
this is a mesh builder which returns this Row (self) as xml | def xml
base = REXML::Element.new(@name)
if @row.class == DBI::Row # only if we have a row otherwise return an empty xml node
# prime
context = nil
rowcontext = base
# loop through each column
@row.each_with_name do |val, colpath|
context = rowcontext # start at the top of the row for each column
parents = colpath.split('/') # split on any path dividers, i.e. parent/parent/child
child = parents.pop # get the child off the parents
# loop through all the parents
parents.each.each do |p|
found = REXML::XPath.first(context, p) # does the element already exist?
if not found # if not...
el = p.gsub(/[[].*[]]$/,'') # remove index if there is one
found = context.add_element(el) # add the element
end
context = found # this parent is now our new context
end
# do the child (the end of the tree branch)
if child =~ /^@(.*)/ # is it labeled an attribute with @?
context.add_attribute($1, val.to_s) # add attribute
elsif @attributes.include?(child) # or is it in the attributes list?
context.add_attribute(child, val.to_s) # add attribute
else
found = REXML::XPath.first(context, child) # does it already exist?
if not found # if not...
el = child.gsub(/[[].*[]]$/,'') # remove index if there is one
found = context.add_element(el) # add the element
end
context = found # the child is now our new context
context.add_text(val.to_s) # insert the text node as val
end
end
end
return base
end | [
"def to_xml(builder, include_dims=true)\n builder.grid do |xml|\n xml.cube\n dims_to_xml(xml) if include_dims\n xml.slices do |xml|\n xml.slice :rows => @row_count, :cols => @col_count do |xml|\n xml.data do |xml|\n xml.range :start => 0, :end => @row_count * @col_count - 1 do\n xml.vals @vals.join('|')\n xml.types @types.join('|')\n end\n end\n end\n end\n end\n end",
"def to_xml\r\n\t\t%{\r\n\t\t\t<a:tbl>\r\n\t\t\t\t<a:tblPr firstRow=\"#{@firstrow}\" bandRow=\"\">\r\n\t\t\t\t\t<a:tableStyleId>{#{@style}}</a:tableStyleId>\r\n\t\t\t\t</a:tblPr>\r\n\t\t\t\t<a:tblGrid>\r\n\t\t\t\t\t#{ \r\n \"<a:gridCol w='#{@width}' />\"*@ncols \r\n }\r\n\t\t\t\t</a:tblGrid>\r\n\t\t\t\t#{\r\n\t\t\t\t\t@matrix.map do |row|\r\n\t\t\t\t\t\t%{\r\n\t\t\t\t\t\t\t<a:tr h=\"#{@height}\">\r\n\t\t\t\t\t\t\t\t#{\r\n\t\t\t\t\t\t\t\t\trow.map do |col|\r\n\t\t\t\t\t\t\t\t\t\t%{\r\n\t\t\t\t\t\t\t\t\t\t\t<a:tc>\r\n\t\t\t\t\t\t\t\t\t\t\t\t<a:txBody>\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t<a:bodyPr />\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t<a:lstStyle />\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t<a:p>\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<a:r>\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<a:rPr lang=\"en-US\" dirty=\"0\" smtClean=\"0\" />\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<a:t>#{col}</a:t>\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</a:r>\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<a:endParaRPr lang=\"en-US\" dirty=\"0\" />\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t</a:p>\r\n\t\t\t\t\t\t\t\t\t\t\t\t</a:txBody>\r\n\t\t\t\t\t\t\t\t\t\t\t\t<a:tcPr />\r\n\t\t\t\t\t\t\t\t\t\t\t</a:tc>\r\n\t\t\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t\t\tend.join\r\n\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t</a:tr>\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\tend.join\r\n\t\t\t\t}\r\n\t\t\t</a:tbl>\r\n\t\t}\r\n end",
"def to_xml\n builder.target!\n end",
"def to_s\n @xml ||= Builder::XmlMarkup.new.worksheet(:xmlns => \"http://schemas.openxmlformats.org/spreadsheetml/2006/main\") do |ws|\n unless @column_widths.empty?\n ws.cols do |xcols|\n @column_widths.keys.sort.each do |i|\n width = @column_widths[i]\n params = {:min => i+1, :max => i+1, :bestFit => 1}\n params.merge!({:customWidth => 1, :width => width}) if width\n xcols.col(params)\n end\n end\n end\n ws.sheetData do |sd|\n @cells.keys.sort.each do |row|\n sd.row(:r => row+1) do |xr|\n\n @cells[row].keys.sort.each do |col|\n cell = @cells[row][col]\n cell.output(xr)\n end\n end\n end\n end\n end\n end",
"def builder\n b = Builder::XmlMarkup.new(:indent => 2)\n b.instruct!\n b\n end",
"def dbrow_serialization\n dbrow = super[\"dbrow\"]\n dbrow[:xml] = self.xml\n dbrow.delete(:_xml_entity_created_by)\n dbrow.delete(:_xml_entity_created_at)\n dbrow.delete(:_xml_entity_modified_by)\n dbrow.delete(:_xml_entity_modified_at)\n\n {\n \"dbrow\" => dbrow\n }\n end",
"def build\r\n\t \tbuffer = \"\"\r\n\t\t xml = Builder::XmlMarkup.new(buffer)\r\n\t\t xml.instruct! :xml, :version=>\"1.0\", :encoding=>\"UTF-8\" \r\n\t\t xml.Workbook({\r\n\t\t 'xmlns' => \"urn:schemas-microsoft-com:office:spreadsheet\", \r\n\t\t 'xmlns:o' => \"urn:schemas-microsoft-com:office:office\",\r\n\t\t 'xmlns:x' => \"urn:schemas-microsoft-com:office:excel\", \r\n\t\t 'xmlns:html' => \"http://www.w3.org/TR/REC-html40\",\r\n\t\t 'xmlns:ss' => \"urn:schemas-microsoft-com:office:spreadsheet\" \r\n\t\t }) do\r\n\t \r\n\t\t\t xml.Styles do\r\n\t\t\t xml.Style 'ss:ID' => 'Default', 'ss:Name' => 'Normal' do\r\n\t\t\t xml.Alignment 'ss:Vertical' => 'Bottom'\r\n\t\t\t xml.Borders\r\n\t\t\t xml.Font 'ss:FontName' => 'Arial'\r\n\t\t\t xml.Interior\r\n\t\t\t xml.NumberFormat\r\n\t\t\t xml.Protection\r\n\t\t\t end\r\n\t\t\t xml.Style 'ss:ID' => 's22' do\r\n\t\t\t xml.NumberFormat 'ss:Format' => 'General Date'\r\n\t\t\t end\r\n\t\t\t end\r\n\t\t\t \r\n\t\t\t for object in @worksheets\r\n\t\t \t\t# use the << operator to prevent the < > and & characters from being converted.\r\n\t\t \t\t# this will concat them together.\r\n\t\t \t\tif object[1] =='array'\r\n\t\t \t\t xml << worksheetFromArray(object[0], object[2])\r\n\t\t else\r\n\t\t xml << worksheet(object[0], object[1], object[2])\r\n\t\t end\r\n\t\t\t end # for records\r\n\t\t\t end\r\n\t\t\t \r\n\t return xml.target! \r\n\t end",
"def builder\n builder = Builder::XmlMarkup.new\n builder.instruct!\n builder\n end",
"def to_excel(xml) \n # Header - Prod Name\n xml.Row 'ss:Height'=>'18' do\n xml.Cell('ss:StyleID'=> 's1') { xml.Data name, 'ss:Type' => 'String' }\n end\n xml.Row { xml.Cell }\n root.to_excel(xml) \n end",
"def to_xml_fragment\n Exporters::XML::DDI::Fragment.export_3_2 Exporters::XML::DDI::QuestionGrid, self\n end",
"def to_xml(options = {})\n if self.class == PhysicalObject\n super(options)\n else\n # technical metadata formats\n require 'builder'\n options[:indent] ||= 2\n options[:dasherize] ||= false\n xml = options[:builder] ||= ::Builder::XmlMarkup.new(indent: options[:indent])\n xml.instruct! unless options[:skip_instruct]\n xml.technical_metadata do\n if options[:format]\n xml.format options[:format]\n elsif self.technical_metadatum.physical_object\n xml.format self.technical_metadatum.physical_object.format\n else\n xml.format \"Unknown\"\n end\n xml.files self.master_copies\n self.class.const_get(:SIMPLE_FIELDS).each do |simple_attribute|\n spoofed_attribute_name = simple_attribute\n spoofed_attribute_name = simple_attribute.gsub(\"_\", \"-\") if options[:dasherize]\n xml << \" <#{spoofed_attribute_name}>#{self.attributes[simple_attribute].to_s.encode(xml: :text)}</#{spoofed_attribute_name}>\\n\"\n end\n self.class.const_get(:MULTIVALUED_FIELDSETS).each do |name, fieldset|\n name = name.downcase.gsub(\" \", \"_\")\n name = name.downcase.gsub(\"_\", \"-\") if options[:dasherize]\n section_string = \"\"\n self.class.const_get(fieldset).each do |field|\n spoofed_field_name = field.to_s\n spoofed_field_name = field.to_s.gsub(\"_\", \"-\") if options[:dasherize]\n section_string << \" <#{spoofed_field_name}>true</#{spoofed_field_name}>\\n\" if self.send((field.to_s + \"?\").to_sym)\n end\n if section_string.blank?\n section_string = \" <#{name}/>\\n\"\n else\n section_string = \" <#{name}>\\n\" + section_string + \" </#{name}>\\n\"\n end\n xml << section_string\n end\n end\n end\n end",
"def to_s\r\n assert_exists\r\n r = super({\"rows\" => \"rows.length\",\"columns\" => \"columnLength\", \"cellspacing\" => \"cellspacing\", \"cellpadding\" => \"cellpadding\", \"border\" => \"border\"})\r\n # r += self.column_count.to_s\r\n end",
"def to_xml\n before_validate\n validate\n render File.expand_path('../../haml/block.haml', __FILE__), self\n end",
"def build_xml(builder)\n build_node(builder, :format, @attributes)\n end",
"def to_ArrayedXml()\n # head part\n axml = [[nil, XmlElementTag, ({ :id => @id,\n :x => @pos.x,\n :y => @pos.y,\n :height => @height })]] ;\n ## tag part\n @tagList.each{|tag|\n axml.push([:tag, tag]) ;\n }\n\n ## link part\n @linkList.each{|link|\n axml.push([[nil, :link, { :id => link.id } ]]) ;\n }\n\n return axml ;\n end",
"def build_object(row)\n end",
"def build_xml(p_item)\r\n xml = \"<#{ELE_ITEM}>\\n\"\r\n unless p_item.id.nil?\r\n xml += \"\\t<#{ELE_ID}>#{p_item.id}</#{ELE_ID}>\\n\"\r\n end\r\n xml += \"\\t<#{ELE_TITLE}>#{p_item.title}</#{ELE_TITLE}>\\n\"\r\n xml += \"\\t<#{ELE_CREATED_ON}><![CDATA[#{format_date(p_item.created_on)}]]></#{ELE_CREATED_ON}>\\n\"\r\n xml += \"\\t<#{ELE_LAST_UPDATED_ON}><![CDATA[#{format_date(p_item.last_updated_on)}]]></#{ELE_LAST_UPDATED_ON}>\\n\"\r\n xml += \"\\t<#{ELE_FIELDS}>\\n\"\r\n p_item.get_fields.each do |name, value|\r\n xml += \"\\t\\t<#{ELE_FIELD} #{ATTR_NAME}='#{name}'><![CDATA[#{value}]]></#{ELE_FIELD}>\\n\"\r\n end\r\n xml += \"\\t</#{ELE_FIELDS}>\\n\"\r\n\r\n # add related items\r\n related_items = p_item.get_related_items\r\n unless related_items.nil? and related_items.empty?\r\n xml += \"<#{ELE_RELATED_ITEMS}>\\n\"\r\n related_items.each do |key, item_ids|\r\n xml += \"<#{key}>\\n\"\r\n item_ids.each do |item_id|\r\n xml += \"<#{ELE_ITEM}>\"\r\n xml += item_id.to_s\r\n xml += \"</#{ELE_ITEM}>\\n\"\r\n end\r\n xml += \"</#{key.to_s.downcase}>\\n\"\r\n end\r\n xml += \"</#{ELE_RELATED_ITEMS}>\\n\"\r\n end\r\n\r\n # set target class\r\n unless p_item.target_class.nil?\r\n xml += \"<#{ELE_TARGET_CLASS}>\"\r\n xml += p_item.target_class\r\n xml += \"</#{ELE_TARGET_CLASS}>\"\r\n end\r\n\r\n xml += \"</#{ELE_ITEM}>\\n\"\r\n end",
"def render\n output = \"\"\n xml = Builder::XmlMarkup.new(:target => output, :indent => @options[:indent])\n\n # Output headers unless we specified otherwise\n xml.instruct!\n xml.declare! :DOCTYPE, :svg, :PUBLIC, \"-//W3C//DTD SVG 1.1//EN\", \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"\n\n xml.svg(:viewBox => \"0 0 #{@options[:viewbox][:width]} #{@options[:viewbox][:height]}\",\n :width => @options[:width], :height => @options[:height],\n :xmlns => 'http://www.w3.org/2000/svg', :version => '1.1') do |xml|\n\n xml.g(:fill => 'black', :stroke => 'black', 'stroke-width' => '2',\n 'font-family' => 'Helvetica, Arial, sans-serif', 'font-size' => '10px', 'font-weight' => 'medium') do |xml|\n\n # Outline\n xml.rect(:x => @options[:margin][:left], :y => @options[:margin][:top],\n :width => graph_width,\n :height => graph_height,\n :fill => 'lightgray')\n\n xml.g 'stroke-width' => '1' do |xml|\n\n # Title\n xml.text(@options[:title], 'font-size' => '24px', :x => (@options[:viewbox][:width] / 2.0).round, :y => (@options[:subtitle] ? 24 : 32), 'text-anchor' => 'middle') if @options[:title]\n xml.text(@options[:subtitle], 'font-size' => '18px', :x => (@options[:viewbox][:width] / 2.0).round, :y => 34, 'text-anchor' => 'middle') if @options[:subtitle]\n\n # Lines\n xml.g 'font-size' => '10px' do |xml|\n line_x1 = @options[:margin][:left] + 1\n line_x2 = @options[:viewbox][:width] - @options[:margin][:right] - 1\n\n text_x = @options[:margin][:left] - 5\n\n xml.text 0, :x => text_x, :y => (@options[:viewbox][:height] - @options[:margin][:bottom] + 4), 'stroke-width' => 0.5, 'text-anchor' => 'end'\n\n 1.upto((max / division) - 1) do |line_number|\n y = (@options[:margin][:top] + (line_number * dy)).round\n xml.line :x1 => line_x1, :y1 => y, :x2 => line_x2, :y2 => y, :stroke => '#666666'\n xml.text max - line_number * division, :x => text_x, :y => y + 4, 'stroke-width' => 0.5, 'text-anchor' => 'end'\n\n # Smaller Line\n xml.line(:x1 => line_x1, :y1 => y + (0.5 * dy), :x2 => line_x2, :y2 => y + (0.5 * dy), :stroke => '#999999') if max < 55\n end\n\n xml.text max, :x => text_x, :y => @options[:margin][:top] + 4, 'stroke-width' => 0.5, 'text-anchor' => 'end'\n # Smaller Line\n xml.line(:x1 => line_x1, :y1 => @options[:margin][:top] + (0.5 * dy), :x2 => line_x2, :y2 => @options[:margin][:top] + (0.5 * dy), :stroke => '#999999') if max < 55\n end\n\n # Labels\n xml.g 'text-anchor' => 'end', 'font-size' => '12px', 'stroke-width' => 0.3 do |xml|\n @labels.each_with_index do |label, index|\n x = (@options[:margin][:left] + (dx * index) + (dx / 2.0)).round\n y = @options[:viewbox][:height] - @options[:margin][:bottom] + 15\n xml.text label, :x => x, :y => y, :transform => \"rotate(-45 #{x} #{y})\"\n end\n end\n\n # Bars\n xml.g 'font-size' => '10px', 'stroke-width' => 0.3 do |xml|\n last_spot = []\n\n @data.each_with_index do |data, data_index|\n data = Array(data)\n width = dx - @options[:padding]\n bar_width = (width / Float(data.size)).round\n\n x = (@options[:margin][:left] + (dx * data_index)).round\n\n # Rectangles\n data.each_with_index do |number, number_index|\n color = if @colors.respond_to? :call\n @colors.call(data_index, number_index, @data.size, data.size)\n elsif @colors.class == Array\n first = @colors[data_index % (@colors.size)]\n\n if first.class == Array\n first[number_index % (first.size)]\n else\n first\n end\n else\n @colors\n end\n\n height = ((dy / division) * number).round\n\n bar_x = (x + ((dx - width) / 2.0) + (number_index * bar_width)).round\n bar_y = @options[:viewbox][:height] - @options[:margin][:bottom] - height\n\n\n case @options[:type]\n when :bar then\n xml.rect :fill => color, :stroke => color, 'stroke-width' => 0, :x => bar_x, :width => bar_width, :y => bar_y, :height => height - 1\n when :line then\n if last_spot[number_index]\n xml.line(:x1 => last_spot[number_index][:x], :y1 => last_spot[number_index][:y], :x2 => bar_x, :y2 => bar_y,\n :fill => color, :stroke => color, 'stroke-width' => 2.0)\n end\n xml.circle :cx => bar_x, :cy => bar_y, :fill => color, :stroke => color, :r => bar_width * 1.5\n end\n\n last_spot[number_index] = { :x => bar_x, :y => bar_y }\n end\n end\n\n @data.each_with_index do |data, data_index|\n data = Array(data)\n width = dx - @options[:padding]\n bar_width = (width / Float(data.size)).round\n\n x = (@options[:margin][:left] + (dx * data_index)).round\n\n # Text\n if @options[:bar_text] != :none\n last_bar_height = false\n data.each_with_index do |number, number_index|\n percent_total = data.inject(0) { |total, percent_number| total + percent_number }\n\n if number > 0\n height = ((dy / division) * number).round\n\n bar_x = (x + ((dx - width) / 2.0) + (number_index * bar_width)).round\n text_x = (bar_x + (bar_width / 2.0)).round\n\n bar_y = @options[:viewbox][:height] - @options[:margin][:bottom] - height\n text_y = bar_y - 3\n\n if last_bar_height && (last_bar_height - height).abs < 14\n text_y -= (14 - (height - last_bar_height))\n last_bar_height = false\n else\n last_bar_height = height\n end\n\n label = case @options[:bar_text]\n when :number then number\n when :percent then (100.0 * Float(number) / Float(percent_total)).round.to_s + \"%\"\n end\n\n xml.text label, :x => text_x, :y => text_y, 'text-anchor' => 'middle'\n else\n last_bar_height = false\n end\n end\n end\n end\n end\n\n # Legend\n if @legend\n if @options[:legend] == :right\n legend_x = @options[:viewbox][:width] - (3 * @options[:margin][:right])\n else\n legend_x = (@options[:margin][:left] * 1.5).round\n end\n legend_y = (@options[:margin][:top] / 2) + @options[:margin][:top]\n xml.rect :fill => '#ffffff', :stroke => '#000000', 'stroke-width' => 2, :x => legend_x, :y => legend_y, :width => (2.5 * @options[:margin][:right]), :height => (@legend.size * 15) + 16\n\n @legend.sort.each_with_index do |data, index|\n color, label = data\n xml.rect :fill => color, :stroke => color, 'stroke-width' => 0, :x => legend_x + 10, :y => legend_y + 10 + (index * 15), :width => 35, :height => 10\n xml.text label, :x => legend_x + 55, :y => legend_y + 18 + (index * 15), 'text-anchor' => 'left'\n end\n end\n\n # Yield in case they want to do some custom drawing and have a block ready\n yield(xml, @options) if block_given?\n\n end\n end\n end\n\n output\n end",
"def to_xml\n return nil if title.nil? || body.nil?\n\n xml = \"\"\n xml << \"<d:entry id=\\\"#{self.id}\\\" d:title=\\\"#{self.title}\\\">\\n\"\n @index.each do |i|\n xml << \"\\t<d:index d:value=\\\"#{i}\\\" d:title=\\\"#{title}\\\" \"\n xml << \"d:yomi=\\\"#{yomi}\\\"\" if !self.yomi.nil?\n xml << \"/>\\n\"\n end\n xml << \"\\t<div>\\n\"\n xml << \"\\t\\t#{@body}\\n\"\n xml << \"\\t</div>\\n\"\n xml << \"</d:entry>\\n\"\n\n return xml\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
watch comment user id list without `ignore` option | def watch_comment_by_user_ids
self.watch_comment_by_user_actions.where("action_option is null or action_option != ?", "ignore").pluck(:user_id)
end | [
"def watch_comment_by_user_ids\n user_ids = watch_comment_by_user_actions.where(\"action_option is null or action_option != ?\", \"ignore\").pluck(:user_id)\n user_ids += repository.watch_by_user_ids\n user_ids.uniq!\n\n user_ids - unwatch_comment_by_user_ids\n end",
"def notify_watchers(comment_id)\n comment = Comment.find comment_id\n\n article_id = comment.article_id\n favs = Favorite.where(favorable_id: article_id)\n uid = comment.user_id\n ids = []\n favs.each do |f|\n i = f.user_id\n next if uid == i or i == 0\n ids << i\n f.updated_at = Time.now()\n f.save!\n end\n end",
"def watch_comment_status_by_user_id(user_id)\n action = watch_comment_by_user_actions.where(\"user_type = 'User' and user_id = ?\", user_id).take\n return action.action_option == \"ignore\" ? \"ignore\" : \"watch\" if action\n\n repo_action = repository.watch_by_user_actions.where(\"user_type = 'User' and user_id = ?\", user_id).take\n return \"watch\" if repo_action\n \"unwatch\"\n end",
"def commented_on(username, *args)\n path = \"user/#{username}/commented\"\n Rdigg.fetch(path, \"comment\", args)\n end",
"def comments_given(user_id)\n comments = Comment.where(user_id: user_id)\n end",
"def watch\n klass = params[:commentable_type].constantize\n @commentable = klass.find(params[:commentable_id])\n\n authorize! :read, @commentable\n\n if request.post?\n User.create_action(:watch_comment, target: @commentable, user: current_user, action_option: \"watch\")\n else\n User.create_action(:watch_comment, target: @commentable, user: current_user, action_option: \"ignore\")\n end\n end",
"def populate_liked_commented_users(opts = {})\n lkd_usr_ids = [] if opts[:only_comments]\n cmt_usr_ids = [] if opts[:only_likes]\n lks_lmt = opts[:likes_limit] || 5\n cmts_lmt = opts[:comments_limit] || 2\n\n lkd_usr_ids ||= self.likes.desc(:created_at).limit(lks_lmt).only(:user_id).collect(&:user_id)\n cur_usr_id = lkd_usr_ids.delete(current_user.id)\n cmt_usr_ids ||= self.comments.desc(:created_at).limit(cmts_lmt).only(:user_id).collect(&:user_id)\n unless (lkd_usr_ids + cmt_usr_ids).empty?\n usrs = User.where(:_id.in => (lkd_usr_ids + cmt_usr_ids)).only(:id, :username).to_a\n usrs = usrs.group_by(&:id)\n self.liked_user = (cur_usr_id.nil? ? '' : 'You||') + lkd_usr_ids.collect { |uid| usrs[uid].first.username }.join('||')\n self.commented_user = cmt_usr_ids.collect { |uid| usrs[uid].first.username }.join('||')\n end\n end",
"def notify_unlikes(_user)\n _user.anonymous_time_verification = :now\n content.trigger_instant_notification('unlike_comment', notification_data(false, _user), {foreground: true})\n end",
"def get_user_commented_entries(nickname = nil, options = nil)\n nickname ||= @nickname\n nickname or require_api_login\n call_api('feed/user/%s/comments' % URI.encode(nickname), options)['entries']\n end",
"def comment (name, comm, user, pw)\n yt = YouTube::Service.new()\n printvideos vids = yt.search(name,1,1)\n yt.comment(vids[0], comm, YouTube::User.new(user, pw))\nend",
"def find_comments(username, *args)\n path = \"user/#{username}/comments\"\n Rdigg.fetch(path, \"comment\", args)\n end",
"def get_user_comments(user_id,start,count)\n numitems = $r.zcard(\"user.comments:#{user_id}\").to_i\n ids = $r.zrevrange(\"user.comments:#{user_id}\",start,start+(count-1))\n comments = []\n ids.each{|id|\n news_id,comment_id = id.split('-')\n comment = Comments.fetch(news_id,comment_id)\n comments << comment if comment\n }\n [comments,numitems]\n end",
"def watcher_user_ids_with_uniq_ids=(user_ids)\n if user_ids.is_a?(Array)\n user_ids = user_ids.uniq\n end\n send :watcher_user_ids_without_uniq_ids=, user_ids\n end",
"def commenters\n users = []\n self.comments.each do |comment|\n unless comment.user_id.nil?\n user = User.find(comment.user_id)\n if !users.include? user\n users.push(user)\n end\n end\n end\n users\n end",
"def comments(user)\n self.design_review_comments.to_ary.find_all { |comment| comment.user == user }\n end",
"def watch_for_me(m, chan, pass, name)\n create(:watches, { \"chan\" => \"##{chan}\", \"chan_pass\" => pass, \"watch_pattern\" => name, \"created_by\" => m.user.nick })\n refresh_watches\n m.reply \"gerrit notification enabled.\"\n end",
"def commented_on_by_ids\n object.available_comments.map(&:created_by_id)\n end",
"def set_user_id\n User.stats.limit(2).each do |user|\n self.user_id = user['id']\n\n # prevents us from setting back to the same user\n if self.changes['user_id'][0] != self.changes['user_id'][0]\n break\n end\n end\n end",
"def like_comment(id = nil, user_id = nil)\n comment = Comment.where(:id => id).first\n if not comment.nil?\n #check to make sure user cant like comment more than once\n #users_who_liked is a string which is a comma-separated list of ids of users who like a comment\n\n users = comment.users_who_liked\n if not users.nil?\n users = comment.users_who_liked.split(\",\")\n end\n\n if not user_id.nil? and (users.nil? or not users.include?(user_id.to_s))\n comment.numlikes += 1\n users = users.join(\",\")\n users += \",\" + user_id.to_s\n comment.users_who_liked = users\n comment.save\n return SUCCESS\n end\n end\n return FAILED\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This action is called on ajax autocomplete call. It checks if user has rights to view data. URL parameters: [table] Table (collection) model name in lower case indicating table which will be searched. [id] Name of id key field that will be returned. Default is '_id' [input] Search data entered in input field. [search] when passed without dot it defines field name on which search will be performed. When passed with dot class_method.method_name is assumed. Method name will be parsed and any class with class method name can be evaluated. Class method must accept input parameter and return array [ [_id, value],.. ] which will be used in autocomplete field. Return: JSON array [label, value, id] of first 20 documents that confirm to query. | def autocomplete
# return '' unless session[:edit_mode] > 0 #
return render text: t('drgcms.not_authorized') unless dc_user_can(DcPermission::CAN_VIEW)
# TODO Double check if previous line works as it should.
table = params['table'].classify.constantize
id = [params['id']] || '_id'
# call method in class if search parameter has . This is for user defined searches
# result must be returned as array of [id, search_field_value]
a = if params['search'].match(/\./)
name, method = params['search'].split('.')
table.send(method, params['input']).inject([]) do |r,v|
r << { label: v[0], value: v[0], id: v[1].to_s }
end
# simply search which will search and return field_name defined in params['search']
else
table.where(params['search'] => /#{params['input']}/i).limit(20).inject([]) do |r,v|
r << { label: v[params['search']], value: v[params['search']], id: v.id.to_s }
end
end
render inline: a.to_json, formats: 'js'
end | [
"def autocomplete\r\n # table parameter must be defined. If not, get it from search parameter\r\n if params['table'].nil? && params['search'].match(/\\./)\r\n name = params['search'].split('.').first\r\n params['table'] = name.underscore\r\n end\r\n if params['table'].match('_control')\r\n # it must be at least logged on\r\n return render json: { label: t('drgcms.not_authorized') } unless dc_user_can(DcPermission::CAN_VIEW, 'dc_memory')\r\n else\r\n return render json: { label: t('drgcms.not_authorized') } unless dc_user_can(DcPermission::CAN_VIEW)\r\n end\r\n\r\n table = params['table'].classify.constantize\r\n input = params['input'].gsub(/\\(|\\)|\\[|\\]|\\{|\\|\\.|\\,}/, '')\r\n # call method in class if search parameter contains . This is for user defined searches\r\n a = if params['search'].match(/\\./)\r\n #method, additional_params = params['search'].split('.')\r\n #data = additional_params ? table.send(method, input, additional_params, self) : table.send(method, input)\r\n name, method = params['search'].split('.')\r\n data = table.send(method, input)\r\n data.map do |v|\r\n { label: v[0], value: v[0], id: (v[1] || v[0]).to_s }\r\n end\r\n # will search and return field_name defined in params['search']\r\n else\r\n table.where(params['search'] => /#{input}/i).limit(20).map do |v|\r\n { label: v[params['search']], value: v[params['search']], id: v.id.to_s }\r\n end\r\n end\r\n\r\n render json: a\r\nend",
"def autocomplete\n class_name=AUTO_COMPLETION_MAPPING[params[:hash].to_sym][:class_name]\n query =AUTO_COMPLETION_MAPPING[params[:hash].to_sym][:search_query]\n search_field=AUTO_COMPLETION_MAPPING[params[:hash].to_sym][:search_field]\n model_name = class_name.class.to_s == \"String\" ? class_name.constantize : class_name\n result=auto_completion_json(params[:q],query,search_field,model_name,params[:hash])\n render :text => result\n end",
"def search\n authorize Student\n\n respond_to do |format|\n term = params[:term] ? params[:term].strip : nil\n if term\n # TODO: replace with scoped_search?\n students = Student.search(term).order(:first_name, :family_name).limit 15\n format.json { render json: students, only: [:id, :first_name, :family_name, :email] }\n else\n format.json { head :ok } # blank json response\n end\n end\n end",
"def autocomplete_on\n\t\tconditions = if params[:name]\n [\"name LIKE :name\", { :name => \"%#{params['name']}%\"} ]\n else\n {}\n end\n\t\t @objects = params[:model_name].classify.constantize.find(:all, :conditions => conditions)\n\t\t render :text => '<ul>'+ @objects.map{ |e| '<li>' + e.name + '</li>' }.join(' ')+'</ul>'\n\tend",
"def autocomplete\n\t\tquery_params = QueryFormat.autocomplete_format()\n\t\tbegin\n\t\t\tQueryFormat.transform_raw_parameters(params)\n\t\t\tquery = QueryFormat.create_solr_query(query_params, params, request.remote_ip)\n\t\t\tquery['field'] = \"content_auto\" if query['field'].blank?\n\t\t\tis_test = Rails.env == 'test' ? :test : :live\n\t\t\tis_test = :shards if params[:test_index]\n\t\t\tsolr = Solr.factory_create(is_test)\n\t\t\tmax = query['max'].to_i\n\t\t\tquery.delete('max')\n\t\t\twords = solr.auto_complete(query)\n\t\t\twords.sort! { |a,b| b[:count] <=> a[:count] }\n\t\t\twords = words[0..(max-1)]\n\t\t\t@results = words.map { |word|\n\t\t\t\t{ :item => word[:name], :occurrences => word[:count] }\n\t\t\t}\n\n\t\t\trespond_to do |format|\n\t\t\t\tformat.html # index.html.erb\n\t\t\t\tformat.json { render json: { results: @results } }\n\t\t\t\tformat.xml\n\t\t\tend\n\t\trescue ArgumentError => e\n\t\t\trender_error(e.to_s)\n\t\trescue SolrException => e\n\t\t\trender_error(e.to_s, e.status())\n\t\trescue Exception => e\n\t\t\tExceptionNotifier.notify_exception(e, :env => request.env)\n\t\t\trender_error(\"Something unexpected went wrong.\", :internal_server_error)\n\t\tend\n\tend",
"def auto_complete\n @query = params[:auto_complete_query]\n @auto_complete = hook(:auto_complete, self, :query => @query, :user => current_user)\n if @auto_complete.empty?\n @auto_complete = self.controller_name.classify.constantize.my(:user => current_user, :limit => 10).search(@query)\n else\n @auto_complete = @auto_complete.last\n end\n session[:auto_complete] = self.controller_name.to_sym\n render :template => \"admin/common/auto_complete\", :layout => nil\n end",
"def auto_complete\n @query = params[:auto_complete_query]\n @auto_complete = hook(:auto_complete, self, :query => @query, :user => @current_user)\n if @auto_complete.empty?\n @auto_complete = self.controller_name.classify.constantize.my(:user => @current_user, :limit => 10).search(@query)\n else\n @auto_complete = @auto_complete.last\n end\n session[:auto_complete] = self.controller_name.to_sym\n render :template => \"common/auto_complete\", :layout => nil\n end",
"def json_index_searches_user_name_by_user_id_with_limit_and_offset_and_keyword\n\n #respond_to do |format|\n # if params[:keyword]\n #\n # keyword = params[:keyword]\n # keyword.downcase!\n #\n # # limit the length of the string to avoid injection\n # if keyword.length < 12\n #\n # @user = User.\n # select('id,name,image_name').\n # where('lower(name) LIKE ? and id != ?', \"%#{keyword}%\",params[:user_id]).\n # limit(params[:limit]).\n # offset(params[:offset])\n #\n #\n #\n # @search = Hash.new\n # @user.each do |i|\n #\n #\n #\n # @search[i.id] = Hash.new()\n # @search[i.id]['user'] = i\n # @search[i.id]['friend'] = Friend.select('user_id').where(user_id:params[:user_id],user_id_friend: i.id)\n # @search[i.id]['request'] = FriendRequest.select('user_id').where(user_id:params[:user_id],user_id_requested: i.id)\n # @search[i.id]['requested'] = FriendRequest.select('user_id').where(user_id_requested:params[:user_id],user_id: i.id)\n # end\n #\n # format.json { render json: @search\n #\n # }\n #\n # else\n # @user = nil\n # format.json { render json: @user }\n # end\n # else\n # @user = nil\n # format.json { render json: @user }\n # end\n #end\n end",
"def student_auto_complete_field(object, method, tag_options = {}, completion_options = {})\n tag_options = tag_options.merge({:indicator => 'global'})\n \tmodel_auto_completer \"student_search\", \"\", \"#{object}[#{method}]\", 0, \n \t\t\t\t\t\t\t{ :submit_on_return => true, \n \t\t\t\t\t\t\t :append_random_suffix => (tag_options[:append_random_suffix] || false), \n \t\t\t\t\t\t\t :url => auto_complete_for_student_anything_admin_students_url,\n \t\t\t\t\t\t\t :after_update_element => \"function(tf, item, hf, id) { tf.value = tf.value.strip() }\" },\n \t\t\t\t\t\t\t{ :autocomplete => false, :accesskey => 'f' },\n \t\t\t\t\t\t\t{ :skip_style => true, :indicator => tag_options[:indicator] }\n # \n # observe_field('model_auto_completer_hf', :frequency => 0.5, :function => \"$('search-form').submit()\" )\n end",
"def search\n return @search\n end",
"def auto_complete\n\t\tobjects = Lecturer.simple_search(params[:term]).take(2)\n\t\tauto_complete = []\n\t\tauto_complete = auto_complete.concat(objects.map(&:name)) unless objects.nil?\n\t\tobjects = Student.simple_search(params[:term]).take(2)\n\t\tauto_complete = auto_complete.concat(objects.map(&:name)) unless objects.nil?\n\t\tobjects = TeachingAssistant.simple_search(params[:term]).take(2)\n\t\tauto_complete = auto_complete.concat(objects.map(&:name)) unless objects.nil?\n\t\tobjects = Course.simple_search(params[:term]).take(2)\n\t\tauto_complete = auto_complete.concat(objects.map(&:name)) unless objects.nil?\n\t\trender json: auto_complete\n\tend",
"def execute\n @model_class.search(KEYWORD)\n end",
"def roar_auto_complete(fields, options={})\n options = {:limit=>10, :order=>nil}.merge(options)\n roar_action(\"auto_complete\", options) do\n q = model_class.query\n s = \"%#{params[model_symbol]}%\"\n [fields].flatten.inject(q.or) { |query,field| query << q.like(field, s) }\n @records = q.find(:limit=>options[:limit], :order=>options[:order])\n render :roar_partial => \"auto_complete\"\n end\n end",
"def search\n @search = Ransack::Search.new(Student)\n end",
"def search_model\n end",
"def autocomplete\n query_params = QueryFormat.autocomplete_format()\n begin\n QueryFormat.transform_raw_parameters(params)\n query = QueryFormat.create_solr_query(query_params, params, request.remote_ip)\n query['field'] = \"content_auto\"\n is_test = Rails.env == 'test' ? :test : :live\n is_test = :shards if params[:test_index]\n solr = Solr.factory_create(is_test)\n max = query['max'].to_i\n query.delete('max')\n words = solr.auto_complete(query)\n words.sort! { |a,b| b[:count] <=> a[:count] }\n words = words[0..(max-1)]\n @results = words.map { |word|\n { :item => word[:name], :occurrences => word[:count] }\n }\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml\n end\n rescue ArgumentError => e\n render_error(e.to_s)\n rescue SolrException => e\n render_error(e.to_s, e.status())\n rescue Exception => e\n ExceptionNotifier::Notifier.exception_notification(request.env, e).deliver\n render_error(\"Something unexpected went wrong.\", :internal_server_error)\n end\n end",
"def autosuggest(object, name, options={})\n options[:display] ||= name\n options[:limit] ||= 10\n options[:name] = name\n options[:search_in] ||= [name]\n options[:order] ||= \"#{options[:search_in].first} ASC\"\n\n define_method \"autosuggest_#{object}_#{name}\" do\n options.merge!(:query => params[:query], :object => object.to_s.camelize.constantize)\n query = ''\n values = []\n\n for column in options[:search_in]\n query += \"#{column} ILIKE ? OR \"\n values.push(\"#{options[:query]}%\")\n end\n results = options[:object].where(query[0..-4], *values).order(options[:order]).limit(options[:limit])\n render :json => Yajl::Encoder.encode(results.map{|r| {:name => r.send(options[:display]), :value => r.id.to_s}})\n end\n end",
"def ajax_auto_complete\n type = params[:type].to_s\n instr = params[:id].to_s\n letter = ' '\n scientific = false\n user = login_for_ajax\n if user\n scientific = (user.location_format == :scientific)\n end\n @items = []\n if instr.match(/^(\\w)/)\n letter = $1\n case type\n\n when 'location'\n @items = Observation.connection.select_values(%(\n SELECT DISTINCT `where` FROM observations\n WHERE `where` LIKE '#{letter}%' OR\n `where` LIKE '% #{letter}%'\n )) + Location.connection.select_values(%(\n SELECT DISTINCT `name` FROM locations\n WHERE `name` LIKE '#{letter}%' OR\n `name` LIKE '% #{letter}%'\n ))\n if scientific\n @items.map! {|i| Location.reverse_name(i)}\n end\n @items.sort!\n\n when 'name'\n @items = Name.connection.select_values %(\n SELECT text_name FROM names\n WHERE text_name LIKE '#{letter}%'\n AND correct_spelling_id IS NULL\n ORDER BY text_name ASC\n )\n\n when 'name2'\n @items = Name.connection.select_values(%(\n SELECT text_name FROM names\n WHERE text_name LIKE '#{instr}%'\n AND correct_spelling_id IS NULL\n ORDER BY text_name ASC\n )).sort_by {|x| (x.match(' ') ? 'b' : 'a') + x}\n # This sort puts genera and higher on top, everything else on bottom,\n # and sorts alphabetically within each group.\n letter = ''\n\n when 'project'\n @items = Project.connection.select_values %(\n SELECT title FROM projects\n WHERE title LIKE '#{letter}%'\n OR title LIKE '%#{letter}%'\n ORDER BY title ASC\n )\n\n when 'species_list'\n @items = SpeciesList.connection.select_values %(\n SELECT title FROM species_lists\n WHERE title LIKE '#{letter}%'\n OR title LIKE '%#{letter}%'\n ORDER BY title ASC\n )\n\n when 'user'\n @items = User.connection.select_values %(\n SELECT CONCAT(users.login, IF(users.name = \"\", \"\", CONCAT(\" <\", users.name, \">\")))\n FROM users\n WHERE login LIKE '#{letter}%'\n OR name LIKE '#{letter}%'\n OR name LIKE '% #{letter}%'\n ORDER BY login ASC\n )\n end\n end\n\n # Result is the letter requested followed by results, one per line. (It\n # truncates any results that have newlines in them -- that's an error.)\n render(:layout => false, :inline => letter +\n %(<%= @items.uniq.map {|n| h(n.gsub(/[\\r\\n].*/,'')) + \"\\n\"}.join('') %>))\n end",
"def search_emp\n @employee = Employee.search2(params[:advance_search], params[:search])\n authorize! :read, Employee\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Action for restoring document data from journal document. | def restore_from_journal
# Only administrators can perform this operation
unless dc_user_has_role('admin')
return render inline: { 'msg_info' => (t ('drgcms.not_authorized')) }.to_json, formats: 'js'
end
# selected fields to hash
restore = {}
params[:select].each {|key,value| restore[key] = value if value == '1' }
result = if restore.size == 0
{ 'msg_error' => (t ('drgcms.dc_journal.zero_selected')) }
else
journal_doc = DcJournal.find(params[:id])
# update hash with data to be restored
JSON.parse(journal_doc.diff).each {|k,v| restore[k] = v.first if restore[k] }
# determine tables and document ids
tables = journal_doc.tables.split(';')
ids = (journal_doc.ids.blank? ? [] : journal_doc.ids.split(';') ) << journal_doc.doc_id
# find document
doc = nil
tables.each_index do |i|
doc = if doc.nil?
(tables[i].classify.constantize).find(ids[i])
else
doc.send(tables[i].pluralize).find(ids[i])
end
end
# restore and save values
restore.each { |field,value| doc.send("#{field}=",value) }
doc.save
# TODO Error checking
{ 'msg_info' => (t ('drgcms.dc_journal.restored')) }
end
render inline: result.to_json, formats: 'js'
end | [
"def restore_from_journal\r\n # Only administrators can perform this operation\r\n unless dc_user_has_role('admin')\r\n return render plain: { 'msg_info' => (t ('drgcms.not_authorized')) }.to_json\r\n end\r\n # selected fields to hash\r\n restore = {} \r\n params[:select].each { |key,value| restore[key] = value if value == '1' }\r\n result = if restore.size == 0\r\n { 'msg_error' => (t ('drgcms.dc_journal.zero_selected')) }\r\n else\r\n journal_doc = DcJournal.find(params[:id])\r\n # update hash with data to be restored\r\n JSON.parse(journal_doc.diff).each {|k,v| restore[k] = v.first if restore[k] }\r\n # determine tables and document ids\r\n tables = journal_doc.tables.split(';')\r\n ids = (journal_doc.ids.blank? ? [] : journal_doc.ids.split(';') ) << journal_doc.doc_id\r\n # find document\r\n doc = nil\r\n tables.each_index do |i|\r\n doc = if doc.nil?\r\n (tables[i].classify.constantize).find(ids[i])\r\n else\r\n doc.send(tables[i].pluralize).find(ids[i])\r\n end\r\n end\r\n # restore and save values\r\n restore.each { |field,value| doc.send(\"#{field}=\",value) }\r\n doc.save\r\n # TODO Error checking\r\n { 'msg_info' => (t ('drgcms.dc_journal.restored')) }\r\n end\r\n render plain: result.to_json\r\nend",
"def restore\n\n @document = @library.documents.find(params[:id])\n\n Document.transaction do\n @document.soft_restore(@active_user)\n end\n\n columns = DocumentsHelper.columns_by_doctype(@document)\n\n respond_to do |format|\n format.html { redirect_to @document, notice: 'Document was successfully restored.' }\n format.json { render json: @document.to_json(:only => columns) }\n end\n\n end",
"def restore_document\n @document = Document.find(params[:id])\n if @document.update_attributes(:is_deleted => false)\n redirect_to admin_documents_url, notice: 'Document was successfully restored.' \n \n else \n redirect_to admin_documents_url, notice: 'Document was successfully restored.' \n end\n \n end",
"def record_restore\n self.paper_trail_event = 'restore'\n record_update(true) # NOTE: Future versions of paper_trail call create\n clear_version_instance!\n self.paper_trail_event = nil\n end",
"def restore\n if @@backup.save\n flash[:type] = 'success'\n flash[:title] = t(:successful_restore_todo, :todo => @@backup.title)\n @@backup = nil\n redirect_to todos_path\n else\n flash[:type] = 'error'\n flash[:title] = t(:unsuccessful_restore_todo)\n redirect_to todos_path\n end\n end",
"def restore\n note = current_user.notes.deleted.find(params[:id])\n note.deleted = false\n note.save\n redirect_to trashed_notes_path, notice: \"Note successfully restored.\"\n end",
"def restore\n @auction = Auction.find(params[:id])\n @auction.restore_auction\n\n redirect_to edit_auction_path, notice: 'Auction was successfully restored.'\n end",
"def restore_archive\n end",
"def restore\n @comment.restore!(current_user.id)\n\n flash[:comment] = { status: 'success', content: \"Comment restored.\" }\n redirect_to request.referer || @comment.paper\n end",
"def restore!\n restore\n save!(validate: false)\n end",
"def restore!\n record = self.restore\n record.save!\n self.destroy\n return record\n end",
"def restore\n update(archived_at: nil)\n end",
"def restore\n contact.restore!\n render action: :update\n end",
"def restore(id)\n\t\tend",
"def restore\n milestone.restore!\n render action: :update\n end",
"def restore_archive\n file_id = params[:id]\n file = Archive.find_by(id:file_id)\n file.is_deleted=false\n file.save\n redirect_to archives_show_path\n end",
"def restore\n self.update_attributes(archived: false)\n end",
"def restore\n project.restore!\n render action: :update\n end",
"def revise\n @document.revise!\n\n respond_to do |format|\n format.html { redirect_to @document, notice: t('view.documents.revised') }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Paste data from clipboard into text_area and update documents in destination database. This action is called twice. First time for displaying text_area field and second time ajax call for processing data. | def paste_clipboard
# Only administrators can perform this operation
return render(text: t('drgcms.not_authorized') ) unless dc_user_has_role('admin')
result = ''
respond_to do |format|
# just open new window to same url and come back with html request
format.html { return render('paste_clipboard', layout: 'cms') }
format.json {
table, id, ids = nil
params[:data].split("\n").each do |line|
line.chomp!
next if line.size < 5 # empty line. Skip
begin
if line[0] == '[' # id(s)
result << "<br>#{line}"
line = line[/\[(.*?)\]/, 1] # just what is between []
table, id, ids = line.split(',')
elsif line[0] == '{' # document data
result << process_document(line, table, id, ids)
end
rescue Exception => e
result << " Runtime error. #{e.message}\n"
break
end
end
}
end
dc_render_ajax(div: 'result', value: result )
end | [
"def paste_clipboard\r\n # Only administrators can perform this operation\r\n return render(plain: t('drgcms.not_authorized') ) unless dc_user_can(DcPermission::CAN_ADMIN,'dc_site')\r\n\r\n result = ''\r\n respond_to do |format|\r\n # just open new window to same url and come back with html request\r\n format.html { return render('paste_clipboard', layout: 'cms') }\r\n format.json {\r\n table, id, ids = nil\r\n params[:data].split(\"\\n\").each do |line|\r\n line.chomp!\r\n next if line.size < 5 # empty line. Skip\r\n\r\n begin\r\n if line[0] == '[' # id(s)\r\n result << \"<br>#{line}\"\r\n line = line[/\\[(.*?)\\]/, 1] # just what is between []\r\n table, id, ids = line.split(',')\r\n elsif line[0] == '{' # document data\r\n result << process_document(line, table, id, ids)\r\n end\r\n rescue Exception => e \r\n result << \" Runtime error. #{e.message}\\n\"\r\n break\r\n end\r\n end\r\n }\r\n end\r\n dc_render_ajax(div: 'result', value: result )\r\nend",
"def paste\n # PUNDIT_REVIEW_AUTHORIZE\n # PUNDIT_CHOOSE_AUTHORIZE\n # no authorization needed ...\n # authorize Investigation\n # authorize @investigation\n # authorize Investigation, :new_or_create?\n # authorize @investigation, :update_edit_or_destroy?\n if @investigation.changeable?(current_visitor)\n @original = clipboard_object(params)\n if (@original)\n @component = @original.deep_clone :no_duplicates => true, :never_clone => [:uuid, :updated_at,:created_at], :include => {:sections => :pages}\n if (@component)\n # @component.original = @original\n @container = params[:container] || 'investigation_activities_list'\n @component.name = \"copy of #{@component.name}\"\n @component.deep_set_user current_visitor\n @component.investigation = @investigation\n @component.save\n end\n end\n end\n\n render :update do |page|\n page.insert_html :bottom, @container, render(:partial => 'activity_list_item', :locals => {:activity => @component})\n page.sortable :investigation_activities_list, :handle=> 'sort-handle', :dropOnEmpty => true, :url=> {:action => 'sort_activities', :params => {:investigation_id => @investigation.id }}\n page[dom_id_for(@component, :item)].scrollTo()\n page.visual_effect :highlight, dom_id_for(@component, :item)\n end\n end",
"def send_to_clipboard\n\tClipboard.copy(@good_text)\nend",
"def paste\n backend.paste text\n end",
"def copy_clipboard\r\n # Only administrators can perform this operation\r\n return render(plain: t('drgcms.not_authorized') ) unless dc_user_can(DcPermission::CAN_ADMIN,'dc_site')\r\n\r\n respond_to do |format|\r\n # just open new window to same url and come back with html request\r\n format.json { dc_render_ajax(operation: 'window', url: request.url ) }\r\n \r\n format.html do\r\n table = CmsHelper.table_param(params)\r\n doc = dc_find_document(table, params[:id], params[:ids])\r\n text = '<style>body {font-family: monospace;}</style><pre>'\r\n text << \"JSON:<br>[#{table},#{params[:id]},#{params[:ids]}]<br>#{doc.as_document.to_json}<br><br>\"\r\n text << \"YAML:<br>#{doc.as_document.to_hash.to_yaml.gsub(\"\\n\", '<br>')}</pre>\"\r\n render plain: text\r\n end\r\n end \r\nend",
"def do_clipboard_paste\n dat = if Wx::PLATFORM == \"WXMAC\" \n # XXX i feel dirty\n `pbpaste`\n else\n dobj=RawDataObject.new\n Wx::Clipboard.open {|clip| clip.fetch dobj}\n dobj.raw_data\n end\n\n self.gui_set_value(self.cur_pos, dat) if dat and dat.size > 0\n end",
"def copy_to_clipboard\n begin\n @element = Element.find(params[:id])\n session[:clipboard] = {}\n session[:clipboard][:method] = params[:method]\n session[:clipboard][:element_id] = @element.id\n if session[:clipboard][:method] == \"move\"\n @element.page_id = nil\n @element.save!\n end\n rescue\n log_error($!)\n render :update do |page|\n Alchemy::Notice.show_via_ajax(page, _(\"element_%{name}_not_moved_to_clipboard\") % {:name => @element.display_name}, :error)\n end\n end\n end",
"def paste_at_cursor_position()\n\t\tmodel.insert_text_at_cursor(@clipboard) \n\tend",
"def refreshPaste(iSelection)\n lPasteEnabled = false\n lErrors = []\n if (@Controller.Clipboard_CopyMode != nil)\n lLocalSelection = nil\n if (@Controller.Clipboard_CopyID == @Controller.CopiedID)\n lLocalSelection = @Controller.CopiedSelection\n end\n lPasteEnabled, lErrors = isPasteAuthorized?(\n @Controller,\n iSelection,\n @Controller.Clipboard_CopyMode,\n lLocalSelection,\n @Controller.Clipboard_SerializedSelection\n )\n end\n @Controller.setMenuItemGUIEnabled(@EditMenu, Wx::ID_PASTE, lPasteEnabled)\n if (lErrors.empty?)\n @Controller.setMenuItemGUITitle(@EditMenu, Wx::ID_PASTE, nil)\n else\n @Controller.setMenuItemGUITitle(@EditMenu, Wx::ID_PASTE, \"Unable to paste: #{lErrors.join(' & ')}\")\n end\n lButton = @ToolBar.find_by_id(Wx::ID_PASTE)\n if (lButton != nil)\n @Controller.setToolbarButtonGUIEnabled(lButton, Wx::ID_PASTE, lPasteEnabled)\n if (lErrors.empty?)\n @Controller.setToolbarButtonGUITitle(lButton, Wx::ID_PASTE, nil)\n else\n @Controller.setToolbarButtonGUITitle(lButton, Wx::ID_PASTE, \"Unable to paste: #{lErrors.join(' & ')}\")\n end\n end\n end",
"def notifyClipboardContentChanged\n updateCommand(Wx::ID_PASTE) do |ioCommand|\n if (@Clipboard_CopyMode == nil)\n # The clipboard has nothing interesting for us\n # Deactivate the Paste command\n ioCommand[:Enabled] = false\n ioCommand[:Title] = 'Paste'\n # Cancel eventual Copy/Cut pending commands\n notifyCancelCopy\n # Notify everybody\n notifyRegisteredGUIs(:onClipboardContentChanged)\n elsif (@Clipboard_CopyMode == Wx::ID_DELETE)\n # Check that this message is adressed to us for real (if many instances of PBS are running, it is possible that some other instance was cutting things)\n if (@CopiedID == @Clipboard_CopyID)\n # Here we have to take some action:\n # Delete the objects marked as being 'Cut', as we got the acknowledge of pasting it somewhere.\n if (@CopiedMode == Wx::ID_CUT)\n if (!@Clipboard_AlreadyProcessingDelete)\n # Ensure that the loop will not come here again for this item.\n @Clipboard_AlreadyProcessingDelete = true\n executeCommand(Wx::ID_DELETE, {\n :parentWindow => nil,\n :selection => @CopiedSelection,\n :deleteTaggedShortcuts => false,\n :deleteOrphanShortcuts => false\n })\n # Then empty the clipboard.\n Wx::Clipboard.open do |ioClipboard|\n ioClipboard.clear\n end\n # Cancel the Cut pending commands.\n notifyCutPerformed\n notifyRegisteredGUIs(:onClipboardContentChanged)\n @Clipboard_AlreadyProcessingDelete = false\n end\n else\n log_bug 'We have been notified of a clipboard Cut acknowledgement, but no item was marked as to be Cut.'\n end\n end\n # Deactivate the Paste command\n ioCommand[:Enabled] = false\n ioCommand[:Title] = 'Paste'\n else\n lCopyName = nil\n case @Clipboard_CopyMode\n when Wx::ID_CUT\n lCopyName = 'Move'\n when Wx::ID_COPY\n lCopyName = 'Copy'\n else\n log_bug \"Unsupported copy mode from the clipboard: #{@Clipboard_CopyMode}.\"\n end\n if (@Clipboard_CopyID != @CopiedID)\n # Here, we have another application of PBS that has put data in the clipboard. It is not us anymore.\n notifyCancelCopy\n end\n if (lCopyName != nil)\n # Activate the Paste command with a cool title\n ioCommand[:Enabled] = true\n ioCommand[:Title] = \"Paste #{@Clipboard_SerializedSelection.getDescription} (#{lCopyName})\"\n else\n # Deactivate the Paste command, and explain why\n ioCommand[:Enabled] = false\n ioCommand[:Title] = \"Paste (invalid type #{@Clipboard_CopyMode}) - Bug ?\"\n end\n notifyRegisteredGUIs(:onClipboardContentChanged)\n end\n end\n end",
"def paste\n # no authorization applied as the method must always render\n if @section.changeable?(current_visitor)\n @original = clipboard_object(params)\n if @original\n @container = params[:container] || 'section_pages_list'\n if @original.class == Page\n @component = @original.duplicate\n else\n @component = @original.deep_clone :no_duplicates => true, :never_clone => [:uuid, :updated_at,:created_at]\n @component.name = \"copy of #{@original.name}\"\n end\n if (@component)\n # @component.original = @original\n @component.section = @section\n @component.save\n end\n @component.deep_set_user current_visitor\n end\n end\n render :update do |page|\n page.insert_html :bottom, @container, render(:partial => 'page_list_item', :locals => {:page => @component})\n page.sortable :section_pages_list, :handle=> 'sort-handle', :dropOnEmpty => true, :url=> {:action => 'sort_pages', :params => {:section_id => @section.id }}\n page[dom_id_for(@component, :item)].scrollTo()\n page.visual_effect :highlight, dom_id_for(@component, :item)\n end\n end",
"def paste\n buffer_current.paste @clipboard.clip\n end",
"def paste(text)\r\n @options[\"api_paste_code\"] = text\r\n @options[\"api_option\"] = \"paste\"\r\n Net::HTTP.post_form(URI.parse('http://pastebin.com/api/api_post.php'),\r\n @options).body\r\n end",
"def paste\n\t\t\t$ruvim.insert $ruvim.buffers[:copy].data\n\t\t\t$ruvim.editor.redraw\n\t\tend",
"def create\n fail \"no adult\" unless ENV[\"EZII_ADULT_PASSWORD\"] == params[\"ezii_adult_password\"]\n fail if UserTextCopy.last.pasting_started\n @ezii_adult_verify_and_signin = EziiAdultVerifyAndSignin.new(ezii_adult_verify_and_signin_params)\n\n @user_text_copy = UserTextCopy.new(user_text_copy_params)\n\n @user_text_copy.ezii_adult_verify_and_signin = @ezii_adult_verify_and_signin\n respond_to do |format|\n if @user_text_copy.save!\n\n # §\n §(LIMIT_SQL_SELECT_TO_TEN_TOTAL_RECORDS) do\n copies = []\n §⚕(LIMIT_SQL_SELECT_TO_TEN_TOTAL_RECORDS) do\n # copies = @user_text_copy.copies.limit(10)\n ladder\n end\n # byebug \n # ⚕\n ActionCable.server.broadcast(\n \"all\",\n copies: copies,\n pastes: copies.reverse\n )\n end\n format.html { redirect_to @user_text_copy, notice: 'User text copy was successfully created.' }\n format.json { render :show, status: :created, location: @user_text_copy }\n else\n format.html { render :new }\n format.json { render json: @user_text_copy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def paste\n authorize @page, :update?\n @original = clipboard_object(params)\n if (@original)\n # let some embeddables define their own means to save\n if @original.respond_to? :duplicate\n @component = @original.duplicate\n else\n @component = @original.deep_clone :no_duplicates => true, :never_clone => [:uuid, :updated_at,:created_at]\n end\n if (@component)\n @container = params['container'] || dom_id_for(@page, :elements_container)\n @component.name = \"copy of #{@component.name}\"\n @component.user = @page.user\n @component.pages << @page\n @component.save\n @element = @page.element_for(@component)\n @element.user = @component.user\n @element.save\n end\n end\n if @element.nil?\n logger.warn \"Paste failed. original: #{@original} container: #{@container} component: #{@component} element: #{@element}\"\n else\n render :update do |page|\n page.insert_html :bottom, @container, render(:partial => 'element_container', :locals => {:edit => true, :page_element => @element, :component => @component, :page => @page })\n page.sortable 'elements_container', :url=> {:action => 'sort_elements', :params => {:page_id => @page.id }}\n page[dom_id_for(@component, :item)].scrollTo()\n page.visual_effect :highlight, dom_id_for(@component, :item)\n end\n end\n end",
"def update\n respond_to do |format|\n if @copypaste.update(copypaste_params)\n format.html { redirect_to @copypaste, notice: 'Copypaste was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @copypaste.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @paste.update(paste_params)\n format.html { redirect_to @paste, notice: 'Paste was successfully updated.' }\n format.json { render :show, status: :ok, location: @paste }\n else\n format.html { render :edit }\n format.json { render json: @paste.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @paste = Paste.find(params[:id])\n\n respond_to do |format|\n if @paste.update_attributes(params[:paste])\n format.html { redirect_to @paste, :notice => 'Paste was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @paste.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Clears all session data related to login. | def clear_login_data
session[:edit_mode] = 0
session[:user_id] = nil
session[:user_name] = nil
session[:user_roles] = nil
cookies.delete :remember_me
end | [
"def clear_login_data\n self.account = nil\n self.timeout_at = nil\n self.logged_in = false\n end",
"def clear\n @session.delete :uid\n @session.delete :ulogin\n end",
"def clear_sessions \n sessions.each do |key, session|\n logger.info \"Closing: #{key}\"\n session.close\n end \n sessions.clear \n reset_password\n end",
"def clear_session\n session.clear\n end",
"def clear_session\n Mack::SessionStore.expire_all\n end",
"def logout\n @login_session = nil\n end",
"def clear_session\n session[:user_id] = nil\n cookies[:user_id] = nil\n cookies[:password_hash] = nil\n end",
"def clear_session\n session[:int_key] = nil\n session[:email] = nil\n session[:password] = nil\n session[:account_id] = nil\n redirect_to root_url\n end",
"def clear\n @cookies.delete :uid\n @cookies.delete :ulogin\n end",
"def sign_out_all_sessions\r\n\t\tcurrent_user.sessions.each {|s| s.destroy}\r\n\t\tsession[:token] = nil\r\n\tend",
"def clear_session_keys\n ActiveRecord::Base.clear_session_keys\n end",
"def sessions_reset\n self.sessions_flush\n @sessions = {}\n end",
"def clear_session_keys() #:nodoc:\n @@session_keys.clear\n end",
"def clear_authentication(session)\n session[:usergrid_user_id] = nil\n session[:usergrid_auth_token] = nil\n clear_thread_context(session)\n end",
"def sign_out_all_sessions\r\n\t\tcurrent_user.sessions.each {|s| s.destroy}\r\n\t\tsession[:token] = nil\r\n\t\trender json: {}\r\n\tend",
"def clear_login(user, pass)\n @username = user\n @password = pass\n self.login\n end",
"def reset\n @session = nil\n end",
"def log_out\n set_cookies nil\n @modhash = nil\n @userid = nil\n @username = nil\n end",
"def reset_session\n super\n user_session.try(:destroy)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Initializes various variables when a new Crawler object is instantiated | def initialize(site)
puts "Rcrawl Version #{VERSION} initializing..."
@links_to_visit = Array.new
@visited_links = Array.new
@external_links = Array.new
@raw_html = Hash.new
@rules = RobotRules.new('Rcrawl')
@user_agent = "Rcrawl/#{VERSION} (http://rubyforge.org/projects/rcrawl/)"
@sites = Hash.new
@errors = Hash.new
@meta = Hash.new
@site = URI.parse(site) || raise("You didn't give me a site to crawl")
@links_to_visit << site
puts "Ready to crawl #{site}"
end | [
"def initialize (params = {})\t\t\n\t\t@verbose=params.fetch(:verbose, false)\n\t\t@http_timeout=params.fetch(:http_timeout, 5000)\n\t\t@crawl_depth=params.fetch(:crawl_depth, 4)\n\t\t@crawl_page_limit=params.fetch(:crawl_page_limit, 1000)\n\t\t@max_parallel=params.fetch(:max_parallel, 40)\n\t\t# Discovered data store\t\t\n\t\t@discovered_urls_by_crawler=Hash.new\n\t\t@visited_urls_by_crawler=Hash.new\n\t\t@crawl_start=Hash.new\n\t\t@crawl_done=Hash.new\n\t\t@log_file=File.dirname(__FILE__)+\"../../logs/crawler.log\"\n\tend",
"def initialize\n @l = FancyLog.instance\n @resource = $config['crawler']['docpath']\n require \"temporary_storage\"\n @storage=Temporary_Storage.new($config['storage'])\n end",
"def initialize (params = {})\n\t\t@verbose=params.fetch(:verbose, false)\n\t\t@data_dir=params.fetch(:data_dir, File.dirname(__FILE__)+'/../../data/')\n\t\t@http_timeout=params.fetch(:http_timeout, 5000)\n\t\t@crawl_depth=params.fetch(:crawl_depth, 4)\n\t\t@crawl_page_limit=params.fetch(:crawl_page_limit, 1000)\n\t\t@max_parallel=params.fetch(:max_parallel, 40)\n\t\t@user_agent=params.fetch(:user_agent, \"OWASP WMAP Spider\")\n\t\t# Discovered data store\n\t\t@discovered_urls_by_crawler=Hash.new\n\t\t@visited_urls_by_crawler=Hash.new\n\t\t@crawl_start=Hash.new\n\t\t@crawl_done=Hash.new\n\t\tDir.mkdir(@data_dir) unless Dir.exist?(@data_dir)\n\t\t@log_dir=@data_dir + \"/logs/\"\n\t\tDir.mkdir(@log_dir) unless Dir.exist?(@log_dir)\n\t\t@log_file=@log_dir + \"crawler.log\"\n\tend",
"def initialize(url)\n @visited = []\n @broken = []\n links(url)\n end",
"def initialize(urls, &block)\n @urls = [urls].flatten.map{ |url| URI(url) if url.is_a?(String) }\n @urls.each{ |url| url.path = '/' if url.path.empty? }\n \n @tentacles = []\n @pages = PageHash.new\n @on_every_page_blocks = []\n @on_pages_like_blocks = Hash.new { |hash,key| hash[key] = [] }\n @skip_link_patterns = []\n @after_crawl_blocks = []\n \n if Anemone.options.obey_robots_txt\n @robots = Robots.new(Anemone.options.user_agent)\n end\n \n block.call(self) if block\n end",
"def initialize\n raise NotImplementedError, 'need to implement #intialize and set @url'\n end",
"def _add_crawlers urls\n \n crawler = Crawley::BaseCrawler.new @debug \n\n crawler.instance_variable_set :@start_urls, urls \n crawler.instance_variable_set :@max_depth, @max_depth \n crawler.instance_variable_set :@allowed_urls, @allowed_urls \n crawler.instance_variable_set :@black_list, @black_list \n crawler.instance_variable_set :@max_concurrency_level, @max_concurrency_level \n crawler.instance_variable_set :@requests_deviation, @requests_deviation\n crawler.instance_variable_set :@search_all_urls, @search_all_urls\n crawler.instance_variable_set :@login, @login\n crawler.instance_variable_set :@post, @post\n crawler.instance_variable_set :@proxy_host, @proxy_host\n crawler.instance_variable_set :@proxy_user, @proxy_user\n crawler.instance_variable_set :@proxy_pass, @proxy_pass\n crawler.instance_variable_set :@proxy_port, @proxy_port\n\n @crawlers.push crawler\nend",
"def initialize(url = BASE_URL)\n @agent = Mechanize.new\n @current_page = @agent.get url\n end",
"def initialize(url_str_without_query=OAI_URL_BASE_STRING)\n @url_base_str = url_str_without_query\n @query_str_verb = QUERY_STRING_PART1\n @query_str_next_page = QUERY_STRING_PART2_INITIAL\n @is_next_url_str_valid = true\n\n # Vars below correspond to the current page\n @page_count = 0\n @url_str = nil\n @page = nil\n @doc = nil\n end",
"def initialize(urls, opts = {})\n @urls = [urls].flatten.map{ |url| url.is_a?(URI) ? url : URI(url) }\n @urls.each{ |url| url.path = '/' if url.path.empty? }\n\n @tentacles = []\n @on_every_page_blocks = []\n @on_pages_like_blocks = Hash.new { |hash,key| hash[key] = [] }\n @skip_link_patterns = []\n @after_crawl_blocks = []\n @opts = opts\n @focus_crawl_block = nil\n\n\n yield self if block_given?\n end",
"def initialize(root, visit=false)\n initialize_accessors if respond_to?(:initialize_accessors)\n initialize_driver root\n goto if visit && respond_to?(:goto)\n initialize_page if respond_to?(:initialize_page)\n end",
"def initialize\n @broken_links = []\n @first_linked_from = {}\n end",
"def initialize(url, referer_url = nil)\n @url = url.to_s\n @referer_url = referer_url&.to_s\n @urls = [@url, @referer_url].select(&:present?)\n\n @parsed_url = Addressable::URI.heuristic_parse(url) rescue nil\n @parsed_referer = Addressable::URI.heuristic_parse(referer_url) rescue nil\n @parsed_urls = [parsed_url, parsed_referer].select(&:present?)\n end",
"def initialize(urls, options)\n @urls = Array(urls).map do |url|\n url = URI(url) if String === url\n url.path = '/' if url.path.empty?\n url\n end\n\n @options = options.dup\n @tentacles = []\n @pages = PageHash.new\n @on_every_page_blocks = []\n @on_pages_like_blocks = Hash.new { |hash,key| hash[key] = [] }\n @skip_link_patterns = convert_patterns(options[:skip_urls])\n @after_crawl_blocks = []\n @focus_crawl_block = nil\n\n @options[:http] ||= HTTP.new(@options)\n\n yield self if block_given?\n end",
"def run_crawler\n cr = Crawler.new @prms\n cr.crawl\n end",
"def initialize(url)\n @url = HTTParty.get(url)\n @title = @url['Title']\n @director = @url['Director']\n @plot = @url['Plot']\n end",
"def initialize(user_agent, body)\n @robot_id = user_agent\n @found = true\n parse(body) # set @body, @rules and @sitemaps\n end",
"def initialize base_url\n @base_url = base_url\n end",
"def initialize\n @agent = Mechanize.new\n @agent.set_defaults if @agent.respond_to?(:set_defaults)\n @agent.user_agent = @@user_agent ||= \"bahn.rb\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
HTML processing module for raw HTML storage | def process_html(document)
# Add link and raw HTML to a hash as key/value
# for later storage in database
unless @raw_html.has_value?(document)
print "."
@raw_html[@document.base_uri.to_s] = document
end
end | [
"def html_parser; end",
"def handle_html b, html, plain_text\n html\n end",
"def post_process(html)\n html\n end",
"def process(markup)\n markup\n end",
"def content_from(html, url)\n \n def extract_pre_from(html)\n regex = /<pre.*?>.*?<\\/pre>/m\n pre_list = html.scan regex\n html.gsub! regex, 'DUMMY-STRING'\n [pre_list, html]\n end\n\n def add_domain(html, domain)\n html.gsub! /a href=\\\"(\\/.*?\\\")/, \"a href=\\\"#{domain}\\\\1\"\n html.gsub! /img src=\\\"(\\/.*?\\\")/, \"img src=\\\"#{domain}\\\\1\"\n html\n end\n\n def add_pre(html, pre_list)\n pre_list.each do |p|\n html.sub!('DUMMY-STRING', p)\n end\n html\n end\n \n pre_list, replaced = extract_pre_from html\n params = { :tags => %w[div span p a b i pre h1 h2 h3 h4 h5 h6 strong small em\n blockquote ul ol li img],\n :attributes => %w[href src] }\n html = HtmlPress.press Readability::Document.new(replaced, params).content\n domain = domain_of url\n output = add_pre(add_domain(html, domain), pre_list)\n output = sanitize_with_img output\n output.gsub /<img /, \"<img onError=\\\"this.style.display='none';\\\" \"\n \n end",
"def process_html(content)\n\t\t\t\tcontent.output = if content.output.include? ASIDE_START_TAG\n\t\t\t\t\thead, opener, tail = content.output.partition(CLOSING_ASIDE_TAG_REGEX)\n\t\t\t\t\t\t\t\telse\n\t\t\t\t\thead, opener, tail = content.output.partition(POST_CONTENT_CLASS)\n\t\t\t\t\t\t\t\tend\n\t\t\t\tbody_content, *rest = tail.partition(\"</body>\")\n\t\t\t\t\n\t\t\t\tprocessed_markup = process_words(body_content)\n\t\t\t\t\n\t\t\t\tcontent.output = String.new(head) << opener << processed_markup << rest.join\n\t\t\tend",
"def parse_raw_html(el, &block); end",
"def process_text(raw, preserve = false)\n raw.gsub!(/\\s+/, ' ') unless preserve\n src = Kramdown::Utils::StringScanner.new(raw)\n result = []\n until src.eos?\n if (tmp = src.scan_until(/(?=#{HTML_ENTITY_RE})/o))\n result << Element.new(:text, tmp)\n src.scan(HTML_ENTITY_RE)\n val = src[1] || (src[2]&.to_i) || src[3].hex\n result << if %w[lsquo rsquo ldquo rdquo].include?(val)\n Element.new(:smart_quote, val.intern)\n elsif %w[mdash ndash hellip laquo raquo].include?(val)\n Element.new(:typographic_sym, val.intern)\n else\n begin\n Element.new(:entity, entity(val), nil, original: src.matched)\n rescue ::Kramdown::Error\n src.pos -= src.matched_size - 1\n Element.new(:entity, ::Kramdown::Utils::Entities.entity('amp'))\n end\n end\n else\n result << Element.new(:text, src.rest)\n src.terminate\n end\n end\n result\n end",
"def presentable_html(html)\n # sanitize edited, tags: %w(body p span a h1 h2 h3 h4 h5 h6 ul ol li) if work.file_content_html %> -->\n # doc = Nokogiri::HTML(html_junk)\n # body = doc.at_xpath(\"//body\")\n # body.css('*').remove_attr('class')\n # edited = body.to_html\n return raw html\n end",
"def parse_block_html; end",
"def html_postprocess(field,html)\n html\n end",
"def process_markdown\n self.data = self.class.convert_markdown(self.data)\n sanitize_html\n end",
"def handle_raw_html_tag(name); end",
"def prepare_html(content , page_type = 'N')\n #header\n 1.upto 5 do |no| content.gsub! /^(={#{no}}) (.*) (={#{no}})/ ,\"\\nh#{no+1}. \\\\2\\n\" end\n 1.upto 5 do |no| content.gsub! /^(={#{no}}) (.*)/ ,\"\\nh#{no+1}. \\\\2\\n\" end\n\n #list\n 1.upto 5 do |no| content.gsub! /^([ ]{#{no}})(\\*) ?(.*)/ ,\"#{'*'*no} \\\\3\" end\n 1.upto 5 do |no| content.gsub! /^([ ]{#{no}})(#) ?(.*)/ ,\"#{'#'*no} \\\\3\" end\n #content.gsub! /(\\*) v (.*)/ , \"\\\\1 -\\\\2-\"\n \n #block\n content.gsub! /^\\{\\{\\{/ , \"<pre>\" ; content.gsub! /^\\}\\}\\}/ , \"</pre>\"\n content.gsub! /^\\{\\{\\\"/ , \"<blockquote>\" ; content.gsub! /^\\\"\\}\\}/ , \"</blockquote>\"\n content.gsub! /^\\{\\{\\[/ , \"<math>\" ; content.gsub! /^\\]\\}\\}/ , \"</math>\"\n \n #concept & property\n content.gsub! /\\[\\[(.*?):=(.*?)\\]\\]/ , '\\1(\\2)'\n #content.gsub! /\\[\\[(.*?)[<>=].*?\\]\\]/ , \\\"\\\\1\\\":#{APP_ROOT}/page/\\\\1\" \n content.gsub! /\\[\\[(.*?)\\]\\]/ , \"\\\"\\\\1\\\":#{APP_ROOT}/entry/\\\\1\" if defined?(APP_ROOT)\n\n #comment\n content.gsub! PTN_COMMENT , \"\\\\1\"\n content.gsub! PTN_COMMENT_MULTILINE , \"\"\n if defined? SystemConfig\n SystemConfig.site_info.each do |e|\n content.gsub! /(\\s)#{e[1]}:/ , \"\\\\1#{e[2]}\"\n end\n content.gsub! SystemConfig.ptn_url_unnamed , \"\\\\1\\\"\\\\2\\\":\\\\2\"\n content.gsub! \"%ROOT%\" , APP_ROOT\n end\n \n #Process by page_type\n case page_type\n when 'N'\n math_list = content.scan( PTN_MATH ) ; math_list.each do |m|\n #content.gsub! \"$#{m[0]}$\" , latex_render(m[0])\n content.gsub! \"$#{m[0]}$\" , get_math_img(m[0])\n end\n math_block_list = content.scan( PTN_MATH_BLOCK ) ; math_block_list.each do |m|\n #content.gsub! \"#{m[0]}\" , latex_render(m[0])\n content.gsub! \"#{m[0]}\" , get_math_img(m[0])\n end\n when 'S'\n menu_list = content.scan( PTN_MENU ) ; menu_list.each do |m|\n menu_title = m[0] ; menu_target = m[1] ; menu_str = \"M{{#{menu_title}|#{menu_target}}}\"\n #$lgr.info \"#{menu_title} / #{menu_target}\"\n result = link_to_remote(menu_title , :url => { :action => 'menu' , :query => CGI.escape(menu_target) })\n content.gsub! menu_str , result\n end\n end\n #$lgr.info \"[prepare_html] \"+content\n query_list = content.scan( PTN_QUERY ) ; query_list.each do |q|\n query_type = q[0] ; query_content = q[1] ; query_str = \"#{query_type}{{#{query_content}}}\"\n case query_type\n when 'P'\n result = eval(\"find_page :display=>'|@title|@tags|@created_at|' ,\" + query_content )\n result = result.join(\"\\n\") if result.class == Array\n result = \"|_.Title|_.Tag|_.CreatedAt|\\n\"+result if query_content.scan(/:display/).size == 0\n #$lgr.info \"[prepare_html] Query : #{query_str} , #{result}\"\n content.gsub! query_str , result\n end\n end\n #content.gsub! SystemConfig.ptn_url , \"\\\"\\\\0\\\":\\\\0\"\n #???content.gsub!(SystemConfig.ptn_site) \"\\\"#{ApplicationController.SystemConfig(\\\\0)}\\\":\\\\0\"\n content\n end",
"def process_html(content)\n head, opener, tail = content.output.partition(OPENING_BODY_TAG_REGEX)\n body_content, *rest = tail.partition(\"</body>\")\n\n processed_markup = process_anchor_tags(body_content)\n\n content.output = String.new(head) << opener << processed_markup << rest.join\n end",
"def html_markup_org(text); end",
"def transform_HTML(additional_context = nil)\n old_version = self.clone\n new_self = self.clone\n\n # In order to ensure that no chunks are sanitized twice, and that each\n # chunk is sanitized in accordance with the context it appears in its final\n # place of use, roll back any chunks in the current string that have a \n # rollback transformer (see definition of RollbackTransformer for more of \n # an explanation)\n new_self = run_rollback(new_self) \n\n # Call the context sanitization routine for HTML (see context_sanitization.rb\n # for a detailed description of how this process works)\n res = context_sanitize(new_self)\n\n # Match the resulting string with a new RollbackTransformer, so that the\n # new transformations can be reverted if it turns out that the string will\n # be used in yet a larger HTML context\n new_trans = RollbackTransformer.new\n new_trans.backup = old_version \n\n # Return the HTML-transformed version of the string\n res = res.set_taint(new_trans)\n res\n end",
"def html\n return @html if defined? @html\n\n @html = Henkei.read :html, data\n end",
"def scrub_html4_document(string_or_io, method); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns last_name, first_name or just first_name if no last_name exists | def full_name_last_first
return self.first_name unless (self.last_name.length > 0)
return (self.last_name + ", " + self.first_name)
end | [
"def get_first_and_last_name\n if first_name && last_name && !first_name.empty? && !last_name.empty?\n [first_name, last_name]\n elsif description.present?\n [\n description.split(' ')[0],\n description.split(' ')[1]\n ]\n else\n [name[0..4], ''] # Return just the first 5 char from the username, just to increase the chances\n end\n end",
"def last_first_name\n ret = \"\"\n ret += name unless name.blank?\n ret += \", \" unless firstname.blank? or name.blank?\n ret += firstname unless firstname.blank?\n ret\n end",
"def last_name_first\n if name\n stripped_name = name.strip\n\n leading_dots = if name =~ /^([A-Za-z\\s\\.]*)\\s/\n $1\n end\n\n if leading_dots\n stripped_name = stripped_name.gsub(/#{leading_dots}/, \"\")\n return stripped_name + \", \" + leading_dots\n end\n\n name_parts = stripped_name.split(' ')\n if name_parts.count > 1\n [name_parts[1, name_parts.length].join(' '), name_parts[0]].join(', ')\n else\n name\n end\n else\n \"\" # return empty string instead of nil if no name\n end\n end",
"def last_name_first_name(name)\n last = last_name(name)\n first = name.gsub(last, '').strip \n \"#{last}, #{first}\"\n end",
"def last_name_first(options={})\n return (last_name || '') + (first_name || '') if last_name.blank? || first_name.blank?\n options[:short] = false if options[:paren_short] # paren_short overrides the short option\n if options[:short] && !short_name.blank? # use the short form of first name if it's defined\n first = short_name\n else\n first = first_name # || '*Missing First Name*'\n end\n if options[:initial] && !middle_name.blank? # use middle initial rather than whole middle name?\n middle = middle_initial\n else\n middle = middle_name || ''\n end\n if (options[:paren_short]) && !short_name.blank? && short_name != first\n first = first + \" (#{short_name})\"\n end\n s = (last_name) + ', ' + first \n s << (' ' + middle) unless options[:middle] == false || middle.empty?\n return s || ''\n end",
"def first_name_last_initial\n if name.split.count > 1\n first_name + ' ' + last_name[0].upcase + '.'\n else\n first_name\n end\n end",
"def full_name\n first_name.present? && last_name.present? ? \"#{first_name} #{last_name}\" : uid\n end",
"def merge_first_and_last_name_into_name\n self.name = [first_name, last_name].compact.join(' ').strip\n end",
"def full_name(user, last_first=true)\n if last_first\n \"#{user.last_name}, #{user.first_name}\"\n else\n \"#{user.first_name} #{user.last_name}\"\n end\n end",
"def first_name_last_name_initial\n self.first_name + ' ' + self.last_name[0,1]\n end",
"def full_name\n [first_name, surname].join(' ')\n end",
"def fullname\n full = [self.firstname, self.lastname].join(' ').strip\n unless full.blank?\n return full\n else\n return self.username\n end\n end",
"def name\n if first_name && last_name\n name = \"#{first_name} #{last_name}\"\n elsif first_name\n name = first_name\n else\n name = email\n end\n end",
"def fullname_or_name\n ((fullname && !fullname.empty?) ? fullname : name)\n end",
"def names(first_name, last_name)\n\t\"#{last_name.upcase}, #{first_name.capitalize}\"\nend",
"def preferred_last_name() self[:preferred_last_name] || last_name end",
"def full_name\n return \"#{first_name} #{last_name}\"\n end",
"def list_first_and_last_name(first_name, middle_name, last_name)\n puts \"First Name: #{first_name}, Middle Name: #{middle_name}, Last Name: #{last_name}\"\nend",
"def fullname?\n firstname.present? || lastname.present?\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
"Sand", "Water", "Fish", and "Sun" appear without overlapping (regardless of the case). Examples sum_of_a_beach("WAtErSlIde") ==> 1 sum_of_a_beach("GolDeNSanDyWateRyBeaChSuNN") ==> 3 sum_of_a_beach("gOfIshsunesunFiSh") ==> 4 sum_of_a_beach("cItYTowNcARShoW") ==> 0 | def sum_of_a_beach (beach)
(beach.scan(/sand|water|fish|sun/i) || []).length
end | [
"def countApplesAndOranges(start_house_loc, end_house_loc, apple_loc, orange_loc, apples, oranges)\n apples.collect! { |apple| apple_loc + apple }.keep_if { |apple| (start_house_loc..end_house_loc).include? apple }\n oranges.collect! { |orange| orange_loc + orange }.keep_if { |orange| (start_house_loc..end_house_loc).include? orange }\n\n puts apples.count\n puts oranges.count\nend",
"def countApplesAndOranges(s, t, a, b, apples, oranges)\n apple_count, orange_count = 0, 0 \n apples.each do |d|\n if (d >= s-a) && (d <= t-a)\n apple_count += 1\n end\n end\n oranges.select{|num| num < 0}.each do |d|\n if (d >= s-b) && (d <= t-b)\n orange_count += 1\n end\n end\n printf(\"%s\\n%s\", apple_count, orange_count)\nend",
"def countApplesAndOranges(s, t, a, b, apples, oranges)\n apples_on_house = 0\n oranges_on_house = 0\n apples.map! do |apple|\n apple + a\n end\n oranges.map! do |orange|\n orange + b\n end\n apples.each do |apple|\n if apple >= s && apple <= t\n apples_on_house += 1\n end\n end\n oranges.each do |orange|\n if orange >= s && orange <= t\n oranges_on_house += 1\n end\n end\n p apples_on_house\n p oranges_on_house\nend",
"def num_shared_words(words_a,words_b)\n matches = 0\n words_a.each do |word_a|\n words_b.each_with_index do |word_b,i|\n if (word_a.downcase == word_b.downcase)\n matches += 1\n words_b.delete_at(i)\n break\n end\n end\n end\n matches\nend",
"def count_vegetables(s)\n # if there are any non-vegetables mixed in, discard them.\n # if the count of two vegetables is the same, sort in reverse alphabetical order(Z -> A)\n s_array = s.split(' ')\n p s_array\n delete_list = [\"chopsticks\"]\n b = s_array.reject { |x| delete_list.each.include? x } # This deletes all instances of of items in delete_list\n #only_veg = delete_list.each do |del| s_array.delete_at(s_array.index(del))\n p b\n s_array2 = b.group_by { |name| name }\n s_array3 = s_array2.map { |name, names| [names.length, name] }.sort.reverse\n# s.delete_at if !s.include?('cabbage', 'carrot', 'celery', 'cucumber', 'mushroom', 'onion', 'pepper', 'potato', 'tofu', 'turnip')\n# end\n #your code here\n# s2 = s.group_by { |name| name}\n# s2.map { |veg, veg_name| [veg.length, veg_name] }\n\nend",
"def countApplesAndOranges(s, t, a, b, apples, oranges)\n result_a = 0\n apples.each do |d|\n cord = a + d\n if s <= cord && cord <= t\n result_a += 1\n end\n end\n puts result_a\n\n result_o = 0\n oranges.each do |d|\n cord = b + d\n if s <= cord && cord <= t\n result_o += 1\n end\n end\n puts result_o\nend",
"def match(a, b)\n a = a.to_s.downcase\n b = b.to_s.downcase\n result = 0\n if a == b \n return 100\n end\n if (a.include?(b) ==true && (a.length - b.length) ==1 ) || (b.include?(a)==true && (b.length - a.length) ==1) \n return 50\n end\n if (a.length >=3 && a.include?(b) ==true && (a.length - b.length) ==2 ) || (b.length >=3 && b.include?(a)==true && (b.length - a.length) ==2) \n return 25\n end\n if (a.length >=6 && a.include?(b) ==true && (a.length - b.length) == 3 ) || (b.length >=6 && b.include?(a)==true && (b.length - a.length) ==3) \n return 15\n end\n return 0\n end",
"def measure_string_overlap(string_a, string_b)\n overlap = 3\n lcs = 1.0\n min_string_length = [string_a, string_b].map(&:length).min\n prev_sim = 0\n max_sim = 0\n overall_sim = string_a.longest_subsequence_similar(string_b)\n puts \"Overall similarity: #{ overall_sim.round(3) }\"\n similarity_threshold = 0.95\n\n until(\n 1.0 == max_sim ||\n (overlap > 5 && max_sim >= similarity_threshold) ||\n overlap >= min_string_length\n ) do\n puts ''\n string_a_end = string_a[-overlap..-1]\n string_b_start = string_b[0..(overlap-1)]\n sim = string_a_end.longest_subsequence_similar(string_b_start)\n puts [\n ('█' * (sim * 10).round).rjust(10),\n ' ',\n string_a_end.inspect\n ].join\n puts [\n sim.round(3).to_s.rjust(10).color(prev_sim <= sim ? :green : :red),\n ' ',\n string_b_start.inspect\n ].join\n if sim > max_sim\n optimal_overlap = overlap\n end\n max_sim = [max_sim, sim].max\n prev_sim = sim\n overlap += 1\n end\n if max_sim > similarity_threshold\n optimal_overlap\n else\n 0\n end\nend",
"def num_overlapping_chars(full_sequence, ranges, substring)\n #start_positions = aaseq.enum_for(:scan, substring).map { $~.offset(0)[0]}\n if ranges.size == 0\n []\n #full_sequence.enum_for(:scan, substring).map { 0 }\n else\n substring_ranges = []\n pos = 0\n slen = substring.size\n while i=full_sequence.index(substring,pos)\n substring_ranges << Range.new(i, i+slen-1)\n pos = i + slen\n end\n # brute force way\n last_tm_range = ranges.last.last\n to_return = substring_ranges.map do |sb|\n overlap = 0\n # there's got to be a much simpler way to do this, but this does work...\n ranges.each do |tm|\n (frst, lst) = \n if tm.include?( sb.first )\n [tm, sb]\n elsif tm.include?( sb.last )\n [sb, tm]\n else\n nil\n end\n if frst\n if lst.last <= frst.last\n overlap += (frst.last+1 - frst.first) - (lst.first - frst.first) - (frst.last - lst.last)\n else \n overlap += (frst.last+1 - frst.first) - (lst.first - frst.first)\n end\n end\n end\n overlap\n end\n end\n end",
"def commonCharacterCount(s1, s2)\n a1 = s1.split(\"\").uniq\n a2 = s2.split(\"\").uniq\n \n b = a1 - a2\n c = a2 - a1\n \n check_a = a1 - b - c\n \n count = 0\n \n check_a.each do |char|\n count_1 = s1.split(\"\").count(\"#{char}\")\n count_2 = s2.split(\"\").count(\"#{char}\")\n \n if count_1 < count_2\n count += count_1\n else\n count += count_2\n end\n end\n \n count\nend",
"def sherlockAndAnagrams(s)\n\tbucket, count = [], 1\n\t(((s.split(\"\").length) - 1 )).times do\n\t\tbucket << s.split(\"\").combination(count).to_a.map {|x| x.join(\"\")}\n\t\tcount +=1\n\tend\n\tp bucket.map! { |y| y.combination(2).to_a}.flatten(1).uniq.keep_if {|z| z[0].split(\"\").sort == z[1].split(\"\").sort }.count\nend",
"def countApplesAndOranges(s, t, a, b, apples, oranges)\n sam_apples_count = 0\n sam_oranges_count = 0\n apples.each do |apple|\n apple_position = apple + a\n if (apple_position >= s && apple_position <= t) then\n sam_apples_count += 1\n end\n end\n \n oranges.each do |orange|\n orange_position = orange + b\n if (orange_position >= s && orange_position <= t) then\n sam_oranges_count += 1\n end\n end\n \n puts sam_apples_count\n puts sam_oranges_count\nend",
"def bulls(guess)\n guess.split(//).zip(@word.split(//)).inject(0) { | r, (letter_1, letter_2) | letter_1 == letter_2 ? r + 1 : r }\n end",
"def part1(input)\n twos = threes = 0\n\n # For each ID, bucket each char and count their instances within the ID.\n # Originally, I'd tried to sort the chars and use a regex, but getting\n # around the awkwardness of backtracking and detecting the twos that aren't\n # also threes (or more) wasn't worth the effort on this exercise.\n input.each do |id|\n counts = id.chars.group_by(&:to_s).values.map(&:size)\n\n twos += 1 if counts.include?(2)\n threes += 1 if counts.include? (3)\n end\n\n return twos * threes\nend",
"def commonCharacterCount(s1, s2)\n s1_hash = s1.chars.group_by { |x| x }.map { |k,v| [k, v.size]}.to_h # transform to hash : { 'a' => 2, 'b' => 1, 'c' => 2 }\n s2_hash = s2.chars.group_by { |x| x }.map { |k,v| [k, v.size]}.to_h\n counter = 0\n s1_hash.each_pair do |k,v| # compare values of s1_hash and s2_hash and takes the min common nb between the two\n counter += [v, s2_hash[k]].min if !s2_hash[k].nil?\n end\n return counter\nend",
"def solution(s)\n return 0 if s.empty?\n t = 0\n ('A'..'Z').to_a.each do |c|\n t += s.count(c)\n end\n t + 1\nend",
"def four_letters_in_common(word, dictionary)\n\n # Get all four or more consectutive letter combinations i.e. slices\n slices = get_slices(word)\n\n # To store all words that have letters in common with the slices\n matches = Set.new\n\n # Find all the matching words\n dictionary.each do |word|\n \n # Check if the word matches any of the slices\n slices.each do |slice|\n if word.include?(slice)\n matches << word\n end\n\n end\n end\n\n matches\nend",
"def HammingDistance(arr)\n first_str = arr[0].chars\n sec_str = arr[1].chars\n hamm_dis_count = []\n (0..first_str.size).each do |index|\n if first_str[index] != sec_str[index]\n hamm_dis_count << first_str[index]\n end\n end\n hamm_dis_count.size\nend",
"def mix(s1, s2)\n str = []\n letters = []\n common = []\n\n [s1, s2].each_with_index do |input,i|\n str << input.tr(' ', '')\n letters << str[i].chars.uniq.select{|s| str[i].count(s) > 1 && ('a'..'z').include?(s)}\n end\n\n (letters[0] & letters[1]).each do |ele|\n if str[0].count(ele) > str[1].count(ele)\n letters[1].delete(ele)\n elsif str[0].count(ele) < str[1].count(ele)\n letters[0].delete(ele)\n elsif str[0].count(ele) == str[1].count(ele)\n letters[0].delete(ele)\n letters[1].delete(ele)\n common << \"=:#{ele * str[0].count(ele)}\"\n end\n end\n\n (letters[0].map{|ele| \"1:#{ele * str[0].count(ele)}\"} +\n letters[1].map{|ele| \"2:#{ele * str[1].count(ele)}\"} +\n common).sort_by{|s| s}.reverse.sort_by{|s| s.size}.reverse.join('/')\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
2.Get all windmill id for a given wind Form. | def getmill
formid = Windmill.where(windformid: params[:id])
if formid.present?
render json: formid.as_json(only:[:no])
else
render json: {massage: 'Windform not found'}
end
end | [
"def getform\n\tgetform = Windmill.where(no: params[:no])\n\tif getform.present?\n render json: getform.as_json(only: [:windformid])\n\telse\n\t\trender json: {massage: 'No windfrom available in this id'}\n\tend\nend",
"def all_form_field_ids(form_id)\n fields = []\n field_maps[form_id].each do |name, data|\n if data.is_a?(Hash)\n data.each {|k, v| fields << v }\n elsif data.is_a?(Array)\n fields += data\n else\n fields << data\n end\n end\n return fields.map(&:to_s)\n end",
"def form_ids\n forms.collect{|f| f.id}.sort\n end",
"def form_ids\n forms.collect(&:id).sort\n end",
"def get_listOfddIDs #{{{2\n listOfddIDs = Array.new\n\n design_details = DesignDetail.find_all_by_extraction_form_id(@ef_id)\n design_details.each do |d|\n listOfddIDs.push d.id\n end\n return listOfddIDs\n end",
"def idmap\n mapping = {}\n form.fields.each{|field| mapping[field.id] = field.name}\n mapping\n end",
"def reform_ids\n reform_surveys.pluck(:reform_id)\n end",
"def get_all_window_ids()\n return get_string_array(\"getAllWindowIds\", [])\n end",
"def all_ids\n json = JSON.parse(http_client.get(\"mobiledevices\"))\n json[\"mobile_devices\"].map { |c| c[\"id\"] }\n end",
"def get_forms\r\n Form.where(\"opportunity_id = ?\", opportunity_id).all\r\n end",
"def obligatory_field_ids\n contact_form_fields.obligatory.map(&:id)\n end",
"def form_id\n form_response.form_id\n end",
"def ffcrm_list_ids\n config.mailchimp_list_fields.map{ |f| f.settings['list_id'] }\n end",
"def form_identifiers\n \n ret = @core.forms.map do |f|\n if f.name or f.action\n f.name or f.action\n else\n throw \"couldn\\'t get name or action\"\n end\n end\n \n if ret.length != ret.uniq.length\n throw \"amibiguously named forms--should fix in amigo\"\n end\n ret\n end",
"def form_elements(identifier)\n platform.forms_for(identifier.clone)\n end",
"def get_form_index(id)\n forms.index { |form| form['id'] == id }\n end",
"def form_identifiers\n \n ret = @core.forms.map do |f|\n if f.name or f.action\n f.name or f.action\n else\n throw \"couldn\\'t get name or action\"\n end\n end\n\n if ret.length != ret.uniq.length\n throw \"amibiguously named forms--should fix in amigo\"\n end\n ret\n end",
"def exercise_id_list\n query(\"SELECT id FROM exercises;\").field_values('id')\n end",
"def all_field_ids\n result = []\n form_versions.reverse_each {|form_version|\n form_version.form_fields.reverse_each {|form_field|\n result.insert(0, form_field.field_id) unless result.include?(form_field.field_id)\n }\n }\n return result\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
4.Get a wind form for a given wind mill id | def getform
getform = Windmill.where(no: params[:no])
if getform.present?
render json: getform.as_json(only: [:windformid])
else
render json: {massage: 'No windfrom available in this id'}
end
end | [
"def getmill\n\tformid = Windmill.where(windformid: params[:id])\n\tif formid.present?\n render json: formid.as_json(only:[:no])\n\telse\n\t\trender json: {massage: 'Windform not found'}\n\tend\nend",
"def get_form(id)\n forms.find { |form| form['id'] == id }\n end",
"def form(form_id)\n if f = get(\"forms/#{form_id}\")['Forms']\n Form.new(f.first['Url'], :party => self, :details => f.first)\n end\n end",
"def retrieve_form(form_id)\n start.uri('/api/form')\n .url_segment(form_id)\n .get()\n .go()\n end",
"def form(id)\n make_json_api_request :get, \"v2/#{account_id}/forms/#{id}\"\n end",
"def form(form_id)\n return unless (f = get(\"forms/#{form_id}\")['Forms'])\n Form.new(f.first['Url'], party: self, details: f.first)\n end",
"def get_form\n @formid = session[:formid]\n if @formid\n @form = Form.find(@formid) # by default search by ID\n else\n #TODO - do the error handling. how come the formid is NIL\n #should we take it to login by calling 'login_required'\n #that will make sense if we are sure that \n end\n end",
"def get_form_by_id(page, id)\n form = page.search(\".//form[@id='#{id}']\")[0]\n form = WWW::Mechanize::Form.new(form, page.mech, page)\n form.action ||= page.uri.to_s\n return form\n end",
"def loc; det.form(:name, 'configDevice').text_field(:id, 'location'); end",
"def form\n @form ||= Form.new(get(\"form/#{all['form_id']}\")).use_api(@api)\n end",
"def get_form_index(id)\n forms.index { |form| form['id'] == id }\n end",
"def get_forms\r\n Form.where(\"opportunity_id = ?\", opportunity_id).all\r\n end",
"def form_elements(identifier)\n platform.forms_for(identifier.clone)\n end",
"def get_form(id=nil)\n id = 0 if not id\n return nil if forms.empty? || !forms[id]\n f = @forms[id]\n action = f.action\n action ||= action\n action ||= cur\n action ||= \"PARSE_ERROR\"\n action = nil if cur.basename == action\n url_action = @urlparse.parse(self.cur,action).to_s\n type = f.type\n FormArray.new do |x|\n x.set_fields(f.fields)\n x.action = url_action\n x.type = type\n end\n end",
"def retrieve_form_field(field_id)\n start.uri('/api/form/field')\n .url_segment(field_id)\n .get()\n .go()\n end",
"def show\n @federalform = Federalform.find(params[:id])\n end",
"def show\n \n @stateform = Stateform.find(params[:id])\n \n \n end",
"def new\n @wic_form = WicForm.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @wic_form }\n end\n end",
"def get_question(form_name, id)\n get_form(form_name)[:questions][id]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
executes easy_install command with the passed arguments | def ezy_install(*args)
easy_install *args
rescue NoMethodError => e
if pathname = which('easy_install')
self.class.commands :easy_install => pathname
easy_install *args
else
raise e
end
end | [
"def run(*args)\n require 'rubygems'\n optparse(*args)\n build_gems\n end",
"def run_install\n require 'fileutils'\n install_path = ARGV.shift || '.'\n FileUtils.mkdir_p install_path unless File.exists?(install_path)\n install_file \"#{CC_ROOT}/config/config.example.yml\", \"#{install_path}/config.yml\"\n install_file \"#{CC_ROOT}/config/config.example.ru\", \"#{install_path}/config.ru\"\n install_file \"#{CC_ROOT}/config/database.example.yml\", \"#{install_path}/database.yml\"\n install_file \"#{CC_ROOT}/actions\", \"#{install_path}/actions\", true\n end",
"def dist_install_s( *args )\n args = args.dup\n if args.last.is_a?( Hash )\n opts = args.pop\n else\n opts = {}\n end\n\n commands = []\n\n @apt_update_state_lock.synchronize do\n unless @apt_update_state[ target_host ]\n commands << \"apt-get -yq update\"\n @apt_update_state[ target_host ] = true\n end\n end\n\n args = dist_map_packages( args )\n args.unshift \"--no-install-recommends\" if opts[ :minimal ]\n commands << \"apt-get -yq install #{args.join ' '}\"\n\n commands.join( \"\\n\" )\n end",
"def dist_install( *args )\n opts = args.last.is_a?( Hash ) && args.pop || {}\n args.flatten!\n flags = []\n flags << '--no-install-recommends' if opts[ :minimal ]\n chk = opts[ :check_install ]\n chk = check_install? if chk.nil?\n dist_if_not_installed?( args, chk != false, opts ) do\n dist_update( opts )\n sudo( \"apt-get -yq install #{(flags + args).join ' '}\", opts )\n end\n end",
"def install\n cd_and_sh( pkg_dir, install_commands )\n end",
"def run_install\n installation_possible? ? system(@tool_inst_command) : nil\n end",
"def install_command\n if (RUBY_PLATFORM =~ /linux/ or RUBY_PLATFORM =~ /darwin/) and Process.uid != 0\n cmd = \"sudo gem install\"\n $gems_missing.each do |current_gem|\n cmd = cmd + \" #{current_gem}\"\n end\n if $gems_missing_version.length != 0\n $gems_missing_version.each do |current_gem|\n if cmd == \"sudo gem install\"\n cmd = cmd + \" #{current_gem}\"\n else\n cmd = cmd + \" && sudo gem install #{current_gem}\"\n end\n end\n end\n else\n cmd = \"gem install\"\n $gems_missing.each do |current_gem|\n cmd = cmd + \" #{current_gem}\"\n end\n if $gems_missing_version.length != 0\n $gems_missing_version.each do |current_gem|\n if cmd == \"gem install\"\n cmd = cmd + \" #{current_gem}\"\n else\n cmd = cmd + \" & gem install #{current_gem}\"\n end\n end\n end\n end\n cmd = cmd.delete \",\" \"'\"\n cmd = cmd.gsub(\"=\", \"-v\")\n return cmd\nend",
"def ezbake_installsh(host, task = '')\n on host, \"cd #{ezbake_install_dir}; bash install.sh #{task}\"\n end",
"def install_command\n cmd = \"#{top.sudo} bash\"\n cmd += \" -s -- -v #{required_version}\" unless required_version.nil?\n cmd\n end",
"def install_gem; end",
"def install_command\n command = ['helm', 'upgrade', name, chart] +\n install_flag +\n reset_values_flag +\n optional_tls_flags +\n optional_version_flag +\n rbac_create_flag +\n namespace_flag +\n value_flag\n\n command.shelljoin\n end",
"def install_command\n command = \"Install-Module #{@resource[:name]} -Scope AllUsers -Force\"\n command << \" -RequiredVersion #{@resource[:ensure]}\" unless [:present, :latest].include? @resource[:ensure]\n command << \" -Repository #{@resource[:source]}\" if @resource[:source]\n command << \" #{install_options(@resource[:install_options])}\" if @resource[:install_options]\n command\n end",
"def run_setup(arguments)\n wd_path = File.join(File.dirname(__FILE__), '..', 'bin', 'wd')\n lib_path = File.join(File.dirname(__FILE__), '..', 'lib')\n system(\"/usr/bin/env ruby -I #{lib_path} -r whiskey_disk -rubygems #{wd_path} setup #{arguments} > #{integration_log} 2> #{integration_log}\")\nend",
"def gem_install_command\n \"sudo gem install #{gem_name} -s http://gems.github.com\"\n end",
"def install\n system \"cargo\", \"install\", *std_cargo_args\n end",
"def gem(command,*arguments)\n run 'gem', command, *arguments\n end",
"def install uri\n execute(:install, uri)\n end",
"def install_command\n return \"brew install\" if in_path?(:brew)\n return \"sudo apt-get install\" if in_path?(\"apt-get\")\n return \"sudo yum install\" if in_path?(\"yum\")\n end",
"def install_nginx(cmd)\n execute 'install_nginx' do\n command \"#{cmd}\"\n end\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /albums/1 DELETE /albums/1.json | def destroy
@album = @user.albums.find(params[:id])
@album.destroy
respond_to do |format|
format.html { redirect_to albums_url }
format.json { head :no_content }
end
end | [
"def destroy\n @album.destroy\n render json: @album\n end",
"def destroy\n \t@album = Album.find(params[:album_id])\n @photo = @album.photos.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to albums_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @user_album = UserAlbum.find(params[:id])\n @user_album.destroy\n\n respond_to do |format|\n format.html { redirect_to user_albums_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @album = current_account.albums.find(params[:id])\n @album.destroy\n\n respond_to do |format|\n format.html { redirect_to(albums_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @photo_album = PhotoAlbum.find(params[:id])\n @photo_album.destroy\n\n respond_to do |format|\n format.html { redirect_to photo_albums_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @photoalbum = Photoalbum.find(params[:id])\n @photoalbum.destroy\n\n respond_to do |format|\n format.html { redirect_to photoalbums_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @account_album = Account::Album.find(params[:id])\n @account_album.destroy\n\n respond_to do |format|\n format.html { redirect_to account_albums_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @album = Album.find(params[:id])\n @album.destroy\n\n respond_to do |format|\n format.html { redirect_to(albums_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @album = Album.find(params[:id])\n @album.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_albums_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @album = Album.find(params[:id])\n @album.album.destroy\n @album.destroy\n respond_to do |format|\n format.html { redirect_to(albums_path, :notice => 'photo was successfully deleted.') }\n format.xml { head :ok }\n end\n end",
"def destroy\n @private_album.destroy\n respond_to do |format|\n format.html { redirect_to private_albums_url }\n format.json { head :no_content }\n end\n end",
"def delete_album album_id\n delete(\"/albums/#{album_id}\", code: 204)\n end",
"def destroy\n @public_album.destroy\n respond_to do |format|\n format.html { redirect_to public_albums_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @albumone.destroy\n respond_to do |format|\n format.html { redirect_to albumones_url, notice: 'Albumone was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @albumm = Albumm.find(params[:id])\n @albumm.destroy\n\n respond_to do |format|\n format.html { redirect_to albumms_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @galleries_album.destroy\n respond_to do |format|\n format.html { redirect_to galleries_albums_url, notice: 'Album was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @albumsix.destroy\n respond_to do |format|\n format.html { redirect_to albumsixes_url, notice: 'Albumsix was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @album = Album.find(params[:id])\n deleted_files = 0\n deleted_album = @album.title\n @album.destroy\n\n respond_to do |format|\n format.html { redirect_to(albums_url, :notice => \"Deleted #{deleted_album}\") }\n format.xml { head :ok }\n end\n end",
"def destroy\n @photo_album.destroy\n\n respond_to do |format|\n format.html { redirect_to(photo_albums_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns absolute and movingaverage values of time to repair ()complete unplanned cards) since supplied time. | def time_to_repair(since_time, unit: :second)
total_repair_time = 0
total_repairs = 0
ttr = { point_values: {}, moving_averages: {} }
# Unplanned cards created after since_time, in order of creation.
cards.where('cards.created_at > ?', since_time).where('cards.label = ?', Card::LABEL_UNPLANNED).order('cards.created_at DESC').map do |unplanned_card|
lead_time = unplanned_card.lead_time
if lead_time > 0
total_repairs += 1
total_repair_time += time_in_unit(lead_time, unit)
ttr[:point_values][unplanned_card.created_at] = time_in_unit(lead_time, unit)
ttr[:moving_averages][unplanned_card.created_at] = total_repair_time / total_repairs
end
end
ttr
end | [
"def calculate_local_mean_time_of(h, ra, t)\n h + ra - (0.06571 * t) - 6.622\n end",
"def average_time()\n sum = 0\n if all_rides.length != 0\n all_rides().each { |ride| sum += ride.completion_time() }\n return sum / (all_rides().count)\n else\n return 0\n end \n end",
"def get_overtime\n return 0 if self.start.nil? or self.end.nil?\n\n if self.off_am?\n standard_wktime = Timecard.get_standard_wktime_when_off_am\n elsif self.off_pm?\n standard_wktime = Timecard.get_standard_wktime_when_off_pm\n else\n standard_wktime = Timecard.get_standard_wktime\n end\n\n return 0 if standard_wktime <= 0\n\n return self.get_actual_wktime - standard_wktime\n\n# overtime = self.get_actual_wktime - standard_wktime\n#\n# if overtime >= 0\n# return overtime\n# else\n# return 0\n# end\n end",
"def at_time(time)\n resource = @initial\n\n puts \"[START] initial: #{initial}\" if COPIOUS_DEBUGGING\n puts \"[START] time: #{time}\" if COPIOUS_DEBUGGING\n\n if intervals_at_time(@harvesters, time).size > 1\n generate_pairs(intervals_at_time(@harvesters, time)).each_with_index do |pair, index|\n puts \"[DEBUG] #{index}: STARTING LOOP\" if COPIOUS_DEBUGGING\n start_time = pair[0]\n next_time = pair[1]\n puts \"#{index}: start_time: #{start_time}\" if COPIOUS_DEBUGGING\n\n harvesters = @harvesters[start_time]\n puts \"#{index}: harvesters: #{harvesters}\" if COPIOUS_DEBUGGING\n\n period = next_time - start_time\n puts \"#{index}: period: #{period}\" if COPIOUS_DEBUGGING\n\n harvested = time_to_resource(period, harvesters)\n resource += harvested\n puts \"#{index}: harvested: #{harvested}\" if COPIOUS_DEBUGGING\n\n consumed = consumed_in_interval(start_time, next_time)\n resource -= consumed\n puts \"#{index}: consumed: #{consumed}\" if COPIOUS_DEBUGGING\n\n puts \"#{index}: resource: #{resource}\" if COPIOUS_DEBUGGING\n end\n else\n resource -= consumed_at_time(0)\n puts \"0: resource: #{resource}\" if COPIOUS_DEBUGGING\n end\n\n if resource < 0\n raise(InvalidEconomyError, \"The Starcraft economy does not allow deficit spending (deficit of #{resource} #{@type} at #{time} seconds).\")\n end\n\n puts \"[FINISH] at_time: #{resource}\" if COPIOUS_DEBUGGING\n resource\n end",
"def get_usual_overtime\n\n overtime = self.get_overtime\n midnight_overtime = self.get_midnight_overtime\n\n if overtime <= 0\n return overtime\n elsif overtime <= midnight_overtime\n return 0\n else\n return (overtime - midnight_overtime)\n end\n\n# usual_overtime = self.get_overtime - self.get_midnight_overtime\n#\n# if usual_overtime >= 0\n# return usual_overtime\n# else\n# return 0\n# end\n end",
"def avg_time_lap\n times = []\n self.voltas.each { |lap_num, lap_stat| times << time_in_ms(lap_stat[:tempo])}\n return ms_to_min(get_avg(times))\n end",
"def find_time_asleep\n @total_time_asleep = 0\n @wake_times.each_with_index do |wake_time, index|\n @total_time_asleep += ( wake_time - @sleep_times[index] )\n end\n end",
"def time_between_failures(since_time, unit: :second)\n total_uptime = 0\n total_failures = 0\n tbf = { point_values: {}, moving_averages: {} }\n\n # Unplanned cards created after since_time, in order of creation.\n unplanned_cards = cards.where('cards.created_at > ?', since_time).where('cards.label = ?', Card::LABEL_UNPLANNED).order('cards.created_at DESC')\n\n unplanned_cards.each_with_index do |unplanned_card, index|\n previous_unplanned_card = unplanned_cards[index + 1]\n\n if previous_unplanned_card && previous_unplanned_card.completed_at\n uptime = unplanned_card.created_at - previous_unplanned_card.completed_at\n total_failures += 1\n total_uptime += time_in_unit(uptime, unit)\n\n tbf[:point_values][unplanned_card.created_at] = time_in_unit(uptime, unit)\n tbf[:moving_averages][unplanned_card.created_at] = total_uptime / total_failures\n end\n end\n\n tbf\n end",
"def average_missed_call_duration\n average_duration(missed_contacts, 'arrived', 'call_ended')\n end",
"def closest_tide(time)\n time = time.class == Time ? time : Time.new(time)\n @data.min_by { |x| (DateTime.parse(x['utctime']).hour - time.hour).abs }\n end",
"def get_average_time(start_station, end_station)\n \n end",
"def overtime\n if is_finished\n is_overtime ? overtime_minutes : -overtime_minutes\n else\n 0\n end\n end",
"def average_completion_time(task)\n this_time = average_time(task)\n if task.position == 1\n return time_difference(this_time, task.episode.start_time.to_f)\n else\n previous_time = average_time(Task.find(:first, :conditions => [\" episode_id = ? AND position = ? \", \n task.episode_id, (task.position - 1)]))\n return time_difference(this_time, previous_time)\n end\n end",
"def snap_previous(time)\n delta = self.class.new(\n :hours => time.hour,\n :minutes => time.min,\n :seconds => time.sec\n )\n\n period_types = [:seconds, :minutes, :hours]\n while period_type = period_types.shift\n next unless data.has_key?(period_type) && data[period_type] != 0\n\n delta.data[period_type] = delta.data[period_type] % data[period_type]\n period_types.each {|pt| delta.data[pt] = 0 }\n end\n\n delta.reverse!\n time.advance(delta.data).change(:usec => 0)\n end",
"def calcMeanObliquityOfEcliptic(t)\n seconds = 21.448 - t*(46.8150 + t*(0.00059 - t*(0.001813)))\n 23.0 + (26.0 + (seconds/60.0))/60.0 # in degrees\n end",
"def calc_average_distance(time_period)\n return 0 if time_period.size <= 1 # Need at least 2 cars\n sum = 0\n for i in 0...(time_period.size - 1)\n sum += calc_distance(time_period[0], time_period[1])\n end\n return sum / (time_period.size - 1)\n end",
"def calculate_suns_mean_anomaly(t)\n (0.9856 * t) - 3.289\n end",
"def remaining_time\n distance_of_time_in_words promised_for, Time.now\n end",
"def update_time_worked\n\n time_entries = self.time_card.time_entry\n total_hours = 0\n # Ordering by time so the total hours can be calculated.\n # An odd array will have [start_time1, end_time1, start_time2]\n # and the total_hours_worked = start_time1 - end_time1\n # An even array will have [start_time1, end_time1, start_time2, end_time 2]\n # and the total_hours_worked = (start_time1 - end_time1) + (start_time2 - end_time2)\n\n time_entries = time_entries.order(time: :asc)\n time_entries_count = time_entries.count\n\n i = 0\n while i < time_entries_count and i+1 < time_entries_count\n start_time = time_entries[i].time\n end_time = time_entries[i+1].time\n i = i+2\n total_hours = total_hours + time_diff(start_time, end_time).to_i\n end\n\n if time_entries_count == 1\n self.time_card.update_attribute(:total_hours_worked, 0)\n else\n self.time_card.update(:total_hours_worked => total_hours)\n end\n\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns absolute and movingaverage values of time between failures (creation of unplanned card and completion of last unplanned card) since supplied time. | def time_between_failures(since_time, unit: :second)
total_uptime = 0
total_failures = 0
tbf = { point_values: {}, moving_averages: {} }
# Unplanned cards created after since_time, in order of creation.
unplanned_cards = cards.where('cards.created_at > ?', since_time).where('cards.label = ?', Card::LABEL_UNPLANNED).order('cards.created_at DESC')
unplanned_cards.each_with_index do |unplanned_card, index|
previous_unplanned_card = unplanned_cards[index + 1]
if previous_unplanned_card && previous_unplanned_card.completed_at
uptime = unplanned_card.created_at - previous_unplanned_card.completed_at
total_failures += 1
total_uptime += time_in_unit(uptime, unit)
tbf[:point_values][unplanned_card.created_at] = time_in_unit(uptime, unit)
tbf[:moving_averages][unplanned_card.created_at] = total_uptime / total_failures
end
end
tbf
end | [
"def average_execution_time\n successes = Timberline.redis.xmembers(attr(\"success_stats\")).map { |item| Envelope.from_json(item)}\n times = successes.map do |item|\n if item.finished_processing_at\n item.finished_processing_at.to_f - item.started_processing_at.to_f\n elsif item.fatal_error_at\n item.fatal_error_at.to_f - item.started_processing_at.to_f\n else\n nil\n end\n end\n times.reject! { |t| t.nil? }\n if times.size == 0\n 0\n else\n times.inject(0, :+) / times.size.to_f\n end\n end",
"def average_time()\n sum = 0\n if all_rides.length != 0\n all_rides().each { |ride| sum += ride.completion_time() }\n return sum / (all_rides().count)\n else\n return 0\n end \n end",
"def time_to_repair(since_time, unit: :second)\n total_repair_time = 0\n total_repairs = 0\n ttr = { point_values: {}, moving_averages: {} }\n\n # Unplanned cards created after since_time, in order of creation.\n cards.where('cards.created_at > ?', since_time).where('cards.label = ?', Card::LABEL_UNPLANNED).order('cards.created_at DESC').map do |unplanned_card|\n lead_time = unplanned_card.lead_time\n\n if lead_time > 0\n total_repairs += 1\n total_repair_time += time_in_unit(lead_time, unit)\n\n ttr[:point_values][unplanned_card.created_at] = time_in_unit(lead_time, unit)\n ttr[:moving_averages][unplanned_card.created_at] = total_repair_time / total_repairs\n end\n end\n\n ttr\n end",
"def mean_time_to_failure_in_minutes\n return @mean_time_to_failure_in_minutes\n end",
"def average_time_taken\n total = student_tests.map{ |x| x.time_taken }.inject(0, :+)\n total / student_tests.size\n end",
"def calculate_local_mean_time_of(h, ra, t)\n h + ra - (0.06571 * t) - 6.622\n end",
"def calculateAccumulatedAverageTime(task_results)\n accumulatedAverageTime = [0.0]\n task_results.each do |result|\n length = accumulatedAverageTime.length\n accumulatedAverageTime[length] = (accumulatedAverageTime[length-1] + result.time) / length\n end\n return accumulatedAverageTime\n end",
"def average_missed_call_duration\n average_duration(missed_contacts, 'arrived', 'call_ended')\n end",
"def average_completion_time(task)\n this_time = average_time(task)\n if task.position == 1\n return time_difference(this_time, task.episode.start_time.to_f)\n else\n previous_time = average_time(Task.find(:first, :conditions => [\" episode_id = ? AND position = ? \", \n task.episode_id, (task.position - 1)]))\n return time_difference(this_time, previous_time)\n end\n end",
"def average_time_taken\n total = answers.map{ |x| x.time_taken }.inject(0, :+)\n total / answers.size\n end",
"def calculateAverageTime(task_results)\n averagetime = 0.0\n counter = 0.0\n task_results.each do |result|\n counter += 1.0\n averagetime += result.time\n end\n averagetime /= counter\n return averagetime\n end",
"def set_failed_count_and_average\n @failed_count = 0\n sum = count = 0\n @ping_times.each do |t|\n if t == -1\n @failed_count += 1\n else\n sum += t\n count += 1\n end\n end\n\n @average = if count > 0\n sum / count\n else\n 0\n end\n end",
"def get_overtime\n return 0 if self.start.nil? or self.end.nil?\n\n if self.off_am?\n standard_wktime = Timecard.get_standard_wktime_when_off_am\n elsif self.off_pm?\n standard_wktime = Timecard.get_standard_wktime_when_off_pm\n else\n standard_wktime = Timecard.get_standard_wktime\n end\n\n return 0 if standard_wktime <= 0\n\n return self.get_actual_wktime - standard_wktime\n\n# overtime = self.get_actual_wktime - standard_wktime\n#\n# if overtime >= 0\n# return overtime\n# else\n# return 0\n# end\n end",
"def overtime\n if is_finished\n is_overtime ? overtime_minutes : -overtime_minutes\n else\n 0\n end\n end",
"def final_avg\n @wait_time.to_i / @avg_count.to_i # return average\n end",
"def calculate_time(end_time, start_time, action_type)\n action_time = end_time - start_time\n self.verify_performance_goal(action_time, action_type)\n return action_time\n end",
"def average_response_time\n return summary_average[:average_response_time]\n end",
"def at_time(time)\n resource = @initial\n\n puts \"[START] initial: #{initial}\" if COPIOUS_DEBUGGING\n puts \"[START] time: #{time}\" if COPIOUS_DEBUGGING\n\n if intervals_at_time(@harvesters, time).size > 1\n generate_pairs(intervals_at_time(@harvesters, time)).each_with_index do |pair, index|\n puts \"[DEBUG] #{index}: STARTING LOOP\" if COPIOUS_DEBUGGING\n start_time = pair[0]\n next_time = pair[1]\n puts \"#{index}: start_time: #{start_time}\" if COPIOUS_DEBUGGING\n\n harvesters = @harvesters[start_time]\n puts \"#{index}: harvesters: #{harvesters}\" if COPIOUS_DEBUGGING\n\n period = next_time - start_time\n puts \"#{index}: period: #{period}\" if COPIOUS_DEBUGGING\n\n harvested = time_to_resource(period, harvesters)\n resource += harvested\n puts \"#{index}: harvested: #{harvested}\" if COPIOUS_DEBUGGING\n\n consumed = consumed_in_interval(start_time, next_time)\n resource -= consumed\n puts \"#{index}: consumed: #{consumed}\" if COPIOUS_DEBUGGING\n\n puts \"#{index}: resource: #{resource}\" if COPIOUS_DEBUGGING\n end\n else\n resource -= consumed_at_time(0)\n puts \"0: resource: #{resource}\" if COPIOUS_DEBUGGING\n end\n\n if resource < 0\n raise(InvalidEconomyError, \"The Starcraft economy does not allow deficit spending (deficit of #{resource} #{@type} at #{time} seconds).\")\n end\n\n puts \"[FINISH] at_time: #{resource}\" if COPIOUS_DEBUGGING\n resource\n end",
"def get_usual_overtime\n\n overtime = self.get_overtime\n midnight_overtime = self.get_midnight_overtime\n\n if overtime <= 0\n return overtime\n elsif overtime <= midnight_overtime\n return 0\n else\n return (overtime - midnight_overtime)\n end\n\n# usual_overtime = self.get_overtime - self.get_midnight_overtime\n#\n# if usual_overtime >= 0\n# return usual_overtime\n# else\n# return 0\n# end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns hash containing timestamp and details from ServerLog and Card | def logs_and_cards_with_timestamp(since_time)
required_logs = server_logs.where('server_logs.created_at > ?', since_time).order('server_logs.created_at DESC')
required_cards = cards.where('cards.created_at > ?', since_time).order('cards.created_at DESC')
formatted_logs_info = required_logs.inject({}) do |specific_log_info, required_log|
specific_log_info[required_log.timestamp] = required_log.log.join(',')
specific_log_info
end if required_logs.count > 0
formatted_cards_info = required_cards.inject({}) do |specific_card_info, required_card|
specific_card_info[required_card.created_at.to_i.to_s] = "<a href=#{required_card.url}>Trello URL</a>"
specific_card_info
end if required_cards.count > 0
{ logs: formatted_logs_info, cards: formatted_cards_info }
end | [
"def hash\n @timestamp.hash\n end",
"def hash\n data = [store.id, user.id, timestamp].join('$')\n Base64.encode64(OpenSSL::HMAC.digest('SHA256', store.private_key, data)).chomp\n end",
"def timestamp\n @packet.timestamp\n end",
"def calculate_checksum\n last_checksum = previous_event&.checksum\n attrs = attributes.except(\"checksum\", \"id\", \"updated_at\").merge(last_checksum: last_checksum)\n cs = Digest::SHA256.hexdigest(attrs.to_s)\n puts \"#{id} calculate_checksum: #{cs} <- #{attrs} \" if Rails.env.development?\n Rails.logger.info(\"#{id} calculate_checksum: #{cs} <- #{attrs} \")\n return cs\n end",
"def hash\n # TODO: Produce a hash (using default hash method) of the credit card's\n # serialized contents.\n # Credit cards with identical information should produce the same hash.\n to_s.hash\n end",
"def message_sha\n Digest::SHA2.hexdigest \"#{message_size}#{message_date}#{message_id}\"\n end",
"def hash\n # TODO: implement this method\n # - Produce a hash (using default hash method) of the credit card's\n # serialized contents.\n # - Credit cards with identical information should produce the same hash\n to_s.hash\n end",
"def h2(msg)\n RbNaCl::Hash.sha256 RbNaCl::Hash.sha256 \"\\0\" * 64 + msg\nend",
"def snapshot_create_time\n data[:snapshot_create_time]\n end",
"def get_server_timestamp\n request :get_server_timestamp\n end",
"def get_fingerprint\n padding = 2\n hostname = Socket.gethostname\n hostid = hostname.split('').inject(hostname.length+BASE) do |a,i|\n a += (i.respond_to? \"ord\") ? i.ord : i[0]\n end\n return format($$, padding) + format(hostid, padding)\n end",
"def analyze_server_log(log_record)\n\tlog_arr = Array.new(5)\n\tlog_arr[SDDR] = log_record.slice!(/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}/)\n\tmtch = log_record.match(/(\\[.*\\])\\s(\\\"[^\"]*\\\")\\s(\\d{3})\\s(\\d*|-)\\s(\\\"[^\"]*\\\")\\s(\\\"[^\"]*\\\")/)\n\tlog_arr[DATETIME] = mtch[1]\n\tlog_arr[PAYLOAD] = mtch[2]\n\tlog_arr[STATUS] = mtch[3]\n\tlog_arr[CONTENT] = mtch[6]\n\treturn log_arr\nend",
"def hash\n @digest = Digest::SHA256.new\n @digest.update @submitter\n @digest.update @period\n @digest.update @random\n return @digest.hexdigest\n end",
"def get_message_formatted(raw, time)\r\n { :data => hex_string_to_numeric_bytes(raw), :timestamp => time }\r\n end",
"def server_info\n {\n server:\n {\n hostname: hostname,\n os_release: os_release,\n collection_time: timestamp\n }\n }\n end",
"def get_tools_version_info\n timestamp, sha1 = `git log -1 --pretty='%at,%h'`.strip.split(',')\n\n [ Time.at(timestamp.to_i), sha1 ]\nend",
"def replyHash\n puts \"#{@hash}-#{@clientIP}/#{@clientPort}#{RSConfig.instance.location}\"\n Digest::MD5.hexdigest \"#{@hash}-#{@clientIP}/#{@clientPort}#{RSConfig.instance.password}\"\n end",
"def get_bp_stamp\n @RESPONSE_HASH['BP_STAMP']\n end",
"def hashver\n major = fingerprint[0...7].to_i(16).to_s\n minor = fingerprint[7...14].to_i(16).to_s\n patch = fingerprint[14...21].to_i(16).to_s\n\n \"#{major}.#{minor}.#{patch}\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
parse the command line arguments example 'c 23 f text.csv' = :count = 23 and :path = text.csv | def parse_arguments
OptionParser.new do |parser|
# parser.banner = "Usage: init.rb -c <integer>"
parser.on("-c", "--count COUNT", Integer, "Specify number of uuid's to generate") do |c|
@options[:count] = c
end
parser.on("-f", "--file FILE", "Specify path to save csv file example -f '/path/to/file.csv'") do |path|
@options[:path] = path
end
end.parse!
end | [
"def parse_arg\n url = ARGV[1]\n base_url = url.split('/')[0..-2].join('/')\n channel_name = url.split('/')[-2]\n timestamp = (url.split('/')[-1][1..-1]).to_f / 1_000_000\n count = ARGV[2].to_i || 1000\n {\n token: ARGV[0], base_url: base_url, channel_name: channel_name,\n oldest: timestamp, count: count\n }\nend",
"def process_command_line\r\n\t\twere_inputs_ok = true\r\n\t\t@nominal = ARGV.shift.to_f\r\n\t\tcsv_file_name = ARGV.shift\r\n\r\n\t\tunless File.exists?(csv_file_name) && File.file?(csv_file_name) && File.readable?(csv_file_name)\r\n\t\t\tputs \"FAILURE: csv_file_name #{csv_file_name} does not exist, is not a proper file, or is not readable...\"\r\n\t\t\twere_inputs_ok = false\r\n\t\tend\r\n\r\n\t\t#unless \"csv\".equals(File.extname(csv_file_name))\r\n\t\tunless \".csv\" == File.extname(csv_file_name)\r\n\t\t\tputs \"FAILURE: csv_file_name #{csv_file_name} extension (#{File.extname(csv_file_name)}) is not '.csv'.\"\r\n\t\t\twere_inputs_ok = false\r\n\t\tend\r\n\r\n\t\t# TODO: Allow for cmd line flag [-o <output_filename>] to set output_file_name\r\n\t\t# TODO: Allow for cmd line flag [-a <appending_kml>] to specify a kml file into which\r\n\t\t# to append the output, just before closing </Document> </kml> tags\r\n\t\t# generate default output_file_name based on the csv_file_name base\r\n\t#\tunless output_file_name\r\n\t\t\t@base_name = File.basename(csv_file_name, \".*\")\r\n\t\t\toutput_file_name = File.join(File.dirname(csv_file_name), \"changes.#{@base_name}.kml\")\r\n\t\t\tputs \"INFO: Output KML Tour will be written to file #{output_file_name}\"\r\n\t#\tend\r\n\r\n\t\t# TODO?: Allow for cmd line flag [-n <min_fraction>] to set @fraction_min\r\n\t\t# TODO?: Allow for cmd line flag [-x <max_fraction>] to set @fraction_max\r\n\t\t# TODO: Allow for cmd line flags to set fraction levels as an array of +/- variances of nominal\r\n\t\t#\te.g. (not sure of format): [-l \"3%, 5%, 7%\"]\r\n\t\t#\twould set up 7 bins: ...-7%...-5%...-3%...+3%...+5%...+7%...\r\n\t\t#\t bin bin bin bin bin bin bin\r\n\t\t# TODO: Allow for cmd line flags to set duration between timestamps (@tour_step_duration), i.e. [-s 0.2]\r\n\t\t\r\n\t\tif were_inputs_ok \r\n\t\t\t@csv = File.open(csv_file_name, \"r\")\r\n\t\t\t@kml = File.open(output_file_name, \"a+\")\r\n\t\t\t@summary = File.open(output_file_name, \"a+\")\r\n\t\t\t# TODO: In the case of -a append to kml, I will need to position the\r\n\t\t\t# write/insert pointer into the correct location before proceeding.\r\n\t\tend\r\n\t\twere_inputs_ok \r\n\tend",
"def params\n\n script = ARGV.shift\n method = ARGV.shift\n infile = ARGV.shift\n\n if infile.nil?\n STDERR.puts \"You must provide an input file\"\n exit -1\n elsif !File.exists? infile\n STDERR.puts \"#{infile} does not exist. You must provide a valid input file\"\n exit -1\n end\n\n if ARGV.empty?\n print \"#{method}(#{infile})\"\n else\n print \"#{method}(#{infile}, #{ARGV.join(', ')})\"\n end\n\n puts; print \"Analyzing #{infile}...\"\n\n result = Sycsvpro::Analyzer.new(infile).result\n puts; print \"> #{result.col_count} cols | #{result.row_count} rows\"\n\n [infile, result, ARGV].flatten\n\n end",
"def parse_cmdline_run_parm()\n\n @cfg['run_testcases'] = []\n if @cfg['run_cases'] == []\n return\n end\n run_testcases = []\n\n\n if @cfg['run_cases'].include?(\",\")\n run_testcases = @cfg['run_cases'].gsub(/\\s+/, \"\").strip().split(',')\n elsif @cfg['run_cases'].include?(\"-\") and @cfg['run_cases'].include?(\".\")\n run_testcases = get_idx_using_range()\n elsif @cfg['run_cases'].include?(\"-\")\n run_testcases = get_idx_using_range()\n elsif @cfg['run_cases'].strip.match(/^\\d+$/)\n r = @cfg['run_cases'].strip\n if r == nil\n run_testcases = []\n else\n run_testcases << r\n end\n # error\n else\n raise StandardError.new(\"ERROR: #{@cfg['run_cases']} - run command line contains invalid paramater\")\n end\n\n @cfg['run_testcases'] = run_testcases.uniq\n\n return\n end",
"def input_parm(args)\n testset = \"smoke\"\n @cfg['run_cases']=[]\n\n args.each do |arg|\n case arg\n when \"--help\", \"-h\"\n help =\n%\" -t, --testset smoke (default), regression or integration\n -r, --run an array of indexes of test cases, which will be executed\n\nExamples:\n ruby main.rb --help\n ruby main.rb --testset integration --run 1,2,3\n ruby main.rb --run 1-7\n\"\n puts help\n when \"--testset\", \"-t\"\n idx = args.index(arg)\n raise StandardError.new('Missing --testset value define!') if args[idx+1] == nil\n testset = args[idx+1]\n when \"--run\", \"-r\"\n idx = args.index(arg)\n raise StandardError.new('Missing --run cases index array define!') if args[idx+1] == nil\n value=args[idx+1]\n @cfg['run_cases'] = value.strip\n end\n end\n if @cfg['run_cases']!= []\n parse_cmdline_run_parm()\n end\n\n return testset\n\n end",
"def parse_args cmd\n args = cmd.strip.split(/\\s+/)\n cmd = args.shift\n if cmd !~ %r{#{CMD_FORMAT.keys.map { |c| \"^#{c}$\" }.join '|'}}\n raise InvalidCommandError, \"Invalid command: #{cmd}.\"\n end\n files = args.slice_before { |co| co =~ /^-/ }.to_a.last.delete_if { |co| co =~ /^-/ }\n files.shift while CMD_FORMAT[cmd].max < files.size\n files.keep_if { |file| file =~ /^\\w+(\\.\\w+)+$/ } if cmd == 'view'\n unless CMD_FORMAT[cmd].include? files.size\n raise RequiredFilesMissingError,\n \"Required #{CMD_FORMAT[cmd].join ' or '} file(s), \" <<\n \"#{CMD_FORMAT[cmd][0] - files.size} missing.\"\n end\n files\n end",
"def initialize\n if valid_arguments?\n @directory = ARGV[0]\n @modification_factor = parse_number(ARGV[1])\n @cancellaction_factor = parse_number(ARGV[2])\n else\n raise \"Wrong input! directroy: #{ARGV[0]}, modification_factor: #{ARGV[1]}, cancellaction_factor: #{ARGV[2]}.\n Please pass arguments in such order: directory name (string), modification factor (integer or float),\n cancellaction factor (integer or float), for example: ruby run.rb directory_name 1 0.4\"\n end\n end",
"def argc! count, syntax = nil\n @params = @params_str.split(/\\s/, count)\n\n return true if @params.length >= count\n\n if syntax\n raise \"At least #{count} parameters required: #{syntax}\"\n else\n raise \"At least #{count} parameters required.\"\n end\n end",
"def test_options_parser\n input_short = '-oC:\\test -v -c pass'.split(\" \")\n input_long = '--output-path=C:\\test --verbose --copy-only pass'.split(\" \")\n\n [input_short, input_long].each do |input|\n options = parse_args(input)\n\n assert_equal('C:\\test', options[:output_folder])\n assert_true(options[:verbose])\n assert_true(options[:copy_only])\n assert_equal(['pass'], input)\n end\n end",
"def test_parseargs_more_than_3\n sim = Simulator.new\n refute sim.parseargs(['1','2','3','4'])\n end",
"def file_read_opts=(_arg0); end",
"def machine_count_top\n infile, result, *others = params\n a_filename = \"A-customers-#{others[0] || File.basename(infile, '.*')}.csv\"\n count_filename = \"A-customers-count-#{others[0] || File.basename(infile, '.*')}.csv\"\n age_filename = \"A-customers-age-#{others[0] || File.basename(infile, '.*')}.csv\"\n count = others[1] || 50\n\n puts; print \"Extracting customers with more than #{count} machines\"\n\n Sycsvpro::Extractor.new(infile: infile,\n outfile: a_filename,\n rows: \"0,1,BEGINn5>=#{count}END\").execute\n\n puts; print \"Extract customer name and machine count\"\n\n Sycsvpro::Extractor.new(infile: a_filename,\n outfile: count_filename,\n cols: \"0,5\").execute\n\n puts; print \"Extract customer name, machine count and age older than 7 years\"\n\n Sycsvpro::Extractor.new(infile: a_filename,\n outfile: age_filename,\n cols: \"0,5,6\").execute\n\n puts;\n puts \"You can find the result in '#{a_filename}', '#{count_filename}' \"+\n \"and '#{age_filename}'\"\n \nend",
"def process_arguments\n @args << \"-h\" if(@args.length < 1)\n \n opts_parse = OptionParser.new do |opts|\n opts.on('-f','--file FILE','use the following local file') {|file| @options.file = File.expand_path(file)}\n opts.on('-p','--parse PARSE',\"sets which set of sider files to download #{@@sections.join(\"|\")}\") {|parse| @options.parse = parse}\n opts.on('-d','--download','download the file to be parsed') {@options.download = true}\n opts.on('-o','--output DIR','set the output directory') {|directory| @options.output = File.expand_path(directory)}\n opts.on('-h','--help',\"prints the help\"){puts opts; exit!}\n end\n \n opts_parse.parse!(@args) rescue raise \"There was an error processing command line arguments use -h to see help\"\n end",
"def read_argv(arguments)\n total = 0\n arguments.each do |number|\n if number.is_number\n total += number.to_f\n else\n puts \"Sorry, something goes wrong\"\n exit -1\n end\n end\n return total, arguments.length\nend",
"def parse_args\n # Create slots for conditions and files\n files = {} \n conditions = {}\n options = DEFAULT_OPTIONS\n output = []\n separator = DEFAULT_SEPARATOR\n\n # Parse input arguments\n force_file = false\n default_options = true\n ARGV.each{|a|\n\n # Test if it's an argument or a file\n if %w{- +}.include?(a[0]) and not force_file then\n \n \n # Check for separator\n if a == '--' then\n # Separator, turn force file mode on\n force_file = true\n elsif a[0] == '+' then\n # condition\n a = a[1..-1] # Remove prefixed dash\n value = DEFAULT_RX\n key = a\n if equals = a.index('=') then\n key = a[0..(equals-1)]\n value = a[(equals+1)..-1]\n end\n\n # Assign to list of things to check\n conditions[key] = value\n elsif a[0] == '-' then\n if default_options\n options = [] \n default_options = false\n end\n\n # option\n long_arg = false\n (1..(a.length-1)).each{|i|\n if not long_arg then\n # Single-char options\n o = a[i]\n \n if OPTIONS.keys.include?(o.to_sym)\n\n # Handle output specially\n if o == 'o' then\n options << :o\n # Tell the loop to ignore further things from this group\n long_arg = true\n \n # Ignore leading equals and cut up by space\n fields = a[(i+1)..-1].gsub(/^=/, '').split(',')\n if fields.length > 0 then\n output += fields\n end\n\n elsif o == 's' then\n options << :s\n \n # Tell the loop to ignore further things from this group\n long_arg = true\n \n # Ignore leading equals and cut up by space\n separator = a[(i+1)..-1].gsub(/^=/, '').to_s\n else\n # Flag option\n options << o.to_sym \n end\n else\n $stderr.puts \"Unrecognised option: #{o}\"\n # force help\n options << :h\n end\n end\n }\n end\n\n\n else\n # file\n if not File.readable?(a) then\n $stderr.puts \"Cannot read #{a}\"\n elsif File.directory?(a) then\n $stderr.puts \"#{a} is a directory\"\n else\n files[a] = File.open(a)\n end\n end\n }\n\n # Check that either -r or -o=field is given\n if options.include?(:r) and output.length > 0 then\n $stderr.puts \"Option -r cannot be used with -o.\"\n options << :h\n end\n\n # Default to stdin\n files['<stdin>'] = STDIN if files.length == 0\n\n # List filename by default if file length over 1\n options << :f if not options.include?(:n) and files.length > 1\n options.delete(:f) if options.include?(:F)\n\n # Construct regex according to options\n rx_flags = 0\n rx_flags |= Regexp::IGNORECASE if options.include?(:i)\n rx_flags |= Regexp::EXTENDED if options.include?(:e)\n rx_flags |= Regexp::MULTILINE if options.include?(:m)\n conditions.each{ |k, v|\n conditions[k] = Regexp.new(v, rx_flags)\n }\n\n return files, conditions, options.uniq, output, separator\nend",
"def parse_file_argument(key)\n if @opts[key].nil? and f=@argv.shift\n @opts[key] = do_file_read(f)\n end\n end",
"def test_parse_file_line_valid_all\r\n input = [\"LET\", \"PRINT\", \"1\", \"-1\", \"200\", \"999999999999999999999999999999999999\", \"+\", \"-\", \"/\", \"*\", \"a\", \"Z\"]\r\n val = @arg_checker.parse_file_line(input)\r\n assert_equal val, input\r\n end",
"def parse(args)\n parser.parse!(args)\n options[:dir] = File.expand_path(args[0]) if args.length == 1\n\n nil\n end",
"def initialize\n raise ArgumentError, \"ARGV is empty\" if ARGV.empty?\n\n @flags = ARGV.select { |f| f.start_with?('-') }.map { |f| f.split('=').map(&:to_sym) } || []\n @raw_flags = ARGV.select { |f| f.start_with?('-') } || []\n @controller = ARGV[0].to_sym unless ARGV[0].start_with?('-')\n @command = ARGV[1].to_sym unless ARGV[1].nil?\n\n return unless ARGV.size > 2\n\n @custom = ARGV[2..ARGV.size].reject { |p| p.start_with?('-') }.map(&:to_sym) || []\n @param = ARGV[2]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts account to a Struct::Account and sets it. | def account=(account)
@account = account ? Struct::Account.new(account) : account
end | [
"def set_account\n unless self.role_before_type_cast == 0\n key = Eth::Key.new\n data = SecureRandom.alphanumeric(8)\n unique_id = self.id.to_s\n\n Client.personal_import_raw_key(key.private_hex,data) rescue nil \n self.account_password = Encrypt_me.call(self.id,self.created_at,data) rescue nil\n self.account_address = self.role_before_type_cast == 0 ? Client.personal_list_accounts[\"result\"][0] : key.address rescue nil #get_new_address(data) \n\n self.save rescue nil\n\n end\n end",
"def assign_account(account_id)\n self.account_id = account_id\n end",
"def setaccount(bitcoinaddress, account)\n bitcoin.setaccount bitcoinaddress, account\n end",
"def setaccount( bitcoinaddress, account )\n {\n method: :setaccount,\n params:[bitcoinaddress, account]\n }\n end",
"def setaccount(bitcoinaddress, account)\n @api.request 'setaccount', bitcoinaddress, account\n end",
"def copy_account_to_user(account)\n return account if account.current_user.nil?\n\n account.current_user.phone_number = account.contact_number\n account.current_user.email_address = account.email_address\n account.current_user.email_address_confirmation = account.email_address_confirmation\n account\n end",
"def set\n Function.new do |session, code|\n find_account_and_set_session(session).call(:account, code: code)\n end\n end",
"def account\n @account = current_account_settings\n end",
"def setaccount(namecoin_address, account)\n request :setaccount, namecoin_address, account\n end",
"def update_defaults_from_account(account)\n self.name = account.name if name.blank?\n self.email = account.email if email.blank? && !email_changed?\n end",
"def modify_account(options = {})\n hash = options.dup\n response_object = JSON.parse(patch('/account/', hash).body)\n parse_hash_to_struct(response_object)\n end",
"def user_account=(value)\n @user_account = value\n end",
"def setaccount(dogecoinaddress, account)\n dogecoin.setaccount dogecoinaddress, account\n end",
"def padma_account_setted_correctly\n # refresh_cached_account_if_needed\n return if self.padma_account.nil?\n unless padma_account.is_a?(PadmaAccount)\n raise 'This is not a account!'\n end\n if padma_account.name != self.account_name\n if self.account_name.nil?\n # if they differ because account_id is nil we set it here\n self.account_name = self.padma_account.name\n else\n raise 'This is the wrong account!'\n end\n end\n end",
"def change_account(new_account)\n @current_account = new_account\n end",
"def src_account=(account)\n raise InvalidType.new(MailAccount, account.class) unless account.is_a?(MailAccount)\n @src_account = account\n end",
"def new_account=(account)\n @new_account = self.account_ids.include?(account.id) ? nil : account\n end",
"def consume_account(account)\n account.all_accounts.each do |sub_account|\n sub_account.root_account = self.root_account\n sub_account.save!\n end\n account.parent_account = self\n account.root_account = self.root_account\n account.save!\n account.pseudonyms.each do |pseudonym|\n pseudonym.account = self.root_account\n pseudonym.save!\n end\n end",
"def account\n @account ||= AccountContext.new(self, @domain.client.account_sid)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts user to a Struct::User and sets it. | def user=(user)
@user = user ? Struct::User.new(user) : user
end | [
"def user=(value)\n @user = value\n end",
"def user=(value)\n @user = value\n end",
"def set_user\n cu = @current_user\n unless cu.is_a?(User) && cu.persisted? && !cu.disabled\n raise \"Attempting to set user with non user: #{cu.nil? ? 'nil' : cu}\"\n end\n\n write_attribute :user_id, cu.id\n end",
"def set_user\n @form_user = User.new(:email => self.user_email, :first_name => self.user_first_name,\n :last_name => self.user_last_name)\n @user = User.where(:email => self.user_email).first\n @new_user = false\n if @user.nil?\n @new_user = true\n @user = @form_user\n end\n end",
"def set_user(v)\n set_userinfo(v, @password)\n v\n end",
"def user=(user)\n also_save = self.persisted? && !self.changed?\n self.user_uid = user.uid\n @_user = user\n self.save if also_save\n end",
"def user= new_user\n # return unless new_user\n new_user = Twitter::User.adapt(new_user.to_hash) unless new_user.is_a?(Twitter::User)\n self[:user] = new_user\n end",
"def set_user(props={})\n send_people_request(\"$set\", props)\n end",
"def set_User(value)\n set_input(\"User\", value)\n end",
"def rbac_user_set_record_vars(user)\n user.name = @edit[:new][:name]\n user.userid = @edit[:new][:userid]\n user.email = @edit[:new][:email]\n user.password = @edit[:new][:password] if @edit[:new][:password]\n end",
"def user_as_string=(user)\n # reset both either way\n self.user_as_model = self.username = nil\n user.is_a?(::ActiveRecord::Base) ?\n self.user_as_model = user :\n self.username = user\n end",
"def user=(new_user)\n @user = new_user\n return unless @user && @service\n @booking_profile ||= @user.try(:booking_profile_for, @service)\n end",
"def set_wf_user(user = nil)\n role = set_wf_role(user)\n user = role.to_s if user.nil? || user.is_a?(Symbol)\n # noinspection RubyMismatchedReturnType\n @wf_user = user\n end",
"def user_as_string=(user)\n # reset both either way\n self.user_as_model = self.username = nil\n user.is_a?(ActiveRecord::Base) ?\n self.user_as_model = user :\n self.username = user\n end",
"def first_user=(user)\n self.memberships.create(user_id: user.id, admin: true, host: true)\n end",
"def set_User(value)\n set_input(\"User\", value)\n end",
"def update_user_from_auth(auth)\n user = self\n user.username = auth.info.nickname\n user.avatar_url = auth.info.image\n user.location = auth.extra.raw_info.city\n user.country = auth.extra.raw_info.country\n user.name = auth.info.name\n user\n end",
"def user=(value)\n conf['api']['user'] = value\n end",
"def userize #cast_into_user could be a better name\n self.becomes(User)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This sets the mode so OpenSSL knows to encrypt or decrypt, etc. | def initialize_cipher_for(mode)
@cipher.send mode
@cipher.key = @config['key']
@cipher.iv = @config['iv']
end | [
"def initialize_cipher_for(mode)\n @cipher.send mode\n @cipher.key = @config[:key]\n @cipher.iv = @config[:iv]\n end",
"def set_ssl(mode = true)\n @fu_ssl = mode\n end",
"def mode\n attributes.fetch(:mode) do\n Ably::Util::Crypto::DEFAULTS.fetch(:mode)\n end.downcase\n end",
"def create_cipher_simple(mode)\n iv,pwd,salt = parse_key(@key)\n\n cipher = OpenSSL::Cipher.new 'AES-128-CBC'\n cipher.send(mode)\n\n cipher.iv = iv\n cipher.key = pwd\n cipher\n end",
"def set_mode(mode)\n case mode\n # when \"+s\"\n # when \"-s\"\n when \"+i\"\n room.lock\n room.locked = true\n client.user_reply :mode, channel,\n current_mode_string, room.membership_limit\n when \"-i\"\n room.unlock\n room.locked = false\n client.user_reply :mode, channel,\n current_mode_string, room.membership_limit\n else\n client.numeric_reply :err_unknownmode,\n \"is unknown mode char to me for #{channel}\"\n end\n end",
"def set_peer_certificate_mode(opts)\n opts = check_params(opts,[:modes])\n super(opts)\n end",
"def verify_mode=(mode)\n unless VERIFY_MODES.include? mode\n raise ArgumentError, \"Invalid SSL verify mode #{mode.inspect}\\n\" +\n \"Please specify one of #{VERIFY_MODES.inspect}\"\n end\n\n @verify_mode = mode\n end",
"def encrypt_with\n log!\n prepare\n\n if mode_options.empty?\n raise Error, \"Encryption could not be performed for mode '#{mode}'\"\n end\n\n yield \"#{utility(:gpg)} #{base_options} #{mode_options}\", \".gpg\"\n ensure\n cleanup\n end",
"def set_mode(m)\n @mode = m\n end",
"def set_ssl_verify_mode(mode)\n @fu_ssl_verify_mode = mode\n end",
"def create_cipher(mode)\n create_cipher_simple(mode)\n end",
"def binary=(newmode); end",
"def virtual_secure_mode=(value)\n @virtual_secure_mode = value\n end",
"def set_mode(mode)\n @modes << mode unless @modes.include?(mode)\n @irc.send \"MODE #{nick} +#{mode}\"\n end",
"def cipher\n aes = OpenSSL::Cipher::Cipher.new(\"aes-#{@key_size}-cbc\")\n\n case @mode\n when :encrypt\n aes.encrypt\n when :decrypt\n aes.decrypt\n else\n raise(InvalidMode,\"invalid mode #{@mode}\")\n end\n\n aes.key = @key\n aes.iv = @iv\n\n yield aes if block_given?\n return aes\n end",
"def encrypt_with\n log!\n yield \"#{utility(:openssl)} #{options}\", \".enc\"\n end",
"def set_ssl_cipher(opts)\n opts = check_params(opts,[:ciphers])\n super(opts)\n end",
"def mode=(mode)\n @mode = mode.to_sym\n end",
"def mode=(mode)\n @mode = mode ? mode.to_sym : nil\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
def set_defaults self.badge ||= " end we can call the concern here | def set_defaults
self.badge ||= Placeholder.image_generator(height: '150', width: '150')
end | [
"def set_default_values_skill\n self.badge ||= Placeholder.image_generator(height: 250, width: 250) # self.=> is similar to 'this' keyword. referencing this specific skill.\n #if main_image is nil put my default value else put users inputted image value\n #setting default values for images\n end",
"def default_values!\n self.name ||= 'DEFAULT ACHIEVEMENT'\n self.description ||= 'This is a default achievement.'\n end",
"def set_default\n end",
"def badge\n raise NotImplementedError\n end",
"def default!\n self.severity = :DEFAULT\n end",
"def required_defaults; end",
"def set_default_values\n self.status ||= DonationStatus.find(0)\n end",
"def set_option_default(option)\n\t\tvalue = nil\n\t\tcase option\n\t\twhen 'badge'\n\t\t\tvalue = 1\n\t\telse\n\t\t\traise \"Option not supported: #{option_name}\"\n\t\tend\n\t\tdebug_log \"Setting default for option: #{option}\"\n\t\tset_option(option, value)\n\tend",
"def default_values\n self.pmu_type ||= \"uncommitted\"\n self.completed ||= false\n self.max_people ||= 1\n end",
"def set_defaults\n super\n self.seating_capacity ||= 2\n self.vehicle_length ||= 1\n self.gross_vehicle_weight ||= 0\n self.crew_size ||= 2\n self.asset_type ||= AssetType.find_by_class_name(self.name)\n end",
"def badges\n end",
"def set_defaults\n self.balance ||= 0\n end",
"def set_defaults\n self.pet_age = 5 if pet_age.blank?\n self.energy_level = 3 if energy_level.to_i <= 0 || energy_level.to_i > 5\n self.size_id = 1 if size_id.to_i <= 0 || energy_level.to_i > 4\n self\n end",
"def set_defaults\n super\n self.assessed_rating ||= transam_asset ? (transam_asset.condition_updates.last.try(:reported_condition_rating)) : ConditionType.max_rating\n self.asset_event_type ||= AssetEventType.find_by_class_name(self.name)\n end",
"def set_notifiable_class_defaults\n self._notification_targets = {}\n self._notification_group = {}\n self._notification_group_expiry_delay = {}\n self._notifier = {}\n self._notification_parameters = {}\n self._notification_email_allowed = {}\n self._notifiable_action_cable_allowed = {}\n self._notifiable_action_cable_api_allowed = {}\n self._notifiable_path = {}\n self._printable_notifiable_name = {}\n self._optional_targets = {}\n nil\n end",
"def twitter_badge( account, opt = {} )\n\treturn '' unless account\n\t@twitter_badge_setting = {\n\t\t:account => account,\n\t\t:label => (opt[:label] || 'follow-me'),\n\t\t:color => (opt[:color] || '#35ccff'),\n\t\t:side => (opt[:side] || 'right')[0,1],\n\t\t:top => (opt[:top] || 136).to_i,\n\t\t:delay => (opt[:delay] || 5).to_i * 1000,\n\t}\n\t'' # do nothing in this method.\nend",
"def default_values\n #self.clientcontact ||= false #TODO: assign default values from migration \n end",
"def set_default_options\n end",
"def default_options; end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Agrega enlaces de notas | def enlaces_notas resumen
mensaje = ""
notas = resumen.notas.order "updated_at DESC"
if notas.count > 0
notas.each_with_index do |nota,i|
mensaje += link_nota nota
mensaje += "-" if i < resumen.notas.count-1
end
end
return mensaje
end | [
"def comisiones_asignadas\n asunto.comisiones if asunto\n end",
"def notificaciones\n end",
"def trg_soma_itens\n self.pedido.gerenciar_acoes\n self.pedido.save\n end",
"def solicitudes_atrasadas\n end",
"def guarda_nombres_comunes_todos\n dame_nombres_comunes_todos\n\n if x_nombre_comun_principal.present?\n a = adicional ? adicional : Adicional.new(especie_id: id)\n a.nombres_comunes = x_nombres_comunes.encode('UTF-8', {invalid: :replace, undef: :replace, replace: ''})\n a.nombre_comun_principal = x_nombre_comun_principal.force_encoding(\"UTF-8\")\n\n if a.changed?\n a.save\n reload\n end\n end\n end",
"def ingresar_menores\n if @pila_aux[:tope]!=nil\n for i in 1..@pila_aux[:size]\n valor = @pila_aux[:tope][:valor]\n nodo = {\n valor:valor,\n siguiente: nil\n }\n tope = @pila[:tope]\n nodo[:siguiente]=tope\n @pila[:tope]=nodo\n siguiente = @pila_aux[:tope][:siguiente]\n @pila_aux[:tope]=siguiente\n @pasos.push(mostrar_pila())#pasos\n end\n end\n end",
"def contando\n tags = Tag.find(:all)\n \n for tag in tags\n tag.ocurrencias = contador tag.descripcion \n tag.enlazados = Enlace.count(:all, :conditions =>['tag_id = ?', tag.id])\n \n tag.save\n end \n end",
"def carta_extra_info(idiom)\n case \n when( 'es' == idiom and !self.importacion_id? )\n \"de las busquedas solicitadas\"\n when( 'es' == idiom and self.importacion_id? )\n \"en la gaceta #{self.importacion.publicacion} de fecha #{ I18n.l self.importacion.publicacion_fecha }\"\n when 'en' == idiom\n if self.importacion_id?\n I18n.locale = :en\n text = \"in gaceta #{self.importacion.publicacion} from date #{ I18n.l self.importacion.publicacion_fecha }\"\n I18n.locale = :es\n else\n text = \"in your requested trademarks search\"\n end\n text\n end\n end",
"def titulo_guia\n titulo = []\n\n t = Especie.find(params[:especie_id])\n a = t.adicional\n\n tipo_region = params[:tipo_region] == \"anp\" ? \"ANP \" : \"Municipio de \"\n \n if a.nombre_comun_principal.present?\n titulo[0] = \"Guía de #{a.nombre_comun_principal}\"\n else\n titulo[0] = \"Guía de #{t.nombre_cientifico}\"\n end\n\n titulo[1] = tipo_region + params[:nombre_region]\n titulo\n end",
"def complemento_remessa\n\t\t\t\t\t\"#{conta.conta_corrente[-1]}#{conta.conta_corrente_dv}\"\n\t\t\t\tend",
"def annonce_perso\n clear\n notice \"=== Annonce sur philippeperret.fr ===\"\n if informations[:annonce_perso]\n yesNo(MSG(:already_annonce_on_perso)) || return\n end\n\n notice <<-EOT\n(la diffusion — publique — de la vidéo a été contrôlée)\n\nJe vais ouvrir le site perso, à la rubrique Scrivener.\nEt je t'indique ensuite la démarche à suivre.\n\n EOT\n Clipboard.copy(titre)\n sleep 2.5\n Clipboard.copy(video_url)\n sleep 2.5\n open_site_perso\n\n\n notice <<-EOT\n\nPour procéder à l'opération :\n\n * passe en édition à l'aide du lien 'connexion' tout\n en bas de page,\n * repère ou crée la rubrique où peut aller ce nouveau\n tutoriel,\n * duplique l'élément qui sert d'interligne entre les\n tutoriels,\n * duplique un tutoriel proche,\n Attention : la duplication n'est pas facile : il faut\n glisser la souris sur l'élément jusqu'à voir apparaitre\n 'Modifier les colonnes', puis cliquer sur ce texte,\n et déplacer à l'aide de la poignée,\n * déplace-le à l'endroit voulu,\n * passe-le en édition,\n * sélectionne la vidéo et change l'url pour avoir la nouvelle\n (que j'ai mise dans le presse-papier),\n * sélectionne le texte et remplace-le par le titre\n “#{titre}”\n que j'ai placé dans PasteBox,\n * supprime le style (gomme) et mets la taille à 28px,\n * lie-le avec le lien :\n #{video_url}\n que j'ai aussi placé dans PasteBox.\n\n EOT\n\n # Marquer l'annonce déposée ?\n if yesNo(\"Dois-je marquer la publication faite sur ton site perso ?\")\n informations.set(annonce_perso: true)\n save_last_logic_step\n end\n\n end",
"def ingresar_detalle_en_venta(detalle_venta,venta)\n if venta[:esta_vacio]\n venta[:tope] = detalle_venta\n venta[:final] = detalle_venta\n venta[:esta_vacio] = false\n venta[:size] +=1\n venta[:max]-=1\n else\n final = venta[:final]\n final[:siguiente] = detalle_venta\n venta[:final] = detalle_venta\n venta[:size]+=1\n venta[:max]-=1\n end\nend",
"def correos_interesados(l_incluir_ciudadano = true)\n correos = []\n\n #usuarios unidad informacion\n usuarios_udip = self.institucion.usuarios.activos.udip\n usuarios_udip.each { |u|\n correos << u.email unless u.email.empty?\n }\n\n #ciudadano si hay correo\n if l_incluir_ciudadano\n correos << self.email unless self.email.empty?\n end\n\n #enlaces\n self.enlaces.each { |e|\n correos << e.email unless e.email.empty?\n }\n\n return correos\n end",
"def create_aminos\n\t\t\tnas = naseq\n\t\t\t@aa_first_pos = nas.translate.to_s.strip\n\t\t\t@aa_second_pos = nas.translate(2).to_s.strip\n\t\t\t@aa_third_pos = nas.translate(3).to_s.strip\n\t\tend",
"def index\n @notificacoes = Notificacao.nao_vista\n end",
"def set_listas\n #@locais = Local.all.map{|l| [l.nome,l.id]}\n @locais = Local.all\n @periodos = ['Manhã','Tarde','Noite']\n @publicos = ['Infantil','Adulto']\n end",
"def proximas_entregas(entregas)\n entregas_finalizadas = []\n entregas.each { |entrega|\n if Time.parse(entrega.fecha_fin) > Time.now\n entregas_finalizadas << entrega\n end\n }\n entregas_finalizadas\n end",
"def mostrar_calificaciones(entregas)\n text = \"Las entregas para *#{@curso.nombre}* actualmente se encuentran en el siguiente estado:\\n\"\n entregas.each_with_index { |entrega, index|\n text += \" #{index}) Entrega: *#{entrega.nombre}*\\n Nota: *#{Estudiante.new(@ultimo_mensaje.usuario.id_telegram).consultar_nota_entrega(entrega)}*\\n\" \n }\n @@bot.api.send_message(chat_id: @ultimo_mensaje.usuario.id_telegram, text: text, parse_mode: 'Markdown')\n end",
"def entregas\n # Se refreca cada vez que se consultan las entregas consultando moodle para evitar almacenar para siempre jamas las entregas que se consultaron por primera vez\n @entregas = []\n datos_curso = @@moodle.api('mod_assign_get_assignments', 'courseids[0]' => @id_curso)\n entregas = datos_curso['courses'][0]['assignments']\n\n entregas.each { |entrega|\n fecha_convertida = Time.at(entrega['duedate'].to_i).strftime('%Y-%m-%d %H:%M:%S')\n @entregas << Entrega.new(entrega['id'], fecha_convertida, entrega['name'])\n if entrega['intro']\n @entregas.last.descripcion = entrega['intro']\n end\n }\n\n @entregas\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Trigger auto approve update rescue task Author: Aniket Date: 06/07/2018 Reviewed By: | def trigger_auto_approve_update_rescue_task(user_extended_details_id)
BgJob.enqueue(
AutoApproveUpdateJob,
{
client_id: @client_id,
reprocess: 1,
user_extended_details_id: user_extended_details_id
}
)
Rails.logger.info("---- enqueue_job AutoApproveUpdateJob for ued_id-#{user_extended_details_id} done")
end | [
"def auto_approve!\n update_attribute(:approved_at,DateTime.now)\n update_attribute(:approved_by, User.APPLICATION)\n write_key!\n end",
"def auto_approve\n if !self.auto_approved && self.approval_status.nil?\n UserMailer.auto_approved_email(self).deliver\n self.auto_approved = true\n self.approval_status = true\n self.save\n end\n end",
"def approve!\n MTurkUtils.approve self.task\n end",
"def auto_approve_type\n 'auto'\n end",
"def approve\n user = User.current\n assigned_was = @issue.assigned_to\n assigned_new = params[:assigned_to_id]\n status_was = @issue.status\n status_next_available = @issue.new_statuses_allowed_to(User.current)\n notes = params[:notes]\n\n @configs = configs('approve')\n # Rails.logger.info \"Rules of Approval: \\n\" + @configs.to_s\n @issue.assigned_to_id\n end",
"def approve_all!\n run_task_job ApproveProcessor\n end",
"def alteration_approve\n UserNotifier.deliver_in_process(self, user)\n end",
"def auto_approve\n user=photo.user or return nil\n if user.admin?\n photo.approver_id=user.id\n photo.status = Photo::STATUS_APPROVED\n end\n end",
"def approve\n associate_or_create_identity\n self.update!(approval_state: ApprovalState::APPROVED)\n send_approval_email\n end",
"def escalate_approval\n if !self.user.manager_id.nil?\n if self.manager.manager_id.nil?\n self.auto_approve()\n else\n UserMailer.escalate_approval_email(self).deliver\n self.is_escalated = true\n self.save\n end\n end\n end",
"def approve!\n self.approved_at = Time.now\n registration.approve\n save!\n end",
"def enable_auto_approve\n add option: \"-auto-approve=true\"\n end",
"def approve_user\n # set the enabled flag to true for the user\n # send out approval notification\n end",
"def approve\n res = submission_details.approve(self)\n update_attribute(:approved, res)\n res\n end",
"def approve task\n return unless task.mturk_hit\n approve_remaining_assignments task\n\n if task.task_response and task.task_response.approved.nil?\n task.task_response.approved = true\n task.task_response.save!\n end\n end",
"def approve\n\t\tif @interest_point.update({approver_id: current_user.id, approved_at: DateTime.now})\n\t\t\tflash[:notice] = \"This interest point is approved!\"\n\t\telse\n\t\t\tflash[:error] = \"Something went wrong! #{get_all_errors}\"\n\t\tend\n\t\t\tredirect_to admin_interest_point_path(@interest_point)\n\tend",
"def autoapprove_check\n if membership_type.autoapprove?\n autoapprove!\n end\n end",
"def update_auto_approve_setting\n service_response = ClientManagement::UpdateAutoApproveSetting.new(params).perform\n render_api_response(service_response)\n end",
"def approve\n @company = Company.find( params[:company_id])\n #@user = @company.users.find( params[:user_id])\n #@timeoff = @user.time_offs.find( params[:id])\n @timeoff = TimeOff.find( params[:id])\n #@user = @user.find( @timeoff.user_id)\n if @timeoff.state == 2\n user_t = User.find( @timeoff.user_id)\n user_t.time_off_days -= @timeoff.total_credits\n user_t.save!(:validate => false)\n end\n @timeoff.update_attribute :state, 1\n @timeoff.update_attribute :color, '#33FF33'\n UserMailer.send_email_time_off_result(@timeoff.user, current_user(@company.slug), \"Approved\", @timeoff.start_at, @timeoff.end_at).deliver\n redirect_to manage_company_user_time_offs_path( @company, User.find( params[:user_id]))\n #redirect_to \n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Check if the function param is defined as an output for the action in the ddl | def contains_output?(output)
@ddl[:output].keys.include?(output)
end | [
"def is_output_file?\n self.data_type == 'outputs' && self.parameter_type.match(/File/).present?\n end",
"def has_output?\n return !@outputs.empty?\n end",
"def action_argument_required?\n !AVAILABLE_ACTIONS.empty?\n end",
"def available_action?(action_name); end",
"def validate_output(env, receiver, value)\n _output = get_type(env, output, receiver)\n unless _output.match(env, value)\n raise MethodInterfaceError.new(:output, name, [_output], [value])\n end\n end",
"def returns_something?; @return_type || !@returned_arguments.empty? end",
"def params_ok?(*) end",
"def parameter_rule?\n false\n end",
"def collect_output?\n collect_output == true\n end",
"def validate_output(obj)\n validate_type(output, obj)\n end",
"def valid_routine?(action)\n !fetch_routine_config(action).nil?\n end",
"def process_hasoutput_output(params)\n process_with_output_tag = params.fetch(:process_with_output_tag, \"thisprocess\") # some one-word name\n output_value = params.fetch(:output_value, nil)\n output_value_column = params.fetch(:output_value_column, nil)\n output_type = params.fetch(:output_type, SIO[\"information-content-entity\"][self.sio_verbose])\n output_type_column = params.fetch(:output_type_column, nil)\n output_type_label = params.fetch(:output_type_label, \"measurement-value\")\n output_type_label_column = params.fetch(:output_type_label_column, nil)\n output_value_datatype = params.fetch(:output_value_datatype, \"xsd:string\")\n output_value_datatype_column = params.fetch(:output_value_datatype_column, nil)\n output_comments_column = params.fetch(:output_comments_column, nil)\n output_start_column = params.fetch(:output_start_column, nil)\n output_end_column = params.fetch(:output_end_column, nil)\n output_timeinstant_column = params.fetch(:output_timeinstant_column, nil)\n output_measured_at_column = params.fetch(:output_measured_at_column, nil)\n output_annotations = params.fetch(:output_annotations, [])\n #output_annotations_columns = params.fetch(:output_annotations_columns, [])\n make_unique_process = params.fetch(:make_unique_process, true)\n\n output_value = output_value_column ? \"$(#{output_value_column})\":output_value\n output_type = output_type_column ? \"$(#{output_type_column})\":output_type\n output_type_label = output_type_label_column ? \"$(#{output_type_label_column})\":output_type_label\n output_value_datatype = output_value_datatype_column ? \"$(#{output_value_datatype_column})\":output_value_datatype\n \n\n root_url = get_root_url(make_unique_process)\n\n #return unless output_value\n \n @mappings << mapping_clause(\n \"#{process_with_output_tag}_process_has_output\",\n [\"#{source_tag}-source\"],\n root_url + \"##{process_with_output_tag}\",\n [[SIO[\"has-output\"][self.sio_verbose], \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\", \"iri\"]]\n )\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_Output_annotation\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [[\"rdf:type\",SIO[\"information-content-entity\"][self.sio_verbose], \"iri\"]]\n ) \n \n if output_type\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_Output_type_annotation\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [[\"rdf:type\",output_type, \"iri\"]]\n )\n end\n \n if output_type_label\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_Output_type_label_annotation\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [[\"rdfs:label\",\"#{output_type_label} Output Type\", \"xsd:string\"]]\n )\n end\n \n if output_value\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_Output_value_annotation\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [[SIO[\"has-value\"][self.sio_verbose],output_value, output_value_datatype]]\n )\n end\n \n if output_comments_column\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_Output_value_comments\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [[\"rdfs:comment\",\"$(#{output_comments_column})\", \"xsd:string\"]]\n )\n end\n \n \n #if output_timeinstant_column\n # \n # @mappings << mapping_clause(\n # \"#{process_with_output_tag}_output_annotation_timeinstant\",\n # [\"#{source_tag}-source\"],\n # \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n # [\n # [SIO[\"has-value\"][self.sio_verbose], \"$(#{output_timeinstant_column})\", \"xsd:date\"],\n # [\"rdf:type\", SIO[\"time-instant\"][self.sio_verbose], \"iri\"], \n # ]\n # )\n #end\n\n if output_measured_at_column\n \n @mappings << mapping_clause(\n \"#{process_with_output_tag}_output_measured_at_timeinstant\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [\n [SIO[\"measured-at\"][self.sio_verbose], \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output_measured_at\", \"iri\"], \n ]\n )\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_output_measured_at_timeinstant_value\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output_measured_at\",\n [\n [SIO[\"has-value\"][self.sio_verbose], \"$(#{output_timeinstant_column})\", \"xsd:date\"],\n [\"rdf:type\", SIO[\"time-instant\"][self.sio_verbose], \"iri\"], \n ]\n )\n end\n\n\n if output_start_column # start and end will be attributes of the information object\n \n @mappings << mapping_clause(\n \"#{process_with_output_tag}_output_has_start_atribute\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [\n # need to add the start column into this URI so that, if it is empty, the attribute will not be created at all by SDMRDFizer\n [SIO[\"has-attribute\"][self.sio_verbose], \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output_start_attribute_$(#{output_start_column})\", \"iri\"], \n ]\n )\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_output_has_start_attribute_value\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output_start_attribute_$(#{output_start_column})\",\n [\n [SIO[\"has-value\"][self.sio_verbose], \"$(#{output_start_column})\", \"xsd:date\"],\n [\"rdf:type\", SIO[\"start-date\"][self.sio_verbose], \"iri\"], \n ]\n )\n end\n\n\n if output_end_column # start and end will be attributes of the information object\n \n @mappings << mapping_clause(\n \"#{process_with_output_tag}_output_has_end_atribute\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [\n # need to add the start column into this URI so that, if it is empty, the attribute will not be created at all by SDMRDFizer\n [SIO[\"has-attribute\"][self.sio_verbose], \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output_end_attribute_$(#{output_end_column})\", \"iri\"], \n ]\n )\n @mappings << mapping_clause(\n \"#{process_with_output_tag}_output_has_end_attribute_value\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output_end_attribute_$(#{output_end_column})\",\n [\n [SIO[\"has-value\"][self.sio_verbose], \"$(#{output_end_column})\", \"xsd:date\"],\n [\"rdf:type\", SIO[\"end-date\"][self.sio_verbose], \"iri\"], \n ]\n )\n end\n\n output_annotations.each do |pred, value, dtype|\n datatype = \"xsd:string\"\n predicate = \"\"\n if dtype and dtype =~ /\\S+\\:\\S+/ # URI or qname\n datatype = dtype\n elsif dtype\n datatype = \"$(#{datatype})\" # make it the column reference if it exists, but isn't a uri\n end\n \n if pred and pred =~ /\\S+\\:\\S+/ # URI or qname\n predicate = pred\n else\n predicate = \"$(#{pred})\" # make it the column reference if it exists, but isn't a uri\n end\n \n next unless predicate and value\n uniqid = get_uniq_id\n \n @mappings << mapping_clause(\n \"#{uniqid}_output_custom_annotation\",\n [\"#{source_tag}-source\"],\n \"this:individual_$(#{@personid_column})_$(#{@uniqueid_column})##{process_with_output_tag}_Output\",\n [[\"$(#{predicate})\", \"$(#{value})\", datatype]]\n )\n \n end\n \n end",
"def action?\n @action_name.any? and @action_name.first != '*'\n end",
"def inherit_outputs?\n\t\t\t\t@outputs == :inherit\n\t\t\tend",
"def validate_output(value)\n @output.match(value.pione_model_type)\n end",
"def exist_output_data?(env, rule_condition, inputs, outputs, data_null_tuples)\n result = false\n rule_condition.outputs.each_with_index do |condition, i|\n _condition = condition.eval(env)\n # remove\n if _condition.operation == :remove\n case _condition.distribution\n when :all\n if not(outputs[i].nil? or outputs[i].select{|data| _condition.match(data.name)}.empty?)\n result = true\n end\n when :each\n if not(outputs[i].nil?) and _condition.match(outputs[i].first.name)\n result = true\n end\n end\n end\n break if result\n end\n return result\n end",
"def file_is_output? file\n belongs_to.respond_to?(:output) && belongs_to.output.to_s == file\n end",
"def can_run?(fn)\n (fn < MAX_EXT_FN && fn <= max_basic_param) || (fn >= MAX_EXT_FN && fn <= max_extended_param)\n end",
"def argument?; end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Similar to `case`, except it uses a `ResultPatternMatch` instead. | def result_case(target, destructure: false, &fn)
Qo::PatternMatchers::ResultPatternMatch
.new(destructure: destructure, &fn)
.call(target)
end | [
"def match_case\n return @match_case\n end",
"def case!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 23 )\n\n type = CASE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 144:8: 'case'\n match( \"case\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 23 )\n\n end",
"def on_case(value, consequent); end",
"def match_mixed(test)\n case test\n when 'hello'\n 'hello'\n in [a, b]\n \"a: #{a}, b: #{b}\"\n end\nend",
"def match_case=(value)\n @match_case = value\n end",
"def case_insensitive_match; end",
"def simple_case_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 165 )\n return_value = SimpleCaseStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n simple_case_statement_start_index = @input.index\n\n root_0 = nil\n string_literal953 = nil\n string_literal955 = nil\n plsql_expression952 = nil\n plsql_expression954 = nil\n seq_of_statements956 = nil\n\n tree_for_string_literal953 = nil\n tree_for_string_literal955 = nil\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n root_0 = @adaptor.create_flat_list\n\n\n # at line 911:4: plsql_expression ( 'WHEN' plsql_expression 'THEN' seq_of_statements )+\n @state.following.push( TOKENS_FOLLOWING_plsql_expression_IN_simple_case_statement_5749 )\n plsql_expression952 = plsql_expression\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, plsql_expression952.tree )\n end\n # at file 911:21: ( 'WHEN' plsql_expression 'THEN' seq_of_statements )+\n match_count_247 = 0\n while true\n alt_247 = 2\n look_247_0 = @input.peek( 1 )\n\n if ( look_247_0 == T__63 )\n alt_247 = 1\n\n end\n case alt_247\n when 1\n # at line 911:23: 'WHEN' plsql_expression 'THEN' seq_of_statements\n string_literal953 = match( T__63, TOKENS_FOLLOWING_T__63_IN_simple_case_statement_5753 )\n if @state.backtracking == 0\n\n tree_for_string_literal953 = @adaptor.create_with_payload( string_literal953 )\n @adaptor.add_child( root_0, tree_for_string_literal953 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_plsql_expression_IN_simple_case_statement_5755 )\n plsql_expression954 = plsql_expression\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, plsql_expression954.tree )\n end\n string_literal955 = match( T__109, TOKENS_FOLLOWING_T__109_IN_simple_case_statement_5757 )\n if @state.backtracking == 0\n\n tree_for_string_literal955 = @adaptor.create_with_payload( string_literal955 )\n @adaptor.add_child( root_0, tree_for_string_literal955 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_seq_of_statements_IN_simple_case_statement_5759 )\n seq_of_statements956 = seq_of_statements\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, seq_of_statements956.tree )\n end\n\n else\n match_count_247 > 0 and break\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n eee = EarlyExit(247)\n\n\n raise eee\n end\n match_count_247 += 1\n end\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 165 )\n memoize( __method__, simple_case_statement_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end",
"def parse_case\n pos = position\n keyword(:case) or return\n ws\n cond = parse_condition or expected(\"condition for 'case' block\")\n ws\n whens = kleene { parse_when }\n ws\n if keyword(:else)\n ws\n elses = parse_opt_defexp\n end\n ws\n keyword(:end) or expected(\"'end' for open 'case'\")\n return E[pos, :case, cond, whens, elses].compact\n end",
"def keyword\n 'case'\n end",
"def match(input); end",
"def visit_case(node); end",
"def _ops_case(expr, token, ix)\n # ignored expr for now\n # ignored token\n ix += 1\n end",
"def case_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 164 )\n return_value = CaseStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n case_statement_start_index = @input.index\n\n root_0 = nil\n string_literal945 = nil\n string_literal949 = nil\n string_literal950 = nil\n label_name944 = nil\n simple_case_statement946 = nil\n searched_case_statement947 = nil\n else_case_statement948 = nil\n label_name951 = nil\n\n tree_for_string_literal945 = nil\n tree_for_string_literal949 = nil\n tree_for_string_literal950 = nil\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n root_0 = @adaptor.create_flat_list\n\n\n # at line 908:4: ( label_name )? 'CASE' ( simple_case_statement | searched_case_statement ) ( else_case_statement )? 'END' 'CASE' ( label_name )?\n # at line 908:4: ( label_name )?\n alt_243 = 2\n look_243_0 = @input.peek( 1 )\n\n if ( look_243_0.between?( ID, DOUBLEQUOTED_STRING ) )\n alt_243 = 1\n end\n case alt_243\n when 1\n # at line 908:6: label_name\n @state.following.push( TOKENS_FOLLOWING_label_name_IN_case_statement_5705 )\n label_name944 = label_name\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, label_name944.tree )\n end\n\n end\n string_literal945 = match( T__142, TOKENS_FOLLOWING_T__142_IN_case_statement_5710 )\n if @state.backtracking == 0\n\n tree_for_string_literal945 = @adaptor.create_with_payload( string_literal945 )\n root_0 = @adaptor.become_root( tree_for_string_literal945, root_0 )\n\n end\n # at line 908:28: ( simple_case_statement | searched_case_statement )\n alt_244 = 2\n look_244_0 = @input.peek( 1 )\n\n if ( look_244_0 == LPAREN || look_244_0.between?( PLUS, QUOTED_STRING ) || look_244_0.between?( ID, DOUBLEQUOTED_STRING ) || look_244_0.between?( T__57, T__58 ) || look_244_0 == T__100 || look_244_0.between?( T__110, T__111 ) || look_244_0.between?( T__116, T__117 ) || look_244_0 == T__140 || look_244_0 == T__142 )\n alt_244 = 1\n elsif ( look_244_0 == T__63 )\n alt_244 = 2\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n raise NoViableAlternative( \"\", 244, 0 )\n end\n case alt_244\n when 1\n # at line 908:30: simple_case_statement\n @state.following.push( TOKENS_FOLLOWING_simple_case_statement_IN_case_statement_5715 )\n simple_case_statement946 = simple_case_statement\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, simple_case_statement946.tree )\n end\n\n when 2\n # at line 908:54: searched_case_statement\n @state.following.push( TOKENS_FOLLOWING_searched_case_statement_IN_case_statement_5719 )\n searched_case_statement947 = searched_case_statement\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, searched_case_statement947.tree )\n end\n\n end\n # at line 908:80: ( else_case_statement )?\n alt_245 = 2\n look_245_0 = @input.peek( 1 )\n\n if ( look_245_0 == T__115 )\n alt_245 = 1\n end\n case alt_245\n when 1\n # at line 908:82: else_case_statement\n @state.following.push( TOKENS_FOLLOWING_else_case_statement_IN_case_statement_5725 )\n else_case_statement948 = else_case_statement\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, else_case_statement948.tree )\n end\n\n end\n string_literal949 = match( T__54, TOKENS_FOLLOWING_T__54_IN_case_statement_5730 )\n if @state.backtracking == 0\n\n tree_for_string_literal949 = @adaptor.create_with_payload( string_literal949 )\n @adaptor.add_child( root_0, tree_for_string_literal949 )\n\n end\n string_literal950 = match( T__142, TOKENS_FOLLOWING_T__142_IN_case_statement_5732 )\n if @state.backtracking == 0\n\n tree_for_string_literal950 = @adaptor.create_with_payload( string_literal950 )\n @adaptor.add_child( root_0, tree_for_string_literal950 )\n\n end\n # at line 908:118: ( label_name )?\n alt_246 = 2\n look_246_0 = @input.peek( 1 )\n\n if ( look_246_0.between?( ID, DOUBLEQUOTED_STRING ) )\n alt_246 = 1\n end\n case alt_246\n when 1\n # at line 908:120: label_name\n @state.following.push( TOKENS_FOLLOWING_label_name_IN_case_statement_5736 )\n label_name951 = label_name\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, label_name951.tree )\n end\n\n end\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 164 )\n memoize( __method__, case_statement_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end",
"def regex_verify_case_statement\n /(?:.*;)?\\s*case\\s*(?:#+.*\\s*)*(?:\\s:*\\w+(?:\\(:?.*\\))?\\s|'(?:[^']?(?:\\\\')?)*'|\"(?:[^\"]?(?:\\\\\")?)*\")\\s*(?:#+.*\\s*)*\\s*when\\s*(?:#+.*\\s*)*(?:\\s:*\\w+(?:\\(:?.*\\))?\\s|'(?:[^']?(?:\\\\')?)*'|\"(?:[^\"]?(?:\\\\\")?)*\")/\nend",
"def match(pattern)\n matcher.match(pattern)\n end",
"def case_exact\n return @case_exact\n end",
"def switch_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n return_value = SwitchStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n\n _last = _first_0 = nil\n string_literal60 = nil\n expression61 = nil\n case_clause62 = nil\n default_clause63 = nil\n\n tree_for_string_literal60 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 95:5: ^( 'switch' expression ( case_clause )* ( default_clause )? )\n _save_last_1 = _last = @input.look\n _first_1 = nil\n root_1 = @adaptor.create_flat_list\n _last = @input.look\n string_literal60 = match( SWITCH, TOKENS_FOLLOWING_SWITCH_IN_switch_statement_477 )\n\n tree_for_string_literal60 = @adaptor.copy_node( string_literal60 )\n\n root_1 = @adaptor.become_root( tree_for_string_literal60, root_1 )\n\n\n\n match( DOWN, nil )\n _last = @input.look\n @state.following.push( TOKENS_FOLLOWING_expression_IN_switch_statement_479 )\n expression61 = expression\n @state.following.pop\n\n @adaptor.add_child( root_1, expression61.tree )\n # at line 95:28: ( case_clause )*\n while true # decision 16\n alt_16 = 2\n look_16_0 = @input.peek( 1 )\n\n if ( look_16_0 == CASE )\n alt_16 = 1\n\n end\n case alt_16\n when 1\n # at line 95:28: case_clause\n _last = @input.look\n @state.following.push( TOKENS_FOLLOWING_case_clause_IN_switch_statement_481 )\n case_clause62 = case_clause\n @state.following.pop\n\n @adaptor.add_child( root_1, case_clause62.tree )\n\n\n else\n break # out of loop for decision 16\n end\n end # loop for decision 16\n # at line 95:41: ( default_clause )?\n alt_17 = 2\n look_17_0 = @input.peek( 1 )\n\n if ( look_17_0 == DEFAULT )\n alt_17 = 1\n end\n case alt_17\n when 1\n # at line 95:41: default_clause\n _last = @input.look\n @state.following.push( TOKENS_FOLLOWING_default_clause_IN_switch_statement_484 )\n default_clause63 = default_clause\n @state.following.pop\n\n @adaptor.add_child( root_1, default_clause63.tree )\n\n\n end\n\n match( UP, nil )\n @adaptor.add_child( root_0, root_1 )\n _last = _save_last_1\n\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end\n \n return return_value\n end",
"def searched_case_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 166 )\n return_value = SearchedCaseStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n searched_case_statement_start_index = @input.index\n\n root_0 = nil\n string_literal957 = nil\n string_literal959 = nil\n plsql_expression958 = nil\n seq_of_statements960 = nil\n\n tree_for_string_literal957 = nil\n tree_for_string_literal959 = nil\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n root_0 = @adaptor.create_flat_list\n\n\n # at line 914:4: ( 'WHEN' plsql_expression 'THEN' seq_of_statements )+\n # at file 914:4: ( 'WHEN' plsql_expression 'THEN' seq_of_statements )+\n match_count_248 = 0\n while true\n alt_248 = 2\n look_248_0 = @input.peek( 1 )\n\n if ( look_248_0 == T__63 )\n alt_248 = 1\n\n end\n case alt_248\n when 1\n # at line 914:6: 'WHEN' plsql_expression 'THEN' seq_of_statements\n string_literal957 = match( T__63, TOKENS_FOLLOWING_T__63_IN_searched_case_statement_5774 )\n if @state.backtracking == 0\n\n tree_for_string_literal957 = @adaptor.create_with_payload( string_literal957 )\n @adaptor.add_child( root_0, tree_for_string_literal957 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_plsql_expression_IN_searched_case_statement_5776 )\n plsql_expression958 = plsql_expression\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, plsql_expression958.tree )\n end\n string_literal959 = match( T__109, TOKENS_FOLLOWING_T__109_IN_searched_case_statement_5778 )\n if @state.backtracking == 0\n\n tree_for_string_literal959 = @adaptor.create_with_payload( string_literal959 )\n @adaptor.add_child( root_0, tree_for_string_literal959 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_seq_of_statements_IN_searched_case_statement_5780 )\n seq_of_statements960 = seq_of_statements\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, seq_of_statements960.tree )\n end\n\n else\n match_count_248 > 0 and break\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n eee = EarlyExit(248)\n\n\n raise eee\n end\n match_count_248 += 1\n end\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 166 )\n memoize( __method__, searched_case_statement_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end",
"def casecmp(p0) end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a new type of pattern matcher from a set of branches | def create_pattern_match(branches:)
Qo::PatternMatchers::PatternMatch.create(branches: branches)
end | [
"def build_matcher(options, whitelist_option, blacklist_option); end",
"def convert_to_branchsets( *patterns )\n\t\tself.log.debug \"Turning %d patterns into branchsets.\" % [ patterns.length ]\n\t\treturn patterns.collect do |pat|\n\t\t\tkey, val = pat.split( /\\s*=\\s*/, 2 )\n\t\t\tself.log.debug \" making a filter out of %p => %p\" % [ key, val ]\n\t\t\t@currbranch.filter( key => val )\n\t\tend\n\tend",
"def build_case_pattern(node, type)\n case node.term\n when :__tuple__\n # Only match against n-tuple types (this type if it's an n-tuple, or any child n-tuple types if this type is a union or nominal)\n tuple_types = type.deunion.select { |t| t.is_a?(Types::Tuple) && t.types.count == node.children.count }\n raise NoMatchError.new(\"Cannot match this #{node.children.count}-tuple pattern with the type '#{type}'\", node) if tuple_types.empty?\n\n # Recursively check the child types of each candidate type with the respective child patterns and determine the variables bound in the children\n # Throw an error if any variable is bound more than once in this pattern\n possible_patterns = tuple_types.map do |tuple_type|\n begin\n child_patterns = node.children.zip(tuple_type.types).map { |child, child_type| build_case_pattern(child, child_type) }\n\n Pattern.new(:tuple, child_patterns, node, Types::Tuple.new(child_patterns.map(&:type)), merge_bindings(child_patterns))\n rescue NoMatchError\n nil\n end\n end.compact\n\n # If no types in the union matched the subpatterns, throw an error\n raise NoMatchError.new(\"Cannot match this pattern with the type '#{type}'\", node) if possible_patterns.empty?\n\n possible_patterns.reduce(&:merge)\n when :__record__\n # Only match against record types (or records types in a union) that contain all of this pattern's fields\n required_fields = node.children.map(&:term)\n record_types = type.deunion.select { |t| t.is_a?(Types::Record) && (required_fields - t.types_hash.keys).empty? }\n raise NoMatchError.new(\"Cannot match this record pattern with the type '#{type}'\", node) if record_types.empty?\n\n common_fields = record_types.map(&:types_hash).map(&:keys).reduce(&:&)\n\n possible_patterns = record_types.map do |record_type|\n begin\n ordered_fields = record_type.types_hash.sort\n built_type = {}\n\n child_patterns =\n ordered_fields.map do |field, child_type|\n # A record pattern only includes args that are common across all record types in the union.\n # A wildcard is used if a field is missing from the pattern node.\n if (child = node.children.find { |c| c.term == field })\n child_pattern = build_case_pattern(child.children.first, child_type)\n built_type[field] = child_pattern.type\n\n child_pattern\n elsif common_fields.include?(field)\n built_type[field] = child_type\n Pattern.new(:wildcard, [], nil, child_type, {})\n else\n built_type[field] = child_type\n\n nil\n end\n end.compact\n\n Pattern.new(:record, child_patterns, node, Types::Record.new(built_type), merge_bindings(child_patterns))\n rescue NoMatchError\n nil\n end\n end.compact\n\n # If no types in the union matched the subpatterns, throw an error\n raise NoMatchError.new(\"Cannot match this pattern with the type '#{type}'\", node) if possible_patterns.empty?\n\n possible_patterns.reduce(&:merge)\n when :__list__\n # TODO: handle non-empty lists\n raise InvalidPatternError.new(\"A case pattern cannot include a non-empty list\") unless node.children.empty?\n\n list_types = type.deunion.select { |t| t.is_a?(Types::Generic) && t.type_atom == :List }\n raise NoMatchError.new(\"Cannot match this list pattern with the type '#{type}'\", node) if list_types.empty?\n\n Pattern.new(:empty, [], node, Types.union(list_types), {})\n when :'::'\n list_types = type.deunion.select { |t| is_a_list?(t) }\n raise NoMatchError.new(\"Cannot match this :: pattern with the type '#{type}'\", node) if list_types.empty?\n\n possible_patterns = list_types.map do |list_type|\n begin\n head_node, tail_node = node.children\n head_pattern = build_case_pattern(head_node, list_type.type_parameters.first)\n tail_pattern = build_case_pattern(tail_node, list_type)\n pattern_type = Types::Generic.new(:List, [Types.union([head_pattern.type, tail_pattern.type.type_parameters.first])])\n\n Pattern.new(:cons, [head_pattern, tail_pattern], node, pattern_type, merge_bindings([head_pattern, tail_pattern]))\n rescue NoMatchError\n nil\n end\n end.compact\n\n raise NoMatchError.new(\"Cannot match this pattern with the type '#{type}'\", node) if possible_patterns.empty?\n\n possible_patterns.reduce(&:merge)\n else\n if node.term.is_a?(Symbol)\n if node.children.nil? && !('A'..'Z').include?(node.term[0]) # Pattern is a variable\n bindings =\n if node.term == :_ # Wildcard pattern does no binding\n {}\n else\n { node.term => type }\n end\n\n Pattern.new(:wildcard, [], node, type, bindings)\n elsif ('A'..'Z').include?(node.term[0]) # pattern is a type constructor\n name = node.term\n module_path = node.meta && node.meta[:module_path]\n\n nominals = type.deunion.select do |t|\n t.is_a?(Types::Nominal) &&\n t.type_atom == name && (\n module_path.nil? ||\n module_path.count <= t.module_path.count &&\n t.module_path[-module_path.count..-1] == module_path\n )\n end\n\n if nominals.empty?\n constructor_str = module_path && !module_path.empty? ? \"#{module_path.join('.')}.\" : \"\"\n constructor_str += name.to_s\n raise NoMatchError.new(\"Cannot match the type constructor '#{constructor_str}' with the type '#{type}'\", node)\n end\n\n # This constructor is ambiguous if it can match module paths for multiple nominal types in the union\n if nominals.map(&:module_path).uniq.count > 1\n constructor_str = module_path && !module_path.empty? ? \"#{module_path.join('.')}.\" : \"\"\n constructor_str += name\n raise InvalidPatternError.new(\"Type constructor '#{constructor_str}' is ambiguous in the type '#{type}'\", node)\n end\n\n raise InvalidPatternError.new(\"Type constructors accept only one argument\", node) if node.children && node.children.count > 1 && node.meta && node.meta[:no_paren]\n\n possible_patterns = nominals.map do |nominal_type|\n if node.children.nil? || node.children.empty?\n if nominal_type.underlying_type.nil?\n Pattern.new(:constructor, [], node, nominal_type, {})\n else\n Pattern.new(:constructor, [Pattern.new(:wildcard, [], nil, nominal_type.underlying_type, {})], node, nominal_type, {})\n end\n else\n begin\n child_pattern =\n if node.children.count > 1\n tuple_node = AST.new(:__tuple__, node.children, token: node.token)\n build_case_pattern(tuple_node, nominal_type.underlying_type)\n else\n build_case_pattern(node.children.first, nominal_type.underlying_type)\n end\n\n pattern_type = Types::Nominal.new(nominal_type.type_atom, nominal_type.type_parameters, child_pattern.type, module_path: nominal_type.module_path)\n\n Pattern.new(:constructor, [child_pattern], node, pattern_type, child_pattern.bindings)\n rescue NoMatchError\n nil\n end\n end\n end.compact\n\n raise NoMatchError.new(\"Cannot match this pattern with the type '#{type}'\", node) if possible_patterns.empty?\n\n possible_patterns.reduce(&:merge)\n else\n raise InvalidPatternError.new(\"Invalid pattern\", node)\n end\n elsif !node.type.nil? # pattern is a literal\n raise NoMatchError.new(\"Cannot match a literal '#{node.type}' with the type '#{type}'\", node) if !type.subtype?(node.type)\n\n Pattern.new(:literal, [node.term], node, node.type, {})\n else\n raise InvalidPatternError.new(\"Invalid pattern\", node)\n end\n end\n end",
"def initialize(matcher)\n @string = matcher\n\n crumbs = matcher.split(/\\s+/)\n validate(crumbs)\n\n @crumbs = crumbs.map { |crumb| Crumbs.new(crumb) }\n end",
"def extract_branches blocks\n\n branches = []\n blocks.each do |block|\n block.each do |stmt|\n case stmt\n when GotoStatement\n next if stmt.identifiers.length < 2\n unless stmt.identifiers.length == 2\n fail \"Unexpected goto statement: #{stmt}\"\n end\n\n if annotation = stmt.previous_sibling\n fail \"Expected :branchcond annotation\" unless\n annotation.has_attribute?(:branchcond)\n end\n\n branches.push(stmt)\n end\n end\n end\n return branches\n end",
"def build_matcher(options, whitelist_option, blacklist_option)\n options.assert_exclusive_keys(whitelist_option, blacklist_option)\n\n if options.include?(whitelist_option)\n value = options[whitelist_option]\n value.is_a?(Matcher) ? value : WhitelistMatcher.new(options[whitelist_option])\n elsif options.include?(blacklist_option)\n value = options[blacklist_option]\n raise ArgumentError, \":#{blacklist_option} option cannot use matchers; use :#{whitelist_option} instead\" if value.is_a?(Matcher)\n\n BlacklistMatcher.new(value)\n else\n AllMatcher.instance\n end\n end",
"def override_patterns_to_rules(branch_patterns)\n rules = {}\n branch_patterns.each do |patterns|\n patterns.each_value do |override|\n # deep_merge() and deep_merge!() are different!\n # deep_merge! will merge and overwrite any unmergeables in destination\n # hash\n # deep_merge will merge and skip any unmergeables in destination hash\n # NOTE: it is not clear to me why, but apparently we have unmergables\n # probably however strings are unmergable and as such would either\n # be replaced or not (this is the most mind numbingly dumb behavior\n # attached to foo! that I ever saw, in particular considering the\n # STL uses ! to mean in-place. So deep_merge! is behaviorwise not\n # equal to merge! but deeper...)\n rules = rules.deep_merge(override)\n end\n end\n rules\n end",
"def build_regex\n regex = \"((\"\n days = [:monday, :tuesday, :wednesday, :thursday, :friday, :saturday, :sunday, :weekday, :weekend, :day]\n days.each do |key|\n regex += build_match_pattern(names[key]) + \"|\"\n end\n and_ptn = build_match_pattern(names[:and])\n regex.chop + \")(\\\\s*(,|#{and_ptn})?\\\\s*)?)\"\n end",
"def parse(t, pattern_, labels)\n tokenizer = TreePatternLexer.new(pattern_)\n parser = TreePatternParser.new(tokenizer, self, TreePatternTreeAdaptor.new)\n tpattern = parser.pattern\n # System.out.println(\"t=\"+((Tree)t).toStringTree());\n # System.out.println(\"scant=\"+tpattern.toStringTree());\n matched = __parse(t, tpattern, labels)\n return matched\n end",
"def expand_when_branches(when_branches); end",
"def initialize(*branches)\n if branches.none?\n raise ArgumentError, 'Split requires at least one proc'\n end\n\n super()\n\n # Each DSL is evaluated once, and +handle_result+ changes the source\n # for each value being processed. This is more efficient than creating\n # and evaluating a new DSL for every input.\n @branches = branches.map do |branch|\n dsl = Pipeline.dsl([])\n pump = dsl.source\n\n Branch.new(pump, branch.call(dsl))\n end\n\n # JRuby doesn't support calling +next+ on enum.cycle.with_index.\n @branches_cycle = @branches.zip((0...@branches.length).to_a).cycle\n end",
"def nonstrict_match(tokens)\n matchset = MatchSet.new()\n index = 0;\n tokens.each do |token|\n break unless pattern[index]\n tagger_name = pattern[index].to_s\n klass = constantize(tagger_name)\n match = token.has_tag?(klass) \n if match\n matchset << token.get_tag(klass);\n index += 1; \n next; \n else\n next\n end\n end\n\n return false if matchset.size != pattern.size\n return matchset\n end",
"def apply_conditionals(match)\n cond = replace_conditionals(match)\n re = Oniguruma::ORegexp.new(regexp, options)\n ConditionParser.new(cond).evaluate\n end",
"def base_matcher; end",
"def create_branches_list(branch_type)\n branches = []\n self.expand_branches!.each_by(2) { |pair| branches << branch_type.new(pair[0], pair[1]) }\n branches\n end",
"def matchtask(regex, split = nil)\n tasks = []\n regex.each do |r|\n @message.scan(eval(r['pattern'])).each do |arr|\n if split.nil?\n task = PACTask.new(arr[0])\n task.add_commit(self)\n self.referenced = true\n task.label = r['label']\n tasks << task\n else\n arr[0].split(split).each do |s|\n task = PACTask.new(s)\n task.add_commit(self)\n task.label = r['label']\n self.referenced = true\n tasks << task\n end\n end\n end\n end\n\n tasks\n end",
"def match_maker(*args)\n\tanswer = []\n\targs.each_with_index do |item,i|\n\t\tif i.odd? and args[0]\n\t\t\t((args[i] and args[i+1]) or (not args[i] and not args[i+1])) ? answer.push(false) : answer.push(true)\n\t\telsif i.odd? and not args[0]\n\t\t\t((args[i] and args[i+1]) or (not args[i] and not args[i+1])) ? answer.push(true) : answer.push(false)\n\t\tend\n\tend\n\tanswer\nend",
"def match_maker(*args)\n arr = []\n args.each do |i|\n arr << i\n end\n\n arr = arr.map { |x| x == nil || x == false ? false : true }\n\n result = []\n\n c = arr.size\n\n if arr[0] == true\n arr[1..c].each_slice(2) do |a, b|\n if a == b\n result << false\n else\n result << true\n end\n end\n\n else\n arr[1..c].each_slice(2) do |a, b|\n if a == b\n result << true\n else\n result << false\n end\n end\n end\n return result\nend",
"def all_of(*matchers); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /refunds GET /refunds.json | def index
respond_to do |format|
format.html # index.html.erb
format.json { render json: @refunds }
end
end | [
"def refunds(options = nil)\n request = Request.new(@client)\n path = \"/transactions/\" + CGI.escape(@id) + \"/refunds\"\n data = {\n\n }\n\n response = Response.new(request.get(path, data, options))\n return_values = Array.new\n \n a = Array.new\n body = response.body\n for v in body['refunds']\n tmp = Refund(@client)\n tmp.fill_with_data(v)\n a.push(tmp)\n end\n\n return_values.push(a)\n \n\n \n return_values[0]\n end",
"def refunds\n @refunds ||= RefundsApi.new config\n end",
"def get_refunds(tid)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds\"\n\n # make the request.\n json_response = Rede::CommonRequest::get(url, @security)\n\n # mapping the result.\n response = Rede::RefundListResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundListResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def refunds\n RefundRepository.new(api).all(token)\n end",
"def refunds\n Refund.all(token)\n end",
"def retrieve(id)\n @client.make_request(:get, \"refunds/#{id}\", MODEL_CLASS)\n end",
"def refunds\n @refunds ||= Services::RefundsService.new(@api_service)\n end",
"def get_all_refunds_for_invoice(id:)\n urlpath = \"invoices/#{id}/refunds\"\n invoice = get_invoice(id: id)\n refunds = get(path: urlpath, token: invoice[\"token\"])\n refunds[\"data\"]\n end",
"def find(transaction_id, refund_id, options = nil)\n request = Request.new(@client)\n path = \"/transactions/\" + CGI.escape(transaction_id) + \"/refunds/\" + CGI.escape(refund_id) + \"\"\n data = {\n\n }\n\n response = Response.new(request.get(path, data, options))\n return_values = Array.new\n \n body = response.body\n body = body[\"refund\"]\n \n \n obj = Refund.new(@client)\n return_values.push(obj.fill_with_data(body))\n \n\n \n return_values[0]\n end",
"def list(charge_token)\n api_response(api_get(\"#{PATH}/#{charge_token}/refunds\"))\n end",
"def refunds request_options = {}\n request_url = \"/1/charges/#{token}/refunds\"\n response = Charge.get(URI.parse(PinPayment.api_url).tap{|uri| uri.path = request_url}, request_options)\n response.map{|x| Refund.new(x.delete('token'), x) }\n end",
"def get_refund(tid, refund_id)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds/#{refund_id}\"\n # make the request.\n json_response = Rede::CommonRequest::get(url, @security)\n\n # mapping the result.\n response = Rede::RefundResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def refund(tid, refund)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds\"\n\n # make the request.\n json_response = Rede::CommonRequest::post(url, refund, @security)\n\n # mapping the result.\n response = Rede::RefundResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def refund_url\n resource_url + '/refund'\n end",
"def show\n @refund = Refund.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @refund }\n end\n end",
"def index\n @refund_types = RefundType.all\n end",
"def order_partial_refund(order_id, refunds)\n execute 'api/refund',\n build_json({\n id: order_id,\n refunds: refunds\n })\n end",
"def get_refund_details(options = {})\n requires!(options, :amazon_refund_id)\n commit('GetRefundDetails', options)\n end",
"def register\n response_request = Request.post(\"transactions/refunds\", api_version, params)\n Response.new(response_request, self).serialize\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /refunds/1 GET /refunds/1.json | def show
@refund = Refund.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @refund }
end
end | [
"def retrieve(id)\n @client.make_request(:get, \"refunds/#{id}\", MODEL_CLASS)\n end",
"def refunds(options = nil)\n request = Request.new(@client)\n path = \"/transactions/\" + CGI.escape(@id) + \"/refunds\"\n data = {\n\n }\n\n response = Response.new(request.get(path, data, options))\n return_values = Array.new\n \n a = Array.new\n body = response.body\n for v in body['refunds']\n tmp = Refund(@client)\n tmp.fill_with_data(v)\n a.push(tmp)\n end\n\n return_values.push(a)\n \n\n \n return_values[0]\n end",
"def find(transaction_id, refund_id, options = nil)\n request = Request.new(@client)\n path = \"/transactions/\" + CGI.escape(transaction_id) + \"/refunds/\" + CGI.escape(refund_id) + \"\"\n data = {\n\n }\n\n response = Response.new(request.get(path, data, options))\n return_values = Array.new\n \n body = response.body\n body = body[\"refund\"]\n \n \n obj = Refund.new(@client)\n return_values.push(obj.fill_with_data(body))\n \n\n \n return_values[0]\n end",
"def refunds\n @refunds ||= RefundsApi.new config\n end",
"def get_refunds(tid)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds\"\n\n # make the request.\n json_response = Rede::CommonRequest::get(url, @security)\n\n # mapping the result.\n response = Rede::RefundListResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundListResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def get_refund(tid, refund_id)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds/#{refund_id}\"\n # make the request.\n json_response = Rede::CommonRequest::get(url, @security)\n\n # mapping the result.\n response = Rede::RefundResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def refunds\n RefundRepository.new(api).all(token)\n end",
"def refund_url\n resource_url + '/refund'\n end",
"def refunds\n Refund.all(token)\n end",
"def index\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @refunds }\n end\n end",
"def refunds\n @refunds ||= Services::RefundsService.new(@api_service)\n end",
"def refund(tid, refund)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds\"\n\n # make the request.\n json_response = Rede::CommonRequest::post(url, refund, @security)\n\n # mapping the result.\n response = Rede::RefundResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def get_all_refunds_for_invoice(id:)\n urlpath = \"invoices/#{id}/refunds\"\n invoice = get_invoice(id: id)\n refunds = get(path: urlpath, token: invoice[\"token\"])\n refunds[\"data\"]\n end",
"def show\n @refund_request = RefundRequest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @refund_request }\n end\n end",
"def list(charge_token)\n api_response(api_get(\"#{PATH}/#{charge_token}/refunds\"))\n end",
"def new\n @refund = Refund.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @refund }\n end\n end",
"def show\n @refund = Refund.find(params[:id])\n \n respond_to do |format|\n format.html { render :action => \"show\"}\n format.xml { render :xml => @refund }\n end\n end",
"def refunds request_options = {}\n request_url = \"/1/charges/#{token}/refunds\"\n response = Charge.get(URI.parse(PinPayment.api_url).tap{|uri| uri.path = request_url}, request_options)\n response.map{|x| Refund.new(x.delete('token'), x) }\n end",
"def refund(refund); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /refunds/new GET /refunds/new.json | def new
@refund = Refund.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @refund }
end
end | [
"def new\n @refund = Refund.new\n @refund.sale_item = SaleItem.find(params[:sale_item])\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @refund }\n end\n end",
"def create\n @refund = Refund.new(params[:refund])\n\n respond_to do |format|\n if @refund.save\n format.html { redirect_to @refund, notice: 'Refund was successfully created.' }\n format.json { render json: @refund, status: :created, location: @refund }\n else\n format.html { render action: \"new\" }\n format.json { render json: @refund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new_refund(params={})\n Peddler::Refunds::Item.new(params)\n end",
"def new\n @mftrefund = Mftrefund.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @mftrefund }\n end\n end",
"def create\n @refund = Refund.new(refund_params)\n\n respond_to do |format|\n if @refund.save\n format.html { redirect_to @refund, notice: 'Refund was successfully created.' }\n format.json { render :show, status: :created, location: @refund }\n else\n format.html { render :new }\n format.json { render json: @refund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @refund = @team.refunds.build\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @refund }\n end\n end",
"def new_refund()\n Refund.new(self)\n end",
"def new\n @carbontaxrefund = Carbontaxrefund.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @carbontaxrefund }\n end\n end",
"def new\n @pre_refund = PreRefund.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pre_refund }\n end\n end",
"def create\n @refund = @team.refunds.new(params[:refund])\n\n respond_to do |format|\n if @refund.save\n format.html { redirect_to(team_refunds_path, :notice => 'Refund was successfully created.') }\n format.xml { render :xml => @refund, :status => :created, :location => @refund }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @refund.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @refundmaster = Refundmaster.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @refundmaster }\n end\n end",
"def create\n @refund_type = RefundType.new(refund_type_params)\n\n respond_to do |format|\n if @refund_type.save\n format.html { redirect_to @refund_type, notice: 'Refund type was successfully created.' }\n format.json { render action: 'show', status: :created, location: @refund_type }\n else\n format.html { render action: 'new' }\n format.json { render json: @refund_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @title = t('view.funds.new_title')\n @fund = Fund.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fund }\n end\n end",
"def create_refund\n\t\trespond_with @stripe_charge do |format|\n\t\t\tif @stripe_charge.create_refund(stripe_charge_params)\n\t\t\t\tset_flash_message :notice, :created_refund\n\t\t\telse\n\t\t\t\tformat.html { render :show }\n\t\t\tend\n\t\tend\n\tend",
"def new\n @fund = Fund.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fund }\n end\n end",
"def updateNewRefund\n request_refund_from_worldpay\n render nothing: true\n end",
"def new\n @breadcrumb = 'create'\n @insurance = Insurance.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @insurance }\n end\n end",
"def new\n @funding = Funding.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @funding }\n end\n end",
"def new\n @finance = Finance.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @finance }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /refunds/1 PUT /refunds/1.json | def update
@refund = Refund.find(params[:id])
respond_to do |format|
if @refund.update_attributes(params[:refund])
format.html { redirect_to @refund, notice: 'Refund was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @refund.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @refund = Refund.find(params[:id])\n\n respond_to do |format|\n if @refund.update_attributes(params[:refund])\n format.html { redirect_to @refund, notice: 'Refund was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @refund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def updateNewRefund\n request_refund_from_worldpay\n render nothing: true\n end",
"def update\n respond_to do |format|\n if @refund.update(refund_params)\n format.html { redirect_to @refund, notice: 'Refund was successfully updated.' }\n format.json { render :show, status: :ok, location: @refund }\n else\n format.html { render :edit }\n format.json { render json: @refund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_or_update_refund(options)\n make_json_api_request :post, \"v2/#{account_id}/refunds\", private_generate_resource(\"refunds\", options)\n end",
"def update\n @refund = @team.refunds.find(params[:id])\n\n respond_to do |format|\n if @refund.update_attributes(params[:refund])\n format.html { redirect_to(team_refund_path, :notice => 'Refund was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @refund.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def refund(refund); end",
"def update\n @refund_request = RefundRequest.find(params[:id])\n\n respond_to do |format|\n if @refund_request.update_attributes(params[:refund_request])\n format.html { redirect_to @refund_request, notice: 'Refund request was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @refund_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def refund\n @tour_order.refund\n respond_to do |format|\n if @tour_order.save\n format.html { redirect_to @tour_order, notice: 'Tour order was successfully refunded.' }\n format.json { render :show, status: :ok, location: @tour_order }\n else\n format.html { redirect_to @tour_order }\n format.json { render json: @tour_order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @refund = Refund.find(params[:id])\n @refund.destroy\n\n respond_to do |format|\n format.html { redirect_to refunds_url }\n format.json { head :ok }\n end\n end",
"def create\n @refund = Refund.new(params[:refund])\n\n respond_to do |format|\n if @refund.save\n format.html { redirect_to @refund, notice: 'Refund was successfully created.' }\n format.json { render json: @refund, status: :created, location: @refund }\n else\n format.html { render action: \"new\" }\n format.json { render json: @refund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @refund = Refund.new(refund_params)\n\n respond_to do |format|\n if @refund.save\n format.html { redirect_to @refund, notice: 'Refund was successfully created.' }\n format.json { render :show, status: :created, location: @refund }\n else\n format.html { render :new }\n format.json { render json: @refund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @refund = Refund.find(params[:id])\n @refund.destroy\n\n respond_to do |format|\n format.html { redirect_to refunds_url }\n format.json { head :no_content }\n end\n end",
"def update\n respond_to do |format|\n if @refund_type.update(refund_type_params)\n format.html { redirect_to @refund_type, notice: 'Refund type was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @refund_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @pre_refund = PreRefund.find(params[:id])\n\n respond_to do |format|\n if @pre_refund.update_attributes(params[:pre_refund])\n format.html { redirect_to(@pre_refund, :notice => 'PreRefund was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @pre_refund.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def refund(amount: nil)\n api_request(\"/charges/#{id}/refund\", :post, amount: amount)\n end",
"def refund(tid, refund)\n begin\n #creating url\n url = \"#{@security.environment}/transactions/#{tid}/refunds\"\n\n # make the request.\n json_response = Rede::CommonRequest::post(url, refund, @security)\n\n # mapping the result.\n response = Rede::RefundResponse.map(json_response)\n\n rescue Exception => e\n response = Rede::RefundResponse.new(:return_code => Rede::ReturnCode::UNSUCCESSFUL, :return_message => e.message)\n end\n\n return response\n end",
"def refunds(options = nil)\n request = Request.new(@client)\n path = \"/transactions/\" + CGI.escape(@id) + \"/refunds\"\n data = {\n\n }\n\n response = Response.new(request.get(path, data, options))\n return_values = Array.new\n \n a = Array.new\n body = response.body\n for v in body['refunds']\n tmp = Refund(@client)\n tmp.fill_with_data(v)\n a.push(tmp)\n end\n\n return_values.push(a)\n \n\n \n return_values[0]\n end",
"def request_refund(options = {})\n response = JSON.parse(@client.patch(\"items/#{send(:id)}/request_refund\", options).body)\n @attributes = response['items']\n true\n end",
"def order_partial_refund(order_id, refunds)\n execute 'api/refund',\n build_json({\n id: order_id,\n refunds: refunds\n })\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /refunds/1 DELETE /refunds/1.json | def destroy
@refund = Refund.find(params[:id])
@refund.destroy
respond_to do |format|
format.html { redirect_to refunds_url }
format.json { head :no_content }
end
end | [
"def destroy\n @refund = Refund.find(params[:id])\n @refund.destroy\n\n respond_to do |format|\n format.html { redirect_to refunds_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @refund_request = RefundRequest.find(params[:id])\n @refund_request.destroy\n\n respond_to do |format|\n format.html { redirect_to refund_requests_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @refund = Refund.find(params[:id])\n @refund.destroy\n\n respond_to do |format|\n format.html { redirect_to(refunds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @refund_type.destroy\n respond_to do |format|\n format.html { redirect_to refund_types_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @carbontaxrefund = Carbontaxrefund.find(params[:id])\n @carbontaxrefund.destroy\n\n respond_to do |format|\n format.html { redirect_to carbontaxrefunds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @mftrefund = Mftrefund.find(params[:id])\n @mftrefund.destroy\n\n respond_to do |format|\n format.html { redirect_to mftrefunds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @pre_refund = PreRefund.find(params[:id])\n @pre_refund.destroy\n\n respond_to do |format|\n format.html { redirect_to(pre_refunds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @refundmaster = Refundmaster.find(params[:id])\n @refundmaster.destroy\n\n respond_to do |format|\n format.html { redirect_to(refundmasters_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @tax_paid_and_refund = TaxPaidAndRefund.find(params[:id])\n @tax_paid_and_refund.destroy\n\n respond_to do |format|\n format.html { redirect_to(tax_paid_and_refunds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @api_v1_reward.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_rewards_url, notice: 'Reward was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fund = Fund.find(params[:id])\n @fund.destroy\n\n respond_to do |format|\n format.html { redirect_to funds_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @fund = Fund.find(params[:id])\n @fund.destroy\n\n respond_to do |format|\n format.html { redirect_to funds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @api_stadium.destroy\n respond_to do |format|\n format.html { redirect_to api_stadia_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @rebalance_ref = RebalanceRef.find(params[:id])\n @rebalance_ref.destroy\n\n respond_to do |format|\n format.html { redirect_to :back }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resturant = Resturant.find(params[:id])\n @resturant.destroy\n self.headers.merge!('Content-Type' => 'application/json' )\n self.response_body = {status: \"Deleted\"}\n end",
"def destroy\n @api_v1_budget.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_budgets_url, notice: 'Budget was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @euro_fund.destroy\n\n respond_to do |format|\n format.html { redirect_to euro_funds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @recharge.destroy\n respond_to do |format|\n format.html { redirect_to recharges_url }\n format.json { head :no_content }\n end\n end",
"def refund(refund); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This will only work for switching languages | def switch_language
language = locale('Español', 'English')
go_to language
end | [
"def language_switching\n translation_options\n\t\tif params[:locale_to_conf] == 'translation_addition'\n\t\t\trender :partial => 'translation_addition', :locals => { :translation_sections => @translation_sections }\n\t\telse\n\t\t\tyaml = YAML.load_file(\"#{RAILS_ROOT}/config/locales/#{params[:locale_to_conf]}.yml\")\n\t\t\t@res = yaml[params[:locale_to_conf].to_s]\n\t\t\t@language = params[:locale_to_conf].to_s\n\t\t\trender :partial => 'translations_tab', :locals => {:res => @res, :translation_sections => @translation_sections }\n\t\tend\n end",
"def apply_locale; end",
"def content_language; end",
"def languageChange()\n setCaption(trUtf8(\"きっかんじ\"))\n @kanji_label.setText( trUtf8(\"textLabel1\") )\n @toggle_button.setText( trUtf8(\"pushButton1\") )\n @notes_label.setText( trUtf8(\"textLabel2\") )\n end",
"def do_change_lang\n language = $document.at_css('#tryruby-lang-select').value\n $document.root['lang'] = language\n set_cookie('tryruby_nl_language', language)\n get_content_from_server(language)\n end",
"def language \n \"language\" \n end",
"def translit_non_latin_lang(lang)\n case lang\n when \"ar\",\"ru\",\"el\"\n self.translit\n else\n self\n end\n end",
"def set_page_language(lang)\n @page_language = lang.to_s.downcase.sub(/-.*$/, '')\n end",
"def do_change_lang(event)\n language = event.target.data[\"change-lang\"]\n\n $document.root['lang'] = language\n set_cookie('tryruby_language', language)\n get_content_from_server(language)\n end",
"def language\n self\n end",
"def lang\n # get current page url hash\n back_hash = Rails.application.routes.recognize_path request.referer\n Rails.logger.debug(\"original back_hash: #{back_hash.inspect}\")\n # change the locale code in the current page url hash\n back_hash[:locale] = @locale_code\n Rails.logger.debug(\"redirect to: #{back_hash.inspect}\")\n # see current page in new locale!\n redirect_to back_hash\n end",
"def hello_world(language)\n if (language == 'es')\n 'Hola Mundo'\n elsif (language == 'de')\n 'Hallo Welt'\n elsif (language == 'swe')\n 'Hej världen'\n else\n 'Hello World'\nend\nend",
"def language_chosen(lang)\n @language = lang\n end",
"def template_language; end",
"def set_current_language(lang)\n GetText.locale, prev_value = sanitize_lang(lang), GetText.locale\n prev_value\n end",
"def set_user_language\n # turn on 'zen' to see localization by adding 'zen=true' to query string, will stay on until a query with 'zen=false'\n session[:zen] = (session[:zen] || params[:zen] == \"true\") && params[:zen] != \"false\"\n I18n.locale = 'en'\n end",
"def switch_to_english\n english_link.click\n wait_for_welsh_link\n end",
"def set_website_locale\n app_locales = %w(es ca)\n I18n.locale = params[:lang] if app_locales.include?(params[:lang])\n end",
"def set_language?\n actions.include?(:set_language)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
set up the session context information, so that it gets logged with the job log lines also set up a unique tmpdir, which will get removed at the end of the job. | def configure_for_job(job)
previous_tmpdir = ENV.fetch("TMPDIR", nil)
self.class.running_job(job) do
dir = Dir.mktmpdir("job-#{job.id}-#{name.gsub(/[^\w.]/, ".")}-")
begin
ENV["TMPDIR"] = dir
yield
ensure
FileUtils.remove_entry(dir, true)
end
end
ensure
ENV["TMPDIR"] = previous_tmpdir
end | [
"def set_session_context\n @session_context = @app.build_session_context\n end",
"def temp_dir\n @temp_dir ||= Dir.mktmpdir\n end",
"def set_context_session\n if @object.project\n session[:current_project] = @object.project.id\n end\n if @object.context\n session[:current_context] = @object.context.id\n end\n end",
"def tempdir\n temp_dir = Dir.mktmpdir\n yield temp_dir\n ensure\n if @debug\n puts(\"\\nLeaving #{temp_dir} in place\")\n else\n rm_rf temp_dir\n end\n end",
"def tmpdir\n Dir.tmpdir\n end",
"def teardown\n cleanup_token # In case we created an authorization\n FileUtils.remove_entry_secure ENV['HOME'] if ENV['HOME'].start_with? '/tmp'\n FileUtils.remove_entry_secure @pwd if @pwd.start_with? '/tmp'\n ENV['HOME'] = @old_home\n Dir.chdir @old_pwd\n super\n end",
"def context\n @_context ||= {\n :argv => START_CTX[:argv].map { |arg| arg.dup },\n :cwd => START_CTX[:cwd].dup,\n 0 => START_CTX[0].dup,\n }.tap do |ctx|\n rewrite_context(ctx)\n end\n end",
"def generate_tmpdir\n pipeline.generate_tmpdir\n end",
"def use_temporary_directory_for\n require 'fileutils'\n\n dir = File.join(\"/tmp\", \"parallel_tests\")\n new_dir = File.join(dir, \"application\")\n\n begin\n # just in case the temporary dir already exists\n FileUtils.rm_rf(dir) if File.exists?(dir)\n\n # create the temporary directory\n FileUtils.mkdir_p(new_dir)\n\n # chdir changes cwd back to the original one after it is done\n Dir.chdir(new_dir) do\n yield\n end\n ensure\n FileUtils.rm_rf(dir) if File.exists?(dir)\n end\nend",
"def tempdir #:doc:\n Dir.tmpdir\n end",
"def session_log_directory\n config_directory + FileSep + self['SessionLogDirectory']\n end",
"def save_details_to_session(job_name, filename, correlation_id)\n session[:job] = {\n name: job_name,\n filename: filename,\n submission_time: submission_time,\n correlation_id: correlation_id\n }\n end",
"def make_sessions_logs\n sessions_uuids = []\n sessions_info = []\n info = ''\n hist_file = ''\n hist_file_name = ''\n log_list = []\n\n # Create list of sessions with base info\n framework.db.workspace.events.each do |e|\n if not e.info.nil? and e.info[:session_type] =~ /shell/ or e.info[:session_type] =~ /meter/\n if e.info[:command] != 'load stdapi'\n if not sessions_uuids.include?(e.info[:session_uuid])\n sessions_uuids << e.info[:session_uuid]\n sessions_info << {:uuid => e.info[:session_uuid],\n :type => e.info[:session_type],\n :id => e.info[:session_id],\n :info => e.info[:session_info]}\n end\n end\n end\n end\n\n sessions_uuids.each do |su|\n sessions_info.each do |i|\n if su == i[:uuid]\n print_line(\"Exporting Session #{i[:id]} history\")\n hist_file_name = \"#{framework.db.workspace.name}_session_#{i[:id]}_#{::Time.now.strftime('%Y%m%d.%H%M%S')}.log\"\n i.each do |k, v|\n info << \"#{k.to_s}: #{v.to_s} \"\n end\n break\n end\n end\n hist_file << \"# Info: #{info}\\n\"\n info = ''\n framework.db.workspace.events.each do |e|\n if not e.info.nil? and e.info.has_key?(:command) or e.info.has_key?(:output)\n if e.info[:session_uuid] == su\n if e.info.has_key?(:command)\n hist_file << \"#{e.updated_at}\\n\"\n hist_file << \"#{e.info[:command]}\\n\"\n elsif e.info.has_key?(:output)\n hist_file << \"#{e.updated_at}\\n\"\n hist_file << \"#{e.info[:output]}\\n\"\n end\n end\n end\n end\n\n # Set RC file path and file name\n session_hist_path = ::File.join(Msf::Config.log_directory, 'projects', framework.db.workspace.name)\n session_hist_fullpath = ::File.join(session_hist_path, hist_file_name)\n\n # Create folder\n ::FileUtils.mkdir_p(session_hist_path)\n\n print_line(\"Saving log file to #{session_hist_fullpath}\")\n file_write(session_hist_fullpath, hist_file)\n hist_file = ''\n print_line('Log file written')\n log_list << session_hist_fullpath\n end\n\n return log_list\n end",
"def prepare\n tmp.make_dir\n end",
"def tmpdir(id=:default)\n @tmpdirs[id] ||= Dir.mktmpdir\n end",
"def tmpdir\n @tmpdir ||= File.join(Dir.tmpdir, 'sample_file', 'image')\n end",
"def session_id_context=(p0) end",
"def tmpdir(id=:default)\n @tmpdirs[id] ||= Pathname.new(Dir.mktmpdir)\n end",
"def tmpdir(id=:default)\n @tmpdirs[id] ||= Pathname.new(Dir.mktmpdir).realdirpath\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Private: Used by insert to add new node in specified position. | def insert_at(data, position)
node, head, tail = set_insert_vars(data, position)
# before: head -> position
# after: head -> node -> position
head.tail = node
node.tail = position
position.head = node
node.head = head
@size += 1
end | [
"def insert_at(pos, node) end",
"def insert(idx, node)\n end",
"def add position, data\n node = Node.new position, data\n @insertion_order << position\n\n if @root.nil?\n @root = node\n @depth[:total] = 1\n @depth[:left] = 1\n @depth[:right] = 1\n else\n current = @root\n current_depth = 2\n loop do\n if node.position < current.position\n if current.left.nil?\n node.depth = current_depth\n current.left = node\n\n depth_check current_depth, node.position\n break\n else\n current = current.left\n current_depth += 1\n end\n elsif node.position > current.position\n if current.right.nil?\n node.depth = current_depth\n current.right = node\n\n depth_check current_depth, node.position\n break\n else\n current = current.right\n current_depth += 1\n end\n else\n break\n end\n end\n end\n end",
"def insert_at(position = 1)\n insert_at_position(position)\n end",
"def insert_at(data, index)\n\t\t@current_node = at(index)\n\t\t@insert_node = Node.new(data, @current_node)\n\n\t\t#Handeling the case that user inserts a node at head position (even though prepend exists for that)\n\t\tif @current_node != @head\n\t\t\t@old_link_node = at(index - 1)\n\t\t\t@old_link_node.next_node = @insert_node\n\t\telse\n\t\t\t@head = @insert_node\n\t\tend\n\tend",
"def insert_node(new_node_val)\n new_node = Node.new(new_node_val)\n @nodes << new_node\n @_node_map[new_node_val] = new_node\n new_node\n end",
"def insert_at(position = acts_as_list_top)\n insert_at_position(position)\n end",
"def insert_after(new_node)\n # Interface method\n end",
"def insert_at(index)\n at(index)\n temp = @current_node.next\n blankNode = Node.new('Inserted Node')\n @current_node.next = blankNode\n blankNode.next = temp\n end",
"def insertNodeAtPosition(llist, data, position)\n tracked_node = llist\n new_node = SinglyLinkedListNode.new data\n\n unless llist\n return new_node\n end\n if position == 0\n new_node.next = llist\n return new_node\n end\n\n current_position = 0\n current_node = llist\n while(current_position != position - 1 && llist.next != nil)\n current_node = current_node.next\n current_position += 1\n end\n node_at_position = current_node.next\n current_node.next = new_node\n new_node.next = node_at_position\n\n return tracked_node\n\nend",
"def insert(index, node)\n return if @nodes.include?(node)\n\n @nodes.insert(index, node)\n\n take_ownership(node)\n end",
"def insert_node\n insert_node_helper(@root)\n end",
"def insert(pos, tree)\n if @children.empty?\n self.<<(tree)\n return self\n end\n\n last = @children.length - 1\n @children.each_with_index do |child, i|\n # If the current NUC index is one ahead of the desired position, then we\n # can go ahead and splice the new node into the array\n if child.idx == pos+1 || child.idx == pos+2\n tmp = @children[0 ... i]\n tmp << tree\n tmp += @children[i .. -1]\n @children = tmp\n return self\n # If we're looking at the last element, and we haven't inserted yet,\n # then go ahead and insert\n elsif i == last\n self.<<(tree)\n return self\n end\n end\n\n # TODO: Couldn't we eliminate the final loop comparison and just insert\n # here if we haven't found a place? I don't think the `nil' return value\n # is being checked by any callers.\n\n nil\n end",
"def insert(pos,item)\n end",
"def insert_position(position = nil)\n return @insert_position unless position\n @insert_position = position\n end",
"def insert_me(pos, n)\n raise \"Insertion of non AstNode object\\n\" if !n.is_a?(Node)\n raise \"Insertion of an attached node.\\n\" if @parent!=nil\n case pos\n when \"childof\"\n n.children.push(self)\n @parent = n\n\n when \"before\", \"after\"\n list = n.parent.children\n # find the child\n i = list.index(n)\n # insert \n list.insert(i+1, self) if pos==\"after\"\n list.insert(i, self) if pos==\"before\"\n @parent = n.parent\n else\n raise \"Unsupported insertion type. Only ones allowed are before, after, and childof\"\n end # case\n return nil\n end",
"def insertAfter(node, new_node)\n end",
"def insert(new_entry, new_position = children.size)\n increment_child_positions(new_position)\n new_entry.update(list_position: new_position, parent: self)\n end",
"def insert_at!(position = 1, list_name = nil)\n position = position.to_s.to_i\n if position > 0\n remove_from_list!(list_name)\n if position > last_position(list_name)\n add_to_list!(list_name)\n else\n move_lower_items(:down, position - 1, list_name)\n send(\"#{evaluate_sortable_options(list_name)[:column]}=\".to_sym, position)\n save\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determine if there is a chapter after the current page | def next_chapter_path
return false unless current_page.is_a? Book::Chapter
current_page.next_chapter
end | [
"def whole_chapter?\n starting_bibleverse.verse.zero?\n end",
"def end_of_chapter\n end",
"def chapter?(node)\n attributes = node.attributes\n attributes && attributes['class'] &&\n attributes['class'].value =~ /chapter/\n end",
"def is_only_chapter?\n self.work.chapters.count == 1\n end",
"def whole_book?\n starting_bibleverse.chapter.zero?\n end",
"def has_next?\n 1 < self.page\n end",
"def chapters?\n !chapters.empty?\n end",
"def new_chapter?(template)\n return false unless Kotoba.config.chapter_on_new_page\n template_dir = File.dirname(template.file)\n is_new_dir = !(@last_dir.nil? || @last_dir == template_dir)\n @last_dir = template_dir\n is_new_dir\n end",
"def has_previous_page\n if last\n object.length > last\n else\n !!after\n end\n end",
"def last?\n event.user.chapters.ascending.last == self\n end",
"def chapter_valid?(chapter)\n chapter.positive? && chapter <= nr_of_chapters\n end",
"def completed?(chapter)\n chapters_completed.include? chapter\n end",
"def first_chapter\n frontmatter? ? chapters[1] : chapters[0]\n end",
"def previous_page?\n page > 1\n end",
"def last_page?\n current_page == pages.end\n end",
"def next_chapter\n currentVerse = Verse.find(session[:currentVerse].to_i)\n book = currentVerse.book\n nextChapter = currentVerse.chapter + 1\n if nextChapter > BOOKCHAPTERS[book] #bump to the next book\n if currentVerse.book_order == 27 #unless we are in Revelation\n nextChapter = BOOKCHAPTERS[book]\n else\n book = BOOKCHAPTERS.keys[currentVerse.book_order]\n nextChapter = 1\n end\n end\n @verse = Verse.where(\"book = ? and chapter = ? and verse = ?\", book, nextChapter, 1).first\n show_verse_ajax (@verse.id)\n end",
"def has_previous_page?\n @current_page != 1\n end",
"def chapter_link(book_id, book, starting, jump)\n min_chapter = book[\"prelude\"] ? 0 : 1\n total_chapters = book[\"chapters\"]\n go_to_chapter = starting.to_i + jump\n is_chapter = (go_to_chapter <= total_chapters) && (go_to_chapter >= min_chapter)\n if is_chapter\n return visual_chapter_path(book_id, go_to_chapter)\n elsif jump > 0\n # if you were headed forward but can't get there just go to last chapter\n return visual_chapter_path(book_id, total_chapters)\n else\n # if you were headed backwards but can't get there just go to the first\n return visual_chapter_path(book_id, 1)\n end\n end",
"def long_chapter?\n if self.content.split(' ').count > 250\n true\n else\n false\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Atomically decrement integer value with key This is just syntactic sugar for calling increment with a negative value. This method also accepts negative amounts. | def decrement(key, amount = 1, options = {})
increment(key, -amount, options)
end | [
"def decrement(key, amt: 1)\n counts[key.to_sym] -= amt\n end",
"def decrement_value(key, value = 1)\n fail NotImplementedError\n end",
"def decrement_value(key, value = 1)\n @redis.decrby key, value\n end",
"def decrease key, amount=1\n @lock.write_sync do\n return unless @data[key].nil? || Numeric === @data[key]\n @data[key] ||= 0\n @data[key] -= amount\n end\n end",
"def decrement(key)\n msg = 'Trying to reduce count below zero.'\n raise RangeError, msg if @counter[key] == 0\n @counter[key] -= 1\n end",
"def decrement(value=1)\n raise ArgumentError, \"value must respond to :to_i\" unless value.respond_to?(:to_i)\n \n self.value -= value.to_i\n end",
"def decrement\n @value -= 1\n end",
"def decrement!\n @value -= @increment\n \n self\n end",
"def decrement_field(key, field, value = 1)\n @redis.hincrby key, field, -value\n end",
"def decrement(by=1, &block)\n allow_expiration do\n val = redis.decrby(key, by).to_i\n block_given? ? rewindable_block(:increment, by, val, &block) : val\n end\n end",
"def decrease(counter)\r\n counter - 1\r\nend",
"def decrement(decr = 1)\n @count.update { |v| v - decr }\n end",
"def decrease(counter)\n counter -= 1\nend",
"def decrement(field, amount = 1)\n increment(field, -amount)\n end",
"def decrement_field(key, field, value = 1)\n fail NotImplementedError\n end",
"def decrement!(name,amount=1)\n raise ArgumentError, \"Only integer fields can be decremented.\" unless self.class.fields.include?({:name => name.to_s, :type => :integer})\n redis.decr(field_key(name), amount)\n end",
"def decrement(key, value = 1, expires_in = nil, initial = nil)\n puts \"Rails.cache.decrement(#{key}, #{value}, {expires_in: #{get_ttl(expires_in)}, initial: #{initial}, raw: false})\"\n return Rails.cache.decrement(key, value, {expires_in: get_ttl(expires_in), initial: initial, raw: false})\n rescue => exc\n Rails.logger.error { \"MEMCACHE-ERROR: decrement: K: #{key}. M: #{exc.message}, I: #{exc.inspect}\" }\n return nil\n end",
"def decrement(ip)\n mutex.synchronize do\n new_val = hash[ip] - 1\n\n if new_val <= 0\n hash.delete(ip)\n else\n hash[ip] -= 1\n end\n end\n end",
"def decr(key, amt=1, ttl=nil, default=nil)\n raise ArgumentError, \"Positive values only: #{amt}\" if amt < 0\n perform(:decr, key, amt.to_i, ttl_or_default(ttl), default)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
display ingredients created or updated after the specified date. | def updated_after
update_selector { @ingredients = policy_scope(Ingredient.updated_after(@date)).order(:name) }
render :index
end | [
"def updated_after\n update_selector { @recipes = policy_scope(Recipe.updated_after(@date)).order(:name) }\n render :index\n end",
"def ingredients\n\n # render json of all ingredients in our DB\n render json: Ingredient.all.to_json(except: [:created_at, :updated_at])\n\n end",
"def display_drinks_on_date(user, date)\n users_drinks_on_date = user.drinks_on_date(date)\n user.display_drinks(users_drinks_on_date)\nend",
"def show_todays_drinks(user)\n display_drinks_on_date(user, drinking_day(Time.now))\nend",
"def updated_after\n update_selector { @recipe_reviews = RecipeReview.sort_by_recipe_and_time(policy_scope(RecipeReview.updated_after(@date))) }\n render :index\n end",
"def show(date)\n # Iterate over LogItems, compare date\n @log_items.each do |log_item|\n if log_item.date == date\n puts log_item.name\n end\n end\n end",
"def command_show(date = Date.today)\n\t\n \tlist = @log.get_entries(date)\n\tif list == nil\n\t\tputs \"no entries for that date\"\n\telse\n\t\tlist.each {|entry|\n\t\t\tputs \"#{entry.name}\"\n\t\t}\n\tend\n end",
"def by_date(date)\n\n date = Time.parse(date.to_s).strftime('%Y-%m-%d')\n\n @context.storage.get_many('history', /!#{date}!/)\n end",
"def list_reading_dates\n system \"clear\"\n user_choice = \"\"\n # while user_choice != \"❌ EXIT ❌\" && user_choice != \"🗑 DELETE READING 🗑\" do\n user_choice = prompt.select(\"🔮 #{self.user.name}, Select a date to view your past reading.\") do |menu|\n self.user.reading_dates.map do |date|\n menu.choice \"#{date}\", -> {self.handle_previous_reading_by_date(date)}\n end\n menu.choice \"⬅️ Back ⬅️\", -> {self.main_menu}\n end\n # end \n end",
"def recent_edits_select\n @entities = [%w[Ingredients ingredients], %w[Recipes recipes], %w[Reviews recipe_reviews]]\n end",
"def by_date\n @page_title = \"Expenses [by Date]\"\n @expenses = @current_user.expenses.find(:all)\n\n # attempt to find expenses by a date range\n if !(params[:start].nil? and params[:finish].nil?)\n begin\n @expenses = @current_user.expenses.find(:all, :conditions => {\n :created_at => params[:start].to_date .. params[:finish].to_date\n })\n # capture invalid or nil date ranges\n rescue\n flash[:error] = \"Invalid dates in date range.\"\n @expenses = nil\n end\n end\n\n respond_to do |format|\n format.html # by_date.html.erb\n format.xml { render :xml => @expenses }\n format.iphone do # by_date.iphone.erb\n @panel_title = @page_title\n render :layout => false\n end\n end\n end",
"def followed_recipes\n #if :search_query\n # @recipes = Recipe.search(params[:search_query])\n # @recipes = @recipes.sort!{ |x, y| x[\"created_at\"] <=> y[\"created_at\"] }.reverse\n #else \n if signed_in?\n current_user.followers.each do |following|\n @recipes = Recipe.find(:all, :user_id => following, :order => 'recipes.created_at').reverse\n end\n end\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @recipes }\n end\n end",
"def index\n @date_of_infections = DateOfInfection.all\n end",
"def index\n @instadates = [current_user.created_instadate].compact\n end",
"def show\n @eat_by_date = EatByDate.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @eat_by_date }\n end\n end",
"def show\n \n rec_id = params[:id].to_i\n recipe = Recipe.find(rec_id)\n # find user name for recipe\n \n username = recipe.user.username;\n\n # get all ingredients from step ingredients ?\n ingredients = []\n\n recipe_steps = recipe.recipe_steps\n # one to one step ingredients to ingredients when coming from recipe-steps\n \n # recipe ingredients\n \n \n \n step_ingredients = recipe_steps.map{ |rs| \n { \n step_num: rs.step_num,\n step_image: rs.image,\n instruction: rs.instruction,\n step_ingredients: rs.step_ingredients.map{ |si| \n {amount: si.amount, ingredient: {name: si.ingredient.name} }\n } \n \n }\n \n }\n\n \n step_ingredients.each do |si|\n # byebug \n ings = si[:step_ingredients]\n ings.each do |ing|\n if ing[:amount] \n ing_total = ing[:amount] + \" \" + ing[:ingredient][:name] \n if !ingredients.include?(ing_total)\n ingredients.push(ing_total) \n end\n else\n ing_total = ing[:ingredient][:name] \n if !ingredients.include?(ing_total)\n ingredients.push(ing_total) \n end\n end\n end\n end\n \n # fix time to string\n \n render json: {username: username, recipe: recipe, ingredients: ingredients, recipe_steps: step_ingredients }, status: :accepted\n end",
"def recent_edits\n formatted_date = \"#{params[:date][:year]}-#{params[:date][:month]}-#{params[:date][:day]}\"\n redirect_to \"/#{params[:entity]}/updated_after/#{formatted_date}\"\n end",
"def by_ingredient\n # redirect_non_users\n \n user = User.find_by(id: params[:user_id])\n \n # If ingredient exists, find recipes that use it\n if Ingredient.exists?(params[:id])\n ingredient = Ingredient.find(params[:id])\n # if is_admin?\n # # Show all recipes from ingredient for admins\n # @recipes = Recipe.recipes_of_ingredient(params[:id])\n # else\n # Only show user's recipes\n recipes = user.recipes.recipes_of_ingredient(params[:id])\n # end\n else\n # flash[:alert] = \"That ingredient wasn't found.\"\n # Else show all users' recipes\n recipes = user.recipes\n end\n\n render json: RecipeSerializer.new(recipes).serialized_json, status: 200\n end",
"def edit\n @pickup = Pickup.find(params[:id])\n @days = Day.where(\"date >= ?\", Date.today).all\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a new label. | def create_label(project, name, color, options = {})
post("/projects/#{url_encode project}/labels", body: options.merge(name: name, color: color))
end | [
"def create_label(node)\n if node.attributes['id'] and @labels[node.attributes['id']]\n label = @labels[node.attributes['id']]\n else\n label = @factory.new_label\n @labels[node.attributes['id']] = label\n end\n \n # Read all defined data fields\n label.id = node.attributes['id']\n if node.attributes['type']\n label.type = Utils.add_namespace(node.attributes['type'])\n end\n \n label.name = node.elements['name'].text if node.elements['name']\n label.sort_name = node.elements['sort-name'].text if node.elements['sort-name']\n label.code = node.elements['label-code'].text if node.elements['label-code']\n label.disambiguation = node.elements['disambiguation'].text if node.elements['disambiguation']\n label.country = node.elements['country'].text if node.elements['country']\n \n if life_span = node.elements['life-span']\n label.begin_date = read_date_attribute(life_span, 'begin')\n label.end_date = read_date_attribute(life_span, 'end')\n end\n \n # Read the alias list\n read_alias_list(node.elements['alias-list'], label.aliases)\n \n # Read the release list\n read_release_list(node.elements['release-list'], label.releases)\n \n # Read the relation list\n if node.elements['relation-list']\n node.elements.each('relation-list') {|relation_node|\n read_relation_list(relation_node) {|relation|\n label.add_relation relation\n }\n }\n end\n \n # Read the tag list\n read_tag_list(node.elements['tag-list'], label.tags)\n \n return label \n end",
"def create_label(username, lbl)\n msg = GEmailMessage.new\n msg.add_property(\"label\", lbl)\n response = @connection.request(@action[:label], \"/#{username}/label\", msg.to_s)\n return !response.nil?\n end",
"def put_label name\n @instructions << Label.new(name)\n self\n end",
"def create_issue_label(repo, issue_id, label)\n client.add_labels_to_an_issue(repo, issue_id, [label])\n end",
"def get_or_create_label(name)\n require_relative 'label'\n Label.new(@api, @api.do_request(\"POST\", get_base_api_path() + \"/labels\", {'name' => name}))\n end",
"def createLabel(content)\n\t\tlabel = Gtk::Label.new\n\t\tlabel.set_markup(content)\n\t\treturn label\n\tend",
"def create_labels(labels)\n initialize\n labels.each do |l|\n @client.add_label(@targetRepo, l.name, l.color)\n end\n end",
"def create_labels\n @options[:labels].each do |label|\n client.add_label(@options[:repo], label)\n end\n rescue Octokit::UnprocessableEntity => e\n # assume label already exists and do nothing\n end",
"def put_label name\n if @labels.key? name\n raise Errors::LabelAlreadyDefined, \"Label `#{name}` is already defined\"\n end\n @labels[name] = nil\n @instructions << Label.new(name)\n self\n end",
"def create_labels\n @options.labels.each do |label|\n client.add_label(@options.repo, label)\n end\n rescue Octokit::UnprocessableEntity => e\n # assume label already exists and do nothing\n end",
"def international_create(label_options)\n create_label File.join(LABEL_URL, 'international', label_options)\n end",
"def add_label(label)\n @label_by_id[label.id] = label if !@label_by_id.include?(label.id)\n end",
"def define_label(r, label, element, type)\n err = @name_space_checker.check_for_label_type_mismatch(r, label, type)\n if (err)\n return\n end\n r.define_label(label, element, type)\n end",
"def create_label(state, text)\n label = Gtk::Label.new\n label.set_alignment(0, 0)\n if state\n label.set_markup(\"<span foreground=\\\"black\\\"><b>#{text}</b></span>\")\n else\n label.set_markup(\"<span foreground=\\\"gray\\\">#{text}</span>\")\n end\n return label\n end",
"def add_label label\n @labels[label] = true\n\n label\nend",
"def label=(lab) @label = lab end",
"def create_label(field, text = nil, options_label)\n if options_label and options_label[:none]\n label = ''\n else\n label = label field, text, options_label\n end\n end",
"def create_group_label(group, name, color, options = {})\n post(\"/groups/#{url_encode group}/labels\", body: options.merge(name: name, color: color))\n end",
"def add_label!( n, primary=:false )\n save if add_label( n, primary )\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Subscribes the user to a label to receive notifications | def subscribe_to_label(project, name)
post("/projects/#{url_encode project}/labels/#{url_encode name}/subscribe")
end | [
"def subscribe_to(username)\n action(username, 'subscribe')\n end",
"def subscribe_notification(student)\n notification = Notification.new(\"#{name} #{surname} subscribe to you\")\n student.notifications << notification\n end",
"def subscribe_to_group_label(group, name)\n post(\"/groups/#{url_encode group}/labels/#{url_encode name}/subscribe\")\n end",
"def subscribe_to_user(nickname)\n call_subscription_api('user/%s/subscribe' % URI.encode(nickname))['status']\n end",
"def do_subscribe\n subscribe_to(@nodename)\n get_subscriptions\n end",
"def add_label(label)\n @api.do_request(\"PUT\", label.get_base_api_path() + \"/messages/\" + get('id'));\n @label_ids_set[label.id] = true\n end",
"def subscribe_to_channel; end",
"def subscribe\n CampaignMonitorWrapper.subscribe(id: user.id, email: user.email, name: user.name, beta: user.beta?.to_s, billable: user.billable?.to_s)\n end",
"def subscribe tags\n @sub_tracker.subscribe tags\n end",
"def label(user_id, properties = {}, timeout = nil)\n\n raise(RuntimeError, \"user_id must be a string\") if user_id.nil? || user_id.to_s.empty?\n\n path = Sift.current_users_label_api_path(user_id)\n track(\"$label\", properties, timeout, path)\n end",
"def labels= new_labels\n raise ArgumentError, \"Value must be a Hash\" if new_labels.nil?\n update_grpc = Google::Cloud::PubSub::V1::Subscription.new name: name, labels: new_labels\n @grpc = service.update_subscription update_grpc, :labels\n @resource_name = nil\n end",
"def o_ucount_notify\n User.bot.lobby_speak(\"#{linked_name}さんが本日#{today_total_o_ucount}問解きました\")\n end",
"def notify activity\n\n # people who are subscribed to the tags of the question\n t_subscribers = target.subscribers.all\n\n (t_subscribers).each do |subscriber|\n Notification.create :user => subscriber, :activity => activity\n end\n end",
"def on_label(value); end",
"def create_label(username, lbl)\n msg = GEmailMessage.new\n msg.add_property(\"label\", lbl)\n response = @connection.request(@action[:label], \"/#{username}/label\", msg.to_s)\n return !response.nil?\n end",
"def subscribe! # rubocop:disable Lint/UselessMethodDefinition\n super\n end",
"def subscribe(prefix = EVERYTHING)\n ffi_delegate.set_subscribe(prefix)\n end",
"def subscribe_to_room(nickname)\n call_subscription_api('room/%s/subscribe' % URI.encode(nickname))['status']\n end",
"def subscribe(args = {})\n :no_response\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /translated_lines GET /translated_lines.json | def index
@translated_lines = TranslatedLine.all
end | [
"def index\n @translation_lines = TranslationLine.all\n end",
"def show\n @translated_line = TranslatedLine.new\n @translated_lines = TranslatedLine.where(translation_code: @translation_line.translation_code)\n end",
"def create\n @translated_line = TranslatedLine.new(translated_line_params)\n\n respond_to do |format|\n if @translated_line.save\n @translated_lines = TranslatedLine.where(translation_code: @translated_line.translation_code)\n format.html { redirect_to @translated_line, notice: 'Translated line was successfully created.' }\n format.js {}\n format.json { render :show, status: :created, location: @translated_line }\n else\n format.html { render :new }\n format.js {}\n format.json { render json: @translated_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @translated_line.update(translated_line_params)\n format.html { redirect_to @translated_line, notice: 'Translated line was successfully updated.' }\n format.json { render :show, status: :ok, location: @translated_line }\n else\n format.html { render :edit }\n format.json { render json: @translated_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @line_items = @order.line_items\n\n render json: @line_items\n end",
"def create\n @translation_line = TranslationLine.new(translation_line_params)\n\n respond_to do |format|\n if @translation_line.save\n format.html { redirect_to @translation_line, notice: 'Translation line was successfully created.' }\n format.json { render :show, status: :created, location: @translation_line }\n else\n format.html { render :new }\n format.json { render json: @translation_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @translated_line.destroy\n respond_to do |format|\n format.html { redirect_to translated_lines_url, notice: 'Translated line was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def list\n @client.get('/translations/requests')\n end",
"def index\n @line_strings = LineString.all\n end",
"def update\n respond_to do |format|\n if @translation_line.update(translation_line_params)\n format.html { redirect_to @translation_line, notice: 'Translation line was successfully updated.' }\n format.json { render :show, status: :ok, location: @translation_line }\n else\n format.html { render :edit }\n format.json { render json: @translation_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @order_line_items = @order.order_line_items\n\n render json: @order_line_items\n end",
"def index\n @textlines = Textline.all\n end",
"def index\n @transit_lines = TransitLine.all\n end",
"def index\n @lines = Line.all\n end",
"def get_conversations_messaging_integrations_line_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: ConversationsApi.get_conversations_messaging_integrations_line ...\"\n end\n \n \n \n \n \n \n \n \n \n \n \n \n \n # resource path\n local_var_path = \"/api/v2/conversations/messaging/integrations/line\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'pageSize'] = opts[:'page_size'] if opts[:'page_size']\n query_params[:'pageNumber'] = opts[:'page_number'] if opts[:'page_number']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n local_header_accept = ['application/json']\n local_header_accept_result = @api_client.select_header_accept(local_header_accept) and header_params['Accept'] = local_header_accept_result\n\n # HTTP header 'Content-Type'\n local_header_content_type = ['application/json']\n header_params['Content-Type'] = @api_client.select_header_content_type(local_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n auth_names = ['PureCloud OAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'LineIntegrationEntityListing')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ConversationsApi#get_conversations_messaging_integrations_line\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def index\n @line = Line.includes(:sublines).find(params[:line_id])\n @budget = @line.budget\n @sublines = @line.sublines\n @subline = Subline.new\n\n respond_with(@budget, @lines)\n end",
"def show\n @line = Line.find(params[:id])\n\n render json: @line\n end",
"def index\n @lines = ReportingForms::LaborLine.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @lines }\n end\n end",
"def show\n @item_lines = ItemLine.find(params[:id])\n\n render json: @item_lines\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /translated_lines POST /translated_lines.json | def create
@translated_line = TranslatedLine.new(translated_line_params)
respond_to do |format|
if @translated_line.save
@translated_lines = TranslatedLine.where(translation_code: @translated_line.translation_code)
format.html { redirect_to @translated_line, notice: 'Translated line was successfully created.' }
format.js {}
format.json { render :show, status: :created, location: @translated_line }
else
format.html { render :new }
format.js {}
format.json { render json: @translated_line.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @translation_line = TranslationLine.new(translation_line_params)\n\n respond_to do |format|\n if @translation_line.save\n format.html { redirect_to @translation_line, notice: 'Translation line was successfully created.' }\n format.json { render :show, status: :created, location: @translation_line }\n else\n format.html { render :new }\n format.json { render json: @translation_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @translated_lines = TranslatedLine.all\n end",
"def create\n @textline = Textline.new(textline_params)\n\n respond_to do |format|\n if @textline.save\n format.html { redirect_to @textline, notice: 'Textline was successfully created.' }\n format.json { render :show, status: :created, location: @textline }\n else\n format.html { render :new }\n format.json { render json: @textline.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @translated_line.update(translated_line_params)\n format.html { redirect_to @translated_line, notice: 'Translated line was successfully updated.' }\n format.json { render :show, status: :ok, location: @translated_line }\n else\n format.html { render :edit }\n format.json { render json: @translated_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @line = Line.new(params[:line])\n\n if @line.save\n render json: @line, status: :created, location: @line\n else\n render json: @line.errors, status: :unprocessable_entity\n end\n end",
"def create\n @line = Line.new(line_params)\n\n if @line.save\n render json: @line, status: :created\n else\n render json: @line.errors, status: :unprocessable_entity\n end\n end",
"def index\n @translation_lines = TranslationLine.all\n end",
"def create\n @item_lines = ItemLine.new(item_line_params)\n\n if @item_lines.save\n render json: @item_lines, status: :created, location: @item_lines\n else\n render json: @item_lines.errors, status: :unprocessable_entity\n end\n end",
"def create\n @line_item = @order.line_items.new(line_item_params)\n\n if @line_item.save\n render json: @line_item, status: :created, location: [@order, @line_item]\n else\n render json: @line_item.errors, status: :unprocessable_entity\n end\n end",
"def create\n @line_description = LineDescription.new(params[:line_description])\n\n respond_to do |format|\n if @line_description.save\n format.html { redirect_to @line_description, :notice => 'Line description was successfully created.' }\n format.json { render :json => @line_description, :status => :created, :location => @line_description }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @line_description.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def destroy\n @translated_line.destroy\n respond_to do |format|\n format.html { redirect_to translated_lines_url, notice: 'Translated line was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def create\n @line = Line.new(line_params)\n\n respond_to do |format|\n if @line.save\n format.html { redirect_to @line, notice: \"Line was successfully created.\" }\n format.json { render :show, status: :created, location: @line }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @translation_line.update(translation_line_params)\n format.html { redirect_to @translation_line, notice: 'Translation line was successfully updated.' }\n format.json { render :show, status: :ok, location: @translation_line }\n else\n format.html { render :edit }\n format.json { render json: @translation_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @line = Line.new(line_params)\n\n respond_to do |format|\n if @line.save\n format.html { redirect_to new_line_path, notice: 'Line was successfully created. 5 points are awarded to your score.' }\n format.json { render json: @line, status: :created, location: @line }\n else\n format.html { render action: \"new\" }\n format.json { render json: @line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @line_reply_message = LineReplyMessage.new(line_reply_message_params)\n\n respond_to do |format|\n if @line_reply_message.save\n format.html { redirect_to @line_reply_message, notice: 'Line reply message was successfully created.' }\n format.json { render :show, status: :created, location: @line_reply_message }\n else\n format.html { render :new }\n format.json { render json: @line_reply_message.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @payable = Payable.find(params[:payable_id])\n @payable_line = @payable.payable_lines.build(params[:payable_line])\n\n respond_to do |format|\n if @payable_line.save\n format.html { redirect_to(new_payable_payable_line_path(@payable), :notice => 'Line item was successfully created.') }\n format.json { render :json => @payable_line, :status => :created, :location => [@payable_line.payable, @payable_line] }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @payable_line.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @order_line = OrderLine.new(order_line_params)\n\n respond_to do |format|\n if @order_line.save\n format.html { redirect_to @order_line, notice: \"Order line was successfully created.\" }\n format.json { render :show, status: :created, location: @order_line }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @order_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order_line = OrderLine.new(order_line_params)\n\n respond_to do |format|\n if @order_line.save\n format.html { redirect_to @order_line, notice: 'Order line was successfully created.' }\n format.json { render :show, status: :created, location: @order_line }\n else\n format.html { render :new }\n format.json { render json: @order_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @traduction = Traduction.new(traduction_params)\n @data = JSON.parse(open(URI.escape(\"http://translate.google.com/translate_a/t?client=p&q=\"+@traduction.fr+\"&hl=en&sl=fr&tl=en&ie=UTF-8&oe=UTF-8&multires=0\")).read)\n @tmp = @data['sentences'][0]['trans']\n @traduction.en = @tmp\n respond_to do |format|\n if @traduction.save\n format.html { redirect_to @traduction, notice: 'Traduction was successfully created.' }\n format.json { render action: 'show', status: :created, location: @traduction }\n else\n format.html { render action: 'home' }\n format.json { render json: @traduction.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /translated_lines/1 PATCH/PUT /translated_lines/1.json | def update
respond_to do |format|
if @translated_line.update(translated_line_params)
format.html { redirect_to @translated_line, notice: 'Translated line was successfully updated.' }
format.json { render :show, status: :ok, location: @translated_line }
else
format.html { render :edit }
format.json { render json: @translated_line.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n respond_to do |format|\n if @translation_line.update(translation_line_params)\n format.html { redirect_to @translation_line, notice: 'Translation line was successfully updated.' }\n format.json { render :show, status: :ok, location: @translation_line }\n else\n format.html { render :edit }\n format.json { render json: @translation_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @line = Line.find(params[:id])\n\n respond_to do |format|\n if @line.update_attributes(line_params)\n format.html { redirect_to new_line_path, notice: 'Line was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render verb: \"edit\" }\n format.json { render json: @line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @line = Line.find(params[:id])\n\n if @line.update(params[:line])\n head :no_content\n else\n render json: @line.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @textline.update(textline_params)\n format.html { redirect_to @textline, notice: 'Textline was successfully updated.' }\n format.json { render :show, status: :ok, location: @textline }\n else\n format.html { render :edit }\n format.json { render json: @textline.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @order_line = OrderLine.find(params[:id])\n\n respond_to do |format|\n if @order_line.update_attributes(params[:order_line])\n format.html { redirect_to @order_line, notice: 'Order line was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @line = Line.find(params[:id])\n\n respond_to do |format|\n if @line.update_attributes(params[:line])\n flash[:notice] = 'Line was successfully updated.'\n format.html { redirect_to(lines_path) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @line.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @line = Line.find_by_no(params[:id])\n\n respond_to do |format|\n if @line.update_attributes(params[:line])\n format.html { redirect_to @line, notice: 'Line was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @payable = Payable.find(params[:payable_id])\n @payable_line = @payable.payable_lines.find(params[:id]) \n\n respond_to do |format|\n if @payable_line.update_attributes(params[:payable_line])\n format.html { redirect_to(new_payable_payable_line_path(@payable), :notice => 'Line item was successfully updated.') }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @payable_line.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @line_description = LineDescription.find(params[:id])\n\n respond_to do |format|\n if @line_description.update_attributes(params[:line_description])\n format.html { redirect_to :action=>'index', :notice => 'Line description was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @line_description.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @order_line.update(order_line_params)\n format.html { redirect_to @order_line, notice: 'Order line was successfully updated.' }\n format.json { render :show, status: :ok, location: @order_line }\n else\n format.html { render :edit }\n format.json { render json: @order_line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @item_lines = ItemLine.find(params[:id])\n\n if @item_lines.update(item_line_params)\n head :no_content\n else\n render json: @item_lines.errors, status: :unprocessable_entity\n end\n end",
"def update\n @line = Line.find(params[:id])\n\n respond_to do |format|\n if @line.update_attributes(params[:line])\n format.html { redirect_to(@line, :notice => 'Line was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @line.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @v1_item_line = V1::ItemLine.find(params[:id])\n\n if @v1_item_line.update(v1_item_line_params)\n head :no_content\n else\n render json: @v1_item_line.errors, status: :unprocessable_entity\n end\n end",
"def update\n @line = Line.find(params[:id])\n @line.update_attributes(params[:line])\n end",
"def update\n @line_item1 = LineItem1.find(params[:id])\n\n respond_to do |format|\n if @line_item1.update_attributes(params[:line_item1])\n format.html { redirect_to @line_item1, :notice => 'Line item1 was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @line_item1.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n\n\n respond_to do |format|\n if @po_line.update_attributes(po_line_params)\n CommonActions.notification_process(\"PoLine\", @po_line)\n # genarate_pdf\n format.html { redirect_to new_po_header_po_line_path(@po_header), :notice => 'Line item was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @po_line.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @line = ReportingForms::B1Line.find(params[:id])\n\n respond_to do |format|\n if @line.update_attributes(params[:reporting_forms_b1_line])\n format.html { redirect_to @line, notice: 'Line was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @line.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @invoice_line = InvoiceLine.find(params[:id])\n\n if @invoice_line.update(invoice_line_params)\n head :no_content\n else\n render json: @invoice_line.errors, status: :unprocessable_entity\n end\n end",
"def update\n @invoice_line = InvoiceLine.find(params[:id])\n\n respond_to do |format|\n if @invoice_line.update_attributes(params[:invoice_line])\n format.html { redirect_to @invoice_line, notice: 'Invoice line was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @invoice_line.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /translated_lines/1 DELETE /translated_lines/1.json | def destroy
@translated_line.destroy
respond_to do |format|
format.html { redirect_to translated_lines_url, notice: 'Translated line was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @translation_line.destroy\n respond_to do |format|\n format.html { redirect_to translation_lines_url, notice: 'Translation line was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line = Line.find(params[:id])\n @line.destroy\n\n respond_to do |format|\n format.html { redirect_to lines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @v1_item_line = V1::ItemLine.find(params[:id])\n @v1_item_line.destroy\n\n head :no_content\n end",
"def destroy\n @line = Line.find(params[:id])\n @line.destroy\n\n respond_to do |format|\n format.html { redirect_to new_line_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n\n @po_line.so_line.destroy if @po_line.destroy && @po_line.so_line\n\n respond_to do |format|\n format.html { redirect_to new_po_header_po_line_path(@po_header), :notice => 'Line item was successfully deleted.' }\n format.json { head :ok }\n end\n end",
"def destroy\n @textline.destroy\n respond_to do |format|\n format.html { redirect_to textlines_url, notice: 'Textline was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line = Line.find_by_no(params[:id])\n @line.destroy\n\n respond_to do |format|\n format.html { redirect_to lines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line_description = LineDescription.find(params[:id])\n @line_description.destroy\n\n respond_to do |format|\n format.html { redirect_to line_descriptions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line = ReportingForms::B1Line.find(params[:id])\n @line.destroy\n\n respond_to do |format|\n format.html { redirect_to reporting_forms_b1_lines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line = Line.find(params[:id])\n @line.destroy\n\n respond_to do |format|\n format.html { redirect_to(lines_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @line_item1 = LineItem1.find(params[:id])\n @line_item1.destroy\n\n respond_to do |format|\n format.html { redirect_to line_item1s_url }\n format.json { head :no_content }\n end\n end",
"def delete(id)\n @client.delete(\"/translations/requests/#{id}\")\n end",
"def destroy\n @line1_item.destroy\n respond_to do |format|\n format.html { redirect_to line1_items_url, notice: 'Line1 item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sample_line = SampleLine.find(params[:id])\n @sample_line.destroy\n\n respond_to do |format|\n format.html { redirect_to sample_lines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @translator = Translator.find(params[:id])\n @translator.destroy\n\n respond_to do |format|\n format.html { redirect_to translators_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @invoice_line = InvoiceLine.find(params[:id])\n @invoice_line.destroy\n\n respond_to do |format|\n format.html { redirect_to invoice_lines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line_string.destroy\n respond_to do |format|\n format.html { redirect_to line_strings_url, notice: 'Line string was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @story_line = StoryLine.find(params[:id])\n @story_line.destroy\n\n respond_to do |format|\n format.html { redirect_to story_lines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line_detail.destroy\n respond_to do |format|\n format.html { redirect_to line_details_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /mx_assessments POST /mx_assessments.json | def create
@mx_assessment = MxAssessment.new(mx_assessment_params)
# byebug
respond_to do |format|
if @mx_assessment.save
# format.html { redirect_to @mx_assessment, notice: 'Mx assessment was successfully created.' }
# format.json { render action: 'show', status: :created, location: @mx_assessment }
format.json {head :no_content}
else
# format.html { render action: 'new' }
format.json { render json: @mx_assessment.errors.full_messages, status: :unprocessable_entity }
end
end
end | [
"def create_assessment() \n new_assessment = assessment(\n \"rand1\", \n [assessment_identification_code(\"Other\",\"Some Identification\")],\n 1\n )\n \n url = REST_URL + 'assessments'\n response = RestClient.post url, new_assessment.to_json, headers\n location = response.headers[:location]\n end",
"def create_employees_assessments\n assessmens = Assessment.active.map{|e| {assessment_id: e.id, employee_id: id}}\n # Creates an array like [{employee_id: 1}, {employee_id: 3}]\n EmployeeAssessment.create(assessmens)\n end",
"def create_employees_assessments\n begin\n emplos = Employee.can_be_assessed.map{|e| {employee_id: e.id}}\n # Creates an array like [{employee_id: 1}, {employee_id: 3}]\n EmployeeAssessment.create(emplos) do |f|\n f.assessment_id = id\n end\n rescue\n p $!.bactrace\n end\n end",
"def create\n @assessment = Assessment.new(params[:assessment])\n \n respond_to do |format|\n if @assessment.save\n format.html { redirect_to @assessment, notice: 'Assessment was successfully created.' }\n format.json { render json: @assessment, status: :created, location: @assessment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @assessment = Assessment.new(params[:assessment])\n\n respond_to do |format|\n if @assessment.save\n format.html { redirect_to @assessment, notice: 'Assessment was successfully created.' }\n format.json { render json: @assessment, status: :created, location: @assessment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @assessment = Assessment.new(assessment_params)\n\n respond_to do |format|\n if @assessment.save\n format.html { redirect_to @assessment, notice: 'Assessment was successfully created.' }\n format.json { render :show, status: :created, location: @assessment }\n else\n format.html { render :new }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_assessment\n end",
"def create\n @mobility_assessment = current_business.mobility_assessments.new(mobility_assessment_params)\n @mobility_assessment.author_id = current_user.id\n\n respond_to do |format|\n if @mobility_assessment.save\n format.html { redirect_to app_mobility_assessments_path, notice: 'MobilityAssessment was successfully created.' }\n format.json { render :show, status: :created, location: @mobility_assessment }\n else\n format.html { render :new }\n format.json { render json: @mobility_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @pain_assessment = current_business.pain_assessments.new(pain_assessment_params)\n @pain_assessment.author_id = current_user.id\n\n respond_to do |format|\n if @pain_assessment.save\n format.html { redirect_to app_pain_assessments_path, notice: 'PainAssessment was successfully created.' }\n format.json { render :show, status: :created, location: @pain_assessment }\n else\n format.html { render :new }\n format.json { render json: @pain_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @skill_assessments = SkillAssessment.all\n end",
"def create\n @quizzes_assessment = current_user.assessments.new(quizzes_assessment_params)\n\n respond_to do |format|\n if @quizzes_assessment.save\n format.html { redirect_to edit_quizzes_assessment_path(@quizzes_assessment), notice: 'Good Luck!' }\n format.json { render :show, status: :created, location: @quizzes_assessment }\n else\n current_assessment = current_user.assessments.find_by(quiz_id: @quizzes_assessment.quiz_id)\n format.json { render json: @quizzes_assessment.errors, status: :unprocessable_entity }\n if current_assessment && current_assessment.finished_at?\n format.html { redirect_to quiz_assessment_path(current_assessment.quiz, current_assessment) }\n elsif current_assessment\n format.html { redirect_to edit_quiz_assessment_path(current_assessment.quiz, current_assessment) }\n else\n format.html { render :new }\n end\n end\n end\n end",
"def assessments\n respond_with Assessment.find_all_by_user_id(params[:id],:select => [:name])\n end",
"def index\n @assessments = Assessment.all\n end",
"def create\n #@curriculum_assessment = CurriculumAssessment.create(params[:curriculum_assessments])\n \n @dop = Dop.find(params[:curriculum_assessment][:dop_id])\n @item = Item.find(params[:curriculum_assessment][:item_id])\n @curriculum_assessment = CurriculumAssessment.create(item: @item, dop: @dop)\n\n\n respond_to do |format|\n if @curriculum_assessment.save\n format.html { redirect_to @curriculum_assessment, notice: 'Curriculum assessment was successfully created.' }\n format.json { render json: @curriculum_assessment, status: :created, location: @curriculum_assessment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @curriculum_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @mt_assessment = MtAssessment.new(params[:mt_assessment])\n\n respond_to do |format|\n if @mt_assessment.save\n format.html { redirect_to(@mt_assessment) }\n format.xml { render :xml => @mt_assessment, :status => :created, :location => @mt_assessment }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @mt_assessment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @exam_set = ExamSet.new(exam_set_params)\n\n respond_to do |format|\n if @exam_set.save\n format.html { redirect_to @exam_set, notice: 'Exam set was successfully created.' }\n format.json { render :show, status: :created, location: @exam_set }\n else\n format.html { render :new }\n format.json { render json: @exam_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def list_assessments(inspector)\n inspector.list_assessment_templates({max_results:999})[0].each {|n| inspector.describe_assessment_templates({ assessment_template_arns: [n]})[0].each { |template| puts template.name} }\nend",
"def create\n @risk_assessment = RiskAssessment.new(risk_assessment_params)\n\n respond_to do |format|\n if @risk_assessment.save\n format.html { redirect_to @risk_assessment, notice: 'Risk assessment was successfully created.' }\n format.json { render :show, status: :created, location: @risk_assessment }\n else\n format.html { render :new }\n format.json { render json: @risk_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @fullassessments = Fullassessment.all\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /mx_assessments/1 PATCH/PUT /mx_assessments/1.json | def update
respond_to do |format|
if @mx_assessment.update(mx_assessment_params)
format.html { redirect_to @mx_assessment, notice: 'Mx assessment was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: 'edit' }
format.json { render json: @mx_assessment.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @assessment = Assessment.find(params[:id])\n\n respond_to do |format|\n if @assessment.update_attributes(params[:assessment])\n format.html { redirect_to :back }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @assessment = Assessment.find(params[:id])\n\n respond_to do |format|\n if @assessment.update_attributes(params[:assessment])\n format.html { redirect_to @assessment, notice: 'Assessment was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @assessment.update(assessment_params)\n format.html { redirect_to @assessment, notice: 'Assessment was successfully updated.' }\n format.json { render :show, status: :ok, location: @assessment }\n else\n format.html { render :edit }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @assessment = Assessment.find(params[:id])\n \n respond_to do |format|\n if @assessment.update_attributes(params[:assessment])\n format.html { redirect_to @assessment, notice: 'Assessment was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @skill_assessment.update(skill_assessment_params)\n format.html { redirect_to @skill_assessment, notice: 'Skill assessment was successfully updated.' }\n format.json { render :show, status: :ok, location: @skill_assessment }\n else\n format.html { render :edit }\n format.json { render json: @skill_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @exam_set.update(exam_set_params)\n format.html { redirect_to @exam_set, notice: 'Exam set was successfully updated.' }\n format.json { render :show, status: :ok, location: @exam_set }\n else\n format.html { render :edit }\n format.json { render json: @exam_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @calculated_data_assessment = CalculatedDataAssessment.find(params[:id])\n\n respond_to do |format|\n if @calculated_data_assessment.update_attributes(params[:calculated_data_assessment])\n format.html { redirect_to @calculated_data_assessment, notice: 'Calculated data assessment was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @calculated_data_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @calculated_data_learner_assessment = CalculatedDataLearnerAssessment.find(params[:id])\n\n respond_to do |format|\n if @calculated_data_learner_assessment.update_attributes(params[:calculated_data_learner_assessment])\n format.html { redirect_to @calculated_data_learner_assessment, notice: 'Calculated data learner assessment was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @calculated_data_learner_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @exam = Exam.find(params[:id])\n\n respond_to do |format|\n if @exam.update_attributes(params[:exam])\n format.html { redirect_to exams_path, notice: 'Exam was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @exam.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @myspec = Myspec.find(params[:id])\n\n respond_to do |format|\n if @myspec.update_attributes(params[:myspec])\n format.html { redirect_to @myspec, notice: 'Myspec was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @myspec.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @exam = Exam.find(params[:id])\n\n respond_to do |format|\n if @exam.update_attributes(params[:exam])\n format.html { redirect_to @exam, notice: 'Exam was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @exam.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @curriculum_assessment = CurriculumAssessment.find(params[:id])\n\n respond_to do |format|\n if @curriculum_assessment.update_attributes(params[:curriculum_assessment])\n format.html { redirect_to @curriculum_assessment, notice: 'Curriculum assessment was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @curriculum_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @exam_question = ExamQuestion.find(params[:id])\n\n respond_to do |format|\n if @exam_question.update_attributes(params[:exam_question])\n format.html { redirect_to @exam_question, notice: 'Exam question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @exam_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @test_subject_evaluation = @test_subject.test_subject_evaluations.find(params[:id])\n\n #Check if symptoms and past_medical is empty\n if !params.has_key?(:symptoms)\n @test_subject_evaluation.symptoms = []\n end\n if !params.has_key?(:past_medical)\n @test_subject_evaluation.past_medical = []\n end\n\n respond_to do |format|\n if @test_subject_evaluation.update_attributes(params[:test_subject_evaluation])\n flash[:notice] = \"#{TestSubject.title} Evaluation was successfully updated.\"\n format.html { redirect_to(test_subject_test_subject_evaluation_url(@test_subject, @test_subject_evaluation)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @test_subject_evaluation.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @admission_exam.update(admission_exam_params)\n format.html { redirect_to @admission_exam, notice: 'Admission exam was successfully updated.' }\n format.json { render :show, status: :ok, location: @admission_exam }\n else\n format.html { render :edit }\n format.json { render json: @admission_exam.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @api_v1_outcome.update(api_v1_outcome_params)\n format.html { redirect_to @api_v1_outcome, notice: 'Outcome was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_outcome }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_outcome.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @risk_assessment.update(risk_assessment_params)\n format.html { redirect_to @risk_assessment, notice: 'Risk assessment was successfully updated.' }\n format.json { render :show, status: :ok, location: @risk_assessment }\n else\n format.html { render :edit }\n format.json { render json: @risk_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @value_assessment.update(value_assessment_params)\n format.html { redirect_to @value_assessment, notice: 'Value assessment was successfully updated.' }\n format.json { render :show, status: :ok, location: @value_assessment }\n else\n format.html { render :edit }\n format.json { render json: @value_assessment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @question_set1 = QuestionSet1.find(params[:id])\n\n respond_to do |format|\n if @question_set1.update_attributes(params[:question_set1])\n format.html { redirect_to @question_set1, notice: 'Question set1 was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @question_set1.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /mx_assessments/1 DELETE /mx_assessments/1.json | def destroy
@mx_assessment.destroy
respond_to do |format|
format.html { redirect_to mx_assessments_url }
format.json { head :no_content }
end
end | [
"def destroy\n @assessment.destroy\n respond_to do |format|\n format.html { redirect_to assessments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @assessment = Assessment.find(params[:id])\n @assessment.destroy\n\n respond_to do |format|\n format.html { redirect_to assessments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @calculated_data_learner_assessment = CalculatedDataLearnerAssessment.find(params[:id])\n @calculated_data_learner_assessment.destroy\n\n respond_to do |format|\n format.html { redirect_to calculated_data_learner_assessments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @assessment = Assessment.find(params[:id])\n @assessment.destroy\n \n respond_to do |format|\n format.html { redirect_to(assessments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @mt_assessment = MtAssessment.find(params[:id])\n @mt_assessment.destroy\n\n respond_to do |format|\n format.html { redirect_to(mt_assessments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @mobility_assessment.destroy\n respond_to do |format|\n format.html { redirect_to app_mobility_assessments_path, notice: 'MobilityAssessment was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @calculated_data_assessment = CalculatedDataAssessment.find(params[:id])\n @calculated_data_assessment.destroy\n\n respond_to do |format|\n format.html { redirect_to calculated_data_assessments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @assessment.destroy\n respond_to do |format|\n format.html { redirect_to assessments_url, notice: 'Assessment was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @curriculum_assessment = CurriculumAssessment.find(params[:id])\n @curriculum_assessment.destroy\n\n respond_to do |format|\n format.html { redirect_to curriculum_assessments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @chiropractic_assessment = ChiropracticAssessment.find(params[:id])\n @chiropractic_assessment.destroy\n\n respond_to do |format|\n format.html { redirect_to chiropractic_assessments_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @skill_assessment.destroy\n respond_to do |format|\n format.html { redirect_to skill_assessments_url, notice: 'Skill assessment was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exam = Exam.find(params[:id])\n @exam.delete\n respond_to do |format|\n format.html { redirect_to exams_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @my_exam = MyExam.find(params[:id])\n @my_exam.destroy\n\n respond_to do |format|\n format.html { redirect_to my_exams_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exam_level = ExamLevel.find(params[:id])\n @exam_level.destroy\n\n respond_to do |format|\n format.html { redirect_to exam_levels_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exam_set.destroy\n respond_to do |format|\n format.html { redirect_to exam_sets_url, notice: 'Exam set was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @last_exam = LastExam.find(params[:id])\n @last_exam.destroy\n\n respond_to do |format|\n format.html { redirect_to last_exams_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @pkl_assessment.destroy\n respond_to do |format|\n flash[:notice] = \"Record aspek penilaian pkl berhasil dihapus, #{undo_link(@pkl_assessment)}\"\n format.html { redirect_to pkl_assessments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @pain_assessment.destroy\n respond_to do |format|\n format.html { redirect_to app_pain_assessments_path, notice: 'PainAssessment was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @assessment_rule = AssessmentRule.find(params[:id])\n @assessment_rule.destroy\n\n respond_to do |format|\n format.html { redirect_to assessment_rules_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Adds a new plane to the game by adding it to list of planes | def add_plane()
planes << Plane.new(
gen_location,
0,
width,
height,
gen_speed(),
true
)
end | [
"def add_plane(aPlane, recache = true)\n self.clip_planes << aPlane\n recache_visible_atoms if recache\n aPlane\n end",
"def add_section_plane(plane)\n end",
"def add new_planet\n @planets << new_planet\n end",
"def add_planet (planet)\n @planets << planet\n end",
"def setup_add_plane\n return TSBS.error(@acts[0], 3, @used_sequence) if @acts.size < 3\n file = @acts[1]\n sox = @acts[2] # Scroll X\n soy = @acts[3] # Scroll Y \n z = (@acts[4] ? 400 : 4)\n dur = @acts[5] || 2\n opac = @acts[6] || 255\n get_spriteset.battle_plane.set(file,sox,soy,z,dur,opac)\n end",
"def create\n redirect_if_not_logged_in\n\n @plane = Plane.new(plane_params)\n\n respond_to do |format|\n if @plane.save\n # format.html { redirect_to @plane, notice: 'Plane was successfully created.' }\n format.html { redirect_to '/planes' }\n format.json { render :show, status: :created, location: @plane }\n else\n format.html { render :new }\n format.json { render json: @plane.errors, status: :unprocessable_entity }\n end\n end\n end",
"def land(plane)\n raise 'Weather not suitable for landing' if weather? == false\n raise 'Airport full, no space to land' if full?\n raise 'Plane has already landed at airport' if landed_planes.include?(plane)\n\n @landed_planes.append(plane)\n puts \"#{plane} has landed\"\n end",
"def set_plane(plane)\n end",
"def create\n @plane = Plane.new(params[:plane])\n\n respond_to do |format|\n if @plane.save\n format.html { redirect_to @plane, notice: 'Plane was successfully created.' }\n format.json { render json: @plane, status: :created, location: @plane }\n else\n format.html { render action: \"new\" }\n format.json { render json: @plane.errors, status: :unprocessable_entity }\n end\n end\n end",
"def planet_add(planet)\n @planets.push(planet)\n return @planets\n end",
"def add_planet(new_planet)\n add_new_planet(new_planet) if new_planet.class == Planet\n end",
"def add_new_planet(new_planet)\n @planets.push(new_planet) if !has_planet?(new_planet.name)\n end",
"def create\n # raise params.inspect\n plane_params = params.require(:plane).permit(:name, :design, :description)\n plane = Plane.create(plane_params)\n redirect_to \"/planes/#{plane.id}\"\n end",
"def land(plane)\n fail \"the airport is full\" if full?\n fail \"plane is already grounded\" if plane.where_is_it?\n fail \"cannot land in a storm\" if bad_weather?\n plane.ground?\n @planes << plane\n end",
"def landing (plane)\n\t\tfail 'This plane has already landed' if plane.flying? == false\n\t\tfail 'The airport is full. No landings are possible.' if full?\n\t\tplane = plane.landed?\n\t\t@planes << plane\n\t\tputs 'Your flight has landed'\n\n\tend",
"def draw_plane(plane, lineLoop, pickid)\n\n glPushName(pickid | PICK_ID_PLANE) if self.picking\n \n # if is_picked?(:plane, pickid)\n # Material.black.apply\n # glLineWidth(3.0)\n # glEdgeFlag(GL_TRUE)\n # end\n glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)\n\n Material.new(0.9, 0.9, 0.0, 0.5).apply\n glBegin(GL_TRIANGLE_FAN)\n glNormal3f(plane.a, plane.b, plane.c)\n lineLoop.each{|p| \n glVertex3f(p[0], p[1], p[2])\n }\n glEnd()\n \n glEdgeFlag(GL_FALSE)\n glPopName() if picking\n \n if (is_picked?(:plane, pickid))\n Material.black.apply\n glLineWidth(3.0)\n glBegin(GL_LINE_LOOP)\n lineLoop.each{|p|\n glVertex3f(p[0], p[1], p[2])\n }\n glEnd()\n glLineWidth(1.0)\n end\n \n end",
"def add_planet_list (planet_list)\n @planets += planet_list\n end",
"def acquire_new_plane(aircrafttype)\n home_airport = self.home_airport_game\n param_array = [aircrafttype,self,home_airport]\n new_plane = PlaneCreator.new(param_array).manufacture\n finalize_acquisition_and_save(new_plane)\n end",
"def add_planets(planet_list)\n @planets += planet_list\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generates a random speed for a plane | def gen_speed
rand + 15.0
end | [
"def random_turn deg\n rand(deg) - (deg/2)\n end",
"def change_speed\n @statistics[:speed_changes] += 1\n # pick a coordination to change speed for\n @changed_coord = @coordinations.rand\n @previous_speed = @current_speed[@changed_coord]\n @current_speed[@changed_coord] = @previous_speed + SPEED_CHANGE_OPTIONS.rand\n delta_eval(@changed_coord)\n end",
"def generate_target_rotation\n random(-(TWO_PI*20), (TWO_PI*20))\n end",
"def random\n CYCLE * rand\nend",
"def random_angle\n 360 * rand\n end",
"def travel_time_random()\n @error.check_initialized(self)\n total_time{|cell, nextcell| get_random_travel_time(cell, nextcell)}\n end",
"def random_spot(radius)\n angle = rand() * Math::PI * 2\n vec = Rubygame::Ftor.new_am(angle, radius)\n return vec\nend",
"def generate\n #create random number generator\n rng = Random.new\n @points = Array.new\n\n #num_points times, make random value in ranges \n @num_points.times do\n @points << [rng.rand(@x_lims), rng.rand(@y_lims)]\n end\n end",
"def random(min,max) (max - min) * rand + min end",
"def generer_planetes( n )\n (0...n).map do |i|\n Planete.new( \"#{i}\", 0, rand(10E+15), rand_vector, rand_vector )\n end\nend",
"def get_random_angle\n rand * (2 * Math::PI)\n end",
"def swing(base_val, num_steps, swing_step)\n base_val + (rand(num_steps) * swing_step)\nend",
"def random_in_range(property_id)\n\t\tcase property_id\n\t\t\twhen 0 #velocity scale\n\t\t\t\treturn rand / 10.0\n\t\t\twhen 1 #max speed\n\t\t\t\treturn rand(9) + 2\n\t\t\twhen 2 #normal speed\n\t\t\t\treturn rand(9) + 2\n\t\t\twhen 3 #neighborhood radius\n\t\t\t\treturn rand(91) + 10\n\t\t\twhen 4 #separation weight\n\t\t\t\treturn rand(101)\n\t\t\twhen 5 #alignment weight\n\t\t\t\treturn rand\n\t\t\twhen 6 #cohesion weight\n\t\t\t\treturn rand\n\t\t\twhen 7 #pacekeeping weight\n\t\t\t\treturn rand\n\t\t\twhen 8 #random motion probability\n\t\t\t\treturn rand / 2.0\n\t\tend\n\tend",
"def random_restart\n @controllers.each{|sc| @current_offset[sc] = rand(@current_cycle_time || sc.cycle_time)}\n if @change_probability[:speed].nonzero?\n @coordinations.each do |coord|\n @current_speed[coord] = coord.default_speed + SPEED_CHANGE_OPTIONS_WITH_ZERO.rand\n end\n end\n full_evaluation # all coordinations change, must do full reevaluation\n end",
"def deplacerEnnemi(hitboxJoueur) \n directionX = 0\n directionY = 0\n \n random = rand(3)\n puts random\n case random\n when 0\n directionX = (rand(2)==0)? 1000 : -1000\n when 1\n directionY = (rand(2)==0)? 1000 : -1000\n when 2\n directionX = (rand(2)==0)? 1000 : -1000\n directionY = (rand(2)==0)? 1000 : -1000\n end\n \n \n deplacer(@position[\"x\"] + directionX,@position[\"y\"] + directionY) \n \n return Time.now\n end",
"def _randomize(delay)\n (delay * (1.0 + 0.5 * rand)).round\n end",
"def random_modifier\n rand 0..25\n end",
"def generate_random_around(p, r)\n x, y = p\n rr = uniform(r, 2*r)\n rt = uniform(0, 2*Math::PI)\n \n return rr * Math.sin(rt) + x, rr * Math.cos(rt) + y\n end",
"def price_delta(magnitude=20)\n rand(magnitude) * 0.25\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks to see if you can place a plane so that this x coordinate does not collide horizontally with any plane | def can_place(position)
planes.none? do |plane|
plane.wings.in_horizontal_range(BoundingBox.new(position, position + width, 0, 0))
end
end | [
"def intersect_plane?(plane)\n not parallel_to_plane? plane\n end",
"def point_is_on_plane?(point)\n (@x_coefficient * point.x + @y_coefficient * point.y + @z_coefficient * point.z + @free_coefficient).equal?(0)\n end",
"def is_on_plane?(plane)\n plane.substitute(self) == 0\n end",
"def intersecting?(other_plane)\n check_x = @x_coefficient != other_plane.x_coefficient\n check_y = @y_coefficient != other_plane.y_coefficient\n check_z = @z_coefficient != other_plane.z_coefficient\n check_x || check_y || check_z\n end",
"def can_place? x, y\n x >= 0 and x < @max_x and y >= 0 and y < @max_y and !object_at?(x, y)\n end",
"def parallel_to_plane?(plane)\n plane.find_normal_vector.scalar_product(vector) == 0\n end",
"def intersect?(plane)\n not parallel? plane\n end",
"def overlaps_in_x? (other)\n (self.leftmost_plane >= other.leftmost_plane && self.rightmost_plane <= other.rightmost_plane) || \\\n (self.leftmost_plane < other.leftmost_plane && self.rightmost_plane > other.rightmost_plane) || \\\n (self.leftmost_plane > other.leftmost_plane && self.leftmost_plane < other.rightmost_plane) || \\\n (self.rightmost_plane > other.leftmost_plane && self.rightmost_plane < other.rightmost_plane)\n end",
"def coordinate_valid?(x, y)\n (x >= 0) && (y >= 0) && (x < @width) && (y < @height)\n end",
"def valid?\n lower_left_point.x < upper_right_point.x && lower_left_point.y < upper_right_point.y\n end",
"def direction_valid?(x, y)\n # get pixel movement rate\n pix = $BlizzABS.pixel\n # checks if coordinates are valid\n return (x >= 0 && x < width * pix && y >= 0 && y < height * pix)\n end",
"def direction_valid?(x, y)\n # get pixel movement rate\n pix = BlizzABS.pixel\n # checks if coordinates are valid\n return (x >= 0 && x < width * pix && y >= 0 && y < height * pix)\n end",
"def self_valid?(x, y)\n # get pixel movement rate\n pix = $BlizzABS.pixel\n # checks if coordinates are valid\n return (x >= 0 && x < width * pix - pix + 1 && y >= 0 && y < height * pix - pix + 1)\n end",
"def placement_is_valid?(x, y, direction)\n coordinates_are_valid?(x, y) and direction_is_valid?(direction)\n end",
"def self_valid?(x, y)\n # get pixel movement rate\n pix = BlizzABS.pixel\n # checks if coordinates are valid\n return (x >= 0 && x < width*pix-pix+1 && y >= 0 && y < height*pix-pix+1)\n end",
"def contains_point(x,y,z)\n behind = true\n @planes.each{|p|\n behind = (0 >= p.distance_to_point(x,y,z))\n break if not behind\n }\n return behind\n end",
"def on_plane?(plane)\n end",
"def valid_board_coordinates?(x, y)\n x >= 0 && y >= 0 && x < 3 && y < 3\n end",
"def check_grid_coordinates(x, y)\n (x >= 0) && (x < @height) && (y >= 0) && (y < @width)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks whether the given line type was parsed from the log file for this request | def has_line_type?(line_type)
return true if @lines.length == 1 && @lines[0][:line_type] == line_type.to_sym
@lines.detect { |l| l[:line_type] == line_type.to_sym }
end | [
"def has_line_type?(line_type)\n return true if @lines.length == 1 && @lines[0][:line_type] == line_type.to_sym\n @lines.detect { |l| l[:line_type] == line_type.to_sym }\n end",
"def known_line_type?(line)\n line_key = segment_peek(line)\n @segment_keys.include?(line_key)\n end",
"def check_line(file, line)\n prints << Danger::FileLog.new(file, line.number) if @print && !line.content.match(PRINT_REGEXP).nil?\n nslogs << Danger::FileLog.new(file, line.number) if @nslog && !line.content.match(NSLOG_REGEXP).nil?\n end",
"def line?\n 'line' == self.type\n end",
"def detect_type( line )\n #loop through each format type, trying to split the line data with its delimiter\n #continue until DELIMITER_THRESHOLD is met\n FORMAT_TYPE.index { |x| line.split(x['del']).length >= DELIMITER_THRESHOLD }\n end",
"def potential_line?(filename, lineno); end",
"def valid?(line)\n true\n end",
"def valid_line?(line)\n if line.nil? || line == \"\" || line == \"\\r\\n\" || line.match(/\\[Illustration:/) || line.match(/Appendix/i)\n false\n else\n true\n end\n end",
"def identify_line_type(line)\n blank_regex = /\\A\\s*\\z/\n section_regex = /\\A\\[.+\\]/\n # note: key_regex assumes keys can be multi-word, e.g. \"all key 1 : val1\"\n key_regex = /\\A\\S.*:/\n\n case line\n when blank_regex then :blank\n when section_regex then :section\n when key_regex then :key\n else :value\n end\n end",
"def header_line?(hash)\n file_format.line_definitions[hash[:line_type]].header\n end",
"def hasline?(line)\n\t\tlines.include?(line)\n\tend",
"def check(logline)\n return false if @server and @server != logline[:server]\n return false if @service and @service != logline[:service]\n return false if @type and @type != logline[:type]\n return false if @time_from and logline[:date] < @time_from\n return false if @time_to and logline[:date] > @time_to\n if @except\n @except.each do |key,value|\n return false if logline[key] == value\n end\n end\n @user_params.each_pair do |key,value|\n return false if logline[key] != value\n end\n return true\n end",
"def parse_log_line(log_line)\n end",
"def step_def_line?(line)\n !!(sanitize_line(line) =~ STEP_DEF_LINE_PATTERN)\n end",
"def add_line(line)\n unless @parsing\n if line =~ /^(---|\\+\\+\\+) (.*)$/\n self.file_name = $2\n elsif line =~ /^@@ (\\+|\\-)(\\d+)(,\\d+)? (\\+|\\-)(\\d+)(,\\d+)? @@/\n @line_num_l = $2.to_i\n @line_num_r = $5.to_i\n @parsing = true\n end\n else\n if %r{^[^\\+\\-\\s@\\\\]}.match?(line)\n @parsing = false\n return false\n elsif line =~ /^@@ (\\+|\\-)(\\d+)(,\\d+)? (\\+|\\-)(\\d+)(,\\d+)? @@/\n @line_num_l = $2.to_i\n @line_num_r = $5.to_i\n else\n parse_line(line, @type)\n end\n end\n return true\n end",
"def check_for_the_message (line) \n File.readlines(get_system_log).grep(line).any? \n end",
"def line_matches_criteria(line_to_test, log_level_match, class_name_match,\n method_name_match, log_message_match)\n match_data = LOG_LINE_REGEXP.match(line_to_test)\n # if the match_data value is nil, then the parsing failed and there is no match\n # with this line, so return false\n return false unless match_data\n # check to see if the current line matches our criteria (if one of the criteria\n # is nil, anything is assumed to match that criteria)\n if (!log_level_match || log_level_match.match(match_data[2])) &&\n (!class_name_match || class_name_match.match(match_data[3])) &&\n (!method_name_match || method_name_match.match(match_data[4])) &&\n (!log_message_match || log_message_match.match(match_data[5]))\n return true\n end\n false\n end",
"def process_line(line)\n if @state.include?(:data)\n process_data_line(line)\n elsif @state.include?(:auth_plain_incomplete)\n process_auth_plain_line(line)\n elsif @state.include?(:auth_login_incomplete)\n process_auth_login_line(line)\n else\n process_command_line(line)\n end\n !@state.include?(:quit) # return true unless QUIT is given\n end",
"def class_line?(line)\n line.include?('class') && !line.include?('func') && !line.include?('//') && !line.include?('protocol') && !line.include?('\"')\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns true if this request does not yet contain any parsed lines. This should only occur during parsing. An empty request should never be sent to the aggregators | def empty?
@lines.length == 0
end | [
"def is_empty()\n @requests.empty?\n end",
"def empty?\n @source_lines.empty?\n end",
"def empty?\n line.empty?\n end",
"def valid?\n !@requests.empty?\n end",
"def has_lines?\n !@lines.empty?\n end",
"def empty?\n load # make sure we have determined the number of fetched records\n\n @number_of_records.zero?\n end",
"def show_requests?\n !@requests.empty?\n end",
"def empty?\n @headers.empty?\n end",
"def empty?\n response.items.empty?\n rescue StandardError\n false\n end",
"def request_valid?(data)\n ln = 0\n data.each_line {|l|\n ln += 1\n }\n if ln > 1\n return true\n else\n puts \"\\e[1;31mCatch GET's without header information / Other \\e[0m\\ \"\n return false # Due to single GET Requests to no headers \n end\n end",
"def is_requesting?\n @lock.synchronize { !@batch.empty? }\n end",
"def empty?\n @statements.empty?\n end",
"def response_incomplete?\n parser.response_incomplete?\n end",
"def empty?\n @hits.length == 0\n end",
"def empty?\n hits.size == 0\n end",
"def empty?\n global? ? new_section? : start_line == end_line\n end",
"def empty?\n content.stream.filtered_stream == (@tare_content_stream ||= InitialPageContent) && document.page_number > 0\n end",
"def response_incomplete?\n any_is?(parsers, :response_incomplete?)\n end",
"def all_chunks_received?\n start_chunk == nil\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks whether this request is completed. A completed request contains both a parsed header line and a parsed footer line. Not that calling this function in single line mode will always return false. | def completed?
header_found, footer_found = false, false
@lines.each do |line|
line_def = file_format.line_definitions[line[:line_type]]
header_found = true if line_def.header
footer_found = true if line_def.footer
end
header_found && footer_found
end | [
"def completed?\n header_found, footer_found = false, false\n @lines.each do |line|\n line_def = file_format.line_definitions[line[:line_type]]\n header_found = true if line_def.header\n footer_found = true if line_def.footer\n end\n header_found && footer_found\n end",
"def complete?\n complete = body.length >= header.body_length\n Logger.debug \"response complete? #{complete} (#{body.length}/#{header.body_length})\"\n complete\n end",
"def completed?\n\n return true if self.state == ParseState::Completed\n\n # If the parser state is processing the body and there are an\n # undetermined number of bytes left to read, we just need to say that\n # things are completed as it's hard to tell whether or not they really\n # are.\n if (self.state == ParseState::ProcessingBody and self.body_bytes_left < 0)\n return true\n end\n\n false\n end",
"def finished?\n\t\t\t\t@finished && @body.length >= @headers['CONTENT_LENGTH']\n\t\t\tend",
"def complete?\n completed_at && response\n end",
"def done_headers?\n [:body_identity, :body_chunked, :body_chunked_tail, :done].include?(@state)\n end",
"def capture_complete?\n if( (@headers and not @headers.capture_complete?) or\n (@body and not @body.capture_complete?) )\n return false\n else\n true\n end\n end",
"def header?\n @header_complete\n end",
"def response_incomplete?\n parser.response_incomplete?\n end",
"def multi_response_completed?\n @multi_buffer.nil?\n end",
"def completed_with_errors?\n\t\t\treturn request_status == REQUEST_STATUS['completed_with_errors']\n\t\tend",
"def finished?\n @mutex.synchronize do\n return check_finished_spans\n end\n end",
"def complete?\n if received?(REQUEST_SIZE)\n if put?\n if record_count == 1\n true\n else\n received?(REQUEST_SIZE + (RECORD_SIZE * record_count))\n end\n else\n true\n end\n else\n false\n end\n end",
"def response_incomplete?\n any_is?(parsers, :response_incomplete?)\n end",
"def finished?\n @status.downcase.eql? 'finished'\n end",
"def complete?\n params[\"x_response_code\"] == '1'\n end",
"def done_body?\n done?\n end",
"def complete?\n params[\"x_response_code\"] == '1'\n end",
"def response_incomplete?\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the first timestamp encountered in a request. | def timestamp
first(:timestamp)
end | [
"def timestamp\n @request.timestamp\n end",
"def request_time\n @params[:request_timestamp]\n end",
"def get_request_timestamp\n\t\treturn @transport.get_path(\"meta\",\"datetime\")\n\tend",
"def earliest\n solr_response = @controller.repository.search({:fl => @timestamp_field, :sort => @timestamp_field + ' asc', :rows => 1})\n return Time.parse(get_string_from_hash(solr_response.response[\"docs\"].first, @timestamp_field))\n end",
"def request_time\n @request_time ||= Time.now.utc\n end",
"def get_server_timestamp\n request :get_server_timestamp\n end",
"def timestamp\n GDAX.use_server_time ? get('/time', {}, true)[:epoch] : Time.now.to_i.to_s\n end",
"def time_of_first_event\n (r=@revisions.first) && r.timestamp\n end",
"def begin_timestamp\n @started ? @entries.first.timestamp : nil\n end",
"def timestamp\n @timestamp ||= Spontaneous::Field.timestamp\n end",
"def first_seen_date_time\n return @first_seen_date_time\n end",
"def last_timestamp\n lines.last.first\n rescue\n 0\n end",
"def request_date_time\n return @request_date_time\n end",
"def request_oauth_timestamp\n @oauth_signature_validator && @oauth_signature_validator.request.oauth_timestamp\n end",
"def get_earliest_timestamp\n @timestamp = (@raw_image_files.sort_by { |i| i.timestamp }).first.timestamp\n end",
"def earliest_restorable_time\n data[:earliest_restorable_time]\n end",
"def retrieve_start_timestamp(experiment)\n if timestamp = get(experiment.handle.to_s, 'started_at')\n Time.parse(timestamp)\n end\n end",
"def earliest_time\n Time.now.to_i - @sec\n end",
"def rates_timestamp\n raw = raw_rates_careful\n raw.first.key?('time') ? Time.parse(raw.first['time']) : Time.at(0)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
get 5 random queries, across the given object types | def get_random_quick_queries(types)
allTypes = types.collect {|type| type.get_quick_queries}.flatten
prng = Random.new
random_qq = allTypes.sort {|item1, item2| prng.rand(-1 .. 1) }.slice(0, 5)
random_qq.collect {|qq| render_quick_query(qq)}
end | [
"def random_query\n # Use a fake query\n # TODO: better random queries\n 'query ' + (rand*5000).to_i.to_s\nend",
"def find_mru_entity_types(entity_type, login, amount = 10, already_chosen_ids=[])\n klass = Inflector.constantize(entity_type.entity_type.to_s.camelize)\n \n #This is the case where we have no previously chosen objects,e.g. expressions of a manifestation\n if already_chosen_ids.blank?\n @mru_frbr_objects = klass.find(:all, :limit => amount, \n #Not useful due to batch upload user :conditions => [\"updated_by = ?\", login.login_id],\n :order => 'updated_at desc'\n )\n \n #we need to append a not in clause\n else\n thing_id = entity_type.entity_type + \"_id\"\n @mru_frbr_objects = klass.find(:all, :limit => amount, \n :conditions => [\"#{thing_id} not in (?)\", already_chosen_ids],\n :order => 'updated_at desc'\n )\n end\n \n end",
"def generate_objects!\n if custom_query?\n results = eval(custom_query)\n else\n objects = any? ? [] : starting_objects.clone\n objects_to_delete = []\n puts \"Evaluating #{starting_objects.size} objects and #{conditions.size} condition(s)...\"\n for obj in starting_objects\n # print \"\\n #{obj.id}: \"\n for condition in conditions\n # print \" [C#{condition.id}:\"\n if condition.passes?(obj)\n # print \"PASS]\"\n objects << obj and break if any?\n else\n # print \"FAIL]\"\n objects_to_delete << obj and break if all?\n end\n end\n end\n results = objects - objects_to_delete\n unless result_variant.blank?\n results = results.collect{|r| r.instance_eval(result_variant)}.flatten\n end\n end\n update_object_ids(results)\n update_attribute(:objects_generated_at, Time.now)\n update_attribute(:objects_count, self.object_ids.values.flatten.uniq.size)\n results\n end",
"def get_all_sobjects(type)\n case type\n when 'Account'\n records = @client.materialize(type).query(\"Agents__c != ''\")\n when 'ccrmbasic__Email__c'\n records = @client.materialize(type).query(\"ccrmbasic__Contact__c != ''\")\n else\n records = @client.materialize(type).all\n end\n sobjects = records.dup.to_a\n while records.next_page?\n sobjects += records.next_page\n records = records.next_page\n end\n sobjects\n end",
"def object_query_generator(data)\n # query header\n query = \"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX owl: <http://www.w3.org/2002/07/owl#>\n PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n PREFIX : <http://www.semanticweb.org/simon/ontologies/2016/1/medienprojekt#>\n \n SELECT *\n {\"\n \n # combine the parameters with the query\n data.each {\n |hash| if hash.has_key?(:predicate)\n # its possible to have predicate and object parameters \n query = query << \"?name \" << hash[:predicate] << \" :\" << hash[:object] << \" .\\n\"\n else\n # or just an object parameter\n query = query << \"?name ?predicate :\" << hash[:object] << \" .\\n\"\n end\n }\n\n # query footer\n query = query << \" }\\nLIMIT 400\"\n # return the resulting query\n return query\n end",
"def random_hugs\n huglist_sample = []\n 5.times { huglist_sample.push(Huglist.all.sample[:id]) }\n return huglist_sample.uniq\nend",
"def select_bots_for_roster\n rosters\n .includes(:bot)\n .select('DISTINCT ON(rosters.total_stats) rosters.*')\n .order(total_stats: :desc)\n .sample(15)\n end",
"def query_methods(result_type); end",
"def get_all_objects(class_name, count)\n per_page = 1000\n objects = []\n times = [(count/per_page.to_f).ceil, 10].min\n 0.upto(times) do |offset|\n query = Parse::Query.new(class_name)\n query.limit = per_page\n query.skip = offset * per_page\n objects << query.get\n end\n objects.flatten(1)\n end",
"def related_objects_with_methods(num, methods = {})\n related_objects = []\n 6.times do |time|\n obj_methods = methods.merge(:id => time)\n methods.each do |meth, retval|\n obj_methods[meth] = retval.gsub(/%id%/, obj_methods[:id].to_s) \\\n if retval.respond_to?(:gsub!)\n end\n related_objects << mock_ar_with(obj_methods)\n end\n\n yield related_objects if block_given?\n\n related_objects\n end",
"def search_by_type query, type, limit, offset\n results = []\n\n # Search posts\n if type.to_i == SearchResult::Type::POST.to_i\n post_query = \"%#{query}%\"\n results = Post.includes([:user, :organization]).find(:all, :limit => limit, :conditions => [\"text LIKE ?\", post_query], :order => \"created_at DESC\", :offset => offset)\n results.map!{|p| SearchResult::CreatePostResult(p)}\n end\n\n # Search users\n if type.to_i == SearchResult::Type::USER.to_i\n user_query = \"%#{query}%\"\n results = User.find(:all, :limit => limit, :conditions => [\"name LIKE ?\", user_query], :offset => offset)\n results.map!{|u| SearchResult::CreateUserResult(u)}\n end\n\n # Search organizations\n if type.to_i == SearchResult::Type::ORGANIZATION.to_i\n org_query = \"%#{query}%\"\n results = Organization.find(:all, :limit => limit, :conditions => [\"name LIKE ?\", org_query], :offset => offset)\n results.map!{|o| SearchResult::CreateOrganizationResult(o)}\n end\n\n return results\n end",
"def recommend_new_songs\nselected =Song.all.select do |song|\n song.genre == self.favorite_genre\nend\nselected.sample(5)\n\n end",
"def setup_queries\n\t @queries = case @search_type\n when :query_logs\n Log.to_term(\"Search Type: Query Logs\", \"DEBUG\")\n sql = SQL.new\n \t \tsql.populate(@config) # setup function\n \t sql.to_queries\n\t when :synthetic\n\t Log.to_term(\"Search Type: Syntehtic\", \"DEBUG\")\n\t sql = SQL.new\n\t Log.to_term(\"Populating results\", \"DEBUG\")\n \t \tresults = sql.populate(@config) # setup function\n Log.to_term(\"Done populating\", \"DEBUG\")\n \t \n queries = Queries.new\n \n method = ENV[\"SYNTH_FUNC\"].downcase\n times = (ENV[\"SYNTH_TIMES\"] == nil) ? 1 : ENV[\"SYNTH_TIMES\"].to_i\n Log.to_term(\"SYNTH_FUNC: #{method}\", \"DEBUG\")\n Log.to_term(\"SYNTH_TIMES: #{times}\", \"DEBUG\")\n\n size = sql.queries.size\n Log.to_term(\"Queries size: #{size}\", \"DEBUG\")\n i=1\n sql.queries.each do |query|\n Log.to_term(\"#{i}/#{size}\", \"DEBUG\")\n Log.to_term(\"---- query.id:\\t\\t#{query.id}\", \"DEBUG\")\n Log.to_term(\"---- query.solution:\\t\\t#{query.solution}\", \"DEBUG\")\n Log.to_term(\"---- query.misspelled:\\t\\t#{query.misspelled}\", \"DEBUG\")\n \n s = Synthetic.new(query.id, query.solution, query.misspelled)\n queries << s.to_synthetic(method, times).to_query\n i += 1\n end\n \n\n\t\tLog.stats \"-- #{method} #{times} --\"\n Log.to_term(\"-- #{method} #{times} --\", \"DEBUG\")\n \n queries\n end # case\n end",
"def o_shops \n # Fetch random 70 areas from branches table\n obj = VendorsList.where(:business_type => \"online\").order('rand()').limit(20).map{|i| [i.vendor_name,i.vendor_website]}\n @o_shops = obj.each_slice(10).to_a \n end",
"def random_commentable()\n commentables = []\n Gossip.all.each do |gossip|\n commentables.push(gossip)\n end\n Comment.all.each do |comment|\n commentables.push(comment)\n end\n commentables.sample\nend",
"def generate_five_users()\n for i in 0..5\n u == User.new( {email: \"u#{i}\" + '@example.com', password: 'plainText'})\n end\n end",
"def initialize\n @objects = Array.new\n Kernel::rand(25).times { @objects << Object.new }\n end",
"def get_monsters\n base_url = 'http://www.dnd5eapi.co'\n monster_array = get_api_hash(\"#{base_url}/api/monsters\")\n \n count = 0\n secondary_array = []\n monster_array[\"results\"].first(100).map do |monster|\n count += 1\n if count <= 5\n get_individual_monster(\"#{base_url}#{monster[\"url\"]}\")\n else\n secondary_array << \"#{base_url}#{monster[\"url\"]}\"\n end\n end\n\n secondary_array.sample(15).map do |monster_url|\n get_individual_monster(monster_url)\n end\nend",
"def query_objects(subject, predicate)\n query_string = \"SELECT DISTINCT ?object WHERE { <#{subject}> <#{predicate}> ?object }\"\n execute_with_retries do\n return @client.query(query_string).map { |solution| solution[:object] }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /bags or /bags.json | def index
@bags = Bag.all
end | [
"def tagList()\n http, req = initReq(\"tags/\")\n JSON.parse(http.request(req).body)\nend",
"def all_tags\n request(:get,\"/tags\")\n end",
"def index\n @dags = Dag.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @dags }\n end\n end",
"def index\n @tags = (params[:webapp_id]) ? Webapp.find(params[:webapp_id]).tags : Tag.all\n render json: @tags\n end",
"def retrieve_all_tag_based\n hash, url = request('badges/tags')\n Badges.new hash, url\n end",
"def index\n @btags = Btag.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @btags }\n end\n end",
"def index\n params[:page] ||= 1\n #@tags = Op.tag_counts_on(:tags, :order => \"count desc\").paginate(:page => params[:page], :per_page => 24)\n @tags = Tag.popular.paginate(:page => params[:page], :per_page => 24)\n\n respond_to do |format|\n format.html\n format.json { render json: @tags}\n end\n end",
"def show\n params[:page] ||= 1\n @tag = Tag.find_by_name(params[:id]) || raise_404\n @taggings = @tag.taggings.order(:karma).reverse_order.paginate(:page => params[:page], :per_page => 25)\n\n respond_to do |format|\n format.html\n format.json { render json: @taggings}\n end\n end",
"def index\n @pokebags = Pokebag.all\n end",
"def index\n @hastags = Hastag.all\n end",
"def get_tag_list\n # body = {\n # cmd: \"get_tag_list\"\n # }\n\n end",
"def get_tags_by_url\n url = Url.find_by(id: params[:id])\n tags = url.tags\n render json: {code: 200, tags: tags}\n end",
"def show\n @user = User.friendly.find(params[:username].downcase)\n @bucket = Buket.find(params[:bucket_id])\n @tags = @bucket.tags\n # @bucket.goals.order('created_at DESC')\n end",
"def tagged\n @tags = params[:tags]\n @tags = @tags.split(\"+\").collect { |t| t.split(\"-\").join(\" \") }\n @items = Item.tagged_with(@tags, :any => true)\n\n respond_to do |format|\n format.html # tagged.html.erb\n format.json { render json: @items }\n end\n end",
"def get_urls_by_tag\n if (params[:id] == 'all') \n urls = Url.all.order(created_at: :desc)\n else \n tag = Tag.find_by(id: params[:id])\n urls = tag.urls.order(created_at: :desc)\n end\n render json: {code: 200, urls: urls}\n end",
"def tags\n _get(\"/query/image/tags\") { |json| json }\n end",
"def index\n @tagged_items = TaggedItem.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @tagged_items }\n end\n end",
"def get_brandings\n request :get,\n '/v3/brandings.json'\n end",
"def index\n @brags = Brag.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @brags }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /bags/1 or /bags/1.json | def update
respond_to do |format|
if @bag.update(bag_params)
format.html { redirect_to @bag, notice: "Bag was successfully updated." }
format.json { render :show, status: :ok, location: @bag }
else
format.html { render :edit, status: :unprocessable_entity }
format.json { render json: @bag.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @bag = Bag.find(params[:id])\n\n respond_to do |format|\n if @bag.update_attributes(params[:bag])\n format.html { redirect_to bags_path, notice: 'Bag was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bag.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @btag = Btag.find(params[:id])\n\n respond_to do |format|\n if @btag.update_attributes(params[:btag])\n flash[:notice] = 'Btag was successfully updated.'\n format.html { redirect_to(@btag) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @btag.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @tag.update(tag_params)\n render json: @tag\n end",
"def modify_tag tag\n data = {\n \"tag\" => params\n }\n temp = data[\"servers\"]\n data[\"servers\"] = { \"server\" => temp }\n\n json = JSON.generate data\n\n response = put \"tag/#{tag}\", json\n return response unless response.code == 200\n\n body = JSON.parse response.body\n body[\"tag\"]\n end",
"def assign_tags_to_arrays(args = {}) \n id = args['id']\n temp_path = \"/tags.json/arrays/\"\n path = temp_path\nargs.keys.each do |key|\n if (key == \"tagId\")\n args.delete(key)\n path = temp_path.gsub(\"{#{key}}\", id)\n end\nend\n puts \" PATH : #{path}\"\n put(path, args)\nend",
"def update\n respond_to do |format|\n if @climb_tag.update(climb_tag_params)\n format.html { redirect_to @climb_tag, notice: 'Climb tag was successfully updated.' }\n format.json { render :show, status: :ok, location: @climb_tag }\n else\n format.html { render :edit }\n format.json { render json: @climb_tag.errors, status: :unprocessable_entity }\n end\n end\n end",
"def tag\n @scenario = Scenario.find(params[:id]);\n tl = @scenario.tag_list\n tl << Tag.find_by_id(params[:tag]).name;\n @scenario.tag_list = tl\n respond_to do |format|\n if @scenario.update_attributes(@scenario)\n format.json { render :json => { :success => true } }\n else\n format.json { render :json => { :success => false} }\n end\n end\n end",
"def update\n @brand_tag = BrandTag.find(params[:id])\n\n respond_to do |format|\n if @brand_tag.update_attributes(params[:brand_tag])\n format.html { redirect_to @brand_tag, notice: 'Brand tag was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @brand_tag.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bagtype = Bagtype.find(params[:id])\n\n respond_to do |format|\n if @bagtype.update_attributes(params[:bagtype])\n format.html { redirect_to @bagtype, notice: 'Bagtype was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bagtype.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n @tag_bundle = TagBundle.find(params[:id])\r\n\r\n respond_to do |format|\r\n if @tag_bundle.update_attributes(params[:tag_bundle])\r\n format.html { redirect_to @tag_bundle, notice: 'Tag bundle was successfully updated.' }\r\n format.json { head :ok }\r\n else\r\n format.html { render action: \"edit\" }\r\n format.json { render json: @tag_bundle.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update(options: {}, **data)\n\n refresh_with(parse(client.put(\"/tags/#{gid}\", body: data, options: options)).first)\n end",
"def update\n @sugar_bag = SugarBag.find(params[:id])\n\n respond_to do |format|\n if @sugar_bag.update_attributes(params[:sugar_bag])\n format.html { redirect_to @sugar_bag, notice: 'Sugar bag was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sugar_bag.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @tag = Tag.find_by_permalink!(params[:id])\n @object.tags << @tag unless @object.tags.include?(@tag)\n\n respond_to do |format|\n if @object.save\n format.html { redirect_to(@path, :notice => 'Tagging was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @object.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @egg_basket.update(egg_basket_params)\n format.html { redirect_to @egg_basket, notice: 'Egg basket was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @egg_basket.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @bagh.update(bagh_params)\n format.html { redirect_to @bagh, notice: 'Bagh was successfully updated.' }\n format.json { render :show, status: :ok, location: @bagh }\n else\n format.html { render :edit }\n format.json { render json: @bagh.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @basket = Basket.find(params[:id])\n\n respond_to do |format|\n if @basket.update_attributes(params[:basket])\n format.html { redirect_to baskets_url, notice: 'Basket was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @basket.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bilder = Bilder.find(params[:id])\n\n respond_to do |format|\n if @bilder.update_attributes(params[:bilder])\n format.html { redirect_to @bilder, notice: 'Bilder was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bilder.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @tag = Tag.find(params[:id])\n\n respond_to do |format|\n if @tag.update_attributes(params[:tag])\n format.xml { head :ok }\n format.json { head :ok }\n else\n format.xml { render :xml => @tag.errors, :status => :unprocessable_entity }\n format.json { render :json => @tag.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @barrack = Barrack.find(params[:id])\n\n respond_to do |format|\n if @barrack.update_attributes(params[:barrack])\n format.html { redirect_to @barrack, notice: 'Barrack was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @barrack.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /bags/1 or /bags/1.json | def destroy
@bag.destroy
respond_to do |format|
format.html { redirect_to bags_url, notice: "Bag was successfully destroyed." }
format.json { head :no_content }
end
end | [
"def destroy\n @bag = Bag.find(params[:id])\n @bag.destroy\n\n respond_to do |format|\n format.html { redirect_to bags_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bag = Bag.find(params[:id])\n @bag.destroy\n\n respond_to do |format|\n format.html { redirect_to(bags_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @sugar_bag = SugarBag.find(params[:id])\n @sugar_bag.destroy\n\n respond_to do |format|\n format.html { redirect_to sugar_bags_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ms_bag = Ms::Bag.find(params[:id])\n @ms_bag.destroy\n\n respond_to do |format|\n format.html { redirect_to(ms_bags_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @btag = Btag.find(params[:id])\n @btag.destroy\n\n respond_to do |format|\n format.html { redirect_to(btags_url) }\n format.xml { head :ok }\n end\n end",
"def dropletDeleteByTag(tag)\n http, req = initReq(\"droplets?tag_name=#{tag}\", \"DELETE\")\n http.request(req).body\n tagCleanUp()\nend",
"def destroy\n BuzzTag.where(:buzz_id => params[:buzz_id], :tag_id => params[:tag_id]).first.destroy\n end",
"def destroy\n @feed_item = FeedItem.find(params[:tagging][:feed_item_id])\n @tag = Tag(current_user, params[:tagging][:tag])\n \n current_user.taggings.find_by_feed_item(@feed_item, :all, \n :conditions => { :classifier_tagging => false, :tag_id => @tag }).each(&:destroy) \n\n respond_to :json\n end",
"def delete_tag tag\n delete \"tag/#{tag}\"\n end",
"def destroy\n @garbage_bag.destroy\n respond_to do |format|\n format.html { redirect_to garbage_bags_url, notice: 'Garbage bag was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @msshipbag = Msshipbag.find(params[:id])\n @msshipbag.destroy\n\n respond_to do |format|\n format.html { redirect_to(msshipbags_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @brag = Brag.find(params[:id])\n @brag.destroy\n\n respond_to do |format|\n format.html { redirect_to brags_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bagtype = Bagtype.find(params[:id])\n @bagtype.destroy\n\n respond_to do |format|\n format.html { redirect_to bagtypes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @bloom = Bloom.find(params[:id])\n @bloom.destroy\n\n respond_to do |format|\n format.html { redirect_to blooms_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bloom.destroy\n respond_to do |format|\n format.html { redirect_to blooms_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @climb_tag.destroy\n respond_to do |format|\n format.html { redirect_to climb_tags_url, notice: 'Climb tag was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @brand_tag = BrandTag.find(params[:id])\n @brand_tag.destroy\n\n respond_to do |format|\n format.html { redirect_to brand_tags_url }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @tag_bundle = TagBundle.find(params[:id])\r\n @tag_bundle.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to tag_bundles_url }\r\n format.json { head :ok }\r\n end\r\n end",
"def tag_delete(id, tag)\n wf_source_id?(id)\n wf_string?(tag)\n api.delete([id, 'tag', tag].uri_concat)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set the `sinatra.commonlogger` variable to `true` in the Rack environment before passing the request to lowwer middlewares and the app. This ensures that any `::Rack::CommonLogger` instances (as well as all `::Sinatra::CommonLogger` instances) in the same middleware stack will become silent and not log anything. This is required, so that a single request is not logged multiple times even in the face of the default Rack middleware stack. | def on_request(env)
env['sinatra.commonlogger'] = true
super
end | [
"def log(env, *args)\n unless env['sinatra.commonlogger'.freeze] &&\n env['rackstash.logger'.freeze].is_a?(::Rackstash::Logger)\n super\n end\n end",
"def shush_rack\n ::Rack::CommonLogger.class_eval do\n def call_with_shushed_logs(env)\n if LogShusher.shush?(env)\n @app.call(env)\n else\n call_without_shushed_logs(env)\n end\n end\n\n alias_method_chain :call, :shushed_logs\n end\n end",
"def apply_logger!(rack_env)\n logger = rack_env['captivity.logger']\n ActiveRecord::Base.logger = logger if logger\n end",
"def call env\n env[\"rack.logger\"] = logger\n trace_id = get_trace_id env\n log_name = get_log_name env\n logger.add_request_info trace_id: trace_id, log_name: log_name,\n env: env\n begin\n @app.call env\n ensure\n logger.delete_request_info\n end\n end",
"def install\n Rack::CommonLogger.class_eval do\n\n alias_method :original_log, :log\n\n def log(env, status, header, began_at)\n logger = @logger || env['rack.errors']\n\n blob = {\n :length => header['Content-Length'] || 0,\n :code => status.to_s[0 .. 3],\n :version => env['HTTP_VERSION'],\n :method => env['REQUEST_METHOD'],\n :duration => (Time.now - began_at),\n :query => env[\"QUERY_STRING\"],\n :path => env['PATH_INFO'],\n :remote_addr => env['REMOTE_ADDR'],\n :user => env['REMOTE_USER'],\n :user_agent => env['HTTP_USER_AGENT'],\n :timestamp => Time.now.utc.iso8601\n }\n\n # If there's an X-Forwarded-For header split it up into a\n # list of machine-readable IPs.\n blob[:forwarded_for] = env['HTTP_X_FORWARDED_FOR'].split(',') if env['HTTP_X_FORWARDED_FOR']\n\n if logger\n logger.write({:type => 'request',\n :event => blob}.to_json)\n logger.write(\"\\n\")\n end\n end\n end\n end",
"def enable_logging(logger = Logger.new(STDOUT), log_level: Logger::DEBUG)\n stack.after 'request.factory', 'request.logger', AsyncRequestLogger.new(logger, log_level: log_level)\n stack.before 'response.factory', 'response.logger', AsyncResponseLogger.new(logger, log_level: log_level)\n self\n end",
"def call env\n env[\"rack.logger\"] = logger\n trace_id = extract_trace_id(env)\n logger.add_trace_id trace_id\n\n begin\n @app.call env\n ensure\n logger.delete_trace_id\n end\n end",
"def global_request_logging \n http_request_header_keys = request.headers.keys.select{|header_name| header_name.match(\"^HTTP.*\")}\n http_request_headers = request.headers.select{|header_name, header_value| http_request_header_keys.index(header_name)}\n logger.info \"Received #{request.method.inspect} to #{request.url.inspect} from #{request.remote_ip.inspect}. Processing with headers #{http_request_headers.inspect} and params #{params.inspect}\"\n begin \n yield \n ensure \n logger.info \"Responding with #{response.status.inspect} => #{response.body.inspect}\"\n end \n end",
"def logger\n Mumukit::Env.rack_logger\n end",
"def noisy_logger\n logger.info 'some very noisy logging'\n end",
"def shush_rails\n ::Rails::Rack::Logger.class_eval do\n def call_app_with_shushed_logs(request, env)\n logger_level = Rails.logger.level\n Rails.logger.level = Logger::ERROR if LogShusher.shush?(env)\n call_app_without_shushed_logs(request, env).tap do\n Rails.logger.level = logger_level\n end\n end\n\n alias_method_chain :call_app, :shushed_logs\n end\n end",
"def disable_logging\n stack.remove 'request.logger'\n stack.remove 'response.logger'\n self\n end",
"def additional_logger=(logger)\n @additional_logger = logger || ::Logger.new(nil)\n end",
"def call(env)\n if env['PATH_INFO'] == \"/alive\"\n old_level = Rails.logger.level\n Rails.logger.level = 1234567890 # > 5\n begin\n @app.call(env) # returns [..., ..., ...]\n ensure\n Rails.logger.level = old_level\n end\n else\n super(env) # returns [..., ..., ...]\n end\n end",
"def enable_logging\n initialize_logger\n end",
"def set_logger(logger)\n Engines::LOGGER.set_logger(logger) # TODO: no need for Engines:: part\n end",
"def logger\n if request.respond_to? :logger\n request.logger\n else\n Logger.new(STDOUT)\n end\n end",
"def app_logger(**tags)\n logger_tags =\n { url: request.url,\n ip: request.ip,\n user_id: current_user.id,\n params: params.to_unsafe_h }.merge(tags)\n\n Helli::Logger.new(logger_tags)\n end",
"def logger\n MultiServer.logger\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Partially order the fixed nodes in a graph, so that each has a sequence number which can be compared. If one node must come before another then it will have a lower sequence number. | def partially_order(graph)
# Create a work list of the fixed nodes.
to_sequence = graph.all_nodes.select(&:fixed?)
# Keep going until the work list is empty.
until to_sequence.empty?
node = to_sequence.shift
# We're only interested in the control inputs to the node.
control_input_nodes = node.inputs.control_edges.from_nodes
# Nodes with no control input (such as start but possibly others) have sequence number zero.
if control_input_nodes.empty?
node.props[:sequence] = 0
next
end
# If all control inputs have been given a sequence, we can give this
# node as at least one greater than all of those.
if control_input_nodes.all? { |i| i.props[:sequence] }
node.props[:sequence] = control_input_nodes.map { |i| i.props[:sequence] }.max + 1
next
end
# If all the control inputs haven't been given a sequence number yet
# then put it back on the work list and come back to it later.
to_sequence.push node
end
end | [
"def order!\n hai \"realizing pass DAG into a concrete order\"\n\n graph = build_graph!\n ordered = []\n node_set = []\n\n # Our initial node set consists of only nodes that don't have a predecessor.\n graph.nodes.each do |node|\n next if graph.edges.any? { |e| e[1] == node }\n\n node_set << node\n end\n\n until node_set.empty?\n node = node_set.shift\n ordered << node\n\n succ_nodes = graph.nodes.select { |s| graph.edges.include?([node, s]) }\n succ_nodes.each do |succ|\n graph.edges.delete [node, succ]\n next if graph.edges.any? { |e| e[1] == succ }\n\n node_set << succ\n end\n end\n\n raise \"pass DAG contains a cycle\" unless graph.edges.empty?\n\n replace ordered\n self\n end",
"def sorted_with_order\n # Identify groups of nodes that can be executed concurrently\n groups = tsort_each.slice_when { |a, b| parents(a) != parents(b) }\n\n # Assign order to each node\n i = 0\n groups.flat_map do |group|\n group_with_order = group.product([i])\n i += group.size\n group_with_order\n end\n end",
"def topological_sort\n\t\t@explored_nodes = Array.new(vertices.count, false)\n\t\t@current_label = vertices.count\n\t\t@topological_order = Array.new(vertices.count, nil)\n\t\tvertices.count.times do |label|\n\t\t\tdfs_topological_order(label) unless @explored_nodes[label-1]\n\t\tend\n\t\ttopological_order\n\tend",
"def sort(array_of_nodes, order); end",
"def order\n nodes.length\n end",
"def sequence_nodes(graph)\n # Note that this algorithm is very wasteful! It allocates two sides of a branch\n # the same sequence numbers. This means that to the linear scan values on both\n # sides of the branch and internal to those branches appear to be live at the\n # same time and they won't use the same registers. I think we're supposed to be\n # sequencing one side of the branch at a time, and starting the right side\n # with the max sequence number of the left side.\n\n # Create a worklist of nodes to sequence.\n\n to_sequence = graph.all_nodes\n\n until to_sequence.empty?\n node = to_sequence.shift\n\n # If all this node's inputs have already been sequenced.\n\n if node.inputs.from_nodes.all? { |i| i.props[:register_sequence] }\n # Give this node an sequence number at least one higher than all others.\n input_sequences = node.inputs.from_nodes.map { |i| i.props[:register_sequence] }\n node.props[:register_sequence] = if input_sequences.empty? then 0 else input_sequences.max + 1 end\n next\n end\n\n # Not all inputs were sequenced - put this node back on the list and try again later.\n\n to_sequence.push node\n end\n end",
"def topological_sort\n\t\tcount = size\n\t\t@explored_nodes = Array.new(count, false)\n\t\t@current_label = count\n\t\t@topological_order = Array.new(count, nil)\n\t\tcount.times do |label|\n\t\t\tdfs_topological_order(label) unless @explored_nodes[label]\n\t\tend\n\t\ttopological_order\n\tend",
"def topological_sort(files)\n # Calculate the incomming edges for each node\n files.each_pair do |item, props|\n deps = props[:deps]\n deps.each do |dep|\n files[dep][:incomming] += 1\n end\n end\n\n sorted_nodes = []\n no_dep_nodes = []\n files.each_pair do |item, props|\n if props[:incomming] == 0\n no_dep_nodes << item\n end\n end\n\n while no_dep_nodes.length > 0\n n = no_dep_nodes.shift\n sorted_nodes << n\n\n deps = files[n][:deps]\n deps.each do |dep|\n files[dep][:incomming] -= 1\n if files[dep][:incomming] == 0\n no_dep_nodes << dep\n end\n end\n end\n\n raise Exception, \"Circular dependency detected!\" if sorted_nodes.length != files.size\n\n sorted_nodes.reverse\n end",
"def first_order_connections(node)\n edges.at_order(1).nodes_connected_to(node)\n end",
"def topological_sort(vertices)\n order = []\n explored = Set.new\n cycle = false\n temp = Set.new\n\n vertices.each do |vertex|\n cycle = dfs!(order, explored, vertex, cycle, temp) unless explored.include?(vertex)\n return [] if cycle\n end\n order\nend",
"def kahn_sort(vertices)\n # debugger\n ordered = []\n zero_in_degree = []\n vertices.each do |vertex|\n zero_in_degree.push(vertex) if vertex.zero_in_degree?\n end\n until zero_in_degree.empty?\n # p (zero_in_degree.map{|el| [el.value, e.in_degree]})\n vertex = zero_in_degree.shift\n # p vertex.value\n ordered.push(vertex)\n vertices.delete(vertex)\n # debugger\n neighbors = vertex.destroy!\n zero_in_degree += neighbors.select{|v| v.zero_in_degree? }\n # debugger\n end\n return [] unless vertices.empty?\n ordered\nend",
"def sort(array_of_nodes, order)\n new_arry = []\n array_of_nodes.each { |node|\n node_idx = []\n np = node.node_type == :attribute ? node.element : node\n while np.parent and np.parent.node_type == :element\n node_idx << np.parent.index( np )\n np = np.parent\n end\n new_arry << [ node_idx.reverse, node ]\n }\n ordered = new_arry.sort_by do |index, node|\n if order == :forward\n index\n else\n -index\n end\n end\n ordered.collect do |_index, node|\n node\n end\n end",
"def sort_by_state(nodes)\n sorted_nodes = Array.new\n nodes.each_with_index do |node, index|\n next if node.nil?\n node[:id] = index\n sorted_nodes.push(node)\n end\n sorted_nodes.sort_by { |node| node[:online_count]}.reverse!\n end",
"def graph_sorted\n pieces.sort do |piece, other_piece|\n if piece.y == other_piece.y\n piece.x <=> other_piece.x\n else\n other_piece.y <=> piece.y\n end\n end\n end",
"def topological_sort(vertices)\n # Khan's algorithm\n sorted_arr = []\n queue = Queue.new\n # in_edges = {}\n vertices.each do |vertex|\n if vertex.in_edges.empty?\n queue.enq(vertex)\n end\n end\n\n # vertices.each do |vertex|\n # in_edge_cost = vertex.in_edges.reduce(0) { |sum, edge| sum += edge.cost }\n # in_edges[vertex] = in_edge_cost\n # queue << vertex if in_edge_cost == 0\n # end\n\n until queue.empty?\n u = queue.deq\n sorted_arr << u\n while u.out_edges.length != 0\n current_edge = u.out_edges[0]\n queue.enq(current_edge.to_vertex) if current_edge.to_vertex.in_edges == [current_edge]\n current_edge.destroy!\n end\n u.out_edges = []\n end\n\n # until queue.empty?\n # current = queue.shift\n #\n # current.out_edges.each do |edge|\n # to_vertex = edge.to_vertex\n # in_edges[to_vertex] -= edge.cost\n # queue << to_vertex if in_edges[to_vertex] == 0\n # end\n #\n # sorted arr << current\n # end\n\n if sorted_arr.length != vertices.length\n return []\n # vertices.length == order.length ? order : []\n end\n\n sorted_arr\n\n # Tarjan's algorithm (without cyce catching)\n # order = []\n # explored = Set.new\n #\n # vertices.each do |vertex|\n # dfs!(order, explored, vertex) unless explored.include?(vertex) #depth-first search\n # end\n #\n # order\n\nend",
"def forward_pass(sorted_nodes)\n sorted_nodes.each(&:forward)\nend",
"def optimize_calculation_order\n copy = DeepClone.clone self #Marshal.load(Marshal.dump(self))\n copy.calculate\n copy.finished_nodes.reverse.each_with_index do |node, index|\n if old = nodes.detect{|c| c.id == node.id}\n nodes.delete(old)\n nodes.unshift(old)\n end\n end\n end",
"def sortFile( filename, numberOfValuesToOutput )\n nodeController = NodeController.new( numberOfValuesToOutput )\n # Load each line into memory and add it to the right place in the nodes \n file = File.open( filename, \"r\" )\n file.each_line do | line |\n nodeController.addValueToNodes( line.to_s.chomp.to_f)\n end\n \n # Write the output nodes to disk bottom-up, but only up to a specified number of values\n outFile = File.new( \"output-#{filename}\", \"w\" )\n node = nodeController.lowestValue\n numberOfValuesToOutput.times do\n # Travel the right nodes upwards from the node with the lowest value found\n outFile.puts node.payload.to_s\n node = node.rightNode\n end\n \nend",
"def topological_sort(vertices)\n # visited = Set.new\n # # visited = Array.new(vertices.length, false)\n # cycle = [false]\n # result = []\n #\n # vertices.each do |vertex|\n # unless visited.include?(vertex)\n # visit(vertex, visited, result, cycle)\n # end\n # end\n #\n # if cycle == [true]\n # return []\n # else\n # return result\n # end\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n queue = []\n result = []\n vertices.each do |vert|\n queue << vert if vert.in_edges.empty?\n end\n # byebug\n count = 0\n until queue.empty?\n vertex = queue.pop\n result << vertex\n to_destroy = []\n for i in (0..(vertex.out_edges.length - 1))\n edge = vertex.out_edges[i]\n if edge.to_vertex.in_edges.length == 1\n queue.push(edge.to_vertex)\n end\n to_destroy << edge\n i += 1\n end\n to_destroy.each do |edge|\n edge.destroy!\n end\n count += 1\n end\n result\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Globally schedule a graph, meaning to anchor all floating nodes to a fixed node. All fixed nodes are part of a basic block, so globally scheduling also puts all floating nodes into a basic block. | def global_schedule(graph)
# Create a work list of the floating nodes.
to_schedule = graph.all_nodes.select {|n| n.floating? && n.op != :immediate }
# Keep going until the work list is empty.
until to_schedule.empty?
node = to_schedule.shift
# Are we ready to schedule this node?
if ready_to_schedule?(node)
# Find candidates to anchor this node to.
candidates = schedule_candidates(graph, node)
# If there aren't any then we're stuck!
raise 'stuck' if candidates.empty?
# Sort the candidates and take the first one to get the best.
best_candidate = sort_candidates(candidates).first
# Add a global schedule edge.
node.output_to :global_schedule, best_candidate
else
# If we aren't ready to schedule this node, try it again later.
to_schedule.push node
end
end
end | [
"def local_schedule(graph)\n # Find all basic blocks and locally schedule them.\n\n graph.all_nodes.each do |node|\n if node.begins_block?\n locally_schedule_block node\n end\n end\n end",
"def schedule_every (freq, schedulable=nil, params=nil, &block)\n\n sschedule_every(freq, nil, schedulable, params, &block)\n end",
"def eval\n @nodes.each do |node| \n node.propagate\n end\n @nodes.each do |node|\n node.update_state \n end\n increment_time \n end",
"def trigger\n\n schedule_next\n\n super\n\n #unschedule if @params[:dont_reschedule]\n # obsolete\n end",
"def schedule(parent: @parent || Task.current)\n @scheduler_task ||=\n parent.async { |task|\n task.annotate(\"scheduling tasks for #{self.class}.\")\n\n while @waiting.any? && !limit_blocking?\n delay = [next_acquire_time - Async::Clock.now, 0].max\n task.sleep(delay) if delay.positive?\n resume_waiting\n end\n\n @scheduler_task = nil\n }\n end",
"def run_automatic_scheduling\n Thread.new {\n loop do\n sleep(0.1)\n execute if automatic_scheduling && running.empty?\n end\n }\n end",
"def globally_scheduled?(node)\n node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:global_schedule)\n end",
"def update\n # center force (pull nodes to center)\n @nodes.each do |n|\n n.apply_momentum(Force.center_mom(PVector.sub(n.loc, @center)))\n # apply field force (push nodes apart)\n @nodes.each do |n2|\n n.apply_momentum(Force.field_mom(PVector.sub(n.loc, n2.loc))) unless n == n2\n end\n end\n \n # edge forces (pull connected nodes to each other)\n @edges.each do |e|\n e.from.apply_momentum(Force.edge_mom(PVector.sub(e.to.loc, e.from.loc), e.strength))\n end\n \n # move the nodes\n @nodes.each { |n| n.update }\n end",
"def schedule(&blk)\n @reactor.next_tick(blk)\n end",
"def schedule_all\n return unless require_done\n\n # select all future occurrences\n occurrences_to_schedule = Event.all\n\n problem_builder = PersonalTimetablingAPI::ProblemDefinition.global_reschedule_problem occurrences_to_schedule\n \n solver_id = PersonalTimetablingAPI::SolverClient.run_solver(\n problem_builder.getDefinition,\n SOLVER_TIMEOUT, \n {:user_id => current_user_id}\n )\n @@user_solvers[current_user_id] = solver_id\n\n respond_new\n end",
"def assign_graph_to_qernel_objects\n nodes.each do |node|\n node.graph = self\n node.slots.each { |s| s.graph = self }\n end\n edges.each do |edge|\n edge.graph = self\n edge.query.graph = self\n end\n carriers.each { |c| c.graph = self }\n end",
"def schedule_every (freq, params={}, &block)\n\n params = prepare_params(params)\n params[:every] = freq\n\n first_at = params[:first_at]\n first_in = params[:first_in]\n\n #params[:delayed] = true if first_at or first_in\n\n first_at = if first_at\n at_to_f(first_at)\n elsif first_in\n Time.now.to_f + Rufus.duration_to_f(first_in)\n else\n Time.now.to_f + Rufus.duration_to_f(freq) # not triggering immediately\n end\n\n do_schedule_at(first_at, params, &block)\n end",
"def reschedule\n # get non-old tuples\n # iterate through them, rescheduling\n end",
"def schedule\n # implemented by subclass\n end",
"def fill_first_task\n first_basis = @first_basis || basis_for_initial_task\n return unless first_basis # no initial basis for current task\n dual_task = Tasks::RestrictedDualSimplex.new_without_plan(\n task.task,\n first_basis,\n task.sign_restrictions\n )\n tasks_list << dual_task # add current task to queue\n end",
"def fruchterman_reingold(max_iters=100, width=@width, height=@height, nodeset=@nodes, edgeset=@edges, adjacency=@adjacency)\n puts \"beginning fruchterman_reingold @ \"+Time.now.to_s\n iterations = max_iters\n area = width*height\n k = nodeset.length > 0 ? Math.sqrt(area/nodeset.length) : 1 #multiply this by .75 to slow it down?\n k2 = k**2\n temperature = width/10\n for i in (1..iterations) do\n nodeset.each_value do |v| #calc repulsive forces\n if !v.static\n v.d = Vector[0.0,0.0]\n nodeset.each_value do |u|\n if u!=v\n dist = v.location - u.location\n distlen = dist.r.to_f\n #v.d += distlen != 0.0 ? (dist/distlen)*(k2/distlen) : Vector[(-0.5+rand())*0.1,(-0.5+rand())*0.1]\n if distlen != 0.0\n v.d += (dist/distlen)*(k2/distlen)\n else #at the same spot, so just splut them apart a little this run\n v.d += Vector[0.01,0]\n u.d += Vector[-0.01,0]\n end\n end\n end\n end\n end\n for e in edgeset.values do #calc attractive forces\n #only changes 1/conn (assuming 1 edge each direction)\n # if e.a.id < e.b.id or adjacency[[e.a.id,e.b.id]]+adjacency[[e.b.id,e.a.id]] < 2\n dist = e.a.location - e.b.location\n distlen = dist.r.to_f\n fa = distlen**2/k\n delta = (dist/distlen)*fa\n e.a.d -= delta if !e.a.static\n e.b.d += delta if !e.b.static\n # end\n end\n #puts nodeset\n nodeset.each_value do |v| #move nodes\n #added in attraction to center\n if !v.static\n dist_center = v.location - Vector[width/2, height/2]\n distlen = dist_center.r.to_f\n fa = distlen**2/k\n v.d -= (dist_center/distlen)*fa\n dlen = v.d.r.to_f\n if dlen > 0.0 #if we have a distance to move\n v.location += (v.d/dlen)*[dlen,temperature].min\n nx = [[v.location[0],0].max, width].min #don't let outside of bounds (50px border)\n ny = [[v.location[1],0].max, height].min\n v.location = Vector[nx,ny]\n end\n end\n end\n temperature *= (1 - i/iterations.to_f) #cooling function from http://goo.gl/xcbXR\n #puts \"finished iter \"+i.to_s+\" @ \"+Time.now.to_s\n end\n puts \"finished fruchterman_reingold @ \"+Time.now.to_s\n end",
"def schedule(spec)\n @task.schedule = Cloud::Cycler::Schedule.parse(spec)\n end",
"def fire_triggers!\n triggers.each { |node| node.trigger! }\n end",
"def schedule(program, name, insertions, deletions)\n\t\t@driver.synchronize do\n\t\t\tif (program == \"runtime\") \n\t\t\t task = Driver::Task.new\n\t\t\t task.insertions = insertions\n\t\t\t task.deletions = deletions\n\t\t\t task.program = program\n\t\t\t task.name = name\n#\t\t puts \"XXX Tasking: #{name}, #{task.insertions.tups[0]}, #{task.deletions.tups[0]}\"\n \t\t\t@driver.task(task)\n\t\t\telse\n\t\t\t\ttuple = Tuple.new(clock.current + 1, program, name, insertions, deletions)\n\t\t print \"XXX Scheduling: #{name}, #{task.insertions.tups[0]}, #{task.deletions.tups[0]}\"\n\t\t\t\tschedule.force(tuple)\n\t\t\tend\n\t\t\t# XXXXXXXXXX\n\t\t\t@driver.cond_var.signal\n\t\tend\n\tend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
A node is ready to be globally scheduled if all outputs have themselves been globally scheduled. | def ready_to_schedule?(node)
# Ready to globally schedule
node.outputs.to_nodes.all? do |i|
globally_scheduled?(i)
end
end | [
"def globally_scheduled?(node)\n node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:global_schedule)\n end",
"def locally_scheduled?(node)\n node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:local_schedule)\n end",
"def ready(bot, scheduler)\n end",
"def all_nodes_ready?\n if current_platform_spec.nodes.nil?\n false\n else\n true\n end\n end",
"def allOn\n if (@nodeActiveInactive == NODE_INACTIVE)\n return\n end\n if ( (readCondition == NODE_NOT_REGISTERED) )\n\t return \n elsif ( readCondition == NODE_POWERED_UP )\n @communicator.constructSendCmd(@myPort, @ipaddr, CM_CMD_RESET, @nodeType)\n updateCondition(NODE_POWERED_UP_PENDING)\n @communicator.constructSendCmd(@myPort, @ipaddr, CM_CMD_IDENTIFY_NODE, @nodeType)\n else\n\t updateCondition(NODE_POWERED_UP_PENDING)\n @communicator.constructSendCmd(@myPort, @ipaddr, CM_CMD_POWER_UP, @nodeType)\n\t sleep(0.01)\n end\n end",
"def ready?\n\t\t# always ready to run\n\t\ttrue\n\tend",
"def report_ready_for_nodes_without_tasks(nodes)\n nodes_without_tasks = filter_nodes_without_tasks(nodes)\n @ctx.reporter.report(nodes_status(nodes_without_tasks, 'ready', {'progress' => 100}))\n end",
"def global_schedule(graph)\n # Create a work list of the floating nodes.\n\n to_schedule = graph.all_nodes.select {|n| n.floating? && n.op != :immediate }\n\n # Keep going until the work list is empty.\n\n until to_schedule.empty?\n node = to_schedule.shift\n\n # Are we ready to schedule this node?\n\n if ready_to_schedule?(node)\n # Find candidates to anchor this node to.\n\n candidates = schedule_candidates(graph, node)\n\n # If there aren't any then we're stuck!\n\n raise 'stuck' if candidates.empty?\n\n # Sort the candidates and take the first one to get the best.\n\n best_candidate = sort_candidates(candidates).first\n\n # Add a global schedule edge.\n\n node.output_to :global_schedule, best_candidate\n else\n # If we aren't ready to schedule this node, try it again later.\n\n to_schedule.push node\n end\n end\n end",
"def is_ready\n if self.tasks.empty? and not self.is_done # no tasks assigned for this\n false\n elsif (self.tasks.find_by is_done: false).nil? # can't find any false => all tasks are done\n self.update_attribute(:is_done, true)\n true\n else\n false\n end\n end",
"def process_node(node)\n debug \"Process node: #{node}\"\n node.poll\n return unless node.online?\n ready_task = node.ready_task\n return unless ready_task\n ready_task.run\n end",
"def scheduled?\n @current_state == Psc::ScheduledActivity::SCHEDULED\n end",
"def ready!\n return if ready?\n @ready = true\n if defined?(@_on_ready)\n instance_exec(&@_on_ready)\n remove_instance_variable :@_on_ready\n end\n freeze\n end",
"def ready\n Souffle::Log.info \"#{@node.log_prefix} Is ready for provisioning...\"\n end",
"def make_node_busy\n 'sleep 5'\n end",
"def update_scheduled_status\n self._scheduled = !self.user_id.blank? && !self.milestone_id.blank? && !self.priority.blank?\n true\n end",
"def ready_while(&block)\n @ready = true\n logger.info('Ready')\n yield.tap { @ready = false }\n end",
"def ready=(_); end",
"def scheduled?\n event.present?\n end",
"def unscheduled?; end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
A node is globally scheduled if it was fixed anyway or we've scheduled it. | def globally_scheduled?(node)
node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:global_schedule)
end | [
"def ready_to_schedule?(node)\n # Ready to globally schedule\n\n node.outputs.to_nodes.all? do |i|\n globally_scheduled?(i)\n end\n end",
"def locally_scheduled?(node)\n node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:local_schedule)\n end",
"def global_schedule(graph)\n # Create a work list of the floating nodes.\n\n to_schedule = graph.all_nodes.select {|n| n.floating? && n.op != :immediate }\n\n # Keep going until the work list is empty.\n\n until to_schedule.empty?\n node = to_schedule.shift\n\n # Are we ready to schedule this node?\n\n if ready_to_schedule?(node)\n # Find candidates to anchor this node to.\n\n candidates = schedule_candidates(graph, node)\n\n # If there aren't any then we're stuck!\n\n raise 'stuck' if candidates.empty?\n\n # Sort the candidates and take the first one to get the best.\n\n best_candidate = sort_candidates(candidates).first\n\n # Add a global schedule edge.\n\n node.output_to :global_schedule, best_candidate\n else\n # If we aren't ready to schedule this node, try it again later.\n\n to_schedule.push node\n end\n end\n end",
"def scheduled?(resource)\n self.ignoreschedules or resource_harness.scheduled?(resource_status(resource), resource)\n end",
"def scheduled?(resource)\n self.ignoreschedules or resource.scheduled?\n end",
"def scheduled?\n @current_state == Psc::ScheduledActivity::SCHEDULED\n end",
"def unscheduled?; end",
"def update_scheduled_status\n self._scheduled = !self.user_id.blank? && !self.milestone_id.blank? && !self.priority.blank?\n true\n end",
"def update_schedule_status\n ### if job is present unschedule it and create new job ###\n jobs = $scheduler.jobs(:tag => self.id)\n $scheduler.unschedule(jobs.first.id) if jobs.present?\n ### schedule job wehn schedule status is true ###\n if schedule_status\n $scheduler.every rate, :tags => \"#{self.id}\" do\n self.send_to_florincoin(true)\n end\n end\n end",
"def trigger\n\n schedule_next\n\n super\n\n #unschedule if @params[:dont_reschedule]\n # obsolete\n end",
"def is_alone?\n self.schedule_father=nil\n end",
"def is_root?\n self.schedule_father=self\n end",
"def scheduled?\n event.present?\n end",
"def schedule_order(order)\n # No scheduled orders yet.\n end",
"def report_scheduler_state(state)\n super if defined? super\n end",
"def schedule_public?\n event && (\n !event.current? || # All past schedules are public\n Settings.show_schedule?) # Current event schedule has explicit go-live flag\n end",
"def scheduled?(schedulable, starts, ends)\n puts \"This #{schedulable.class} is not scheduled between #{starts} and #{ends}\"\n end",
"def got_reschedule_condition?\n Time.zone.now >= change_reschedule_after\n end",
"def unscheduled?\n state == :unscheduled\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sort a list of candidates in decreasing sequence number. | def sort_candidates(candidates)
candidates.sort_by { |candidate|
anchor = fixed_anchor(candidate)
sequence = anchor.props[:sequence]
raise unless sequence
sequence
}.reverse
end | [
"def sort_by_rank\n\t\tsorted = @candidates.sort{ |x,y| y.votes <=> x.votes }\n\t\t@candidates\n\tend",
"def stable_sort_by(list); end",
"def sort_candidates(candidates)\n candidates.sort_by do |candidate|\n [candidate[:years_of_experience], candidate[:github_points]]\n end.reverse\nend",
"def sort_questions(questions)\n questions.sort_by(&:seq)\n end",
"def sort!\n @num_cars.sort!{|p1, p2| p1.to_range <=> p2.to_range}\n end",
"def selection_sort\n\n n = @data.length - 1\n\n for i in (n).downto(1) # Start from the end of the array\n\n k = i # k is the biggest element we have seen so far from the remaining unsorted data\n for j in 0..i # Traverse up to, but not including the sorted part of the array\n\n if @data[j] > @data[k]\n\n k = j # This sets the new max if we ever saw one\n end\n\n swap(@data, i, k) # Regardless if k is ever changed, you still swap (Potentially i equals k)\n end\n end\n\n puts(@data)\n end",
"def sort_votes(list)\n list.sort_by! { |option| option.vote } # Sorts the list by the vote attribute of each option\n list.reverse! # Reverses the list because it naturally sorts ascending order\nend",
"def sort!\n @cards.sort_by! { |card| [-card_rank_to_count[card.rank], -card.to_i] }\n end",
"def candidates_below_default\n Array(@ranking[@default_rank+1...@ranking.length]).flatten.sort\n end",
"def candidates_above_default\n Array(@ranking[0...@default_rank]).flatten.sort\n end",
"def ranked_candidates\n Array(@ranking - [UNRANKED_ID]).flatten.sort\n end",
"def gnome_sort(a)\n i, j = 0, 0\n s = a.size - 1\n while i < s\n if a[i] <= a[i + 1]\n j = j + 1\n i = j\n else\n swap(a, i, i + 1)\n i = i - 1\n i = 0 if i < 0\n end\n end\n \n a\n end",
"def prio_sort(elements); end",
"def candidate_events_sorted\n # TODO: rewrite using event states\n candidate_events.sort do |ce1, ce2|\n # in order for this to work due_dates should not be nil.\n # if they are move them to the top so admin can give them\n # one.\n if ce1.due_date.nil?\n if ce2.due_date.nil?\n ce1.event_key <=> ce2.event_key\n else\n -1\n end\n elsif ce2.due_date.nil?\n 1\n elsif ce1.completed_date.nil?\n # due_dates filled in.\n if ce2.completed_date.nil?\n # if neither completed then the first to come\n # due is first\n due_date = ce1.due_date <=> ce2.due_date\n if due_date.zero?\n ce1.event_key <=> ce2.event_key\n else\n due_date\n end\n else\n # non completed goes on top.\n -1\n end\n elsif ce2.completed_date.nil?\n # non completed goes on top.\n 1\n else\n # if both are completed then the first to come\n # due is on top\n due_date = ce1.due_date <=> ce2.due_date\n if due_date.zero?\n ce1.event_key <=> ce2.event_key\n else\n due_date\n end\n end\n end\n end",
"def sort\n @pokers.sort.reverse\n end",
"def natural_sort(list)\n natural_sort_by(list, &:to_s)\n end",
"def sort(from)\n #int i;\n #int bs; /* best score */\n #int bi; /* best i */\n #gen_t g;\n\n bs = -1\n bi = from\n \n i = from\n while (i < @first_move[@ply + 1])\n if (gen_dat[i].score > bs)\n bs = @gen_dat[i].score;\n bi = i;\n end\n i += 1\n end\n \n g = @gen_dat[from];\n @gen_dat[from] = @gen_dat[bi];\n @gen_dat[bi] = g;\n end",
"def sort!\n @events.sort! { |job1, job2| job1.next_run <=> job2.next_run }\n end",
"def selection_sort(random_num)\n min_num_index = 0\n for i in 0..(random_num.length - 2) do\n\n min_num_index = i\n for j in (0 + i)..(random_num.length - 2) do\n if random_num[min_num_index] >= random_num[j + 1]\n min_num_index = (j + 1)\n end\n end\n tmp_num = random_num[i]\n random_num[i] = random_num[min_num_index]\n random_num[min_num_index] = tmp_num\n end\n random_num\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Locally schedule a graph, which means within each basic block decide a single order to run the nodes, which no ambiguity left. | def local_schedule(graph)
# Find all basic blocks and locally schedule them.
graph.all_nodes.each do |node|
if node.begins_block?
locally_schedule_block node
end
end
end | [
"def global_schedule(graph)\n # Create a work list of the floating nodes.\n\n to_schedule = graph.all_nodes.select {|n| n.floating? && n.op != :immediate }\n\n # Keep going until the work list is empty.\n\n until to_schedule.empty?\n node = to_schedule.shift\n\n # Are we ready to schedule this node?\n\n if ready_to_schedule?(node)\n # Find candidates to anchor this node to.\n\n candidates = schedule_candidates(graph, node)\n\n # If there aren't any then we're stuck!\n\n raise 'stuck' if candidates.empty?\n\n # Sort the candidates and take the first one to get the best.\n\n best_candidate = sort_candidates(candidates).first\n\n # Add a global schedule edge.\n\n node.output_to :global_schedule, best_candidate\n else\n # If we aren't ready to schedule this node, try it again later.\n\n to_schedule.push node\n end\n end\n end",
"def eval\n @nodes.each do |node| \n node.propagate\n end\n @nodes.each do |node|\n node.update_state \n end\n increment_time \n end",
"def nodes_in_block(first_node)\n # We're going to do a depth-first search of the graph from the first\n # node, following control flow edges out, and global schedule edges in,\n # and stopping when we find a node that ends a basic block such as a\n # branch.\n\n worklist = [first_node]\n block = Set.new\n\n until worklist.empty?\n node = worklist.pop\n\n if block.add?(node)\n # We need to visit nodes that are anchored to this one.\n\n node.inputs.edges.each do |i|\n if i.input_name == :global_schedule\n worklist.push i.from\n end\n end\n\n # If this node isn't a branch, and it's either the first node or it\n # isn't a merge, visit the nodes that follow it in control flow.\n\n if node.op != :branch && (node == first_node || node.op != :merge)\n node.outputs.edges.each do |o|\n if o.control?\n if !(node.op == :start && o.to.op == :finish)\n worklist.push o.to\n end\n end\n end\n end\n end\n end\n\n block.to_a\n end",
"def setup_start_end\n cluster_tasks_set = Set.new each_task\n tasks_to_include = Set.new\n self.subgraphs.each do |subgraph|\n setup_start_end_piece(subgraph, cluster_tasks_set).each do |piece|\n tasks_to_include.add piece\n end\n end\n to_skip_tasks = cluster_tasks_set - tasks_to_include\n to_skip_tasks.each do |task|\n warn \"Skipping task #{task} due to subgraph evaluation\"\n task.skip!\n end\n end",
"def schedule\n routes = Route.all()\n vehicle_ids = []\n\n routes.each do |route|\n Vehicle.all().each do |vehicle|\n driver_allowed_cities = vehicle.driver.allowed_cities.map(&:id)\n route_cities = route.route_cities.map(&:id)\n can_drive = can_drive_route(driver_allowed_cities, route_cities)\n if vehicle_ids.include? vehicle || !can_drive\n break\n elsif route.load_type == vehicle.load_type && route.load_sum <= vehicle.capacity && vehicle.driver.max_stops <= route.stops_amount\n route.create_assignment(route: route, vehicle: vehicle)\n vehicle_ids.push(vehicle)\n break\n end\n end\n end\nend",
"def execute_best_swap graph\n gain,swap = execute_swap graph\n compute_swap\nend",
"def run\n Etsource::Molecules.from_energy_keys.each do |node_key|\n molecule_node = @molecule_graph.node(node_key)\n conversion = molecule_node.from_energy\n energy_node = @energy_graph.node(conversion.source)\n\n molecule_node.demand = Connection.demand(energy_node, conversion)\n end\n\n @molecule_graph.calculate\n @molecule_graph\n end",
"def reschedule\n # get non-old tuples\n # iterate through them, rescheduling\n end",
"def order!\n hai \"realizing pass DAG into a concrete order\"\n\n graph = build_graph!\n ordered = []\n node_set = []\n\n # Our initial node set consists of only nodes that don't have a predecessor.\n graph.nodes.each do |node|\n next if graph.edges.any? { |e| e[1] == node }\n\n node_set << node\n end\n\n until node_set.empty?\n node = node_set.shift\n ordered << node\n\n succ_nodes = graph.nodes.select { |s| graph.edges.include?([node, s]) }\n succ_nodes.each do |succ|\n graph.edges.delete [node, succ]\n next if graph.edges.any? { |e| e[1] == succ }\n\n node_set << succ\n end\n end\n\n raise \"pass DAG contains a cycle\" unless graph.edges.empty?\n\n replace ordered\n self\n end",
"def linearize(graph)\n # The basic blocks.\n blocks = []\n \n # Details of the basic block that contain the finish operation which\n # won't be added to the list of basic blocks until the end.\n first_node_last_block = nil\n last_block = nil\n \n # Two maps that help us map between nodes and the names of the blocks\n # that they go into, and the merge instruction indicies and the blocks\n # they're coming from.\n first_node_to_block_index = {}\n merge_index_to_first_node = {}\n\n # Look at each node that begins a basic block.\n\n basic_block_starters = graph.all_nodes.select { |n| n.begins_block? }\n\n last_basic_block_starter = basic_block_starters.select { |s| nodes_in_block(s).any? { |n| n.op == :finish } }.first\n\n basic_block_starters.delete last_basic_block_starter\n basic_block_starters.push last_basic_block_starter\n\n basic_block_starters.each do |node|\n original_first_node = node\n first_node = first_in_block(original_first_node, nodes_in_block(original_first_node))\n\n # We're going to create an array of operations for this basic\n # block.\n\n block = []\n next_to_last_control = nil\n\n # Follow the local sequence.\n\n node = first_node\n\n begin\n # We don't want to include operations that are just there to form\n # branches or anchor points in the graph such as start and merge.\n\n unless [:start, :merge].include?(node.op)\n op = node.op\n\n # We rename finish to return to match the switch from the\n # declarative style of the graph to the imperative style\n # of the list of operations.\n op = :return if op == :finish\n\n # The instruction begins with the operation.\n insn = [op]\n\n # Then any constant values or similar.\n [:line, :n, :value].each do |p|\n insn.push node.props[p] if node.props.has_key?(p)\n end\n\n # Then any input registers.\n node.inputs.with_input_name(:value).from_nodes.each do |input_values|\n insn.push input_values.props[:register]\n end\n\n # Phi instructions need pairs of source registers with the blocks they came from.\n if node.op == :phi\n node.inputs.edges.each do |input|\n if input.input_name =~ /^value\\((.+)\\)$/\n n = $1.to_i\n insn.push n\n insn.push input.from.props[:register]\n end\n end\n # Elide phi instructions if register allocation has run correctly and values are\n # already in the correct registers.\n insn = nil if insn.drop(2).select.with_index{ |_,i| i.even? }.uniq.size == 1\n end\n\n # Send instructions and lowered equivalents need the arguments.\n if [:send, :call_managed, :int64_add, :int64_sub, :int64_imul, :int64_and, :int64_shift_left, :int64_shift_right].include?(node.op)\n insn.push node.inputs.with_input_name(:receiver).from_node.props[:register]\n\n if node.op == :send\n insn.push node.props[:name]\n elsif node.op == :call_managed\n insn.push node.inputs.with_input_name(:name).from_node.props[:register]\n end\n\n node.props[:argc].times do |n|\n arg = node.inputs.with_input_name(:\"arg(#{n})\").from_node\n\n if arg.op == :immediate\n insn.push arg.props[:value]\n else\n insn.push arg.props[:register]\n end\n end\n end\n\n # Then the target register if the instruction has one.\n insn.push node.props[:register] if insn && (node.produces_value? || node.op == :move)\n\n # If it's a branch then the target basic blocks and the test.\n if node.op == :branch\n insn.push node.inputs.with_input_name(:condition).from_node.props[:register]\n\n [:true, :false].each do |branch|\n target = node.outputs.with_output_name(branch).to_node\n raise unless target\n insn.push target\n end\n\n if node.props[:test]\n insn.push node.props[:test]\n end\n end\n\n # Send instructions and lowered managed calls need the list of live registers.\n if [:send, :call_managed].include?(node.op)\n insn.push node.props[:live_registers]\n end\n \n # Guards are like branches, but only have one side.\n if node.op == :guard\n insn.push node.inputs.with_input_name(:condition).from_node.props[:register]\n \n if node.props[:test]\n insn.push node.props[:test]\n end\n end\n\n # Kind instructions need the kind.\n if node.op == :kind_is?\n insn.push node.props[:kind]\n end\n \n # Frame states need the instructions, the ip, and to know where values are for\n # the receiver, the arguments, and the stack.\n if node.op == :deopt_map\n insn.push node.props[:insns]\n insn.push node.props[:ip]\n insn.push node.inputs.with_input_name(:receiver).from_node.props[:register]\n\n insn.push node.inputs.edges.select { |e| e.input_name.to_s.start_with?('arg(') }.map { |e|\n /arg\\((\\d+)\\)/ =~ e.input_name.to_s\n [$1.to_i, e.from.props[:register]]\n }.sort_by { |pair| pair.first }.map { |pair| pair.last }\n\n insn.push node.inputs.edges.select { |e| e.input_name.to_s.start_with?('stack(') }.map { |e|\n /stack\\((\\d+)\\)/ =~ e.input_name.to_s\n [$1.to_i, e.from.props[:register]]\n }.sort_by { |pair| pair.first }.map { |pair| pair.last }\n end\n\n # Add the instruction to the block.\n block.push insn if insn\n end\n\n next_to_last_control = node if node.has_control_output?\n\n # Follow the local schedule edge to the next node.\n node = node.outputs.with_output_name(:local_schedule).to_nodes.first\n end while node && node.op != :merge\n\n # Empty blocks cause problems elsewhere - it's easier to just have a nop\n # in them. Really, we should remove empty blocks by modifying the\n # instruction that jumps here to jump to wherever this leads to.\n\n if block.empty?\n block.push [:nop]\n end\n\n # If the last node is a merge, we need to remember which merge index this is.\n\n if node && node.op == :merge\n next_to_last_control.outputs.with_output_name(:control).edges.first.input_name =~ /^control\\((.+)\\)$/\n n = $1.to_i\n merge_index_to_first_node[n] = first_node\n end\n\n # Add a jump instruction if this block was going to just flow into the next\n # - we'll remove it later if the block followed it anyway and we can just\n # fallthrough.\n\n unless [:return, :branch].include?(block.last.first)\n begin\n block.push [:jump, next_to_last_control.outputs.with_output_name(:control).to_node]\n rescue\n block.push [:jump, :broken]\n end\n end\n\n first_node_to_block_index[original_first_node] = blocks.size\n first_node_to_block_index[first_node] = blocks.size\n blocks.push block\n end\n\n # Record the number that this basic block has and then add it to the list of basic blocks.\n\n first_node_to_block_index[first_node_last_block] = blocks.size\n\n # Go back through the basic blocks and update some references that were to things that\n # hadn't been decided yet.\n\n blocks.each do |block|\n block.each do |insn|\n insn.map! do |e|\n # If part of an instruction references a basic block, turn that into the index of\n # the basic block instead.\n\n if e.is_a?(IR::Node)\n :\"block#{first_node_to_block_index[e]}\"\n else\n e\n end\n end\n end\n end\n\n # Go back through the basic blocks and change how the branch instructions out of them\n # work.\n\n blocks.each_with_index do |block, n|\n next_block = :\"block#{n + 1}\"\n last = block.last\n\n if last == [:jump, next_block]\n # A jump that just goes to the next block can be removed and left to fall through.\n block.pop\n elsif last.first == :branch && last[3] == next_block\n # A branch where the else goes to the next block can branch only when true.\n block.pop\n block.push [:branch_if, last[1], last[2], *last.drop(4)]\n elsif last.first == :branch && last[2] == next_block\n # A branch where the if goes to the next block can branch only unless true.\n block.pop\n test = last.drop(4)\n block.push [:branch_unless, last[1], last[3], *test]\n elsif last.first == :branch\n # A branch that doesn't go to the next block at all can be a branch if true\n # and then fallthrough to a new jump instruction.\n block.pop\n block.push [:branch_if, last[1], last[2], *last.drop(4)]\n block.push [:jump, last[3]]\n end\n end\n\n blocks\n end",
"def prioritize_rules()\n \n #\n # Process all :reorder_specs. The top row of each table form roots of the DAG.\n # We test for cycles as we go -- there can't be any. \n \n root_sets = []\n hierarchy = Util::DirectedAcyclicGraph.new( false )\n @reorder_specs.each do |spec|\n previous_level = []\n spec.reorder_levels.each do |level|\n \n #\n # Collect rule reference TO RULES IN THIS GRAMMAR ONLY. Rule priority is about\n # rule production. A grammar can use rules from other grammars, but can't PRODUCE\n # rules from other grammars, so local rules only need apply. This is only an issue\n # for groups, where we'll just skip anything that isn't a local Rule.\n \n current_level = []\n level.references.each do |name_token|\n name = name_token.text\n if @rule_defs.member?(name) then\n current_level << name\n elsif @group_defs.member?(name) then\n @group_defs[name].member_references.each do |reference|\n if reference.is_a?(RuleReference) then\n unless (reference.symbol_name.grammar.exists? and reference.symbol_name.grammar != @name)\n current_level << reference.symbol_name.name\n end\n end\n end\n end\n end\n \n #\n # Everything in the current_level is linked to everything in the previous_level. If \n # there is no previous_level, then we'll register the names as points.\n \n if previous_level.empty? then\n root_sets << current_level\n current_level.each do |name|\n hierarchy.register_point( name )\n end\n else\n previous_level.each do |parent_name|\n current_level.each do |child_name|\n if hierarchy.would_cycle?(parent_name, child_name) then\n nyi( \"error handling for precedence cycle [#{parent_name}] to [#{child_name}]\" )\n else\n hierarchy.register( parent_name, child_name )\n end\n end\n end\n end\n \n previous_level = current_level\n end\n end\n \n \n #\n # Now, we want to integrate the prioritized rules back into the overall hierarchy, and we\n # want to preserve as much of the original ordering as possible. We do this by looking\n # within the prioritized rules at each layer and picking the highest priority for each\n # subtree, then inserting that layer at that index, shifting all the unprioritized rules \n # down.\n #\n # I think some examples might help explain what I mean.\n #\n # Rules: a, b, c, d, e, f, g, h | Rules: a, b, c, d, e, f\n # Order: 1, 2, 3, 4, 5, 6, 7, 8 | Order: 1, 2, 3, 4, 5, 6\n # |\n # Prec table 1: | Prec table 1:\n # d | b c\n # e g | e f\n # Prec table 2: | \n # h | \n # d | \n # g | \n # |\n # DAG layers and original order: | DAG layers and original order:\n # h 8 | b c 2 3\n # d 4 | e f 5 6\n # e g 5 7 |\n #\n # So, with these two examples, we want to reinsert the DAG elements back into the order\n # so that the DAG's hierarchy is respected, while -- as much as possible -- not disturbing \n # the original order. At each layer of the DAG, we look down the tree and find the highest\n # priority original position, and that is where we insert that layer. So\n #\n # insertion_points: | insertion_points:\n # 4, 4, 5 | 2, 5\n #\n # Now, obviously we can't insert two layers at the same point, so for the left example,\n # we'll need to adjust the second layer down a level, which will then cascade to the third\n # layer. And as there is no room between those insertion points, any rules originally at\n # levels 4, 5, or 6 must be shifted down as well.\n #\n # For the right example, notice that rule 4 doesn't need to be disturbed by the\n # the prioritization of either layer, as there is space between insertion points 2 and 5.\n # So we leave it in that position.\n #\n # insertion_points: | insertion_points:\n # 4, 5, 6 | 2, 5\n #\n # Finally, after integrating the default and prioritized rules, we get:\n # 1: a | 1: a\n # 2: b | 2: b c\n # 3: c | 3: d\n # 4: h | 4: e f\n # 5: d |\n # 6: e g |\n # 7: f |\n \n all_rules = []\n default_rules = []\n \n @rule_defs.each do |name, element|\n name = name.to_s\n \n if element.is_a?(Rule) then\n all_rules << name\n default_rules << name unless hierarchy.node?(name)\n end\n end\n\n\n #\n # Next we collect the raw insertion point data for the precedence data. But there's another \n # wrinkle. Up top, we merged all the precedence tables into one DAG, so we could find loops\n # and inter-relationships between the precedence tables. However, if some elements don't link\n # up, we don't want to prioritize all the independent trees to the same level -- we want to\n # preserve as much of the original ordering as possible. So we have to process each tree separately,\n # then interleave the data back together.\n \n insertion_point_sets = []\n insertion_layer_sets = []\n \n hierarchy.independent_trees(root_sets).each do |tree|\n insertion_points = []\n insertion_layers = []\n \n tree.each_layer_reverse do |layer|\n insertion_point = all_rules.length\n layer.each do |name|\n insertion_point = min( insertion_point, all_rules.index(name) )\n end\n \n insertion_points.unshift min(insertion_point, insertion_points.empty? ? insertion_point : insertion_points[0])\n insertion_layers.unshift layer\n end\n \n insertion_point_sets << insertion_points\n insertion_layer_sets << insertion_layers\n end\n \n \n #\n # We interleave the data sets back together. We want to do the interleaving by insertion_point.\n \n insertion_points = []\n insertion_layers = []\n\n until insertion_point_sets.empty? \n tops = insertion_point_sets.collect{|set| set[0]}\n min = tops.inject(all_rules.length){|current, aggregate| min(current, aggregate)}\n index = tops.index( min )\n \n insertion_points << insertion_point_sets[index].shift\n insertion_layers << insertion_layer_sets[index].shift\n \n if insertion_point_sets[index].empty? then\n insertion_point_sets.delete_at(index)\n insertion_layer_sets.delete_at(index)\n end\n end\n \n \n #\n # Next, we need to adjust the insertion points so that every one is unique.\n \n last_insertion_point = -1\n insertion_points.each_index do |index|\n insertion_points[index] = last_insertion_point + 1 if insertion_points[index] <= last_insertion_point\n last_insertion_point = insertion_points[index]\n end\n \n \n #\n # Finally, we have to integrate the two systems by setting the priority on each Rule. \n # We proceed one priority level at a time: if it is in the insertion_points list, we set \n # the priority for all rules on that level to that number; otherwise, we shift a name off\n # the default_rules list and set its priority instead.\n \n (default_rules.length + insertion_layers.length).times do |i|\n if insertion_points.member?(i) then\n insertion_layers[insertion_points.index(i)].each do |name|\n @rule_defs[name].priority = i\n end\n else\n default_rules.shift.each do |rule_name|\n @rule_defs[rule_name].priority = i\n end\n end\n end\n end",
"def each_involved_task(&block)\n return enum_for(__method__) unless block_given?\n\n trace.each_vertex(&block)\n end",
"def run!\n @graph.nodes.each do |node|\n if node.demand.nil?\n # Associated slots and edges will obviously be invalid if the node\n # has no demand, so we don't even bother testing them.\n add_error(node, :missing_demand)\n else\n validate_node(node)\n\n (node.slots.in.to_a + node.slots.out.to_a).each do |slot|\n validate_slot(slot) if slot.edges.any?\n end\n end\n end\n\n self\n end",
"def run!\n run_calculators!\n @graph\n end",
"def create_sub_graph(spec_edges, move_dist = nil)\n # declare locals\n time = Time.now\n m = $game_map\n map_width,map_height = m.width, m.height\n MoveUtils.init_ev_passables\n \n if !move_dist\n dist = spec_edges[:move] ||= TactBattleManager::Defaults::Move\n else\n dist = move_dist\n end\n \n x_low = x_high = y_low = y_high = nil\n if @source\n x_low, x_high = [@source.x-dist, 0].max, [@source.x+dist,map_width-1].min\n y_low, y_high = [@source.y-dist, 0].max, [@source.y+dist,map_height-1].min \n else\n x_low, x_high = 0, [dist,map_width-1].min\n y_low, y_high = 0, [dist,map_height-1].min\n end\n \n @jump_time = 0\n @optim_cache = {}\n x_low.upto(x_high).each do |x|\n y_low.upto(y_high).each do |y|\n expand_graph_adj(v=Vertex.new(x,y, m.terrain_tag(x,y)), spec_edges[:pass])\n add_adjacent_jumpables(v.x,v.y,dist,spec_edges)if spec_edges[:jump_length]>0\n end\n end\n self\n end",
"def each( &block ) # :yields: statement\n\t\treturn self.graph.each( &block )\n\tend",
"def assemble\n paths = []\n\n # Gather a list of nodes to try starting from\n starting_nodes = gather_starting_nodes\n log.info \"Found #{starting_nodes.length} nodes to attempt assembly from\"\n\n seen_nodes = Set.new\n progress = setup_progressbar starting_nodes.length\n\n # For each starting node, start the assembly process\n dummy_trail = Bio::Velvet::Graph::OrientedNodeTrail.new\n starting_nodes.each do |start_node|\n log.debug \"Trying to assemble from #{start_node.node_id}\" if log.debug?\n\n # If we've already covered this node, don't try it again\n if seen_nodes.include?([start_node.node_id, Bio::Velvet::Graph::OrientedNodeTrail::START_IS_FIRST]) or\n seen_nodes.include?([start_node.node_id, Bio::Velvet::Graph::OrientedNodeTrail::END_IS_FIRST])\n log.debug \"Already seen this node, not inspecting further\" if log.debug?\n next\n end\n\n # first attempt to go forward as far as possible, then reverse the path\n # and continue until cannot go farther\n reversed_path_forward = find_beginning_trail_from_node(start_node, seen_nodes)\n if reversed_path_forward.nil?\n log.debug \"Could not find forward path from this node, giving up\" if log.debug?\n next\n end\n # Have we already seen this path before?\n #TODO: add in recoherence logic here\n if seen_last_in_path?(reversed_path_forward, seen_nodes)\n log.debug \"Already seen the last node of the reversed path forward: #{reversed_path_forward.trail[-1].to_shorthand}, giving up\" if log.debug?\n next\n end\n # Assemble ahead again\n log.debug \"reversed_path_forward: #{reversed_path_forward.to_shorthand}\" if log.debug?\n path, just_visited_onodes = assemble_from(reversed_path_forward)\n\n # Remove nodes that have already been seen to prevent duplication\n log.debug \"Before removing already seen nodes the second time, path was #{path.length} nodes long\" if log.debug?\n remove_seen_nodes_from_end_of_path(path, seen_nodes)\n log.debug \"After removing already seen nodes the second time, path was #{path.length} nodes long\" if log.debug?\n\n # Add the now seen nodes to the list\n just_visited_onodes.each do |onode_settable|\n seen_nodes << onode_settable\n end\n\n # Record which nodes have already been visited, so they aren't visited again\n seen_nodes.merge just_visited_onodes\n unless progress.nil?\n if @assembly_options[:min_coverage_of_start_nodes]\n # TODO: this could be better by progress += (starting_nodes_just_visited.length)\n progress.increment\n else\n progress.progress += just_visited_onodes.length\n end\n end\n\n if path.length_in_bp < @assembly_options[:min_contig_size]\n log.debug \"Path length (#{path.length_in_bp}) less than min_contig_size (#{@assembly_options[:min_contig_size] }), not recording it\" if log.debug?\n next\n end\n log.debug \"Found a seemingly legitimate path #{path.to_shorthand}\" if log.debug?\n if block_given?\n yield path\n else\n paths.push path\n end\n end\n progress.finish unless progress.nil?\n\n return paths\n end",
"def test_topological_sort_cycle\n @dgraph = DirectedAcyclicGraph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n vertex_d = Vertex.new('d')\n\n exception = assert_raises GraphError do\n @dgraph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c).add_vertex(vertex_d)\n @dgraph.add_edge('a', 'b').add_edge('b', 'c').add_edge('c', 'a').add_edge('a', 'd');\n\n @dgraph.perform_topological_sort()\n end\n\n assert_equal('Topological sort could not be performed. Graph has at least one cycle', exception.message)\n end",
"def schedule(dependecny_hash)\n puts \"-------------------\"\n puts \"test case: #{dependecny_hash}\"\n graph = add_independent_nodes(dependecny_hash)\n \n result = []\n\n while !graph.empty? \n if (independent_task = fetch_independent_task(graph)).nil?\n fail CircularDependencyException\n end\n result << independent_task\n remove_dependencies(graph, independent_task)\n end \n\n puts \"returned: #{result}\"\n result\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Find all the nodes in a basic block, given the first node. | def nodes_in_block(first_node)
# We're going to do a depth-first search of the graph from the first
# node, following control flow edges out, and global schedule edges in,
# and stopping when we find a node that ends a basic block such as a
# branch.
worklist = [first_node]
block = Set.new
until worklist.empty?
node = worklist.pop
if block.add?(node)
# We need to visit nodes that are anchored to this one.
node.inputs.edges.each do |i|
if i.input_name == :global_schedule
worklist.push i.from
end
end
# If this node isn't a branch, and it's either the first node or it
# isn't a merge, visit the nodes that follow it in control flow.
if node.op != :branch && (node == first_node || node.op != :merge)
node.outputs.edges.each do |o|
if o.control?
if !(node.op == :start && o.to.op == :finish)
worklist.push o.to
end
end
end
end
end
end
block.to_a
end | [
"def select_nodes(&block); end",
"def select_nodes!(&block); end",
"def find_all_blocks(node, block_name)\n return if node.nil?\n\n blocks = node.each_descendant(:block).select { |block_node| block_name == block_node.method_name }\n return blocks unless block_given?\n\n blocks.each do |block_node|\n offending_node(block_node)\n yield block_node\n end\n end",
"def find_blocks(node, block_name)\n return if node.nil?\n\n node.each_child_node(:block).select { |block_node| block_name == block_node.method_name }\n end",
"def nodes(&block)\n if block_given?\n instance_eval(&block)\n else\n @nodes\n end\n end",
"def find_first_recursive(&block) # :yields: node\n each_recursive {|node|\n return node if block.call(node)\n }\n return nil\n end",
"def find_basic_block_starts\n block_starts = Set.new([0])\n\n insns.each do |index, insn|\n branch_targets = insn.branch_targets\n\n if branch_targets.any?\n branch_targets.each do |branch_target|\n block_starts.add(labels[branch_target])\n end\n\n block_starts.add(index + insn.length) if insn.falls_through?\n end\n end\n\n block_starts.to_a.sort\n end",
"def nodelist\n @blocks\n end",
"def each_node(&block)\n @nodes.each &block\n end",
"def find_nodes(*ops)\n found = []\n visit_nodes do |node|\n if ops.empty? || ops.include?(node.op)\n if !block_given? || yield(node)\n found.push node\n end\n end\n end\n found\n end",
"def by_basic_block(bb, level)\n assert(\"RelationNodeList#by_basic_block: level != :src,:dst\") { [:src,:dst].include?(level) }\n lookup(@basic_block_index[level], bb, \"#{level}-block\", false) || []\n end",
"def traverse_example_group_trees_until(&block); end",
"def visit_block(node); end",
"def all_nodes\n nodes = []\n visit_nodes do |node|\n nodes.push node\n end\n nodes\n end",
"def sources &block\n if block\n @nodes.each { |k, v| block.call k if v[:indegree] == 0 }\n else\n @nodes.select { |_, v| v[:indegree] == 0 }.map &:first\n end\n end",
"def block_nodes(node)\n end_tag_found = false\n tag_node = node\n\n # we have to collect all nodes until the we find the end tag\n nodes = [].tap do |result|\n check_tag_soft_return(node)\n while (node = node.next_sibling)\n if node.content =~ end_tag_re\n end_tag_found = true\n check_tag_soft_return(node)\n node.remove\n break\n end\n node = yield(node) if block_given?\n result << node\n end\n end\n\n no_end_tag_for(tag_node) unless end_tag_found\n\n nodes\n end",
"def find_basic_block_starts; end",
"def find_blocks(node, blocks={})\n if node.respond_to?(:nodelist) && !node.nodelist.nil?\n node.nodelist.inject(blocks) do |b, node|\n if node.is_a?(LiquidBlocks::Block)\n b[node.name] = node\n end\n find_blocks(node, b)\n\n b\n end\n end\n\n blocks\n end",
"def select_nodes(&block)\r\n @map.select(&block)\r\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
A node is locally scheduled if it's fixed or we have locally scheduled it. | def locally_scheduled?(node)
node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:local_schedule)
end | [
"def globally_scheduled?(node)\n node.fixed? || node.op == :immediate || node.outputs.output_names.include?(:global_schedule)\n end",
"def ready_to_schedule?(node)\n # Ready to globally schedule\n\n node.outputs.to_nodes.all? do |i|\n globally_scheduled?(i)\n end\n end",
"def scheduled?\n @current_state == Psc::ScheduledActivity::SCHEDULED\n end",
"def scheduled?(resource)\n self.ignoreschedules or resource_harness.scheduled?(resource_status(resource), resource)\n end",
"def scheduled?(resource)\n self.ignoreschedules or resource.scheduled?\n end",
"def update_scheduled_status\n self._scheduled = !self.user_id.blank? && !self.milestone_id.blank? && !self.priority.blank?\n true\n end",
"def got_reschedule_condition?\n Time.zone.now >= change_reschedule_after\n end",
"def global_schedule(graph)\n # Create a work list of the floating nodes.\n\n to_schedule = graph.all_nodes.select {|n| n.floating? && n.op != :immediate }\n\n # Keep going until the work list is empty.\n\n until to_schedule.empty?\n node = to_schedule.shift\n\n # Are we ready to schedule this node?\n\n if ready_to_schedule?(node)\n # Find candidates to anchor this node to.\n\n candidates = schedule_candidates(graph, node)\n\n # If there aren't any then we're stuck!\n\n raise 'stuck' if candidates.empty?\n\n # Sort the candidates and take the first one to get the best.\n\n best_candidate = sort_candidates(candidates).first\n\n # Add a global schedule edge.\n\n node.output_to :global_schedule, best_candidate\n else\n # If we aren't ready to schedule this node, try it again later.\n\n to_schedule.push node\n end\n end\n end",
"def schedule_public?\n event && (\n !event.current? || # All past schedules are public\n Settings.show_schedule?) # Current event schedule has explicit go-live flag\n end",
"def scheduled?\n event.present?\n end",
"def update_schedule_status\n ### if job is present unschedule it and create new job ###\n jobs = $scheduler.jobs(:tag => self.id)\n $scheduler.unschedule(jobs.first.id) if jobs.present?\n ### schedule job wehn schedule status is true ###\n if schedule_status\n $scheduler.every rate, :tags => \"#{self.id}\" do\n self.send_to_florincoin(true)\n end\n end\n end",
"def local_schedule(graph)\n # Find all basic blocks and locally schedule them.\n\n graph.all_nodes.each do |node|\n if node.begins_block?\n locally_schedule_block node\n end\n end\n end",
"def durable?\n @node.time_now >= @time_durable\n end",
"def check_scheduled_at\n self.requeue if self.scheduled_at_changed?\n end",
"def periodic_task?(request)\n !request.headers['X-Aws-Sqsd-Taskname'].nil? && request.headers['X-Aws-Sqsd-Taskname'].present?\n end",
"def in_scheduled_maintenance?\n @redis.exists(\"#{@key}:scheduled_maintenance\")\n end",
"def process_node(node)\n debug \"Process node: #{node}\"\n node.poll\n return unless node.online?\n ready_task = node.ready_task\n return unless ready_task\n ready_task.run\n end",
"def scheduled?(schedulable, starts, ends)\n puts \"This #{schedulable.class} is not scheduled between #{starts} and #{ends}\"\n end",
"def unscheduled?; end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Linearize a graph into a single linear sequence of operations with jumps and branches. | def linearize(graph)
# The basic blocks.
blocks = []
# Details of the basic block that contain the finish operation which
# won't be added to the list of basic blocks until the end.
first_node_last_block = nil
last_block = nil
# Two maps that help us map between nodes and the names of the blocks
# that they go into, and the merge instruction indicies and the blocks
# they're coming from.
first_node_to_block_index = {}
merge_index_to_first_node = {}
# Look at each node that begins a basic block.
basic_block_starters = graph.all_nodes.select { |n| n.begins_block? }
last_basic_block_starter = basic_block_starters.select { |s| nodes_in_block(s).any? { |n| n.op == :finish } }.first
basic_block_starters.delete last_basic_block_starter
basic_block_starters.push last_basic_block_starter
basic_block_starters.each do |node|
original_first_node = node
first_node = first_in_block(original_first_node, nodes_in_block(original_first_node))
# We're going to create an array of operations for this basic
# block.
block = []
next_to_last_control = nil
# Follow the local sequence.
node = first_node
begin
# We don't want to include operations that are just there to form
# branches or anchor points in the graph such as start and merge.
unless [:start, :merge].include?(node.op)
op = node.op
# We rename finish to return to match the switch from the
# declarative style of the graph to the imperative style
# of the list of operations.
op = :return if op == :finish
# The instruction begins with the operation.
insn = [op]
# Then any constant values or similar.
[:line, :n, :value].each do |p|
insn.push node.props[p] if node.props.has_key?(p)
end
# Then any input registers.
node.inputs.with_input_name(:value).from_nodes.each do |input_values|
insn.push input_values.props[:register]
end
# Phi instructions need pairs of source registers with the blocks they came from.
if node.op == :phi
node.inputs.edges.each do |input|
if input.input_name =~ /^value\((.+)\)$/
n = $1.to_i
insn.push n
insn.push input.from.props[:register]
end
end
# Elide phi instructions if register allocation has run correctly and values are
# already in the correct registers.
insn = nil if insn.drop(2).select.with_index{ |_,i| i.even? }.uniq.size == 1
end
# Send instructions and lowered equivalents need the arguments.
if [:send, :call_managed, :int64_add, :int64_sub, :int64_imul, :int64_and, :int64_shift_left, :int64_shift_right].include?(node.op)
insn.push node.inputs.with_input_name(:receiver).from_node.props[:register]
if node.op == :send
insn.push node.props[:name]
elsif node.op == :call_managed
insn.push node.inputs.with_input_name(:name).from_node.props[:register]
end
node.props[:argc].times do |n|
arg = node.inputs.with_input_name(:"arg(#{n})").from_node
if arg.op == :immediate
insn.push arg.props[:value]
else
insn.push arg.props[:register]
end
end
end
# Then the target register if the instruction has one.
insn.push node.props[:register] if insn && (node.produces_value? || node.op == :move)
# If it's a branch then the target basic blocks and the test.
if node.op == :branch
insn.push node.inputs.with_input_name(:condition).from_node.props[:register]
[:true, :false].each do |branch|
target = node.outputs.with_output_name(branch).to_node
raise unless target
insn.push target
end
if node.props[:test]
insn.push node.props[:test]
end
end
# Send instructions and lowered managed calls need the list of live registers.
if [:send, :call_managed].include?(node.op)
insn.push node.props[:live_registers]
end
# Guards are like branches, but only have one side.
if node.op == :guard
insn.push node.inputs.with_input_name(:condition).from_node.props[:register]
if node.props[:test]
insn.push node.props[:test]
end
end
# Kind instructions need the kind.
if node.op == :kind_is?
insn.push node.props[:kind]
end
# Frame states need the instructions, the ip, and to know where values are for
# the receiver, the arguments, and the stack.
if node.op == :deopt_map
insn.push node.props[:insns]
insn.push node.props[:ip]
insn.push node.inputs.with_input_name(:receiver).from_node.props[:register]
insn.push node.inputs.edges.select { |e| e.input_name.to_s.start_with?('arg(') }.map { |e|
/arg\((\d+)\)/ =~ e.input_name.to_s
[$1.to_i, e.from.props[:register]]
}.sort_by { |pair| pair.first }.map { |pair| pair.last }
insn.push node.inputs.edges.select { |e| e.input_name.to_s.start_with?('stack(') }.map { |e|
/stack\((\d+)\)/ =~ e.input_name.to_s
[$1.to_i, e.from.props[:register]]
}.sort_by { |pair| pair.first }.map { |pair| pair.last }
end
# Add the instruction to the block.
block.push insn if insn
end
next_to_last_control = node if node.has_control_output?
# Follow the local schedule edge to the next node.
node = node.outputs.with_output_name(:local_schedule).to_nodes.first
end while node && node.op != :merge
# Empty blocks cause problems elsewhere - it's easier to just have a nop
# in them. Really, we should remove empty blocks by modifying the
# instruction that jumps here to jump to wherever this leads to.
if block.empty?
block.push [:nop]
end
# If the last node is a merge, we need to remember which merge index this is.
if node && node.op == :merge
next_to_last_control.outputs.with_output_name(:control).edges.first.input_name =~ /^control\((.+)\)$/
n = $1.to_i
merge_index_to_first_node[n] = first_node
end
# Add a jump instruction if this block was going to just flow into the next
# - we'll remove it later if the block followed it anyway and we can just
# fallthrough.
unless [:return, :branch].include?(block.last.first)
begin
block.push [:jump, next_to_last_control.outputs.with_output_name(:control).to_node]
rescue
block.push [:jump, :broken]
end
end
first_node_to_block_index[original_first_node] = blocks.size
first_node_to_block_index[first_node] = blocks.size
blocks.push block
end
# Record the number that this basic block has and then add it to the list of basic blocks.
first_node_to_block_index[first_node_last_block] = blocks.size
# Go back through the basic blocks and update some references that were to things that
# hadn't been decided yet.
blocks.each do |block|
block.each do |insn|
insn.map! do |e|
# If part of an instruction references a basic block, turn that into the index of
# the basic block instead.
if e.is_a?(IR::Node)
:"block#{first_node_to_block_index[e]}"
else
e
end
end
end
end
# Go back through the basic blocks and change how the branch instructions out of them
# work.
blocks.each_with_index do |block, n|
next_block = :"block#{n + 1}"
last = block.last
if last == [:jump, next_block]
# A jump that just goes to the next block can be removed and left to fall through.
block.pop
elsif last.first == :branch && last[3] == next_block
# A branch where the else goes to the next block can branch only when true.
block.pop
block.push [:branch_if, last[1], last[2], *last.drop(4)]
elsif last.first == :branch && last[2] == next_block
# A branch where the if goes to the next block can branch only unless true.
block.pop
test = last.drop(4)
block.push [:branch_unless, last[1], last[3], *test]
elsif last.first == :branch
# A branch that doesn't go to the next block at all can be a branch if true
# and then fallthrough to a new jump instruction.
block.pop
block.push [:branch_if, last[1], last[2], *last.drop(4)]
block.push [:jump, last[3]]
end
end
blocks
end | [
"def sequence_nodes(graph)\n # Note that this algorithm is very wasteful! It allocates two sides of a branch\n # the same sequence numbers. This means that to the linear scan values on both\n # sides of the branch and internal to those branches appear to be live at the\n # same time and they won't use the same registers. I think we're supposed to be\n # sequencing one side of the branch at a time, and starting the right side\n # with the max sequence number of the left side.\n\n # Create a worklist of nodes to sequence.\n\n to_sequence = graph.all_nodes\n\n until to_sequence.empty?\n node = to_sequence.shift\n\n # If all this node's inputs have already been sequenced.\n\n if node.inputs.from_nodes.all? { |i| i.props[:register_sequence] }\n # Give this node an sequence number at least one higher than all others.\n input_sequences = node.inputs.from_nodes.map { |i| i.props[:register_sequence] }\n node.props[:register_sequence] = if input_sequences.empty? then 0 else input_sequences.max + 1 end\n next\n end\n\n # Not all inputs were sequenced - put this node back on the list and try again later.\n\n to_sequence.push node\n end\n end",
"def mipsLowerSimpleBranchOps(list)\n newList = []\n list.each {\n | node |\n if node.is_a? Instruction\n annotation = node.annotation\n case node.opcode\n when /^b(addi|subi|ori|addp)/\n op = $1\n bc = $~.post_match\n branch = \"b\" + bc\n\n case op\n when \"addi\", \"addp\"\n op = \"addi\"\n when \"subi\"\n op = \"subi\"\n when \"ori\"\n op = \"ori\"\n end\n\n if bc == \"o\"\n case op\n when \"addi\"\n # addu $s0, $s1, $s2\n # xor $t0, $s1, $s2\n # blt $t0, $zero, no overflow\n # xor $t0, $s0, $s1\n # blt $t0, $zero, overflow\n # no overflow:\n #\n tr = Tmp.new(node.codeOrigin, :gpr)\n tmp = Tmp.new(node.codeOrigin, :gpr)\n noFlow = LocalLabel.unique(\"noflow\")\n noFlowRef = LocalLabelReference.new(node.codeOrigin, noFlow)\n newList << Instruction.new(node.codeOrigin, op, [node.operands[0], node.operands[1], tr], annotation)\n newList << Instruction.new(node.codeOrigin, \"xori\", [node.operands[0], node.operands[1], tmp])\n newList << Instruction.new(node.codeOrigin, \"bilt\", [tmp, MIPS_ZERO_REG, noFlowRef])\n newList << Instruction.new(node.codeOrigin, \"xori\", [tr, node.operands[0], tmp])\n newList << Instruction.new(node.codeOrigin, \"bilt\", [tmp, MIPS_ZERO_REG, node.operands[2]])\n newList << noFlow\n newList << Instruction.new(node.codeOrigin, \"move\", [tr, node.operands[1]])\n when \"subi\"\n # subu $s0, $s1, $s2\n # xor $t0, $s1, $s2\n # bge $t0, $zero, no overflow\n # xor $t0, $s0, $s1\n # blt $t0, $zero, overflow\n # no overflow:\n #\n tr = Tmp.new(node.codeOrigin, :gpr)\n tmp = Tmp.new(node.codeOrigin, :gpr)\n noFlow = LocalLabel.unique(\"noflow\")\n noFlowRef = LocalLabelReference.new(node.codeOrigin, noFlow)\n newList << Instruction.new(node.codeOrigin, op, [node.operands[1], node.operands[0], tr], annotation)\n newList << Instruction.new(node.codeOrigin, \"xori\", [node.operands[1], node.operands[0], tmp])\n newList << Instruction.new(node.codeOrigin, \"bigteq\", [tmp, MIPS_ZERO_REG, noFlowRef])\n newList << Instruction.new(node.codeOrigin, \"xori\", [tr, node.operands[1], tmp])\n newList << Instruction.new(node.codeOrigin, \"bilt\", [tmp, MIPS_ZERO_REG, node.operands[2]])\n newList << noFlow\n newList << Instruction.new(node.codeOrigin, \"move\", [tr, node.operands[1]])\n when \"ori\"\n # no ovwerflow at ori\n newList << Instruction.new(node.codeOrigin, op, node.operands[0..1], annotation)\n end\n else\n if node.operands[1].is_a? Address\n addr = node.operands[1]\n tr = Tmp.new(node.codeOrigin, :gpr)\n newList << Instruction.new(node.codeOrigin, \"loadp\", [addr, tr], annotation)\n newList << Instruction.new(node.codeOrigin, op, [node.operands[0], tr])\n newList << Instruction.new(node.codeOrigin, \"storep\", [tr, addr])\n else\n tr = node.operands[1]\n newList << Instruction.new(node.codeOrigin, op, node.operands[0..-2], annotation)\n end\n newList << Instruction.new(node.codeOrigin, branch, [tr, MIPS_ZERO_REG, node.operands[-1]])\n end\n when \"bia\", \"bpa\", \"bba\"\n tmp = Tmp.new(node.codeOrigin, :gpr)\n comp = node.opcode[1] == ?b ? \"sltub\" : \"sltu\"\n newList << Instruction.new(node.codeOrigin, comp, [tmp, node.operands[1], node.operands[0]], annotation)\n newList << Instruction.new(node.codeOrigin, \"bnz\", [tmp, MIPS_ZERO_REG, node.operands[2]])\n when \"biaeq\", \"bpaeq\", \"bbaeq\"\n tmp = Tmp.new(node.codeOrigin, :gpr)\n comp = node.opcode[1] == ?b ? \"sltub\" : \"sltu\"\n newList << Instruction.new(node.codeOrigin, comp, [tmp, node.operands[0], node.operands[1]], annotation)\n newList << Instruction.new(node.codeOrigin, \"bz\", [tmp, MIPS_ZERO_REG, node.operands[2]])\n when \"bib\", \"bpb\", \"bbb\"\n tmp = Tmp.new(node.codeOrigin, :gpr)\n comp = node.opcode[1] == ?b ? \"sltub\" : \"sltu\"\n newList << Instruction.new(node.codeOrigin, comp, [tmp, node.operands[0], node.operands[1]], annotation)\n newList << Instruction.new(node.codeOrigin, \"bnz\", [tmp, MIPS_ZERO_REG, node.operands[2]])\n when \"bibeq\", \"bpbeq\", \"bbbeq\"\n tmp = Tmp.new(node.codeOrigin, :gpr)\n comp = node.opcode[1] == ?b ? \"sltub\" : \"sltu\"\n newList << Instruction.new(node.codeOrigin, comp, [tmp, node.operands[1], node.operands[0]], annotation)\n newList << Instruction.new(node.codeOrigin, \"bz\", [tmp, MIPS_ZERO_REG, node.operands[2]])\n when /^bt(i|p|b)/\n lowerMIPSCondBranch(newList, \"b\" + $~.post_match + $1, node)\n else\n newList << node\n end\n else\n newList << node\n end\n }\n newList\nend",
"def buildGraph(smile)\n branchArr = []\n iterator = nil\n\n # Loop until end of string, uses a simulated stack to store branches in order and number\n smile.split(\"\").each do |i|\n case i\n # If branch is found \"(\", push branch return pointer and continue\n when \"(\"\n branchArr.push(iterator)\n # If end of branch is found \")\", return to \"popped\" branch return pointer\n when \")\"\n iterator = branchArr.pop\n # If simply another carbon, just add the new vertex to the parent node, increment and assign node value\n when \"C\"\n if iterator != nil\n temp = addVertex(iterator)\n else\n temp = addVertex\n end\n iterator = temp\n end\n end\n\n # Refactor and Reverse (if needed)\n refactorGraph(@head,@tail)\n\n if reverseGraph(@head, @tail)\n iterator = @head\n while iterator.next[0] != nil\n # Flip locant\n iterator.locant = (maxLength - iterator.locant + 1)\n iterator = iterator.next[0]\n end\n end\n\n end",
"def riscLowerHardBranchOps(list)\n newList = []\n list.each {\n | node |\n if node.is_a? Instruction and node.opcode == \"bmulio\"\n tmp1 = Tmp.new(node.codeOrigin, :gpr)\n tmp2 = Tmp.new(node.codeOrigin, :gpr)\n newList << Instruction.new(node.codeOrigin, \"smulli\", [node.operands[0], node.operands[1], node.operands[1], tmp1], node.annotation)\n newList << Instruction.new(node.codeOrigin, \"rshifti\", [node.operands[-2], Immediate.new(node.codeOrigin, 31), tmp2])\n newList << Instruction.new(node.codeOrigin, \"bineq\", [tmp1, tmp2, node.operands[-1]])\n else\n newList << node\n end\n }\n newList\nend",
"def add_bitcode_constraints(machine_function)\n return unless @pml.relation_graphs.has_named?(machine_function.name, :dst)\n rg = @pml.relation_graphs.by_name(machine_function.name, :dst)\n return unless rg.accept?(@options)\n bitcode_function = rg.get_function(:src)\n bitcode_function.blocks.each { |block|\n @bc_model.add_block_constraint(block)\n }\n # Our LCTES 2013 paper describes 5 sets of constraints referenced below\n # map from src/dst edge to set of corresponding relation edges (constraint set (3) and (4))\n rg_edges_of_edge = { :src => {}, :dst => {} }\n # map from progress node to set of outgoing src/dst edges (constraint set (5))\n rg_progress_edges = { }\n each_relation_edge(rg) do |edge|\n rg_level = relation_graph_level(edge.level.to_s)\n source_block = edge.source.get_block(rg_level)\n target_block = (edge.target.type == :exit) ? :exit : (edge.target.get_block(rg_level))\n\n assert(\"Bad RG: #{edge}\") { source_block && target_block }\n # (3),(4)\n (rg_edges_of_edge[rg_level][IPETEdge.new(source_block,target_block,edge.level)] ||=[]).push(edge)\n # (5)\n if edge.source.type == :entry || edge.source.type == :progress\n rg_progress_edges[edge.source] ||= { :src => [], :dst => [] }\n rg_progress_edges[edge.source][rg_level].push(edge)\n end\n end\n # (3),(4)\n rg_edges_of_edge.each do |_level,edgemap|\n edgemap.each do |edge,rg_edges|\n lhs = rg_edges.map { |rge| [rge,1] } + [[edge,-1]]\n @ilp.add_constraint(lhs, \"equal\", 0, \"rg_edge_#{edge.qname}\", :structural)\n end\n end\n # (5)\n rg_progress_edges.each do |progress_node, edges|\n lhs = edges[:src].map { |e| [e,1] } + edges[:dst].map { |e| [e,-1] }\n @ilp.add_constraint(lhs, \"equal\", 0, \"rg_progress_#{progress_node.qname}\", :structural)\n end\n end",
"def exec ops = []\n reset\n @ops = ops\n until @op_pointer == @ops.length\n instruction = @ops[@op_pointer]\n raise \"No such instruction '#{instruction}'\" unless @operators.include? instruction\n @operators[instruction].call\n normalize\n @op_pointer += 1\n end\n raise \"unclosed [\" unless @loop_stack.empty?\n end",
"def dag(nodes)\n dg = RGL::DirectedAdjacencyGraph.new\n nodes.each{|n|\n dg.add_vertex(n)\n group(n).dependencies.each{|m, block|\n dg.add_vertex(m)\n dg.add_edge(m, n)\n }\n }\n\n raise Error, \"Your dependency graph is not acyclic!\" unless dg.acyclic?\n dg\n end",
"def detailed_transition_graph\n @dtgraph ||= each_with_object(Graph::Labeled.new(size)) do |s, graph|\n s.gotos.each do |tok, goto|\n path = if tok.terminal?\n [tok]\n else\n actions_to_reach_reduce(s.ident, tok)\n end\n graph.add_vector(s.ident, goto.to_state.ident, path)\n end\n end.tap { |graph| graph.start = 0 }.freeze\n end",
"def minilang(operations)\n register = 0\n stack = []\n operations.split.each do |op|\n case op\n when 'PUSH' then stack << register\n when 'ADD' then register += stack.pop\n when 'SUB' then register -= stack.pop\n when 'MULT' then register *= stack.pop\n when 'DIV' then register /= stack.pop\n when 'MOD' then register %= stack.pop\n when 'POP' then register = stack.pop\n when 'PRINT' then p register\n else\n register = op.to_i\n end\n end\nend",
"def riscLowerSimpleBranchOps(list)\n newList = []\n list.each {\n | node |\n if node.is_a? Instruction\n annotation = node.annotation\n case node.opcode\n when /^b(addi|subi|ori|addp)/\n op = $1\n branch = \"b\" + $~.post_match\n \n case op\n when \"addi\"\n op = \"addis\"\n when \"addp\"\n op = \"addps\"\n when \"subi\"\n op = \"subis\"\n when \"ori\"\n op = \"oris\"\n end\n \n newList << Instruction.new(node.codeOrigin, op, node.operands[0..-2], annotation)\n newList << Instruction.new(node.codeOrigin, branch, [node.operands[-1]])\n when 'bmulis', 'bmulz', 'bmulnz'\n condition = $~.post_match\n newList << Instruction.new(node.codeOrigin, \"muli\", node.operands[0..-2], annotation)\n newList << Instruction.new(node.codeOrigin, \"bti\" + condition, [node.operands[-2], node.operands[-1]])\n else\n newList << node\n end\n else\n newList << node\n end\n }\n newList\nend",
"def build_jump_references\n $l.debug \"Building jump references and depths\"\n depth = 0\n \n @t.each_index{|i|\n t = @t[i]\n\n t.payload[:token_id] = i\n t.payload[:depth] = depth\n #puts \"#{t}: #{depth} #{t.payload}\"\n \n\n\n # Set token vals\n if(t.type == Instruction::LS) then\n depth += 1\n t.payload[:depth] += 1 #ensure loops are of equal depth\n t.payload[:jump] = scan(1, Instruction::LS, Instruction::LE, i)\n end\n\n if(t.type == Instruction::LE) then\n depth -= 1\n t.payload[:jump] = scan(-1, Instruction::LE, Instruction::LS, i)\n end\n\n # RLE repeatable chars for sum/loop logic\n if([Instruction::IDP, Instruction::DDP, Instruction::IDV, Instruction::DDV, Instruction::OC, Instruction::IC].include? t.type) then\n scan_rle(1, t.type, i)\n end\n }\n raise \"Loop start/end does not match (loop depth at end: #{depth}).\" if depth != 0 \n\n return @t\n end",
"def translate_states_and_rules(states)\n # new rules, one state(set of states) to all character\n rules = states.flat_map { |state| rules_for state }\n # set of states that a state can reach(nfa.next_state)\n # treated as a new state\n more_states = rules.map(&:next_state).to_set\n # if true, then all states rule had been added to rules\n if more_states.subset? states\n [states, rules]\n else\n translate_states_and_rules(states + more_states)\n end\n end",
"def branch_to(label)\n cycle(microcode: \"jump #{label}\")\n end",
"def assemble\n paths = []\n\n # Gather a list of nodes to try starting from\n starting_nodes = gather_starting_nodes\n log.info \"Found #{starting_nodes.length} nodes to attempt assembly from\"\n\n seen_nodes = Set.new\n progress = setup_progressbar starting_nodes.length\n\n # For each starting node, start the assembly process\n dummy_trail = Bio::Velvet::Graph::OrientedNodeTrail.new\n starting_nodes.each do |start_node|\n log.debug \"Trying to assemble from #{start_node.node_id}\" if log.debug?\n\n # If we've already covered this node, don't try it again\n if seen_nodes.include?([start_node.node_id, Bio::Velvet::Graph::OrientedNodeTrail::START_IS_FIRST]) or\n seen_nodes.include?([start_node.node_id, Bio::Velvet::Graph::OrientedNodeTrail::END_IS_FIRST])\n log.debug \"Already seen this node, not inspecting further\" if log.debug?\n next\n end\n\n # first attempt to go forward as far as possible, then reverse the path\n # and continue until cannot go farther\n reversed_path_forward = find_beginning_trail_from_node(start_node, seen_nodes)\n if reversed_path_forward.nil?\n log.debug \"Could not find forward path from this node, giving up\" if log.debug?\n next\n end\n # Have we already seen this path before?\n #TODO: add in recoherence logic here\n if seen_last_in_path?(reversed_path_forward, seen_nodes)\n log.debug \"Already seen the last node of the reversed path forward: #{reversed_path_forward.trail[-1].to_shorthand}, giving up\" if log.debug?\n next\n end\n # Assemble ahead again\n log.debug \"reversed_path_forward: #{reversed_path_forward.to_shorthand}\" if log.debug?\n path, just_visited_onodes = assemble_from(reversed_path_forward)\n\n # Remove nodes that have already been seen to prevent duplication\n log.debug \"Before removing already seen nodes the second time, path was #{path.length} nodes long\" if log.debug?\n remove_seen_nodes_from_end_of_path(path, seen_nodes)\n log.debug \"After removing already seen nodes the second time, path was #{path.length} nodes long\" if log.debug?\n\n # Add the now seen nodes to the list\n just_visited_onodes.each do |onode_settable|\n seen_nodes << onode_settable\n end\n\n # Record which nodes have already been visited, so they aren't visited again\n seen_nodes.merge just_visited_onodes\n unless progress.nil?\n if @assembly_options[:min_coverage_of_start_nodes]\n # TODO: this could be better by progress += (starting_nodes_just_visited.length)\n progress.increment\n else\n progress.progress += just_visited_onodes.length\n end\n end\n\n if path.length_in_bp < @assembly_options[:min_contig_size]\n log.debug \"Path length (#{path.length_in_bp}) less than min_contig_size (#{@assembly_options[:min_contig_size] }), not recording it\" if log.debug?\n next\n end\n log.debug \"Found a seemingly legitimate path #{path.to_shorthand}\" if log.debug?\n if block_given?\n yield path\n else\n paths.push path\n end\n end\n progress.finish unless progress.nil?\n\n return paths\n end",
"def hamiltonian_cycles_dynamic_programming(operational_limit=nil)\n stack = DS::Stack.new\n return [] if @vertices.empty?\n\n origin_vertex = @vertices.to_a[0]\n hamiltonians = []\n num_operations = 0\n\n # This hash keeps track of subproblems that have already been\n # solved. ie is there a path through vertices that ends in the\n # endpoint\n # Hash of [vertex_set,endpoint] => Array of Path objects.\n # If no path is found, then the key is false\n # The endpoint is not stored in the vertex set to make the programming\n # easier.\n dp_cache = {}\n\n # First problem is the whole problem. We get the Hamiltonian paths,\n # and then after reject those paths that are not cycles.\n initial_vertex_set = Set.new(@vertices.reject{|v| v==origin_vertex})\n initial_problem = [initial_vertex_set, origin_vertex]\n stack.push initial_problem\n\n while next_problem = stack.pop\n vertices = next_problem[0]\n destination = next_problem[1]\n\n if dp_cache[next_problem]\n # No need to do anything - problem already solved\n\n elsif vertices.empty?\n # The bottom of the problem. Only return a path\n # if there is an edge between the destination and the origin\n # node\n if edge?(destination, origin_vertex)\n path = Path.new [destination]\n dp_cache[next_problem] = [path]\n else\n # Reached dead end\n dp_cache[next_problem] = false\n end\n\n else\n # This is an unsolved problem and there are at least 2 vertices in the vertex set.\n # Work out which vertices in the set are neighbours\n neighs = Set.new neighbours(destination)\n possibilities = neighs.intersection(vertices)\n if possibilities.length > 0\n # There is still the possibility to go further into this unsolved problem\n subproblems_unsolved = []\n subproblems = []\n\n possibilities.each do |new_destination|\n new_vertex_set = Set.new(vertices.to_a.reject{|v| v==new_destination})\n subproblem = [new_vertex_set, new_destination]\n\n subproblems.push subproblem\n if !dp_cache.key?(subproblem)\n subproblems_unsolved.push subproblem\n end\n end\n\n # if solved all the subproblems, then we can make a decision about this problem\n if subproblems_unsolved.empty?\n answers = []\n subproblems.each do |problem|\n paths = dp_cache[problem]\n if paths == false\n # Nothing to see here\n else\n # Add the found sub-paths to the set of answers\n paths.each do |path|\n answers.push Path.new(path+[destination])\n end\n end\n end\n\n if answers.empty?\n # No paths have been found here\n dp_cache[next_problem] = false\n else\n dp_cache[next_problem] = answers\n end\n else\n # More problems to be solved before a decision can be made\n stack.push next_problem #We have only delayed solving this problem, need to keep going in the future\n subproblems_unsolved.each do |prob|\n unless operational_limit.nil?\n num_operations += 1\n raise OperationalLimitReachedException if num_operations > operational_limit\n end\n stack.push prob\n end\n end\n\n else\n # No neighbours in the set, so reached a dead end, can go no further\n dp_cache[next_problem] = false\n end\n end\n end\n\n if block_given?\n dp_cache[initial_problem].each do |hpath|\n yield hpath\n end\n return\n else\n return dp_cache[initial_problem]\n end\n end",
"def sh4LowerSimpleBranchOps(list)\n newList = []\n list.each {\n | node |\n if node.is_a? Instruction\n annotation = node.annotation\n case node.opcode\n when /^b(addi|subi|ori|addp)/\n op = $1\n bc = $~.post_match\n\n case op\n when \"addi\", \"addp\"\n op = \"addi\"\n when \"subi\", \"subp\"\n op = \"subi\"\n when \"ori\", \"orp\"\n op = \"ori\"\n end\n\n if bc == \"s\"\n raise \"Invalid operands number (#{node.operands.size})\" unless node.operands.size == 3\n if node.operands[1].is_a? RegisterID or node.operands[1].is_a? SpecialRegister\n newList << Instruction.new(node.codeOrigin, op, node.operands[0..1])\n newList << Instruction.new(node.codeOrigin, \"bs\", node.operands[1..2])\n else\n tmpVal = Tmp.new(node.codeOrigin, :gpr)\n tmpPtr = Tmp.new(node.codeOrigin, :gpr)\n addr = Address.new(node.codeOrigin, tmpPtr, Immediate.new(node.codeOrigin, 0))\n newList << Instruction.new(node.codeOrigin, \"leap\", [node.operands[1], tmpPtr])\n newList << Instruction.new(node.codeOrigin, \"loadi\", [addr, tmpVal])\n newList << Instruction.new(node.codeOrigin, op, [node.operands[0], tmpVal])\n newList << Instruction.new(node.codeOrigin, \"storei\", [tmpVal, addr])\n newList << Instruction.new(node.codeOrigin, \"bs\", [tmpVal, node.operands[2]])\n end\n elsif bc == \"nz\"\n raise \"Invalid operands number (#{node.operands.size})\" unless node.operands.size == 3\n newList << Instruction.new(node.codeOrigin, op, node.operands[0..1])\n newList << Instruction.new(node.codeOrigin, \"btinz\", node.operands[1..2])\n else\n newList << node\n end\n when \"bmulio\", \"bmulpo\"\n raise \"Invalid operands number (#{node.operands.size})\" unless node.operands.size == 3\n tmp1 = Tmp.new(node.codeOrigin, :gpr)\n tmp2 = Tmp.new(node.codeOrigin, :gpr)\n newList << Instruction.new(node.codeOrigin, node.opcode, [tmp1, tmp2].concat(node.operands))\n else\n newList << node\n end\n else\n newList << node\n end\n }\n newList\nend",
"def eval\n @nodes.each do |node| \n node.propagate\n end\n @nodes.each do |node|\n node.update_state \n end\n increment_time \n end",
"def build\n GraphUtilities.alternation(@left.build, @right.build)\n end",
"def riscLowerHardBranchOps64(list)\n newList = []\n list.each {\n | node |\n if node.is_a? Instruction and node.opcode == \"bmulio\"\n tmp1 = Tmp.new(node.codeOrigin, :gpr)\n tmp2 = Tmp.new(node.codeOrigin, :gpr)\n newList << Instruction.new(node.codeOrigin, \"smulli\", [node.operands[0], node.operands[1], node.operands[1]])\n newList << Instruction.new(node.codeOrigin, \"rshiftp\", [node.operands[1], Immediate.new(node.codeOrigin, 32), tmp1])\n newList << Instruction.new(node.codeOrigin, \"rshifti\", [node.operands[1], Immediate.new(node.codeOrigin, 31), tmp2])\n newList << Instruction.new(node.codeOrigin, \"zxi2p\", [node.operands[1], node.operands[1]])\n newList << Instruction.new(node.codeOrigin, \"bineq\", [tmp1, tmp2, node.operands[2]])\n else\n newList << node\n end\n }\n newList\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Publish a document object to the web. If content is a StructuredArticle stored as json, and publisDate is note set, then publishDate will be set to current time. Publishes a object by performing a PUT request to object.url with object.content and then performing a PROPPATCH request to object.url with object.properties Example: vortex = Vortex::Connection.new(" article = Vortex::StructuredArticle(:title=>"My title") vortex.publish(article) | def publish(object)
write(object)
uri = @uri.merge(object.url)
if(object.is_a? StructuredArticle) then
if(object.publishDate == nil)then
time = Time.now.httpdate.to_s
prop = '<v:publish-date xmlns:v="vrtx">' + time + '</v:publish-date>'
self.proppatch(uri, prop)
end
end
return uri.to_s
end | [
"def publish!\n publish\n save!\n end",
"def publish!\r\n publish\r\n save!\r\n end",
"def publish!\n publish\n save!\n end",
"def publish_impl(publish_target, digital_object)\n digital_object_pid = digital_object_pids(digital_object).first\n return [false, \"Never preserved to Fedora3\"] unless digital_object_pid\n return [false, \"No DOI\"] if digital_object.doi.blank?\n connection = Faraday.new(publish_target.publish_url)\n connection.token_auth(publish_target.api_key)\n resp = connection.put(digital_object_pid)\n [true, [resp.headers['Location']]]\n rescue StandardError => e\n [false, [e.message]]\n end",
"def publish\n\t\t@article.publish!\n\t\tredirect_to @article\n\tend",
"def publish\n ZC.standard_request(:post, @links[:publish], zc_id: @zc_id)\n end",
"def publish(args)\n object = args[:object]\n action = args[:action]\n options = (args[:options] || {}).stringify_keys\n\n data = {\n :key => object.obj_key,\n :action => action,\n :headline => object.to_title,\n :url => object.public_url,\n :admin_url => object.admin_edit_url,\n :status => object.status\n }.merge(options)\n\n $redis.publish(\"scprcontent\", data.to_json)\n end",
"def publish(topic, content)\n # note: do NOT encode the content, this is not decoded on the erlang side!\n # (only strings are allowed anyway)\n # content = @conn.class.encode_value(content)\n result = @conn.call(:publish, [topic, content])\n @conn.class.process_result_publish(result)\n end",
"def publish\n Client.post(\"/kits/#{@id}/publish\")\n end",
"def publish\n post_params = {\n :body => {}.to_json,\n :headers => Logan::Client.headers.merge({'Content-Type' => 'application/json'})\n }\n\n response = Logan::Client.post \"/projects/#{@id}/publish.json\", post_params\n end",
"def publish(work, publish_url)\n response = post(work, publish_url)\n { response: response, work: JSON.parse(response.body) }\n end",
"def publish(niUri, obj, dest, msgId) end",
"def handle_publish(client, data)\n request_id, options, topic_uri, arguments, argument_keywords = data\n\n trigger(:publish, client, request_id, options, topic_uri, arguments, argument_keywords)\n end",
"def publish!\n self.public = true\n self.save\n end",
"def publish\n @article = Article.find params[:id]\n @version = @article.versions.last\n @version.published = true\n if @article.save and @version.save\n append_flash :notice, \"Article was successfully published\", true\n redirect_to admin_article_path @article\n else\n append_flash :error, \"Unable to publish article\", true\n redirect_to edit_admin_article_path @article\n end # if-else\n end",
"def publish\n @page = Page.find(params[:id])\n @page.published_on = Time.now\n @page.save\n\n respond_to do |format|\n format.json {render json: @pages}\n format.xml {render xml: @pages}\n end\n end",
"def publish!\n self.published = true\n if self.respond_to?(:publish_on)\n self.publish_on ||= Date.today\n end\n self.save\n end",
"def publish(object, props={})\n start = Time.now\n marshaled_object = @marshaler.marshal(object)\n adapter_info = @impl.publish(marshaled_object, @marshaler, nil, props)\n return PublishHandle.new(self, adapter_info, start)\n end",
"def publish\n respond_to do |format|\n if @journal.publish!\n format.html { redirect_to @journal, notice: 'Published!' }\n format.json { head :no_content }\n else\n format.html { redirect_to edit_profile_journal_path(@profile, @journal), notice: 'There were missing parts.' }\n format.json { render json: @journal.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Utilities Convert norwegian date to Time object with a forgiven regexp TODO: Move this somewhere. Examples: t = norwegian_date('1.1.2010') t = norwegian_date('22.01.2010') t = norwegian_date('22.01.2010 12:15') t = norwegian_date('22.01.2010 12:15:20') | def norwegian_date(date)
if /\A\s*
(\d\d?).(\d\d?).(-?\d+)
\s?
(\d\d?)?:?(\d\d?)?:?(\d\d?)?
\s*\z/ix =~ date
year = $3.to_i
mon = $2.to_i
day = $1.to_i
hour = $4.to_i
min = $5.to_i
sec = $6.to_i
# puts "Debug: #{year} #{mon} #{day} #{hour}:#{min}:#{sec}"
usec = 0
usec = $7.to_f * 1000000 if $7
if $8
zone = $8
year, mon, day, hour, min, sec =
apply_offset(year, mon, day, hour, min, sec, zone_offset(zone))
Time.utc(year, mon, day, hour, min, sec, usec)
else
Time.local(year, mon, day, hour, min, sec, usec)
end
else
raise ArgumentError.new("invalid date: #{date.inspect}")
end
end | [
"def convertTime(time)\n time.gsub!(\"P\", \"\")\n\n if (time.match(\"0DT\"))\n time.gsub!(\"0DT\", \"\")\n else\n time.gsub!(\"DT\", \" Days \")\n end\n\n if (time.match(\"0H\"))\n time.gsub!(\"0H\", \"\")\n else\n time.gsub!(\"H\", \"h \")\n end\n\n time.gsub!(\"M\", \"m \")\n time.gsub!(\"S\", \"s \")\n end",
"def time_for_string(time)\n time = time.scan(/\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}/)[0].sub(' ','T')\n Time.xmlschema(time)\n end",
"def convert_date(date_or_timestamp_string)\n if /\\d{4}\\-\\d{1,2}\\-\\d{1,2}/ =~ date_or_timestamp_string\n DateTime.strptime(date_or_timestamp_string, '%Y-%m-%d')\n else\n DateTime.strptime(date_or_timestamp_string, '%s')\n end\nend",
"def parse_date(input, msg = \"Input could not be parsed.\")\n Time.parse(input).localtime\n rescue ArgumentError\n raise Errors::InvalidDateError, \"Invalid date '#{input}': #{msg}\"\n end",
"def format_date(time); end",
"def date_mathable(datestr)#i.e. '2011-02-20'\r\n date_time = datestr.split(\" \")\r\n if date_time.length == 1\r\n arg = datestr.split('-')\r\n arg = arg.length == 1 ? datestr.split('.') : arg\r\n y = Integer( trim_lead(arg[0],'0') )\r\n m = Integer( trim_lead(arg[1],'0') )\r\n d = Integer( trim_lead(arg[2],'0') )\r\n date = Time.local(y,m,d)\r\n return date\r\n elsif date_time.length == 2\r\n arg = date_time[0].split('-')\r\n arg = arg.length == 1 ? date_time[0].split('.') : arg\r\n arg = arg.length == 1 ? date_time[0].split('/') : arg\r\n y = Integer( trim_lead(arg[0],'0') )\r\n m = Integer( trim_lead(arg[1],'0') )\r\n d = Integer( trim_lead(arg[2],'0') )\r\n arg2 = date_time[1].split(':')\r\n hour = Integer( trim_lead(arg2[0],'0') )\r\n min = arg2[1] == '00' ? 0 : Integer( trim_lead(arg2[1],'0') )\r\n sec = arg2[2] == '00' ? 0 : Integer( trim_lead(arg2[2],'0') )\r\n date = y >= 1000 ? Time.local(y,m,d,hour,min,sec) : Time.local(d,y,m,hour,min,sec)\r\n return date\r\n end \r\n end",
"def workOnTimeString(timeStr) \n str = timeStr.dup \n # timeStr e' del tipo: 1h, 5d, etc. \n if (m = str.match /^(?<val>\\d+)(?<unit>[mhd])$/) then \n val = m['val'].to_i\n seconds = {'m' => 60, 'h' => 60*60, 'd' => 60*60*24}\n # tempo corrente come epoch in secondi \n # \n rhsTime = DateTime.now\n lhsTime = rhsTime.to_time.to_i - (m['val'].to_i * seconds[m['unit']])\n lhsTime = lhsTime + DateTime.now.to_time.utc_offset\n lhsTime = DateTime.strptime(lhsTime.to_s, \"%s\")\n # \n elsif (m = str.match /^(?<lhs>.*)__(?<rhs>.*)$/) then\n lhs = m['lhs'].dup.strip\n rhs = m['rhs'].dup.strip\n # puts \"lhs: #{lhs}\"\n # puts \"rhs: #{rhs}\"\n # \n # lhs is always a point in time, that can be expressed as \n # 2018-dec-06, dec-06, 06-10:30:31, 05-10:30\n # if YYY is not set is the current year \n # if MM is not set is the current month\n # if SS is not set is zero \n # if HH:MM is not set it is 00:00 (the first minute of the day)\n begin \n lhsTime = nil\n # full date : 2018-dec-06-10:30\n if lhs.match /^\\d{4}-\\w{3}-\\d{1,2}-\\d{1,2}:\\d{1,2}$/ then \n lhsTime = DateTime.strptime(lhs, '%Y-%b-%d-%H:%M')\n # no year : dec-06-10:30 \n elsif lhs.match /^\\w{3}-\\d{1,2}-\\d{1,2}:\\d{1,2}$/ then \n lhsTime = DateTime.strptime(lhs, '%b-%d-%H:%M')\n # no time : 2018-dec-06 \n elsif lhs.match /^\\d{4}-\\w{3}-\\d{1,2}$/ then \n lhsTime = DateTime.strptime(lhs, '%Y-%b-%d')\n # no year, no time : dec-06\n elsif lhs.match /^\\w{3}-\\d{1,2}$/ then \n lhsTime = DateTime.strptime(lhs, '%b-%d')\n # only time : 15:30\n elsif lhs.match /^\\d{1,2}:\\d{1,2}$/ then \n lhsTime = DateTime.strptime(lhs, '%H:%M')\n else\n STDERR.puts \"Error, time format not recognized.\"\n exit(1)\n end\n # puts \"lhsTime: #{lhsTime}\"\n rescue => ex \n STDERR.puts \"Exception in parsing dates, check e.g. month names.\"\n exit(3)\n end\n\n # \n # rhs can be a point in time, or a delta respect to lhs time.\n # \n # if rhs is a delta it a can be: e.g. +1d, 5h, 20h, -12d ... \n # \n begin \n rhsTime = nil \n if (m = rhs.match(/^(?<sign>[+-])(?<val>\\d+)(?<unit>[mhd])$/)) then\n seconds = {'m' => 60, 'h' => 60*60, 'd' => 60*60*24}\n if m['sign'] == '+' then \n # binding.pry\n rhsTime = lhsTime.to_time.to_i + (m['val'].to_i * seconds[m['unit']])\n rhsTime = DateTime.strptime(rhsTime.to_s, \"%s\")\n else\n rhsTime = lhsTime.to_time.to_i - (m['val'].to_i * seconds[m['unit']]) \n rhsTime = DateTime.strptime(rhsTime.to_s, \"%s\")\n end\n # \n # Now we see the case in which rhs if fully described \n # complete: 2018-dec-06-10:31\n elsif rhs.match /^\\d{4}-\\w{3}-\\d{1,2}-\\d{1,2}:\\d{1,2}$/ then \n rhsTime = DateTime.strptime(rhs, '%Y-%b-%d-%H:%M')\n # no year : dec-06-10:30 \n elsif rhs.match /^\\w{3}-\\d{1,2}-\\d{1,2}:\\d{1,2}$/ then \n rhsTime = DateTime.strptime(rhs, '%b-%d-%H:%M')\n # no time : 2018-dec-06 \n # in this case the time is the last minute of the day.\n elsif rhs.match /^\\d{4}-\\w{3}-\\d{1,2}$/ then \n tmp = DateTime.strptime(rhs, '%Y-%b-%d')\n rhsTime = DateTime.new(tmp.year, tmp.month, tmp.day, 23, 59, 59)\n # no year, no time : dec-06\n elsif rhs.match /^\\w{3}-\\d{1,2}$/ then \n tmp = DateTime.strptime(rhs, '%b-%d')\n rhsTime = DateTime.new(tmp.year, tmp.month, tmp.day, 23, 59, 59)\n # only time : 15:30\n elsif rhs.match /^\\d{1,2}:\\d{1,2}$/ then \n rhsTime = DateTime.strptime(rhs, '%H:%M')\n # \n # Finally the case in which 'rhs' has not a recognizable format \n else \n STDERR.puts \"Error, time format not recognized.\"\n exit(1) \n end\n rescue => ex \n STDERR.puts \"Exception in parsing dates, check e.g. month names.\"\n exit(3)\n end\n # puts \"rhsTime: #{rhsTime}\"\n # exit (2)\n else\n STDERR.puts \"Error, the time string '#{timeStr}' for parameter '-t' has an unknown format.\"\n exit(1);\n end\n # \n # Return the two time limits. The first time is always before (in time) respect to the second one.\n # \n lhsTime = dateTimeToPST(lhsTime)\n rhsTime = dateTimeToPST(rhsTime)\n if lhsTime == rhsTime then \n STDERR.puts \"Error. The time interval is empty, the search will be empty.\"\n exit(1)\n elsif lhsTime < rhsTime then\n out = [lhsTime, rhsTime]\n $MYDEBUG_VALS['lhsTime'] = lhsTime;\n $MYDEBUG_VALS['rhsTime'] = rhsTime;\n else\n out = [rhsTime, lhsTime]\n $MYDEBUG_VALS['lhsTime'] = rhsTime;\n $MYDEBUG_VALS['rhsTime'] = lhsTime;\n end\n out\nend",
"def humanDatetoTime(humanDate)\n\t\tvalidwords = [\"second\", \"minute\", \"hour\", \"day\", \"week\", \"month\", \"year\"]\n\n\t\tgroups = humanDate.scan(/([\\d\\.]+ [a-zA-Z]+)+/)\n\t\t!groups.nil? or return nil\n\n\n\t\ttimes = Hash[groups.map { |group| group[0].split(\" \").reverse! }.map {|unit, num| [unit.end_with?(\"s\") ? unit[0..unit.length-2] : unit, num] }]\n\t\tif times.keys.any? {|unit| !validwords.include? unit }\n\t\t\treturn nil\n\t\tend\n\t\t\n\t\tcarry = 0\n\n\t\t# for each unit starting with year, carry the decimal portion if it has one\n\t\tvalidwords.reverse.each do |unit|\n\t\t\tnum = times.fetch(unit, 0).to_s\n\t\t\tif !(num.include? \".\")\n\t\t\t\ttimes[unit] = Integer(num) + carry\n\t\t\telse\n\t\t\t\tthisunit, nextunit = num.split(\".\")\n\t\t\t\ttimes[unit] = Integer(thisunit) + carry\n\t\t\t\tcarry = Integer(nextunit)\n\t\t\tend\n\t\tend\n\t\t# the last carry is ignored because why handle fractions of a second\n\t\t\n\t\treturn formatTime(times.fetch(\"year\", 0), times.fetch(\"month\", 0), times.fetch(\"day\", 0), times.fetch(\"hour\", 0), times.fetch(\"minute\", 0), times.fetch(\"second\", 0))\n\tend",
"def nice_date(date)\n Time.zone = \"Berlin\"\n date.nil? ? \"\" : h(Time.zone.at(date).strftime(\"%d.%m.%Y, %H:%M Uhr %Z\").gsub(\"CET\", \"MEZ\").gsub(\"CEST\", \"MESZ\"))\n end",
"def from_dmy(date)\n date.to_s.gsub(/^(\\d{2})[\\/-](\\d{2})[\\/-](\\d{4})/, '\\3/\\2/\\1').to_date\n end",
"def make_date(time_object, type=\"cle\")\n case(type)\n when \"cle\"\n month = time_object.strftime(\"%b \")\n day = time_object.strftime(\"%d\").to_i\n year = time_object.strftime(\", %Y \")\n mins = time_object.strftime(\":%M %P\")\n hour = time_object.strftime(\"%l\").to_i\n return month + day.to_s + year + hour.to_s + mins\n when \"oae-message\"\n date = time_object.strftime(\"%-m/%-d/%Y \")\n hour = time_object.strftime(\"%l\").to_i\n mins = time_object.strftime(\":%M %p\")\n return date + hour.to_s + mins\n end\n \n end",
"def item_date(item)\r\n re1 = /<span class=\"h5\">(\\d\\d\\.\\d\\d\\.\\d\\d) (\\d\\d:\\d\\d)<\\/span>/m\r\n datetime_format = \"%d.%m.%y %H:%M\"\r\n \r\n date = item.match(re1)\r\n date = DateTime.strptime(\"#{date[1]} #{date[2]}\", datetime_format) unless date.nil?\r\n date.nil? || date.to_s.empty? ? \"\" : date\r\nend",
"def parse_date date\n\t\tunless date =~ /(\\d{4})(\\d{2})(\\d{2})(\\d{2})(\\d{2})(\\d{2})Z/\n\t\t\treturn nil\n\t\tend\n\t\t\n\t\tyear = $1\n\t\tmonth = $2\n\t\tday = $3\n\t\thour = $4\n\t\tmin = $5\n\t\tsec = $6\n\n\t\treturn Time.mktime(year, month, day, hour, min, sec)\n\tend",
"def decode_time(string); end",
"def parse_filtered_time(time = nil)\n case time\n when String then Time.zone.parse(time)\n when nil then Time.zone.now\n when Time, ActiveSupport::TimeWithZone then time.in_time_zone\n else raise \"Invalid time: #{time.inspect}\"\n end\n end",
"def time(str)\n Time.strptime(str, @format_source.time_format)\n rescue\n str\n end",
"def convert_time(time_string)\n parts = time_string.to_s.split('/')\n return parts.length == 3 ? Time.new(parts[2].to_i, parts[0].to_i, parts[1].to_i) : nil\n end",
"def decode_date(string); end",
"def format_date(original_date_str)\n original_date_str.sub(/\\A(\\d{4})-(\\d{2})-(\\d{2})\\z/,'\\3.\\2.\\1')\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
When changing the response_reset_date, we must go through and update the status_cache for all members of the committee. | def update_member_status_caches!
if response_reset_date_changed?
members.find_in_batches do |member_group|
member_group.each {|member| member.update_status_cache! }
end
end
end | [
"def pending_account_reset_visited\n track_event('Pending account reset visited')\n end",
"def response_lifetime_date\n response_reset_date || CommitteeMember::DEFAULT_RESPONSE_LIFETIME.ago\n end",
"def reset_events_cache\n Event.where(target_id: self.id, target_type: 'Issue').\n order('id DESC').limit(100).\n update_all(updated_at: Time.now)\n end",
"def reassign_team_responses\n @delta[:teams].each do |tobj|\n response = get_response_by_team_id(tobj[:id])\n next if response.blank?\n if tobj[:deleted]\n next\n elsif tobj[:new]\n next\n else\n response.ownerable = get_team_by_id(tobj[:new_id])\n response.save\n end\n end\n end",
"def update_task_completion_status_caches!\n return nil if completion_criteria.blank? && context_object_completion_criteria.blank?\n logger.info { \"Updating task completion status cache for OfferingAdminPhaseTask id #{id}\" }\n for r in relevant_records\n r.update_task_completion_status_cache!(self)\n r.save\n end\n end",
"def expire_reset_tokens\n ResetToken.where(user_id: self.id).each(&:mark_used)\n end",
"def reset_current_stats\n if last_current_update < 2.hours.ago\n self.current_seen = 0\n self.current_correct = 0\n self.last_current_update = Time.now\n self.due_count = self.cards.due.length\n end\n self.save\n end",
"def reset_digests_if_needed(old_status, new_status)\n if self.class.away_idle_or_unavailable?(old_status) && !self.class.away_idle_or_unavailable?(new_status)\n reset_digest_cycle\n end\n end",
"def reset_since_id_counters\n reset!\n reset_since_id\n reset_since_id_reply\n reset_since_id_home_timeline\n reset_since_id_dm\n end",
"def update_cache(previous)\n host.overall_status force_update: true if new_status_different?(previous)\n end",
"def reset_if_possible\n now = DateTime.now\n reset_time = self.reset_on || DateTime.parse(\"1/1/11\")\n if (now - reset_on).to_f > 1\n self.reset_on = now\n self.daily_calls = 0\n self.save\n else\n self.reset_on = reset_time\n self.save\n end\n end",
"def update_status_timestamp\n self.overall_status_modified_at = Time.zone.now\n end",
"def reset_modifications\n @accessed = {}\n @modifications = {}\n end",
"def revalidate_cached_response(not_modified_response)\n logger.info(\" Resource not modified\")\n cached_response.revalidate!(not_modified_response)\n cached_response\n end",
"def after_update(employee)\n expire_cache_for(employee)\n end",
"def rate_limit_reset=(value)\n value ||= (Time.zone.now.end_of_hour).to_i\n Rails.cache.write(\"#{name}/rate_limit_reset\", get_iso8601_from_epoch(value))\n end",
"def update_retried\n # find the latest builds for each name\n latest_statuses = pipeline.statuses.latest\n .group(:name)\n .having('count(*) > 1')\n .pluck(Arel.sql('MAX(id)'), 'name')\n\n # mark builds that are retried\n pipeline.statuses.latest\n .where(name: latest_statuses.map(&:second))\n .where.not(id: latest_statuses.map(&:first))\n .update_all(retried: true) if latest_statuses.any?\n end",
"def leave_reset(emp)\n leave_count = EmployeeLeave.where(employee_id: emp.id)\n leave_count.each do |e|\n leave_type = EmployeeLeaveType.find_by_id(e.employee_leave_type_id)\n default_leave_count = leave_type.max_leave_count\n available_leave = default_leave_count.to_f\n leave_taken = 0\n e.update(leave_taken: leave_taken, leave_count: available_leave,\\\n reset_date: Date.today)\n end\n end",
"def restarted\n @failures << Time.now.to_i\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
If this Committee has the response_reset_date set, return that. Otherwise, calculate this based on the DEFAULT_LIFETIME_RESPONSE. | def response_lifetime_date
response_reset_date || CommitteeMember::DEFAULT_RESPONSE_LIFETIME.ago
end | [
"def responded_recently?(response_lifetime = DEFAULT_RESPONSE_LIFETIME)\n if committee && committee.response_reset_date\n last_user_response_at > committee.response_reset_date rescue false\n else\n Time.now - last_user_response_at < response_lifetime rescue false\n end\n end",
"def renewed_date_time\n return @renewed_date_time\n end",
"def reminder_date_time\n return @reminder_date_time\n end",
"def password_reset_expired?\n reset_sent_at < 2.hours.ago # reset_sent_at db polje datetime\n end",
"def password_reset_expired?\n reset_sent_at < 2.hours.ago\n end",
"def password_reset_expired?\n reset_sent_at < PASSWORD_EXPIRATION.hours.ago\n end",
"def expiration_date\n @expiration_date ||= extract_warranty_date\n end",
"def password_reset_sent_at\n Time.now\n end",
"def reset_if_possible\n now = DateTime.now\n reset_time = self.reset_on || DateTime.parse(\"1/1/11\")\n if (now - reset_on).to_f > 1\n self.reset_on = now\n self.daily_calls = 0\n self.save\n else\n self.reset_on = reset_time\n self.save\n end\n end",
"def fecha_expiration\n run_at = self.expiration_date\n end",
"def password_reset_expired?\n reset_sent_at < 2.hours.ago # The password reset was sent earlier than two hours ago.\n end",
"def password_reset_expired?\n reset_sent_at < Settings.timeout_reset_password.hours.ago\n end",
"def reassigned_date_time\n return @reassigned_date_time\n end",
"def assessment_or_determination_date\n claim.redeterminations.any? ? last_redetermination_date : claim.assessment.updated_at\n end",
"def retired_at\n @retired_at\n end",
"def confirmation_reset_expired?\n confirmation_sent_at < 48.hours.ago\n end",
"def last_response\n @last_response\n end",
"def expiration_date\n return nil unless success?\n @exdate ||= value_for_xpath('//domain:exDate') && Time.parse(value_for_xpath('//domain:exDate'))\n end",
"def last_response\n @response\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /success_cases GET /success_cases.xml | def index
@success_cases = SuccessCase.all.paginate(:page => params[:page], :per_page => 10)
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @success_cases }
end
end | [
"def index\n @cases = Case.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @cases }\n end\n end",
"def show\n @success_case = SuccessCase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @success_case }\n end\n end",
"def show\n respond_to do |format|\n format.html { redirect_to :controller => 'testcases', :action => 'index' }\n format.xml { render :xml => @testcase_result }\n end\n end",
"def index\n @testcases = Testcase.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @testcases }\n end\n end",
"def new\n @success_case = SuccessCase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @success_case }\n end\n end",
"def index\n @contests = Contest.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @contests }\n end\n end",
"def index\n @case_recs = CaseRec.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @case_recs }\n end\n end",
"def show\n @test_case = TestCase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @test_case }\n end\n end",
"def index\r\n @contests = Contest.all\r\n\r\n respond_to do |format|\r\n format.html # index.html.erb\r\n format.xml { render :xml => @contests }\r\n end\r\n end",
"def index\n @testcase_results = TestcaseResult.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @testcase_results }\n end\n end",
"def index\n \n @testcases = Testcase.search(params[:s])\n @testsuite = params[:testsuite]\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @testcases }\n end\n end",
"def show\n @student_discipline_case = StudentDisciplineCase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @student_discipline_case }\n end\n end",
"def show\n @contest = Contest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @contest }\n end\n end",
"def index\n @lab_tests = @test_subject.lab_tests\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render xml: @lab_tests }\n end\n end",
"def show\n @testcase = Testcase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @testcase }\n end\n end",
"def show\n @case = Case.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @case }\n end\n end",
"def index\n @contestant_entries = ContestantEntry.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @contestant_entries }\n end\n end",
"def index\n @lab_tests = @test_subject.lab_tests\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @lab_tests }\n end\n end",
"def index\n @it_cases = ItCase.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @it_cases }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /success_cases/1 GET /success_cases/1.xml | def show
@success_case = SuccessCase.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @success_case }
end
end | [
"def index\n @success_cases = SuccessCase.all.paginate(:page => params[:page], :per_page => 10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @success_cases }\n end\n end",
"def index\n @cases = Case.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @cases }\n end\n end",
"def show\n respond_to do |format|\n format.html { redirect_to :controller => 'testcases', :action => 'index' }\n format.xml { render :xml => @testcase_result }\n end\n end",
"def index\n @testcases = Testcase.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @testcases }\n end\n end",
"def show\n @test_case = TestCase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @test_case }\n end\n end",
"def new\n @success_case = SuccessCase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @success_case }\n end\n end",
"def index\n @case_recs = CaseRec.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @case_recs }\n end\n end",
"def show\n @testcase = Testcase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @testcase }\n end\n end",
"def show\n @student_discipline_case = StudentDisciplineCase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @student_discipline_case }\n end\n end",
"def index\n @contests = Contest.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @contests }\n end\n end",
"def show\n @case = Case.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @case }\n end\n end",
"def index\n @testcase_results = TestcaseResult.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @testcase_results }\n end\n end",
"def show\n @contest = Contest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @contest }\n end\n end",
"def index\n \n @testcases = Testcase.search(params[:s])\n @testsuite = params[:testsuite]\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @testcases }\n end\n end",
"def xml(id)\n http.get(\"/nfse/#{id}/xml\") do |response|\n response.headers.fetch(\"Location\") { \"\" }\n end\n end",
"def show\n @case_study = CaseStudy.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @case_study }\n end\n end",
"def show\n @security_case = SecurityCase.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @security_case }\n end\n end",
"def index\r\n @contests = Contest.all\r\n\r\n respond_to do |format|\r\n format.html # index.html.erb\r\n format.xml { render :xml => @contests }\r\n end\r\n end",
"def show\n @test_case_entry = TestCaseEntry.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @test_case_entry }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /success_cases/new GET /success_cases/new.xml | def new
@success_case = SuccessCase.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @success_case }
end
end | [
"def new\n @test_case = TestCase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @test_case }\n end\n end",
"def new\n @case = Case.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @case }\n end\n end",
"def new\n @testcase = Testcase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @testcase }\n end\n end",
"def new\n @case_rec = CaseRec.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @case_rec }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => new_vurl }\n end\n end",
"def new\n @security_case = SecurityCase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @security_case }\n end\n end",
"def new\n @contest = Contest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @contest }\n end\n end",
"def new\n @case_study = CaseStudy.new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @case_study }\n end\n end",
"def new\n @test_case_entry = TestCaseEntry.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @test_case_entry }\n end\n end",
"def new\n @usercase = Usercase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @usercase }\n end\n end",
"def new\n @test_status = TestStatus.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @test_status }\n end\n end",
"def new\n @trial = Trial.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @trial }\n end\n end",
"def new\n @lab_test = LabTest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render xml: @lab_test }\n end\n end",
"def new\n @actual = Actual.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @actual }\n end\n end",
"def new\n @cup = Cup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cup }\n end\n end",
"def new\n @testing = Testing.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @testing }\n end\n end",
"def new\n @clilab = Clilab.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @clilab }\n end\n end",
"def new\n @project = Project.new\n\n puts \"new\"\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @project }\n end\n end",
"def new\n @suite = Suite.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @suite }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /success_cases POST /success_cases.xml | def create
@success_case = SuccessCase.new(params[:success_case])
respond_to do |format|
if @success_case.save
format.html { redirect_to(@success_case, :notice => 'Success case was successfully created.') }
format.xml { render :xml => @success_case, :status => :created, :location => @success_case }
else
format.html { render :action => "new" }
format.xml { render :xml => @success_case.errors, :status => :unprocessable_entity }
end
end
end | [
"def create\n @testcase = Testcase.new(params[:testcase])\n\n respond_to do |format|\n if @testcase.save\n flash[:notice] = 'Testcase was successfully created.'\n format.html { redirect_to(testcases_url) }\n format.xml { render :xml => \"testcases\", :status => :created, :location => @testcase }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @testcase.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @test_case = TestCase.new(params[:test_case])\n\n respond_to do |format|\n if @test_case.save\n format.html { redirect_to(@test_case, :notice => 'TestCase was successfully created.') }\n format.xml { render :xml => @test_case, :status => :created, :location => @test_case }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @test_case.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def posttestrail(runId, caseId, statusId, versionId, elapsedseconds)\r\n\r\n uri = \"http://testrailgw.jupiter.bbc.co.uk/?action=add_result_for_case&run_id=#{runId}&case_id=#{caseId}&status_id=#{statusId}&version=#{versionId}&elapsed_seconds=#{elapsedseconds}&sharedSecret=thI5iSourSHAREDsecret\"\r\n #uri = \"http://testrailgw.jupiter.bbc.co.uk/?action=add_result_for_case&run_id=110324&case_id=665022&status_id=1&version=Test&elapsed_seconds=12&sharedSecret=thI5iSourSHAREDsecret\"\r\n\r\n uri = uri.gsub(\" \", \"%20\")\r\n xml_data = open(uri).read\r\n if(xml_data.include? '\"test_id\":')\r\n recorded = xml_data.split('\"test_id\":')[1]\r\n testID = recorded.split(',\"status_id\"')[0]\r\n puts \"TestID:\"+testID\r\n else\r\n puts xml_data\r\n fail \"Cannot Post result to Testrail, check Webservice\"\r\n end\r\n\r\n timeStamp = Time.now.strftime (\"posted at %H:%M %d/%m/%Y\")\r\n files = \"//zgbwcfs3005.jupiter.bbc.co.uk/QA/Jenkins/Jupiter/ICETEAresultupdatelog.txt\"\r\n f = File.open(files,'a')\r\n f.write \"#{testID} #{timeStamp}\"\r\n f.close\r\nend",
"def test_should_create_status_post_via_API_XML\r\n get \"/logout\"\r\n post \"/status_posts.xml\", :api_key=>'testapikey',\r\n :status_post => {:body => 'API Status Post 1' }\r\n assert_response :created\r\n end",
"def new\n @success_case = SuccessCase.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @success_case }\n end\n end",
"def create_case(subject, created_at, resolved_at, description, count, external_id, s, e)\n uri = URI('https://yoursite.desk.com/api/v2/cases') # POST URI\n req = Net::HTTP::Post.new(uri.path, {'Content-Type' => 'application/json'}) #set Post object (uri and content type header)\n req.basic_auth '<email>', '<password>' #set Post object (auth)\n\n #set Post object body && convert to json (contents of ticket)\n req.body =\n {\n type: \"email\",\n external_id: \"#{external_id}\",\n subject: \"#{subject}\",\n priority: 4,\n status: \"open\",\n labels: [\"archive\"],\n created_at: \"#{created_at}\",\n resolved_at: \"#{resolved_at}\",\n message: {\n direction: \"in\",\n subject: \"#{subject}\",\n body: \"#{description}\",\n to: \"<email>\",\n from: \"<email>\",\n created_at: \"#{created_at}\"\n }\n }.to_json\n\n #send the request\n res = Net::HTTP.start(uri.hostname, uri.port,\n :use_ssl => uri.scheme == 'https') do |http|\n http.request(req)\n end\n if res.is_a?(Net::HTTPSuccess)\n puts \"Case Created!\"\n #success logging\n s.write(\"#{res.body}\\n\")\n return true\n else\n puts \"Oops! Case not created.\"\n #error logging\n e.write(\"#{res.body}\")\n return false\n end\nend",
"def create\n @case = Case.new(params[:case]) \n respond_to do |format|\n if @case.save\n flash[:notice] = 'Case was successfully created.'\n format.html { redirect_to(@case) }\n format.xml { render :xml => @case, :status => :created, :location => @case }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @case.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @test_case_entry = TestCaseEntry.new(params[:test_case_entry])\n\n respond_to do |format|\n if @test_case_entry.save\n format.html { redirect_to(@test_case_entry, :notice => 'Test case entry was successfully created.') }\n format.xml { render :xml => @test_case_entry, :status => :created, :location => @test_case_entry }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @test_case_entry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @test_case = TestCase.new(params[:test_case])\n\n respond_to do |format|\n if @test_case.save\n format.html { redirect_to @test_case, notice: 'Test case was successfully created.' }\n format.json { render json: @test_case, status: :created, location: @test_case }\n else\n format.html { render action: \"new\" }\n format.json { render json: @test_case.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @usercase = Usercase.new(params[:usercase])\n\n respond_to do |format|\n if @usercase.save\n flash[:notice] = 'Usercase was successfully created.'\n format.html { redirect_to(@usercase) }\n format.xml { render :xml => @usercase, :status => :created, :location => @usercase }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @usercase.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @case_test = CaseTest.new(case_test_params)\n\n respond_to do |format|\n if @case_test.save\n format.html { redirect_to @case_test, notice: 'Case test was successfully created.' }\n format.json { render :show, status: :created, location: @case_test }\n else\n format.html { render :new }\n format.json { render json: @case_test.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n attributes = params[:testcase_result]\n attributes[:testcase_id] = params[:testcase_id]\n attributes[:testsuite_id] = params[:testsuite_id]\n @testcase_result = TestcaseResult.new(attributes)\n\n respond_to do |format|\n if @testcase_result.save\n flash[:notice] = 'TestcaseResult was successfully created.'\n format.html { redirect_to(Testsuite.find(params[:testsuite_id])) }\n format.xml { render :xml => @testcase_result, :status => :created, :location => @testcase_result }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @testcase_result.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def post_new_tcase(project_id)\n self.class.params\n options = {:body =>{:t_case=>\n {:t_case_id => @@params[\"id\"],\n :title => @@params[\"title\"],\n :scenario => @@params[\"scenario\"],\n :status => @@params[\"status\"],\n :project_id => project_id\n }}}\n self.class.post(\"/projects/#{project_id}/t_cases\", options)\n end",
"def create\n @case_rec = CaseRec.new(params[:case_rec])\n\n respond_to do |format|\n if @case_rec.save\n flash[:notice] = 'CaseRec was successfully created.'\n format.html { redirect_to(@case_rec) }\n format.xml { render :xml => @case_rec, :status => :created, :location => @case_rec }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @case_rec.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\r\n @contest = Contest.new(params[:contest])\r\n\r\n respond_to do |format|\r\n if @contest.save\r\n format.html { redirect_to(@contest, :notice => 'Contest was successfully created.') }\r\n format.xml { render :xml => @contest, :status => :created, :location => @contest }\r\n else\r\n format.html { render :action => \"new\" }\r\n format.xml { render :xml => @contest.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n params.permit!\n @security_case = SecurityCase.new(params[:security_case])\n\n respond_to do |format|\n if @security_case.save\n format.html { redirect_to(@security_case, :notice => 'Security case was successfully created.') }\n format.xml { render :xml => @security_case, :status => :created, :location => @security_case }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @security_case.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def POST; end",
"def create\n @test_case = TestCase.new(test_case_params)\n @test_suite = TestSuite.find(params[:test_suite_id])\n\n #Make sure the tc is linked to the ts\n @test_case.test_suite_id=@test_suite.id\n\n respond_to do |format|\n if @test_case.save\n format.html { redirect_to test_suite_test_case_path(@test_suite,@test_case), notice: 'Test case was successfully created.' }\n format.json { render :show, status: :created, location: @test_case }\n else\n format.html { render :new }\n format.json { render json: @test_case.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @contest = Contest.new(params[:contest])\n\n respond_to do |format|\n if @contest.save\n format.html { redirect_to(@contest, :notice => 'Contest was successfully created.') }\n format.xml { render :xml => @contest, :status => :created, :location => @contest }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @contest.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /success_cases/1 PUT /success_cases/1.xml | def update
@success_case = SuccessCase.find(params[:id])
respond_to do |format|
if @success_case.update_attributes(params[:success_case])
format.html { redirect_to(@success_case, :notice => 'Success case was successfully updated.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @success_case.errors, :status => :unprocessable_entity }
end
end
end | [
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post opts.fetch(:path, update_path), opts\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post 'update', opts\n end",
"def update\n @test_case = TestCase.find(params[:id])\n\n respond_to do |format|\n if @test_case.update_attributes(params[:test_case])\n format.html { redirect_to @test_case, notice: 'Test case was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @test_case.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @courtstation = Courtstation.find(params[:courtstation_id])\n @casefile = @courtstation.casefiles.find(params[:id])\n\n respond_to do |format|\n if @casefile.update_attributes(params[:casefile])\n format.html { redirect_to([@casefile.courtstation, @casefile], :notice => 'Casefile was successfully updated.') }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @casefile.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @test_case_entry = TestCaseEntry.find(params[:id])\n\n respond_to do |format|\n if @test_case_entry.update_attributes(params[:test_case_entry])\n format.html { redirect_to(@test_case_entry, :notice => 'Test case entry was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @test_case_entry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @usercase = Usercase.find(params[:id])\n\n respond_to do |format|\n if @usercase.update_attributes(params[:usercase])\n flash[:notice] = 'Usercase was successfully updated.'\n format.html { redirect_to(@usercase) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @usercase.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @tc = TestCase.find(params[:id])\n\n respond_to do |format|\n if @tc.update_attributes(params[:test_case])\n format.html { redirect_to(project_test_case_path(@tc.project, @tc)) }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @tc.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def test_put_existing\n request = Http::Request.new('PUT', '/file1', {}, 'bar')\n\n response = self.request(request)\n\n assert_equal(204, response.status)\n\n assert_equal(\n 'bar',\n @server.tree.node_for_path('file1').get\n )\n\n assert_equal(\n {\n 'X-Sabre-Version' => [Version::VERSION],\n 'Content-Length' => ['0'],\n 'ETag' => [\"\\\"#{Digest::MD5.hexdigest('bar')}\\\"\"]\n },\n response.headers\n )\n end",
"def update\n @xml_sample = XmlSample.find(params[:id])\n \n respond_to do |format|\n format.html do\n @xml_sample = XmlSample.update_attributes(params[:xml_sample])\n if @xml_sample.save\n return redirect_to @xml_sample, notice: 'Xml sample was successfully updated.'\n else\n return render action: \"new\"\n end\n end\n format.xml do\n rexml = REXML::Document.new(params[\"xml\"])\n attr = Hash.from_xml(rexml.to_s)\n if @xml_sample.update_attributes(attr[\"xmlsample\"])\n xml = {msg: \"complete\", status: \"OK\"}.to_xml\n else\n xml = {msg: \"failed\", status: \"NG\"}.to_xml\n end\n return render xml: xml\n end\n end\n end",
"def update\n\n prefix = params[:kase][:case_no_prefix]\n while prefix.size < 3 do prefix = '0' + prefix end\n suffix = params[:kase][:case_no]\n params[:kase][:case_no] = prefix + '-' + suffix\n @kase = Kase.find(params[:id])\n\n respond_to do |format|\n if @kase.update_attributes(params[:kase])\n save_case_note(@kase,params[:note])\n format.html { redirect_to(@kase, :notice => 'Case was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @kase.errors, :status => :unprocessable_entity }\n end\n end\n\n end",
"def update\n @incident = Incident.find(params[:id])\n\n respond_to do |format|\n if @incident.update_attributes(params[:incident])\n format.html { redirect_to(@incident) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @incident.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n params.permit!\n @security_case = SecurityCase.find(params[:id])\n\n respond_to do |format|\n if @security_case.update_attributes(params[:security_case])\n format.html { redirect_to(@security_case, :notice => 'Security case was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @security_case.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @testcase = Testcase.find(params[:id])\n\n respond_to do |format|\n if @testcase.update_attributes(params[:testcase])\n format.html { redirect_to @testcase, :notice => 'Test was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @testcase.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n url = 'https://casa-core.herokuapp.com/api/units/' + params[:id]\n query = {\n 'name' => params[:name]\n }\n response = HTTParty.put(url, :query => query, :headers => { \"Authorization\" => AUTH, \"Host\" => HOST})\n\n if response.code == 200\n redirect_to unit_path(params[:id]), notice: 'Unit was successfully updated.'\n else\n redirect_to unit_path(params[:id]), notice: 'Sheesh! Minor hiccup...run that again!'\n end\n end",
"def update\n @case_file = CaseFile.find(params[:id])\n\n respond_to do |format|\n if @case_file.update_attributes(params[:case_file])\n format.html { redirect_to @case_file, notice: 'Case file was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @case_file.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @casefile = Casefile.find(params[:id])\n\n respond_to do |format|\n if @casefile.update_attributes(params[:casefile])\n format.html { redirect_to @casefile, notice: 'Casefile was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @casefile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @testcase_result.update_attributes(params[:testcase_result])\n flash[:notice] = 'TestcaseResult was successfully updated.'\n format.html { redirect_to(@testcase_result) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @testcase_result.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @old_test_case = OldTestCase.find(params[:id])\n\n respond_to do |format|\n if @old_test_case.update_attributes(params[:old_test_case])\n format.html { redirect_to @old_test_case, notice: 'Old test case was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @old_test_case.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @contest = Contest.find(params[:id])\n\n respond_to do |format|\n if @contest.update_attributes(params[:contest])\n flash[:notice] = 'Contest was successfully updated.'\n format.html { redirect_to(@contest) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @contest.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /success_cases/1 DELETE /success_cases/1.xml | def destroy
@success_case = SuccessCase.find(params[:id])
@success_case.destroy
respond_to do |format|
format.html { redirect_to(success_cases_url) }
format.xml { head :ok }
end
end | [
"def destroy\n @test_case = TestCase.find(params[:id])\n @test_case.destroy\n\n respond_to do |format|\n format.html { redirect_to(test_cases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @testcase.destroy\n\n respond_to do |format|\n format.html { redirect_to(testcases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @testcase = Testcase.find(params[:id])\n TestcaseXref.delete_all(\"from_testcase_id=\"+params[:id]+\" || to_testcase_id=\"+params[:id])\n UserTestcaseXref.delete_all(\"testcase_id=\"+params[:id])\n TestcaseBugXref.delete_all(\"testcase_id=\"+params[:id])\n @testcase.destroy\n\n respond_to do |format|\n format.html { redirect_to(testcases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @cases = Case.find(params[:id])\n @cases.destroy\n\n respond_to do |format|\n format.html { redirect_to(cases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @testcase = Testcase.find(params[:id])\n @testcase.destroy\n\n respond_to do |format|\n format.html { redirect_to(testcases_url) }\n format.xml { head :ok }\n end\n end",
"def del\n @status1 = Status1.find(params[:id])\n @status1.destroy\n\n respond_to do |format|\n format.html { redirect_to(status1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @scrap_xml = ScrapXml.find(params[:id])\n @scrap_xml.destroy\n\n respond_to do |format|\n format.html { redirect_to(scrap_xmls_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @case_rec = CaseRec.find(params[:id])\n @case_rec.destroy\n\n respond_to do |format|\n format.html { redirect_to(case_recs_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @case = Case.find(params[:id])\n @case.destroy\n\n respond_to do |format|\n format.html { redirect_to(cases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @usercase = Usercase.find(params[:id])\n @usercase.destroy\n\n respond_to do |format|\n format.html { redirect_to(usercases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @user_testcase_xref = UserTestcaseXref.find(params[:id])\n @user_testcase_xref.destroy\n\n respond_to do |format|\n format.html { redirect_to(user_testcase_xrefs_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @assertion = Assertion.find(params[:id])\n @assertion.destroy\n\n respond_to do |format|\n format.html { redirect_to(assertions_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @security_case = SecurityCase.find(params[:id])\n @security_case.destroy\n\n respond_to do |format|\n format.html { redirect_to(security_cases_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @xml_sample = XmlSample.find(params[:id])\n @xml_sample.destroy\n\n respond_to do |format|\n format.html { redirect_to xml_samples_url }\n format.xml do\n xml = {msg: \"complete\", status: \"OK\"}.to_xml\n return render xml: xml\n end\n end\n end",
"def destroy\n @test_case_entry = TestCaseEntry.find(params[:id])\n @test_case_entry.destroy\n\n respond_to do |format|\n format.html { redirect_to(test_case_entries_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @testcase_category = TestcaseCategory.find(params[:id])\n @testcase_category.destroy\n\n respond_to do |format|\n format.html { redirect_to(testc_categories_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @tst1 = Tst1.find(params[:id])\n @tst1.destroy\n\n respond_to do |format|\n format.html { redirect_to(tst1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @transaction_xml = Transaction::Xml.find(params[:id])\n @transaction_xml.destroy\n\n respond_to do |format|\n format.html { redirect_to(transaction_xmls_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @colonoscopytest = Colonoscopytest.find(params[:id])\n @colonoscopytest.destroy\n\n respond_to do |format|\n format.html { redirect_to(colonoscopytests_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |