query
stringlengths 7
9.5k
| document
stringlengths 10
1.07M
| negatives
sequencelengths 19
19
| metadata
dict |
---|---|---|---|
Determines if the given body matches the signature. | def verify_content(body, signature)
hmac = HMAC::SHA1.hexdigest(@secret, body)
check = "sha1=" + hmac
check == signature
end | [
"def matches_signature(signature)\n @signature == signature\n end",
"def validate_body(body, request_headers)\n signature = get_header_value(request_headers, HEADER_SIGNATURE)\n key_id = get_header_value(request_headers, HEADER_KEY_ID)\n secret_key = @secret_key_store.get_secret_key(key_id)\n digest = OpenSSL::Digest.new(HMAC_SCHEME)\n hmac = OpenSSL::HMAC.digest(digest, secret_key, body)\n expected_signature = Base64.strict_encode64(hmac).strip\n\n unless equal_signatures?(signature, expected_signature)\n msg = \"failed to validate signature '#{signature}'\"\n raise SignatureValidationException.new(message: msg)\n end\n end",
"def matches_signature(signature)\n @name == signature.name\n end",
"def signatures_match?\n expected_signature == api_signature\n end",
"def verify_signature(params, given_signature)\n given_signature == signature_from(params)\n end",
"def check_signature(body)\n received_signature = request.env['HTTP_X_HUB_SIGNATURE'] || ''\n signature = 'sha1=' + hmac_sha1(settings.github_secret, body)\n\n if !Rack::Utils.secure_compare(signature, received_signature)\n build_failed('signature mismatch')\n end\nend",
"def check_body_matches( response )\n\t\t@expected_bodies.each do |pattern|\n\t\t\tunless pattern_matches_response_body?( pattern, response )\n\t\t\t\tself.failure_description = describe_body_match_failure( pattern, response )\n\t\t\t\treturn false\n\t\t\tend\n\t\tend\n\n\t\treturn true\n\tend",
"def valid_body?(response, service)\n return true unless service[:body_match]\n\n response.body =~ service[:body_match]\n end",
"def valid_body?(body)\n @log.debug \"Null test for body\"\n return true\n end",
"def valid?(signature, payload)\n Sandal::Util.jwt_strings_equal?(sign(payload), signature)\n end",
"def check_signature(secret)\n digest = OpenSSL::Digest::SHA256.new\n expected = OpenSSL::HMAC.hexdigest(digest, secret, @body)\n if @signature == expected\n return true\n else\n Pusher.logger.warn \"Received WebHook with invalid signature: got #{@signature}, expected #{expected}\"\n return false\n end\n end",
"def hash_body?\n BODY_HASH_METHODS.key?(signature_method) && !form_encoded? &&\n (@attributes.key?('hash_body?') ? @attributes['hash_body?'] : true)\n end",
"def valid_signature?(signature, data)\n generate_signature(data) == signature\n end",
"def has_valid_signature?\n Adyen::HPP::Signature.verify(params, shared_secret)\n end",
"def valid_payload?(payload)\n signature = Base64.decode64(payload[:user][:signature])\n\n signature == expected_signature(payload)\n end",
"def valid?\n buffer = body.dup\n buffer << api_key\n expected = Digest::SHA1.hexdigest(buffer)\n expected == signature\n end",
"def valid?\n # If payload does not contain the sha_sign definitely return false.\n return false unless payload.sha_sign\n\n signature == payload.sha_sign\n end",
"def signature_line?(line)\n line =~ SIGNATURE_REGEX || line_is_signature_name?(line)\n end",
"def signature?\n SIGNATURES.include?(@simplified.downcase)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gives the content of a challenge response given the challenge body. | def challenge_response(challenge_code)
{
:body => challenge_code,
:status => 200
}
end | [
"def challenge\n decode_challenge\n respond\n end",
"def challenge\n @challenge = decode_challenge\n respond\n end",
"def respond_to_challenge(request, response)\n authenticate_header = response['www-authenticate'].downcase\n authenticate_header.sub!(/^digest /, '')\n\n @authentication_params = authenticate_header.split(\", \").inject({}) { |h, field|\n key, value = field.split(\"=\")\n h[key] = value.gsub(/^\"|\"$/, '') # strip quotes.\n \n h\n }\n add_cookies!(request)\n authenticate_request!(request)\n request.each{|k,v| puts \"#{k}: #{v}\" }\n # Resend the request\n @client.request(request)\n end",
"def getchallenge\n query = {\n operation: :getchallenge,\n username: @username\n }\n res = get_request query\n res['result']['token']\n end",
"def extract_content\n success? ? response.body : ''\n end",
"def handle_reply(header, content)\n end",
"def parse_response output\n has_meta = false\n headers = {'content-type' => 'text/plain'}\n status = 200\n headers, status, meta_size = Oaf::Util.parse_http_meta output\n lines = output.split(\"\\n\")\n body = lines.take(lines.length - meta_size).join(\"\\n\")+\"\\n\"\n [headers, status, body]\n end",
"def get_response(request)\n socket = TCPSocket.open(@host, @port)\n socket.print(request)\n response = socket.read\n \n headers, body = response.split(\"\\r\\n\\r\\n\", 2)\n response_code, response_message = headers.split(\"\\r\\n\")[0].split(\" \")[1], headers.split(\"\\r\\n\")[0].split(\" \")[2..-1].join(\" \")\n if response_code =~ /^2\\d\\d/\n print body # And display it\n else\n puts \"#{response_code} #{response_message}\"\n end\n end",
"def get_response(key, challenge)\n return false if !key or !challenge\n if $SAFE >= 1 && (key.tainted? || challenge.tainted?) then\n raise SecurityError\n end\n\n encstr = RSA::encrypt(key, RSA::PRIVATE_KEY, challenge)\n return [encstr].pack('m').gsub(\"\\n\", \"\")\n end",
"def response(label)\n ensure_response(label)\n response_body_for(label)\n end",
"def get_challenge(challenge_type, client_ip=nil, client_ua=nil)\n params = get_params(client_ip, client_ua)\n params['challenge_type'] = challenge_type\n post(params, 'get_challenge')\n end",
"def challenge; end",
"def response_example(sample_name)\n content = response_example_path(sample_name).read\n content.stub(:body).and_return(content)\n content\n end",
"def create_get_otp_authentication_challenge(body)\n\n # prepare query url\n _query_builder = Configuration.base_uri.dup\n _query_builder << '/rest/tag/auth/otp/request'\n _query_url = APIHelper.clean_url _query_builder\n\n # prepare headers\n _headers = {\n 'accept' => 'application/json',\n 'content-type' => 'application/json; charset=utf-8'\n }\n\n # prepare and execute HttpRequest\n _request = @http_client.post _query_url, headers: _headers, parameters: body.to_json\n BasicAuth.apply(_request)\n _context = execute_request(_request)\n\n # validate response against endpoint and global error codes\n if _context.response.status_code == 400\n raise APIException.new 'Unexpected error in API call. See HTTP response body for details.', _context\n elsif _context.response.status_code == 401\n raise APIException.new '', _context\n elsif _context.response.status_code == 404\n raise APIException.new 'Unexpected error in API call. See HTTP response body for details.', _context\n end\n validate_response(_context)\n\n # return appropriate response type\n decoded = APIHelper.json_deserialize(_context.response.raw_body)\n return GetOTPAuthenticationChallengeResponseModel.from_hash(decoded)\n end",
"def challenge_reply(challenge2)\n vprint_status('Sending challenge reply (ciphertext)')\n h = Digest::SHA1.hexdigest(challenge2)\n msg = \"3 #{h.upcase}\\n\"\n @encryption_queue.push(msg)\n handle_write\n end",
"def content_from_response(response)\n body = JSON.parse(response.body)\n \n case body['code']\n when 'success' then body['content']\n when 'validation_error' then raise( ValidationError, body['content'] )\n else; false # System error on SnapSearch; Nothing we can do # TODO: Raise exception?\n end\n end",
"def response(env)\n response = Colloquy::Response.new\n\n begin\n parameters = {}\n parameters = validate_request(env)\n parameters = sanitize_parameters(parameters)\n logger.debug \"REQUEST flow: #{parameters[:flow_name]}, msisdn: #{parameters[:msisdn]}, \\\n session_id: #{parameters[:session_id]}, input: #{parameters[:input]}, other: #{parameters[:params].inspect}\"\n rescue Exception => e\n logger.error \"Exception #{e.inspect} when trying to validate request flow: #{parameters[:flow_name]}, \\\n msisdn: #{parameters[:msisdn]}, session_id: #{parameters[:session_id]}, input: #{parameters[:input]}\"\n logger.debug \"#{e.backtrace.inspect}\"\n logger.info 'Responding with default error message'\n\n response = Colloquy::Response.new(Colloquy::Renderer::DEFAULT_ERROR_MESSAGE)\n response.flow_state = :notify\n end\n\n response = @renderer.apply(parameters[:flow_name], parameters[:msisdn], parameters[:session_id], parameters[:input], parameters[:params]) if response.empty?\n\n body = case parameters[:params][:accept]\n when 'text/plain'\n response.to_s\n else\n Yajl.dump({ response: response, flow_state: response.flow_state })\n end\n\n [200, {}, body]\n end",
"def response\n response_body = body\n response_status = status\n lambda do\n status response_status\n json response_body\n end\n end",
"def raw_body\n @response.body\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /article_comments/1 GET /article_comments/1.xml | def show
@article_comment = ArticleComment.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @article_comment }
end
end | [
"def show\n @article_comments = Admin::ArticleComments.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @article_comments }\n end\n end",
"def index\n params[:article_id].present? ? @comments = Article.find(params[:article_id]).comments : @comments = Comment.all\n\n respond_to do |format|\n format.json { render json: @comments }\n format.xml { render xml: @comments }\n end\n end",
"def comments\n @article = Article.find(params[:id])\n @comments = @article.comments\n\n respond_to do |format|\n format.html \n format.json { render json: @comments, status: :ok }\n end\n end",
"def show\n @article_comment_reply = ArticleCommentReply.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @article_comment_reply }\n end\n end",
"def index\n \n @comments = @network.comments.find(:all, :order => 'created_at DESC')\n @comment_pages = @comments.paginate :page => params[:page], :per_page => 15\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @comments }\n end\n end",
"def show\n @ccomment = Ccomment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @ccomment }\n end\n end",
"def new\n @article_comments = Admin::ArticleComments.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article_comments }\n end\n end",
"def show\n @newcomment = Newcomment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @newcomment }\n end\n end",
"def new\n @article_comment = ArticleComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article_comment }\n end\n end",
"def index\n @question_comments = QuestionComment.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @question_comments }\n end\n end",
"def show\n @comment = @activity.comments.find(params[:id])\n respond_to do |format|\n format.html\n format.xml { render :xml => @comment.to_xml(:include => :user, :except => NB_CONFIG['api_exclude_fields']) }\n format.json { render :json => @comment.to_json(:include => :user, :except => NB_CONFIG['api_exclude_fields']) }\n end\n end",
"def show\n @question_comment = QuestionComment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @question_comment }\n end\n end",
"def show\n challenge = Challenge.find(params[:challenge_id])\n @comment = challenge.comments.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @comment }\n end\n end",
"def index\n @comments = Comment.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @hiring_comments }\n end\n end",
"def show\n @entry_comment = EntryComment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @entry_comment }\n end\n end",
"def comments tag\n path = \"/comment/#{tag}/show.xml\"\n doc = http_get path\n \n comments = []\n xpath(doc, \"//comment\").each do |entry|\n comments << Comment.new(:xml => entry)\n end\n comments\n end",
"def show\n @action_item_comment = ActionItemComment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @action_item_comment }\n end\n end",
"def index\n @action_item_comments = ActionItemComment.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @action_item_comments }\n end\n end",
"def index\n @postsale_comments = PostsaleComment.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @postsale_comments }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /article_comments/new GET /article_comments/new.xml | def new
@article_comment = ArticleComment.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @article_comment }
end
end | [
"def new\n @newcomment = Newcomment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @newcomment }\n end\n end",
"def new\n @comment = @story.comments.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @comment }\n end\n end",
"def new\n @article_comments = Admin::ArticleComments.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article_comments }\n end\n end",
"def new\n @comment = Comment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @comment }\n end\n end",
"def new\n @comment_node = CommentNode.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @comment_node }\n end\n end",
"def new\n @ccomment = Ccomment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @ccomment }\n end\n end",
"def new\n @topic_comment = TopicComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @topic_comment }\n end\n end",
"def new\n @entry_comment = EntryComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @entry_comment }\n end\n end",
"def new\n @article_comment = ArticleComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @article_comment }\n end\n end",
"def new\n @scomment = Scomment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @scomment }\n end\n end",
"def new\n @blog_comment = BlogComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @blog_comment }\n end\n end",
"def new\n @question_comment = QuestionComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @question_comment }\n end\n end",
"def new\n @commentary = Commentary.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @commentary }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @comment_type }\n end\n end",
"def new\n @idea_comment = IdeaComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @idea_comment }\n end\n end",
"def new\n @node_comment = NodeComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @node_comment }\n end\n end",
"def new\n @commentaire = Commentaire.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @commentaire }\n end\n end",
"def new\n @article = Article.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article }\n end\n end",
"def new\n @article = Article.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /article_comments POST /article_comments.xml | def create
@article = Article.find(params[:article_id])
@article_comment = @article.article_comments.build()
@article_comment.content = params[:replyContent]
respond_to do |format|
if @article_comment.save
#format.html { redirect_to @article, :anchor => 'comment' }
format.html { redirect_to(article_path(@article, :anchor => "comment", :page=>1)) }
format.xml { render :xml => @article_comment, :status => :created, :location => @article_comment }
else
format.html { redirect_to(@article) }
format.xml { render :xml => @article_comment.errors, :status => :unprocessable_entity }
end
end
end | [
"def create\n @article_comments = Admin::ArticleComments.new(params[:article_comments])\n\n respond_to do |format|\n if @article_comments.save\n flash[:notice] = 'Admin::ArticleComments was successfully created.'\n format.html { redirect_to(@article_comments) }\n format.xml { render :xml => @article_comments, :status => :created, :location => @article_comments }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @article_comments.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n\t\t@article = Article.find(params[:article_id])\n\t\t@comment = @article.comments.create(comment_params)\n\t\tredirect_to article_path(@article)\n\tend",
"def create\n @articlecomment = Articlecomment.new(articlecomment_params)\n\n respond_to do |format|\n if @articlecomment.save\n format.html do\n redirect_to @articlecomment,\n notice: 'Comment was successfully created.'\n end\n format.json { render :show, status: :created, location: @article }\n else\n format.html { render :new }\n format.json do\n render json: @articlecomment.errors, status: :unprocessable_entity\n end\n end\n end\n end",
"def create\n @comment_node = CommentNode.new(params[:comment_node])\n\n respond_to do |format|\n if @comment_node.save\n format.html { redirect_to @comment_node, :notice => 'Comment node was successfully created.' }\n format.json { render :json => @comment_node, :status => :created, :location => @comment_node }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @comment_node.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @article = Article.find(params[:article_id])\n\n @comment = @article.comments.create(comment_params)\n @comment.user = current_user\n @comment.save\n redirect_to article_path(@article)\n end",
"def create\n @node_comment = NodeComment.new(params[:node_comment])\n\n respond_to do |format|\n if @node_comment.save\n format.html { redirect_to @node_comment, notice: 'Node comment was successfully created.' }\n format.json { render json: @node_comment, status: :created, location: @node_comment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @node_comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def comments\n @article = Article.find(params[:id])\n @comments = @article.comments\n\n respond_to do |format|\n format.html \n format.json { render json: @comments, status: :ok }\n end\n end",
"def new\n @article_comment = ArticleComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article_comment }\n end\n end",
"def create\n @article_comment = ArticleComment.find(params[:comment_id])\n @article_comment_reply = @article_comment.article_comment_replies.build()\n @article_comment_reply.content = params[:commentReplyContent]\n\n respond_to do |format|\n if @article_comment_reply.save\n # format.html { redirect_to(@article_comment_reply, :notice => 'Article comment reply was successfully created.') }\n format.html { redirect_to(article_path(@article_comment.article, :anchor => \"comment_\"+@article_comment.id.to_s)) }\n format.js # 指向一个AJAX\n format.xml { render :xml => @article_comment_reply, :status => :created, :location => @article_comment_reply }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @article_comment_reply.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def comment\n @article = Article.find( params[:article_id] )\n @user = User.find( params[:my_id] )\n\n comment = @article.comments.create\n comment.user_id = params[:my_id]\n comment.comment = params[:comment_body]\n comment.save\n\n culc_point\n\n render json: { result: true }\n end",
"def new\n @article_comments = Admin::ArticleComments.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @article_comments }\n end\n end",
"def create(params)\n\nxml =<<XML\n<entry xmlns=\"http://purl.org/atom/ns#\">\n <title>#{params[:title]}</title>\n <link rel=\"related\" type=\"text/html\" href=\"#{params[:url]}\" />\n <summary type=\"text/plain\">#{params[:comment]}</summary>\n</entry>\nXML\n\n post('/post', xml)\n end",
"def create\n @postsale_comment = PostsaleComment.new(params[:postsale_comment])\n\n respond_to do |format|\n if @postsale_comment.save\n format.html { redirect_to([@postsale_comment], :notice => 'Postsale comment was successfully created.') }\n format.xml { render :xml => @postsale_comment, :status => :created, :location => @postsale_comment }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @postsale_comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def post_comment tag, comment\n tag_params = (tag.is_a? String) ? { :the_tag => tag } : {:tag_id => (tag.is_a? Tag) ? tag.id : tag }\n tag_params.merge! :the_comment => comment\n path = \"/comment/create.xml\"\n doc = http_post path, tag_params\n Comment.new(:xml => doc)\n end",
"def create\n \tnew_comment = params.require(:comment).permit(:body)\n \tpost = Post.find(params[:post_id])\n \tcomment = post.comments.create(new_comment)\n\n \tredirect_to post_comment_path(post, comment)\n end",
"def create\n @comment = Comment.new(params[:comment])\n\n respond_to do |format|\n if @comment.save\n format.html { redirect_to admins_comment_path(@comment, :notice => 'Comment was successfully created.') }\n format.xml { render :xml => @comment, :status => :created, :location => @comment }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @comment = @story.comments.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @comment }\n end\n end",
"def create\n comment = Comment.new(params[:comment])\n @entry.comments << comment if comment.valid?\n respond_with(comment, location: redirect_to_index)\n end",
"def create\n @postsale_comment = PostsaleComment.new(params[:postsale_comment])\n\n respond_to do |format|\n if @postsale_comment.save\n format.html { redirect_to([:admin,@postsale_comment], :notice => 'Postsale comment was successfully created.') }\n format.xml { render :xml => @postsale_comment, :status => :created, :location => @postsale_comment }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @postsale_comment.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /article_comments/1 PUT /article_comments/1.xml | def update
@article_comment = ArticleComment.find(params[:id])
respond_to do |format|
if @article_comment.update_attributes(params[:article_comment])
format.html { redirect_to(@article_comment, :notice => 'Article comment was successfully updated.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @article_comment.errors, :status => :unprocessable_entity }
end
end
end | [
"def update\n @article_comments = Admin::ArticleComments.find(params[:id])\n\n respond_to do |format|\n if @article_comments.update_attributes(params[:article_comments])\n flash[:notice] = 'Admin::ArticleComments was successfully updated.'\n format.html { redirect_to(@article_comments) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @article_comments.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @comment = Comment.find(params[:id])\n\n respond_to do |format|\n if @comment.update_attributes(params[:comment])\n format.xml { head :ok }\n format.json { head :ok } \n else\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n format.json { render :json => @comment.errors, :status => :unprocessable_entity } \n end\n end\n end",
"def update\n @article_comment = ArticleComment.find(params[:id])\n\n respond_to do |format|\n if @article_comment.update_attributes(params[:article_comment])\n format.html { redirect_to @article_comment, notice: 'Article comment was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @article_comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @comment = @post.comments.find(params[:id])\n\n respond_to do |format|\n if @comment.update_attributes(params[:comment])\n format.html { redirect_to(@post, @comment) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @comment_node = CommentNode.find(params[:id])\n\n respond_to do |format|\n if @comment_node.update_attributes(params[:comment_node])\n format.html { redirect_to @comment_node, :notice => 'Comment node was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @comment_node.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @comment = Comment.find(params[:id])\n\n respond_to do |format|\n if @comment.update_attributes(params[:comment])\n format.html { redirect_to post_path(@comment.post, :anchor => \"comment_#{@comment.id}\"), :notice => t(\"messages.comments.updated\") }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @node_comment = NodeComment.find(params[:id])\n\n respond_to do |format|\n if @node_comment.update_attributes(params[:node_comment])\n format.html { redirect_to @node_comment, notice: 'Node comment was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @node_comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @comment = Comment.find(params[:id])\n\n respond_to do |format|\n if @comment.update_attributes(params[:comment])\n flash[:notice] = 'Comment was successfully updated.'\n format.html { redirect_to(@story) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @comment = @item.comments.find(params[:id])\n\n respond_to do |format|\n if @comment.update_attributes(params[:comment])\n flash[:notice] = 'Comment was successfully updated.'\n format.html { redirect_to(@comment) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @article = Article.find(params[:id])\n \n respond_to do |format|\n if @article.update_attributes(params[:article])\n format.html { redirect_to article_url(@article) }\n format.xml { render :nothing => true }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @article.errors.to_xml } \n end\n end\n end",
"def update\n @api_v1_comment = @post.comments.find(params[:id])\n params[:comment].delete :created_at\n params[:comment].delete :updated_at\n respond_to do |format|\n if @api_v1_comment.update_attributes(params[:comment])\n format.html { redirect_to @api_v1_comment, notice: 'Comment was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @api_v1_comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @comment = Comment.find(params[:id])\n\n respond_to do |format|\n if @comment.update_attributes(params[:comment])\n flash[:notice] = 'comment was successfully updated.'\n format.html { redirect_to(@comment) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @article_comment_reply = ArticleCommentReply.find(params[:id])\n\n respond_to do |format|\n if @article_comment_reply.update_attributes(params[:article_comment_reply])\n format.html { redirect_to(@article_comment_reply, :notice => 'Article comment reply was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @article_comment_reply.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @question_comment = QuestionComment.find(params[:id])\n\n respond_to do |format|\n if @question_comment.update_attributes(params[:question_comment])\n format.html { redirect_to(@question_comment, :notice => 'Question comment was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @question_comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @idea_comment = IdeaComment.find(params[:id])\n\n respond_to do |format|\n if @idea_comment.update_attributes(params[:idea_comment])\n flash[:notice] = 'IdeaComment was successfully updated.'\n format.html { redirect_to(@idea_comment) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @idea_comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def test_update_comment\n remote_file_name = 'TestUpdateComment.docx'\n\n upload_file File.join(local_test_folder, local_file), remote_data_folder + '/' + remote_file_name\n\n request_comment_range_start = NewDocumentPosition.new({:NodeId => '0.3.0', :Offset => 0})\n request_comment_range_end = NewDocumentPosition.new({:NodeId => '0.3.0', :Offset => 0})\n request_comment = CommentUpdate.new({:RangeStart => request_comment_range_start, :RangeEnd => request_comment_range_end, :Initial => 'IA', :Author => 'Imran Anwar', :Text => 'A new Comment'})\n request = UpdateCommentRequest.new(name: remote_file_name, comment_index: 0, comment: request_comment, folder: remote_data_folder)\n\n result = @words_api.update_comment(request)\n assert_equal false, result.nil?\n end",
"def update\n @comment = Comment.find(params[:id])\n @comment.user_id = params[:user_id]\n @comment.announcement_id = params[:announcement_id]\n @comment.description = params[:description]\n @comment.save\n render json:@comment\n end",
"def update\n @action_item_comment = ActionItemComment.find(params[:id])\n\n respond_to do |format|\n if @action_item_comment.update_attributes(params[:action_item_comment])\n format.html { redirect_to(@action_item_comment, :notice => 'Action item comment was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @action_item_comment.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_comment\n @artifact_answer = ArtifactAnswer.find(params[:artifact_answer][:id])\n @artifact_answer.update_attributes(comment_params)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /article_comments/1 DELETE /article_comments/1.xml | def destroy
@article_comment = ArticleComment.find(params[:id])
@article_comment.destroy
respond_to do |format|
format.html { redirect_to(article_comments_url) }
format.xml { head :ok }
end
end | [
"def destroy\n Comment.delete_all(\"article_id = #{params[:id]}\")\n Article.find(params[:id]).destroy\n\n respond_to do |format|\n format.html { redirect_to(root_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @article_comments = Admin::ArticleComments.find(params[:id])\n @article_comments.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_article_comments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @article_comment = ArticleComment.find(params[:id])\n @article_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to article_comments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @comment = Comment.find(params[:id])\n @comment.destroy\n \n respond_to do |format|\n format.xml { head :ok }\n format.json { head :ok } \n end\n end",
"def destroy\n @postsale_comment = PostsaleComment.find(params[:id])\n @postsale_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to(postsale_comments_url) }\n format.xml { head :ok }\n end\n end",
"def test_delete_comment\n\n comments = Comment.getAll()\n\n for comment in comments\n\n result = Comment.deleteComment(comment.commentId)\n\n assert_equal true, result\n \n end\n\n end",
"def destroy\n @article_comment_reply = ArticleCommentReply.find(params[:id])\n @article_comment_reply.destroy\n\n respond_to do |format|\n format.html { redirect_to(article_comment_replies_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @action_item_comment = ActionItemComment.find(params[:id])\n @action_item_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to(action_item_comments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @postsale_comment = PostsaleComment.find(params[:id])\n @postsale_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_postsale_comments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @commentaire = Commentaire.find(params[:id])\n @commentaire.destroy\n\n respond_to do |format|\n format.html { redirect_to(commentaires_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @blog_comments = BlogComment.find(params[:id])\n @blog_comments.destroy\n\n respond_to do |format|\n format.html { redirect_to(blog_comments_url) }\n format.xml { head :ok }\n end\n end",
"def delete_comment(article_id, id)\n delete \"articles/#{article_id}/comments/#{id}\"\n end",
"def destroy\n @comment = Comment.find(params[:id])\n @comment.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_comments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @ccomment = Ccomment.find(params[:id])\n @ccomment.destroy\n\n respond_to do |format|\n format.html { redirect_to(ccomments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @comment_node = CommentNode.find(params[:id])\n @comment_node.destroy\n\n respond_to do |format|\n format.html { redirect_to comment_nodes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @node_comment = NodeComment.find(params[:id])\n @node_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to node_comments_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @question_comment = QuestionComment.find(params[:id])\n @question_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to(question_comments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @idea_comment = IdeaComment.find(params[:id])\n @idea_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to(idea_comments_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @crawler_article = CrawlerArticle.find(params[:id])\n @crawler_article.destroy\n\n respond_to do |format|\n format.html { redirect_to(crawler_articles_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
add_student(db, "Bob") methoad to add a subject | def add_subject(database, name)
database.execute("INSERT INTO subjects (name) VALUES (?)", [name])
end | [
"def add_student_to_course(student_id, name, email)\n if Student.find_by_email(email)\n student = Student.find_by_email(email)\n else\n student = Student.new\n student.email = email\n student.student_id = student_id\n student.name = name\n student.university = Faculty.find(faculty_id).university\n end\n\n if self.surveys.count > 0\n add_student_to_course_surveys(student)\n end\n\n self.students << student\nend",
"def add_teacher(first, last, campus, veracross, email)\n puts(\"Adding #{first} #{last} #{campus} #{veracross} #{email}\")\n Student.create(\n first_name: first,\n last_name: last,\n campus: campus,\n veracross_id: veracross,\n email: email,\n teacher: true\n )\nend",
"def add_student(person)\n c = CoursePerson.new type: \"student\"\n c.person = person\n self.course_person << c\n self.save \n end",
"def add_subject\n \t@subject = Subject.find_by_name(params[:name])\n \tunless current_user.presenter.subjects.include?(@subject)\n current_user.presenter.subjects << @subject \n end\n \tredirect_to presenter_edit_subjects_path\n end",
"def add_grade(database, student_id, subject_id, grade)\r\n\tdatabase.execute(\"INSERT INTO grades (grade, student_id, subject_id) VALUES (?, ?, ?)\", [grade, student_id, subject_id])\r\nend",
"def add_single_student_to_course\n begin\n student = Student.find_by_email(params[:course][:student][:email])\n\n if student.nil?\n student = Student.new\n student.email = params[:course][:student][:email]\n\n student.student_id = params[:course][:student][:student_id]\n student.name = params[:course][:student][:name]\n student.university = Faculty.find(@course.faculty_id).university\n end\n\n if student.invalid?\n raise StandardError\n end\n\n @course.surveys.each do |survey|\n survey.students << student\n end\n\n @course.students << student\n\n rescue\n @course.errors.add(:student, 'is invalid!')\n return\n end\n end",
"def add_student\n course_id = params[:course_id]\n requires({'role' => ['admin','faculty'],'course_id'=>course_id})\n first = params[:first]\n last = params[:last]\n email = params[:email]\n role = params[:role]\n \n \n course = Course.find(course_id)\n\n student = User.find_by_email(email)\n unless student\n student = create_and_invite_user(first, last, email, 'student')\n end\n\n unless student\n render :text => 'Error adding student.'\n else\n if role == 'student'\n StudentInCourse.create(:user_id => student.id, :course_id => course.id)\n render :text => 'Student added successfully.'\n elsif role == 'ta'\n TaForCourse.create(:user_id => student.id, :course_id => course.id)\n render :text => 'TA added successfully.'\n end\n end\n end",
"def add_student\n name = get_entry(\"Enter name: \")\n mat_nr = get_entry(\"Enter matriculation number: \").to_i\n\n s = Person::Student.new(name, mat_nr)\n Menu.data_handler.add_person(s)\n puts \"Student with id #{s.id} added successfully.\".green\n end",
"def add_subject\n add_subject_button.click\n end",
"def add_student(new_student)\n @students << new_student\n end",
"def add_subject\n @sample = Sample.find(params[:sample_id])\n @sample.subject_id = params[:subject_id]\n \n respond_to do |format|\n if @sample.save\n format.html { redirect_to project_sample_set_sample_path(params[:project_id],params[:sample_set_id],@sample), notice: 'Subject was successfully added to sample.' }\n format.json { head :no_content }\n else\n flash[:error] = 'Could not add subject to sample' \n format.html { redirect_to sample_path(@sample)}\n format.json { render json: @sample.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_student(db, name, address, age, program)\n\tdb.execute(\"INSERT INTO school (name, address, age, program) VALUES (?, ?, ?, ?)\", [name, address, age, program])\nend",
"def add_new_student(school, name)\n\tschool[:students].push({:name => name})\nend",
"def add_student(school, new_student_name, new_student_grade, new_student_semester)\n school[:students].push(:name => new_student_name, \n :grade => new_student_grade, :semester => new_student_semester)\nend",
"def insert_subjects\r\n client = self.connect\r\n \r\n subjects = [\"english\", \"math\", \"history\", \"gelogy\", \"chemistry\",\r\n \"biology\", \"physics\", \"PE\", \"psychology\", \"economics\",\r\n \"under water basket weaving.\"]\r\n \r\n subjects.each do |subject|\r\n db_str = \"INSERT INTO subject (name) VALUES (\\\"\" + subject + \"\\\");\"\r\n result = client.query(db_str)\r\n end\r\n \r\n # close connection\r\n client.close\r\n \r\n return true\r\n end",
"def add_student=(student_id)\n return unless student_id.present?\n students << Student.find(student_id)\n end",
"def add_student(student)\r\n @students.push(student)\r\n end",
"def add_author(db,author)\r\n db.execute('INSERT INTO authors (author_name) VALUES (?)', author)\r\nend",
"def add_student(new_student, new_grade, new_semester, school_name)\n\tschool_name[:students].push({:name => new_student, :grade => new_grade, :semester => new_semester})\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
add_subject(db, "Math") add_subject(db, "English") add_subject(db, "Physics") add_subject(db, "Art") add_subject(db, "Chemistry") method to add a grade | def add_grade(database, student_id, subject_id, grade)
database.execute("INSERT INTO grades (grade, student_id, subject_id) VALUES (?, ?, ?)", [grade, student_id, subject_id])
end | [
"def add_grade(subject, score)\n grade = Grade.new(subject, score)\n if @grades[grade.subject]\n raise \"Sorry, you cannot add or change #{grade.subject} because it was already entered.\"\n end\n @grades[grade.subject] = grade.score\n end",
"def update_subject_grade(all_grades,cid)\n currentMark = mark_per(all_grades.sum(\"worth\"), all_grades.sum(\"courseMark\"))\n Subject.update(cid, :currentMark => currentMark)\n end",
"def add_subject(database, name)\r\n\tdatabase.execute(\"INSERT INTO subjects (name) VALUES (?)\", [name])\r\nend",
"def update_grade(database, student, subject, grade)\r\n\tstudent_id = get_student_id(database, student)\r\n\tsubject_id = get_subject_id(database, subject)\r\n\tdatabase.execute(\"UPDATE grades SET grade=? WHERE student_id=? AND subject_id=?\",[grade, student_id, subject_id])\r\nend",
"def create\n @subject_grade = SubjectGrade.new(subject_grade_params)\n\n respond_to do |format|\n if @subject_grade.save\n format.html { redirect_to @subject_grade, notice: 'Subject grade was successfully created.' }\n format.json { render :show, status: :created, location: @subject_grade }\n else\n format.html { render :new }\n format.json { render json: @subject_grade.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_student(name, grade, semester)\n\t\t @students << {:name => name, :grade => grade, :semester => semester}\n\t\tend",
"def add_subject\n add_subject_button.click\n end",
"def add_student(name, grade, semester)\n students << {:name => name, :grade => grade, :semester => semester}\n end",
"def add_grade_wise_student(students)\n system(\"clear\")\n grade = \"\"\n Student.display_grade\n loop do\n print \"Please Enter Grade :\"\n grade = gets.chomp.to_s\n break if check_valid_grade?(grade)\n end\n get_student_details(grade, students)\n end",
"def add_subject\n \t@subject = Subject.find_by_name(params[:name])\n \tunless current_user.presenter.subjects.include?(@subject)\n current_user.presenter.subjects << @subject \n end\n \tredirect_to presenter_edit_subjects_path\n end",
"def add_student(name, grade, semester)\n\t\tstudents.push( {:name => name, :grade => grade, :semester => semester} )\n\tend",
"def add_student(new_student, new_grade, new_semester, school_name)\n\tschool_name[:students].push({:name => new_student, :grade => new_grade, :semester => new_semester})\nend",
"def add_expert_grades(body)\n flash[:additional_info] = 'add expert grades'\n case params[:assignment_id]\n when '754' # expert grades of Wiki contribution (754)\n body.prepend('\"expert_grades\": {\"submission25030\":95,\"submission25031\":92,\"submission25033\":88,\"submission25034\":98,\"submission25035\":100,\"submission25037\":95,\"submission25038\":95,\"submission25039\":93,\"submission25040\":96,\"submission25041\":90,\"submission25042\":100,\"submission25046\":95,\"submission25049\":90,\"submission25050\":88,\"submission25053\":91,\"submission25054\":96,\"submission25055\":94,\"submission25059\":96,\"submission25071\":85,\"submission25082\":100,\"submission25086\":95,\"submission25097\":90,\"submission25098\":85,\"submission25102\":97,\"submission25103\":94,\"submission25105\":98,\"submission25114\":95,\"submission25115\":94},')\n end\n end",
"def add_course(db, add_course_cmd, course_title, programming_language, source,completion_status)\n db.execute(add_course_cmd, [course_title, programming_language, source,completion_status])\nend",
"def add_student(school, new_student_name, new_student_grade, new_student_semester)\n school[:students].push(:name => new_student_name, \n :grade => new_student_grade, :semester => new_student_semester)\nend",
"def insert_subjects\r\n client = self.connect\r\n \r\n subjects = [\"english\", \"math\", \"history\", \"gelogy\", \"chemistry\",\r\n \"biology\", \"physics\", \"PE\", \"psychology\", \"economics\",\r\n \"under water basket weaving.\"]\r\n \r\n subjects.each do |subject|\r\n db_str = \"INSERT INTO subject (name) VALUES (\\\"\" + subject + \"\\\");\"\r\n result = client.query(db_str)\r\n end\r\n \r\n # close connection\r\n client.close\r\n \r\n return true\r\n end",
"def addGrades(grade)\n @grades.push(grade)\n end",
"def add_grade(student, grade)\n if enrolled?(student)\n @grades[student] << grade\n return true\n else\n return false\n end\n end",
"def grade(student_name)\n frm.table(:class=>\"listHier lines nolines\").row(:text=>/#{Regexp.escape(student_name)}/).link(:text=>\"Grade\").click\n frm.frame(:id, \"grade_submission_feedback_comment___Frame\").td(:id, \"xEditingArea\").frame(:index=>0).wait_until_present\n AssignmentSubmission.new(@browser)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
add_grade(db, 1, 1, 80) method to update a grade | def update_grade(database, student, subject, grade)
student_id = get_student_id(database, student)
subject_id = get_subject_id(database, subject)
database.execute("UPDATE grades SET grade=? WHERE student_id=? AND subject_id=?",[grade, student_id, subject_id])
end | [
"def add_grade(database, student_id, subject_id, grade)\r\n\tdatabase.execute(\"INSERT INTO grades (grade, student_id, subject_id) VALUES (?, ?, ?)\", [grade, student_id, subject_id])\r\nend",
"def enter_grade(entry,grade)\n edit_uri=''\n entry.add_namespace('http://www.w3.org/2005/Atom')\n entry.add_namespace('gd','http://schemas.google.com/g/2005')\n entry.add_namespace('gs','http://schemas.google.com/spreadsheets/2006')\n entry.add_namespace('gsx','http://schemas.google.com/spreadsheets/2006/extended')\n entry.elements.each('link') do |link|\n edit_uri = entry.elements[\"link[@rel='edit']\"].attributes['href']\n end\n entry.elements.each(\"gsx:*[#{@column_id}]\") do |col|\n col.text=grade\n end\n @client.sps_client.put(edit_uri,entry.to_s)\n\t Gradebook::Utility::Logger.log(\"grades\",\"#{Time.now}\", \"ColumnID: #{@column_id}\", \"LastName:#{entry.elements['gsx:lastname'].text}\",\"FirstName:#{entry.elements['gsx:firstname'].text}\",\"Grade: #{grade}\",\"\\n\")\n end",
"def update_grade\n grade_entry_form = record\n grade_entry_student =\n grade_entry_form.grade_entry_students.find(params[:student_id])\n grade =\n grade_entry_student.grades.find_or_create_by(grade_entry_item_id: params[:grade_entry_item_id])\n\n grade.update(grade: params[:updated_grade])\n grade_entry_student.save # Refresh total grade\n grade_entry_student.reload\n render plain: grade_entry_student.get_total_grade\n end",
"def addGrades(grade)\n @grades.push(grade)\n end",
"def update_grade(user_id, course_id, gradebook_item_id, grade)\r\n relative_url = Path::USERS_COURSES_GRADEBOOK_GRADEBOOKITEMS_GRADE % [user_id, course_id, gradebook_item_id]\r\n put(relative_url, grade)\r\n end",
"def grade(student_id, project_name, section_id, grade)\n project = ProjectGrade.retrieve(student_id, project_name, section_id)\n if project == nil\n raise ProjectGrade.no_project_error\n else\n ActiveRecord::Base.connection.execute(\"\n INSERT INTO project_grades\n (student_id, project_name, section_id, grade, instructor_id)\n VALUES\n (#{student_id}, '#{project_name}', #{section_id}, #{grade}, #{instructor_id})\n ON DUPLICATE KEY UPDATE grade=#{grade}, instructor_id=#{instructor_id};\")\n return true\n end\n end",
"def enter_score(gb, assignment, score, name)\r\n \tgb.execute(\"UPDATE #{@course} \r\n \t SET #{assignment} = '#{score}' \r\n \t WHERE name = '#{name}'\")\r\n end",
"def add_grade(subject, score)\n grade = Grade.new(subject, score)\n if @grades[grade.subject]\n raise \"Sorry, you cannot add or change #{grade.subject} because it was already entered.\"\n end\n @grades[grade.subject] = grade.score\n end",
"def grade=(value)\n @grade = value\n end",
"def set_grade\n\t\t@grade = Grade.find(params[:id])\n\n\tend",
"def update\n sql = \"UPDATE students SET name = ?, grade = ? WHERE id = ?\"\n DB[:conn].execute(sql, self.name, self.grade, self.id)\n end",
"def update\n @grade = Grade.find(params[:id])\n\n if @grade.update(grade_params)\n head :no_content\n else\n render json: @grade.errors, status: :unprocessable_entity\n end\n end",
"def add_grade(student, grade)\n if enrolled?(student)\n @grades[student] << grade\n return true\n else\n return false\n end\n end",
"def store_grades gpa, gpa_percent\n ActiveRecord::Base.connection_pool.with_connection do \n gpa_percent = gpa_percent.merge({'gpa'=>gpa})\n gpa_percent = gpa_percent.to_json\n @repo.update(gpa: gpa_percent)\n end\n end",
"def update\n sql = <<-SQL\n UPDATE students SET name = ?, grade = ? WHERE id = ?\n SQL\n\n DB[:conn].execute(sql, self.name, self.grade, self.id)\n end",
"def commit_grades\n return unless will_commit_grades?\n\n grades = self.grades_for_scores scores\n Analysis.transaction do\n if submission.selected_for_grading?\n grades.each(&:save)\n end\n end\n end",
"def grade(*args)\n process_using(*args.unshift(:grade))\n end",
"def add_grades(new_grades)\n new_grades.each do |grade|\n grade_found = grades.detect do |stored_grade|\n stored_grade.with_indifferent_access[:sis_user_id] ==\n grade.with_indifferent_access[:sis_user_id]\n end\n grades << grade if grade_found.nil?\n end\n save! if grades_changed?\n end",
"def update\n sql = \"UPDATE students SET name = ?, grade = ? WHERE id = ?\"\n DB[:conn].execute(sql, self.name, self.grade, self.id)\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
p get_student_id(db, "Bob") method to get subject id | def get_subject_id(database, name)
subject = database.execute("SELECT * FROM subjects WHERE name=?",[name])
subject_id = subject[0][0].to_i
end | [
"def get_ID\n @studentID\n end",
"def student_id\n @net_ldap_entry[:berkeleyedustuid].first\n end",
"def student_id\r\n\t\t\treturn 51875531\r\n\t\tend",
"def subject_id\n raise ArgumentError, \"#subject_id not implemented for #{self.class}\"\n end",
"def student_id\n current_user.student.id \n end",
"def user_id(subject)\n client.user(subject).id\n end",
"def student_id\r\n\t\t\t \t51771125.51772509\r\n\t\t end",
"def school_id\n student = Student.find( self.session_student)\n student.school_id\n end",
"def detect_id(subject)\n case\n when Symbol === subject\n subject\n when subject.respond_to?(:to_param)\n subject.to_param\n when subject.respond_to?(:id)\n subject.id.to_s\n else\n \"#{subject}\"\n end\n end",
"def find_subject\n @subject = Subject.find(params[:id])\n end",
"def subject_id\n sso_session ? sso_session.name_id : nil\n end",
"def get_id(course_name, term, year)\n course = Course.find_by_name(course_name)\n semester = Semester.where(:term => term, :year => year).first\n if course and semester\n coursem = Coursem.where(:course_id => course.id, :semester_id => semester.id).first\n if coursem\n return coursem.id\n end\n end\n return nil\n\n end",
"def find_student\n Student.find(params[:id])\n end",
"def cur_student\n student\n end",
"def getTicketID(iSQL, iSubject)\n rTicketID = nil\n\n iSQL.query(\n \"select id\n from issues\n where\n subject = '#{iSubject}'\").each do |iRow|\n rTicketID = iRow[0]\n end\n # If the Ticket does not exist, error\n if (rTicketID == nil)\n log_err 'Ticket WEACE_Toolkit_Log does not exist.'\n raise RuntimeError, 'Ticket WEACE_Toolkit_Log does not exist.'\n end\n\n return rTicketID\n end",
"def get_student\n @student = Student.find(params[:student_id])\n end",
"def access_student\n student_id = self.student_user_id\n student = User.find_by_id(student_id)\n return student\n end",
"def get_student\n @student = Student.find(params[:student_id])\nend",
"def plain_text_student_id\n @student_id.to_s + \"|\" + utc_date_time;\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
display_students(db) method to display all subjects | def display_subjects(database)
subjects = database.execute("SELECT * FROM subjects")
subjects.each do |subject|
puts "ID: #{subject[0]} Subject: #{subject[1]}"
end
end | [
"def display_students\n Student.all.each do |student|\n puts \"#{student.name.upcase}\".colorize(:blue)\n puts \" location:\".colorize(:light_blue) + \" #{student.location}\"\n puts \" profile quote:\".colorize(:light_blue) + \" #{student.profile_quote}\"\n puts \" bio:\".colorize(:light_blue) + \" #{student.bio}\"\n puts \" twitter:\".colorize(:light_blue) + \" #{student.twitter}\"\n puts \" linkedin:\".colorize(:light_blue) + \" #{student.linkedin}\"\n puts \" github:\".colorize(:light_blue) + \" #{student.github}\"\n puts \" blog:\".colorize(:light_blue) + \" #{student.blog}\"\n puts \"----------------------\".colorize(:green)\n end\n end",
"def list_subjects\r\n #TODO only list subjects from my own schools \r\n #because then people can still it \r\n school = School.find(params[:nid]) rescue render_return\r\n render_return if !logged_in_user.see_course?(school)\r\n render_p 'course_display/course_selection',{'caller'=>params[:caller],'school'=>school}\r\n end",
"def show_students\n\tif @students.length <= 0\n\t\tputs \"No students found in student list\"\n\telse \n\t\tprint_header\n\t\tprint_students_list\n\t\tprint_footer\n\tend\nend",
"def student_displayer\n print_header\n print_students_list\n print_footer\nend",
"def print_students\r\n @students.each {|student| puts student}\r\n end",
"def index\n @user_subjects = UserSubject.all\n end",
"def index\n @university_subjects = UniversitySubject.all\n end",
"def index\n @course = Course.find(params[:course_id])\n @course_subjects = @course.subjects\n end",
"def subject_wise_students_report\n @subject = Subject.shod(params[:id])\n @batch = @subject.batch\n @exam_groups ||= @batch.result_published\n @students ||= @batch.students\n @general_setting = GeneralSetting.first\n render 'subject_wise_students_report', layout: false\n end",
"def print_student_list()\n puts \"Students:\"\n puts \"----------------\"\n @students.each do |student|\n puts \"#{student.name}, Cohort: #{student.cohort.to_s}\"\n end\n puts \"Overall, we have #{@students.count} students\"\n end",
"def display_students_db\n all_students = @conn.exec(\"SELECT * FROM students_db;\")\n #usp[e exec to pretty much do everything\n all_students.each do |student|\n student.each do |k,v|\n puts \"#{k}: #{v}\"\n puts all_students\n end\n end\nend",
"def index\n @class_subjects = ClassSubject.all\n end",
"def show_subjects\n @subject = Subject.find(params[:id])\n end",
"def index\n @subjects_users = SubjectsUser.all\n end",
"def index\n @program_subjects = ProgramSubject.all\n end",
"def index\n @students = Student.all\n render :index\n end",
"def index\n @pub_art_has_students = PubArtHasStudent.all\n end",
"def index\n @timetable_subjects = TimetableSubject.all\n end",
"def students\n Rollcall::Student.find_all_by_school_id schools\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
make sure the complete key is build using the options such as scope and count | def prepare_key(key, options)
complete_key = key
# if a scope is passed in options then build the full key
complete_key = options[:scope].present? ? "#{options[:scope].to_s}.#{complete_key}" : complete_key
# add the correct count suffix
if options[:count].present? && options[:count] == 1
complete_key = "#{complete_key}.one"
elsif options[:count].present? && options[:count] != 1
complete_key = "#{complete_key}.other"
end
complete_key
end | [
"def require_master_key=(_arg0); end",
"def reserve_key(key); end",
"def require_master_key; end",
"def build_key(options = {})\n options[:base] = options[:base]||self.class.to_s.downcase\n if (options[:key_pluralize_instances] == true ) || (options[:key_pluralize_instances] != false && self.class.key_pluralize_instances.present?)\n options[:base] = options[:key_plural]||self.class.key_plural||options[:base].pluralize\n end\n unique = self.send \"#{options[:key_unique]||self.class.key_unique}\".to_sym\n options[:unique] = unique unless unique == object_id\n self.class.build_key(options)\n end",
"def flexible_key; end",
"def idempotency_key=(_arg0); end",
"def build_acl_key()\n @acl_key ||= OldACLManager.build_acl_key(type, acl_id)\n end",
"def validate_key_uniqueness\n self.errors.add :key if Setting.find_by_context_and_key(self[:context], self[:key]).present?\n end",
"def build_requested\n end",
"def build_options(options, scope)\n options.each do |key, value|\n scope[key] = Thor::Option.parse(key, value)\n end\n end",
"def submit_missing_keys\n return if @missing_keys_by_sources.nil? or @missing_keys_by_sources.empty?\n register_keys(@missing_keys_by_sources)\n @missing_keys_by_sources = nil\n end",
"def key!\n @_key = true\n end",
"def initialize(key)\n key ||= Apicasso::Key.new\n cannot :manage, :all\n cannot :read, :all\n key.scope&.each do |permission, klasses_clearances|\n build_permissions(permission: permission, clearance: klasses_clearances)\n end\n end",
"def prepare key\n @key = key\n end",
"def max_key_width=(_arg0); end",
"def build_option(name, options, scope); end",
"def build_options(options, scope) #:nodoc:\n options.each do |key, value|\n scope[key] = Foreman::Thor::Option.parse(key, value)\n end\n end",
"def depend_on_env(key); end",
"def valid_build hash\n attrs = { project_id: project.id, branch: 'master' }.merge(hash)\n Build.create! attrs\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
You can't proxy nil or false, so remove those possibilities if they're in the type. | def proxy_simplify
ts = @types.dup.delete_if do |t|
t.is_a? NominalType and
(t.klass == NilClass or t.klass == FalseClass)
end
if ts.length == @types.length
then self
else if ts.length == 1
then ts.to_a[0]
else UnionType.new(ts)
end
end
end | [
"def upcasted?\n false\n end",
"def type_available\n type.property_names_to_types.map do |name, type|\n attr_accessor(name) unless method_defined?(name)\n alias_method(:\"#{name}?\", name) if type.excluding_null.is_a?(Type::Boolean)\n end\n end",
"def nonregular_type; end",
"def allow_missing?\r\n false\r\n end",
"def proxy_as_optional(model, var)\n proxy_as(model, var, true)\n end",
"def unsafe_munge(value)\n # downcase strings\n if value.respond_to? :downcase\n value = value.downcase\n end\n\n case value\n when *BooleanProperty.values_for_true\n :true\n when *BooleanProperty.values_for_false\n :false\n else\n fail \"expected a boolean value, got #{value.inspect}\"\n end\n end",
"def advanced_typecasting?\n @advanced_typecasting != false\n end",
"def http_proxy?; proxy? && [nil, :http].include?(@options[:proxy][:type]); end",
"def strict_value_coercions; end",
"def polymorphic(true_or_false = true)\n @expected_options[:polymorphic] = true_or_false\n self\n end",
"def be_truthy; end",
"def boolify(val)\n\nend",
"def forced_nil_inverse?; false; end",
"def attr_get_boolean(attr_type, strict = true)\n #This is a stub, used for indexing\n end",
"def replace_late_bound_types_with_built_in(types); end",
"def set_proxy_type(opts)\n opts = check_params(opts,[:types])\n super(opts)\n end",
"def be_falsey; end",
"def check_default_type!; end",
"def lenient_value_coercions; end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /hookups/1 DELETE /hookups/1.xml | def destroy
@hookup = Hookup.find(params[:id])
@hookup.destroy
respond_to do |format|
format.html { redirect_to(hookups_url) }
format.xml { head :ok }
end
end | [
"def delete_webhook\n send_request('deleteWebhook', {})\n end",
"def delete_hook(id)\n delete(\"/hooks/#{id}\")\n end",
"def destroy\n @hookup = Hookup.find(params[:id])\n @hookup.destroy\n\n respond_to do |format|\n format.html { redirect_to hookups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @hookup.destroy\n respond_to do |format|\n format.html { redirect_to hookups_url, notice: 'Hookup was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_webhook(webhook_id)\n start.uri('/api/webhook')\n .url_segment(webhook_id)\n .delete()\n .go()\n end",
"def remove_all_webhooks\n delete(\"/webhooks\")\n end",
"def webhook_del(id, url)\n _params = {:id => id, :url => url}\n return @master.call 'lists/webhook-del', _params\n end",
"def destroy\n @cup_hook.destroy\n respond_to do |format|\n format.html { redirect_to cup_hooks_url }\n format.json { head :no_content }\n end\n end",
"def remove_hipchat_webhooks hipchat, room_name, webhook_name\n hipchat.delete_webhooks_by_name room_name, webhook_name\nend",
"def delete_webhook(list_id, url)\n call(\"listWebhookDel\", list_id, url)\n end",
"def delete(webhook_id)\n @client.call(method: :delete, path: \"relay-webhooks/#{webhook_id}\")\n end",
"def delete\n require \"pact_broker/webhooks/triggered_webhook\"\n TriggeredWebhook.where(webhook: self).update(webhook_id: nil)\n super\n end",
"def delete(id)\n Mailgun.submit :delete, webhook_url(id)\n end",
"def destroy\n @say = Say.find(params[:id])\n @say.destroy\n\n respond_to do |format|\n format.html { redirect_to(says_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @trigger = Trigger.find(params[:id])\n @trigger.destroy\n\n respond_to do |format|\n format.html { redirect_to(triggers_url) }\n format.xml { head :ok }\n end\n end",
"def delete_webhook_on_kapp(kapp_slug, name, headers=default_headers)\n @logger.info(\"Deleting the #{name} webhook on the #{kapp_slug}\\ kapp.\")\n delete(\"#{@api_url}/kapps/#{kapp_slug}/webhooks/#{encode(name)}\", headers)\n end",
"def destroy\n @incoming_hook.destroy\n respond_to do |format|\n format.html { redirect_to incoming_hooks_url, notice: 'Incoming hook was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_webhook(target_url)\n Uploadcare::Webhook.delete(target_url)\n end",
"def destroy\n @hook.destroy\n respond_to do |format|\n format.html { redirect_to hooks_url, notice: 'Hook was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /product_groups/1 GET /product_groups/1.json | def show
@product_group = ProductGroup.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @product_group }
end
end | [
"def show\n @product_grouping = ProductGrouping.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @product_grouping }\n end\n end",
"def index\n @product_product_groups = ProductProductGroup.all\n end",
"def get_product_groups(params, headers = {})\n request = {\n utoken: params[:utoken]\n }\n\n app_key = params[:app_key]\n get(\"v1/apps/#{app_key}/products_groups\", request, headers)\n end",
"def index\n render json: ProductAuth.where({group_id: params[:group_id]})\n end",
"def retrieve_groups()\n start.uri('/api/group')\n .get()\n .go()\n end",
"def retrieve_groups()\n start.uri('/api/group')\n .get()\n .go()\n end",
"def new\n @product_group = ProductGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_group }\n end\n end",
"def new\n @product_grouping = ProductGrouping.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_grouping }\n end\n end",
"def index\n @group_products = GroupProduct.all\n end",
"def index\n @cart_product_groups = CartProductGroup.all\n end",
"def show\n @item_group = ItemGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @item_group }\n end\n end",
"def show\n @grp = Grp.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @grp }\n end\n end",
"def show\n @product_group = ProductGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @product_group }\n end\n end",
"def show\n @pgroup = Pgroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @pgroup }\n end\n end",
"def show\n @group_category = GroupCategory.find(params[:id])\n @group_categories = GroupCategory.all\n @groups = @group_category.groups\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @group_category }\n end\n end",
"def show\n @module_group = ModuleGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @module_group }\n end\n end",
"def index\n @product_subgroups = ProductSubgroup.all\n end",
"def show\n @agroup = Agroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @agroup }\n end\n end",
"def index\n load_group if products_params[:group_id]\n load_producer if products_params[:producer_id]\n\n products = ProductsCollection.new(group: @group, producer: @producer).relation\n\n render json: ProductsSerializer.new(products)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /product_groups/new GET /product_groups/new.json | def new
@product_group = ProductGroup.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @product_group }
end
end | [
"def new\n @product_grouping = ProductGrouping.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_grouping }\n end\n end",
"def new\n @group = scoped_groups.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end",
"def new\n @module_group = ModuleGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @module_group }\n end\n end",
"def new\n @group = Group.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end",
"def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end",
"def new\n @pgroup = Pgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pgroup }\n end\n end",
"def new\n @grp = Grp.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @grp }\n end\n end",
"def new\n @product_tree_group = ProductTreeGroup.new\n \n drop_breadcrumb('新增')\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_tree_group }\n end\n end",
"def new\n @item_group = ItemGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @item_group }\n end\n end",
"def new\n @breadcrumb = 'create'\n @professional_group = ProfessionalGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @professional_group }\n end\n end",
"def create\n @product_group = ProductGroup.new(params[:product_group])\n\n respond_to do |format|\n if @product_group.save\n format.html { redirect_to @product_group, notice: 'Product group was successfully created.' }\n format.json { render json: @product_group, status: :created, location: @product_group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @product_group = ProductGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @product_group }\n end\n end",
"def new\n @join_group = JoinGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @join_group }\n end\n end",
"def new\n @class_group = ClassGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @class_group }\n end\n end",
"def new\n @classgroup = Classgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @classgroup }\n end\n end",
"def new\n @title = t('view.customers_groups.new_title')\n @customers_group = CustomersGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @customers_group }\n end\n end",
"def new\n @groupaddrobj = Groupaddrobj.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @groupaddrobj }\n end\n end",
"def new\n @prpt_grp = PrptGrp.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @prpt_grp }\n end\n end",
"def new\n @variable_group = VariableGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @variable_group }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /product_groups POST /product_groups.json | def create
@product_group = ProductGroup.new(params[:product_group])
respond_to do |format|
if @product_group.save
format.html { redirect_to @product_group, notice: 'Product group was successfully created.' }
format.json { render json: @product_group, status: :created, location: @product_group }
else
format.html { render action: "new" }
format.json { render json: @product_group.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @product_group = ProductGroup.new(product_group_params)\n\n respond_to do |format|\n if @product_group.save\n format.html { redirect_to @product_group, notice: \"Product group was successfully created.\" }\n format.json { render :show, status: :created, location: @product_group }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product_grouping = ProductGrouping.new(params[:product_grouping])\n\n respond_to do |format|\n if @product_grouping.save\n format.html { redirect_to @product_grouping, notice: 'Product grouping was successfully created.' }\n format.json { render json: @product_grouping, status: :created, location: @product_grouping }\n else\n format.html { render action: \"new\" }\n format.json { render json: @product_grouping.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product_product_group = ProductProductGroup.new(product_product_group_params)\n\n respond_to do |format|\n if @product_product_group.save\n format.html { redirect_to @product_product_group, notice: 'Product product group was successfully created.' }\n format.json { render :show, status: :created, location: @product_product_group }\n else\n format.html { render :new }\n format.json { render json: @product_product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @group_product = GroupProduct.new(group_product_params)\n\n respond_to do |format|\n if @group_product.save\n format.html { redirect_to @group_product, notice: 'Group product was successfully created.' }\n format.json { render :show, status: :created, location: @group_product }\n else\n format.html { render :new }\n format.json { render json: @group_product.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_product_group(params, headers = {})\n request = {\n utoken: params[:utoken],\n group_name: params[:group_name]\n }\n\n app_key = params[:app_key]\n post(\"v1/apps/#{app_key}/products_groups\", request, headers)\n end",
"def create_product_group\n ProductGroup.create(:group_id => self.group_id, :product_id => self.product_id)\n return true\n end",
"def create\n @group_product = GroupProduct.new(group_product_params)\n\n respond_to do |format|\n if @group_product.save\n format.html { redirect_to @group_product, notice: 'Group product was successfully created.' }\n format.json { render action: 'show', status: :created, location: @group_product }\n else\n format.html { render action: 'new' }\n format.json { render json: @group_product.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product_subgroup = ProductSubgroup.new(product_subgroup_params)\n\n respond_to do |format|\n if @product_subgroup.save\n format.html { redirect_to @product_subgroup, notice: 'Product subgroup was successfully created.' }\n format.json { render :show, status: :created, location: @product_subgroup }\n else\n format.html { render :new }\n format.json { render json: @product_subgroup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @cart_product_group = CartProductGroup.new(cart_product_group_params)\n\n respond_to do |format|\n if @cart_product_group.save\n format.html { redirect_to @cart_product_group, notice: 'Cart product group was successfully created.' }\n format.json { render :show, status: :created, location: @cart_product_group }\n else\n format.html { render :new }\n format.json { render json: @cart_product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product_group = ProductGroup.new(params[:product_group])\n\n respond_to do |format|\n if @product_group.save\n format.html { redirect_to(@product_group, :notice => 'Product group was successfully created.') }\n format.xml { render :xml => @product_group, :status => :created, :location => @product_group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @product_group.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @product_tree_group = ProductTreeGroup.new product_tree_group_params\n\n respond_to do |format|\n if @product_tree_group.save\n format.html { redirect_to admin_product_tree_group_path(@product_tree_group), notice: '产品新建成功.' }\n format.json { render json: @product_tree_group, status: :created, location: @product_tree_group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @product_tree_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @group_params = group_params.to_json\n @reponse = HTTParty.post(\"https://rails-api-ipo.herokuapp.com/api/v1/groups.json\",\n :body => @group_params,\n :headers => { 'Content-Type' => 'application/json' } )\n respond_to do |format|\n format.html { redirect_to '/groups/'+(@reponse['id'].to_s), notice: 'Group was successfully created.' }\n end\n end",
"def create\n #logger.info \"Post parameters: #{params}\"\n @group = Group.new(name: params[:group][:name], expiration: params[:group][:expiration], owner: current_user)\n if @group.save\n @group.memberships.create!(user: current_user, admin: true)\n if params[:group][:users]\n params[:group][:users].each do |u|\n @group.memberships.create!(user: User.where(\"id = ? OR email = ?\", u[:id], u[:email]).first, admin:u[:admin])\n end\n end\n render json: @group, status: :created, location: @group\n else\n render json: @group.errors, status: :unprocessable_entity\n end\n end",
"def new_nodegroup(nodegroup_json)\n nodemgr_rest_call(\"POST\", \"classifier\", \"groups\", $credentials, id=\"\", nodegroup_json)\nend",
"def new\n @product_grouping = ProductGrouping.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_grouping }\n end\n end",
"def new\n @product_group = ProductGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product_group }\n end\n end",
"def create\n @item_group = ItemGroup.new(item_group_params)\n\n respond_to do |format|\n if @item_group.save\n format.html { redirect_to @item_group, notice: 'Item group was successfully created.' }\n format.json { render :show, status: :created, location: @item_group }\n else\n format.html { render :new }\n format.json { render json: @item_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @products_grupo = ProductsGrupo.new(products_grupo_params)\n\n respond_to do |format|\n if @products_grupo.save\n format.html { redirect_to @products_grupo, notice: 'Products grupo was successfully created.' }\n format.json { render :show, status: :created, location: @products_grupo }\n else\n format.html { render :new }\n format.json { render json: @products_grupo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @itemgroup = Itemgroup.new(itemgroup_params)\n\n respond_to do |format|\n if @itemgroup.save\n format.html { redirect_to @itemgroup, notice: 'Itemgroup was successfully created.' }\n format.json { render :show, status: :created, location: @itemgroup }\n else\n format.html { render :new }\n format.json { render json: @itemgroup.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /product_groups/1 PUT /product_groups/1.json | def update
@product_group = ProductGroup.find(params[:id])
respond_to do |format|
if @product_group.update_attributes(params[:product_group])
format.html { redirect_to @product_group, notice: 'Product group was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @product_group.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n respond_to do |format|\n if @product_group.update(product_group_params)\n format.html { redirect_to @product_group, notice: \"Product group was successfully updated.\" }\n format.json { render :show, status: :ok, location: @product_group }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @product_grouping = ProductGrouping.find(params[:id])\n\n respond_to do |format|\n if @product_grouping.update_attributes(params[:product_grouping])\n format.html { redirect_to @product_grouping, notice: 'Product grouping was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @product_grouping.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @product_group = ProductGroup.find(params[:id])\n\n respond_to do |format|\n if @product_group.update_attributes(params[:product_group])\n format.html { redirect_to(@product_group, :notice => 'Product group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @product_group.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n logger.info \"Put parameters: #{params.to_json}\"\n @group = Group.find(params[:id])\n\n if @group.update_attributes(params[:group])\n head :no_content\n else\n render json: @group.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @product_subgroup.update(product_subgroup_params)\n format.html { redirect_to @product_subgroup, notice: 'Product subgroup was successfully updated.' }\n format.json { render :show, status: :ok, location: @product_subgroup }\n else\n format.html { render :edit }\n format.json { render json: @product_subgroup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @cart_product_group.update(cart_product_group_params)\n format.html { redirect_to @cart_product_group, notice: 'Cart product group was successfully updated.' }\n format.json { render :show, status: :ok, location: @cart_product_group }\n else\n format.html { render :edit }\n format.json { render json: @cart_product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @group_product.update(group_product_params)\n format.html { redirect_to @group_product, notice: 'Group product was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group_product.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_group(group_id, request)\n start.uri('/api/group')\n .url_segment(group_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .put()\n .go()\n end",
"def update\n respond_to do |format|\n if @os_groups_group.update(os_groups_group_params)\n format.html { redirect_to @os_groups_group, notice: 'Os groups group was successfully updated.' }\n format.json { render :show, status: :ok, location: @os_groups_group }\n else\n format.html { render :edit }\n format.json { render json: @os_groups_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_group(group_id, request)\n start.uri('/api/group')\n .url_segment(group_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .put()\n .go()\n end",
"def update\n if @product_item_group.update(product_item_group_params)\n render :show, status: :ok, location: [:partner, @brand, @product_item_group]\n else\n render json: @product_item_group.errors, status: :unprocessable_entity\n end\n end",
"def update\n @item_group = ItemGroup.find(params[:id])\n\n respond_to do |format|\n if @item_group.update_attributes(params[:item_group])\n format.html { redirect_to @item_group, notice: 'Item group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @item_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_nodegroup(nodegroup_json, nodegroup_id)\n nodemgr_rest_call(\"POST\", \"classifier\", \"groups\", $credentials, id=nodegroup_id, nodegroup_json)\nend",
"def update\n @admin_group = Admin::Group.find(params[:id])\n\n respond_to do |format|\n if @admin_group.update_attributes(params[:admin_group])\n format.html { redirect_to @admin_group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @admin_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch_update\n group = @company.public_send(ScimRails.config.scim_groups_scope).find(params[:id])\n\n json_scim_missing_field_response(params) and return if patch_path.blank?\n\n if patch_path == :members\n case patch_operation.downcase\n when \"add\"\n add_members(group) if members_param.present?\n when \"remove\"\n remove_members(group) if members_param.present?\n end\n else\n update_attribute(group)\n end\n\n json_scim_response(object: group)\n end",
"def update\n set_feature_group\n respond_to do |format|\n if @feature_group.update(feature_group_params)\n format.html { redirect_to @product, notice: 'Feature group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @feature_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product_group = ProductGroup.new(product_group_params)\n\n respond_to do |format|\n if @product_group.save\n format.html { redirect_to @product_group, notice: \"Product group was successfully created.\" }\n format.json { render :show, status: :created, location: @product_group }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @product_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @product_tree_group = ProductTreeGroup.find(params[:id])\n\n respond_to do |format|\n if @product_tree_group.update_attributes product_tree_group_params\n format.html { redirect_to admin_product_tree_group_path(@product_tree_group), notice: '产品修改成功.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @product_tree_group.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product_group = ProductGroup.new(params[:product_group])\n\n respond_to do |format|\n if @product_group.save\n format.html { redirect_to @product_group, notice: 'Product group was successfully created.' }\n format.json { render json: @product_group, status: :created, location: @product_group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @product_group.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /product_groups/1 DELETE /product_groups/1.json | def destroy
@product_group = ProductGroup.find(params[:id])
@product_group.destroy
respond_to do |format|
format.html { redirect_to product_groups_url }
format.json { head :no_content }
end
end | [
"def destroy\n @product_grouping = ProductGrouping.find(params[:id])\n @product_grouping.destroy\n\n respond_to do |format|\n format.html { redirect_to product_groupings_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @group_product.destroy\n respond_to do |format|\n format.html { redirect_to group_products_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @product_tree_group = ProductTreeGroup.find(params[:id])\n @product_tree_group.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_product_tree_groups_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n @product_group = ProductGroup.find(params[:id])\n @product_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(product_groups_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n group = Group.find(params[:id])\n group.destroy\n render json: {}\n end",
"def destroy\n @product_subgroup.destroy\n respond_to do |format|\n format.html { redirect_to product_subgroups_url, notice: 'Product subgroup was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @item_group = ItemGroup.find(params[:id])\n @item_group.destroy\n\n respond_to do |format|\n format.html { redirect_to item_groups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @group_product.destroy\n respond_to do |format|\n format.html { redirect_to group_products_url, notice: 'Group product was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @admin_group = Admin::Group.find(params[:id])\n @admin_group.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_groups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @group = @dataset.groups.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to dataset_groups_url(@owner), flash: {success: t('app.msgs.success_deleted', :obj => t('mongoid.models.group.one'))} }\n format.json { head :no_content }\n end\n end",
"def destroy\n @api_v1_group.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_groups_url, notice: 'Group was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cart_product_group.destroy\n respond_to do |format|\n format.html { redirect_to cart_product_groups_url, notice: 'Cart product group was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @os_groups_group.destroy\n respond_to do |format|\n format.html { redirect_to os_groups_groups_url, notice: 'Os groups group was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @request_group.destroy\n respond_to do |format|\n format.html { redirect_to request_groups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @esol_group = EsolGroup.find(params[:id])\n @esol_group.destroy\n\n respond_to do |format|\n format.html { redirect_to esol_groups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n # @group = @hub.groups.get(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to hub_groups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @small_group.destroy\n respond_to do |format|\n format.html { redirect_to small_groups_url, notice: '' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @restaurant_group = Restaurant::Group.find(params[:id])\n @restaurant_group.destroy\n\n respond_to do |format|\n format.html { redirect_to restaurant_groups_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @resource_group.destroy\n \n respond_to do |format|\n format.html { redirect_to root_path }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
pass in a user_id and period in params and it will find the next or previous fab | def cycle_fab_by_period(direction, params)
user = User.find(params[:user_id])
current_fab = find_or_create_base_fab(user, params)
@fab = (direction == :forward) ? current_fab.exactly_next_fab : current_fab.exactly_previous_fab
end | [
"def determinate_next_page_by_period\n if vehicle_details('weekly_possible')\n select_period_dates_path(id: transaction_id)\n else\n daily_charge_dates_path(id: transaction_id)\n end\n end",
"def upcoming_fab\n fabs.find_or_build_this_periods_fab\n end",
"def one_user\n @user = User.find(params[:user_id])\n @date = (params[:date])? Date.parse(params[:date]) : Date.today\n \n next_3_days_date = @date + 3\n todos = DailyTodo.all(\n :conditions => [\"user_id = ? AND (date <= ? AND date >= ?)\", params[:user_id], next_3_days_date, @date - ONE_WEEK + 1],\n :order => 'date' # ASC because \"range\" below is increasing\n )\n range = (@date - ONE_WEEK + 1)..next_3_days_date\n \n @todos = if todos.empty?\n range.map do |date|\n DailyTodo.new(:user_id => @user.id, :date => date)\n end\n else\n i = 0\n range.map do |date|\n if i >= todos.size || date < todos[i].date\n DailyTodo.new(:user_id => @user.id, :date => date)\n else\n i += 1\n todos[i - 1]\n end\n end\n end\n \n @todos.reverse!\n end",
"def next_user\n redirect_to_next_object(:next, User, params[:id].to_s)\n end",
"def upcoming_fee\n if params[:search]\n @members = Member.search(params[:search]).paginate(:page => params[:page], :per_page => 15)\n else\n @membersall =Member.all\n @memberstotal = []\n @membersall.each do |member|\n if member.next_fee_date != nil\n if ((member.next_fee_date > Date.today && member.next_fee_date < (Date.today + 4.days)))\n @memberstotal << member\n end\n else\n @memberstotal << member\n end\n end\n @members = @memberstotal.paginate(:page => params[:page],:per_page => 15)\n end\n end",
"def prev_user\n redirect_to_next_object(:prev, User, params[:id].to_s)\n end",
"def prev_user # :norobots:\n redirect_to_next_object(:prev, User, params[:id].to_s)\n end",
"def next_user # :norobots:\n redirect_to_next_object(:next, User, params[:id].to_s)\n end",
"def filtra_acceso(current_usuario, pf, params = nil)\n pf\n end",
"def create\n @fab = @user.fabs.new(fab_params.merge(period: DateTime.now))\n\n respond_to do |format|\n if @fab.save\n format.html { redirect_to [@user, @fab], notice: 'Fab was successfully created.' }\n format.json { render :show, status: :created, location: @fab }\n else\n format.html { render :new }\n format.json { render json: @fab.errors, status: :unprocessable_entity }\n end\n end\n end",
"def pending_fee\n if params[:search]\n @members = Member.search(params[:search]).paginate(:page => params[:page], :per_page => 15)\n else\n @membersall = Member.all\n @memberstotal =[ ]\n @membersall.each do |member|\n if member.next_fee_date != nil \n if member.next_fee_date < Date.today \n \n @memberstotal << member\n end\n else\n @memberstotal << member\n end\n end\n @members = @memberstotal.paginate(:page => params[:page], :per_page => 15)\n end\n \n end",
"def future\n if @user\n @enrollments = @user.enrollments.future\n add_breadcrumb 'Future', future_user_enrollments_path(@user)\n end\n index\n end",
"def next_active_date\n\t\tget_active_date\n\tend",
"def schedule_paginate(date, period, page, reverse = false)\n\t\t\t\t\t\t \n\t\t\t\t\t\t# Correct page\n\t\t\t\t\t\tif !page.nil?\n\t\t\t\t\t\t\tpage = page.to_i\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tpage = 1\n\t\t\t\t\t\tend\n\n\t\t\t\t\t\t# Correct period\n\t\t\t\t\t\tif period.nil?\n\t\t\t\t\t\t\tperiod = \"week\"\n\t\t\t\t\t\tend\n\n\t\t\t\t\t\tif period == \"week\"\n\n\t\t\t\t\t\t\t# From\n\t\t\t\t\t\t\tfrom = date + (1 - date.cwday).days # Monday before date\n\n\t\t\t\t\t\t\t# Pagination\n\t\t\t\t\t\t\tif reverse == true\n\t\t\t\t\t\t\t\tfrom = from - (page - 1).week\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tfrom = from + (page - 1).week\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t# To\n\t\t\t\t\t\t\tto = from + 1.week\n\n\t\t\t\t\t\telsif period == \"month\"\n\n\t\t\t\t\t\t\t# From\n\t\t\t\t\t\t\tfrom = date + (1 - date.mday).days # First day of this month\n\n\t\t\t\t\t\t\t# Pagination\n\t\t\t\t\t\t\tif reverse == true\n\t\t\t\t\t\t\t\tfrom = from - (page - 1).month\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tfrom = from + (page - 1).month\n\t\t\t\t\t\t\tend\n\n\t\t\t\t\t\t\t# To\n\t\t\t\t\t\t\tto = from + 1.month\n\n\t\t\t\t\t\t end\n\n\t\t\t\t\t\t return [from, to, period, page]\n\t\t\t\t\tend",
"def referrals_for_period\n if params[:data]\n if params[:data]['start_date(1i)'].present?\n start_date = Date.new params[:data][\"start_date(1i)\"].to_i, params[:data][\"start_date(2i)\"].to_i, params[:data][\"start_date(3i)\"].to_i\n end_date = Date.new params[:data][\"end_date(1i)\"].to_i, params[:data][\"end_date(2i)\"].to_i, params[:data][\"end_date(3i)\"].to_i\n elsif params[:data][:start_date].present?\n start_date = Date.parse(params[:data][:start_date])\n end_date = Date.parse(params[:data][:end_date])\n else\n start_date = Date.parse(\"2010-01-01\")\n end_date = Date.parse(\"2020-01-01\")\n end\n else\n start_date = Date.parse(\"2010-01-01\")\n end_date = Date.parse(\"2020-01-01\")\n end\n\n\n refgroups = []\n\n referrals = Referral.by_dataset(@key.app_dataset_id)\n .where(created_at: start_date...end_date)\n\n refcounts = referrals.group(:referred_by).count\n refcounts.delete(nil)\n\n # sort each referrer group into it's own collection:\n refcounts.each do |k,v|\n referer = referrals.find_by(id: k)\n if referer\n # curl -X POST -H \"Content-Type: application/json\" -d '{\"api_key\" : \"42a8871d56d39ab3181a39cf95507ba6\", \"event_name\" : \"graduate_owed_fees\", \"user_id\" : \"2433\", \"outstanding_amount\" : '45.50', \"amount_earned_already\" : '30.00'}' http://evergrad.localhost.volcanic.co:3000/api/v1/event_services.json\n # @response = HTTParty.post('http://evergrad.localhost.volcanic.co:3000/api/v1/event_services.json', {:body => {event_name: 'graduate_owed_fees', api_key: @key.api_key, user_id: referer.user_id, outstanding_amount: '45.50', amount_earned_already: '30.00'}, :headers => { 'Content-Type' => 'application/json' }})\n ref = [\"#{referer.full_name} (#{v} Referrals)\", referrals.select{ |r| r.referred_by == k } ]\n refgroups << ref\n end\n end\n \n respond_to do |format|\n format.html {\n @referrals = refgroups\n render action: 'overview', layout: false\n }\n format.json { render json: {\n success: true, length: referrals.count, referrals: refgroups\n }\n }\n end\n end",
"def current_period_and_client\n Period.where([\"currentperiod = ? and client_id = ?\", \"t\", current_user.client_id]).first\n end",
"def nextTasks\n @user = current_user\n if current_user.admin? and params[:user_id]\n @user = current_user.company.users.find(params[:user_id])\n end\n\n html = render_to_string :partial => \"tasks/next_tasks_panel\", :locals => { :count => params[:count].to_i, :user => @user }\n render :json => { :html => html, :has_more => (@user.tasks.open_only.not_snoozed.count > params[:count].to_i) }\n end",
"def show\n date = params[:date] || Date.today\n\n \t@office = if params[:id] === \"next\"\n \t\tOffice.get_next(date.to_date)\n elsif params[:id] === \"previous\"\n Office.get_previous(date.to_date)\n \telse\n \t\tOffice.find(id)\n \tend\n\n render json: { error: \"No previous office found\" }, status: :not_found if !@office\n end",
"def get_user_progress(params)\n conditions = Marshal.load(Marshal.dump(params))\n\n if params[:recalculate]\n get_recalculated_milestones(params[:user_id], params[:time_unit_id], params[:module])\n end\n\n userQ = Querier.factory(User).select([:role, :time_unit_id, :avatar], [:organization_id]).where(conditions.slice(:user_id))\n userMilestoneQ = Querier.factory(UserMilestone).where(conditions.except(:module))\n userClassQ = Querier.factory(UserClass).select([:updated_at, :time_unit_id], [:user_id]).where(conditions)\n userExtracurricularActivityDetailQ = Querier.factory(UserExtracurricularActivityDetail).select([:updated_at, :time_unit_id], [:user_id]).where(conditions)\n userServiceHourQ = Querier.factory(UserServiceHour).select([:updated_at, :time_unit_id], [:user_id]).where(conditions)\n userTestQ = Querier.factory(UserTest).select([:updated_at, :time_unit_id], [:user_id]).where(conditions)\n userQ.set_subQueriers([userMilestoneQ, userClassQ, userExtracurricularActivityDetailQ,\n userServiceHourQ, userTestQ])\n\n conditions[:organization_id] = userQ.pluck(:organization_id).first\n\n organizationQ = Querier.factory(Organization).select([:name]).where(conditions.slice(:organization_id))\n timeUnitQ = Querier.factory(TimeUnit).select([:name, :id], [:organization_id]).where(conditions.slice(:organization_id))\n milestoneQ = Querier.factory(Milestone).where(conditions.slice(:organization_id, :time_unit_id))\n organizationQ.set_subQueriers([userQ, timeUnitQ, milestoneQ])\n\n view = organizationQ.view.first\n view[:enabled_modules] = EnabledModules.new.get_enabled_module_titles(conditions[:organization_id].to_i)\n\n #ToDo: Super hacky but the Querier doesn't allow me to get the objects I need\n # ** This needs to be changed **\n # ** UPDATE: This won't work - the milestone object is not a DOT object but all the\n # ** milestone classes assume a DOT object\n #Rails.logger.debug(\"*********** Milestones: #{view[:milestones]}\")\n #view[:milestones] = MilestoneFactory.get_milestone_objects_TEMPORARY(view[:milestones])\n\n return ReturnObject.new(:ok, \"Progress for user_id: #{params[:user_id]}, time_unit_id: #{params[:time_unit_id]}, module_title: #{params[:module]}.\", view)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
scores for each letter | def letter_scores
{ "A"=>1, "B"=>3, "C"=>3, "D"=>2,
"E"=>1, "F"=>4, "G"=>2, "H"=>4,
"I"=>1, "J"=>8, "K"=>5, "L"=>1,
"M"=>3, "N"=>1, "O"=>1, "P"=>3,
"Q"=>10, "R"=>1, "S"=>1, "T"=>1,
"U"=>1, "V"=>4, "W"=>4, "X"=>8,
"Y"=>4, "Z"=>10
}
end | [
"def individual_letter_scores\n @scores ||= begin\n # every letter starts with a default score\n scores = Array.new text.length, DEFAULT_LETTER_SCORE\n\n # first letter of each \"word\" gets a bonus:\n\n # start of string\n scores[0] += START_OF_WORD_BONUS\n\n # words start after a non-word character\n ss = StringScanner.new text\n while ss.scan_until /[^a-zA-Z0-9]/\n scores[ss.pos] += START_OF_WORD_BONUS if ss.pos < text.length\n end\n\n # words start with a capital letter\n ss.reset\n while ss.scan_until /[A-Z]/\n scores[ss.pos - 1] += START_OF_WORD_BONUS\n end\n\n scores\n end\n end",
"def score\n string.upcase.chars.sum do |char| \n LETTERS.include?(char) ? calculate_letter_score(char) : 0\n end\n end",
"def score\n @score = 0\n self.to_array.each do |letter|\n @score += TILE_VALUES[letter]\n end\n \"Your word score: #{@score}\"\n end",
"def scrabble_score(word)\n\nscores = {\n 'a' => 1, 'e' => 1, 'i' => 1, 'o' => 1,\n 'u' => 1, 'l' => 1, 'n' => 1, 'r' => 1,\n 's' => 1, 't' => 1, 'd' => 2, 'g' => 2,\n 'b' => 3, 'c' => 3, 'm' => 3, 'p' => 3,\n 'f' => 4, 'h' => 4, 'v' => 4, 'w' => 4,\n 'y' => 4, 'k' => 5, 'j' => 8, 'x' => 8,\n 'q' => 10, 'z' => 10\n}\n total = 0\n\n word.each_char do |letter|\n total += scores[letter]\n\n end\n\n total\n\nend",
"def score\n points = 0\n @score = user_word.each_char do |c|\n points += LETTER_VALUES[c.to_sym]\n end\n puts \"You'll score #{points} total points if you play #{user_word.downcase}.\"\n end",
"def score(name)\n name_score = 0\n name.each_char do |character|\n name_score = name_score + character_score(character)\n end\n name_score\nend",
"def score\n return word.chars.inject(0) { |total, c| total + VALUES[c.to_sym] }\n end",
"def scrabble_scorer(word)\n cleaned_word = word.to_s.strip.upcase\n score = 0\n return score if cleaned_word.empty?\n\n cleaned_word.chars.each do |letter|\n score += LETTER_VALUES.fetch(letter.to_sym)\n end\n\n score\nend",
"def score_word(word)\n # - Each letter within `word` has a point value. The number of points of each letter is summed up to represent the total score of `word`\n # - Each letter's point value is described in the table below\n # - If the length of the word is 7, 8, 9, or 10, then the word gets an additional 8 points\n # return int points\nend",
"def scrabble_score(word)\n points = { \"a\" => 1, \"b\" => 3, \"c\" => 3, \"d\" => 2, \"e\" => 1, \"f\" => 4, \"g\" => 2, \"h\" => 4, \"i\" => 1, \"j\" => 8, \"k\" => 5, \"l\" => 1, \"m\" => 3, \"n\" => 1, \"o\" => 1, \"p\" => 3, \"q\" => 10, \"r\" => 1, \"s\" => 1, \"t\" => 1, \"u\" => 1, \"v\" => 4, \"w\" => 4, \"x\" => 8, \"y\" => 4, \"z\" => 10 }\n word.chars.map { |letter| points[letter.downcase] }.reduce(:+)\nend",
"def calc_score(name)\n \n count = [0]*$alphabet.size\n count_letters(name, count)\n\n score = count.each_with_index {|val, idx| puts count[idx] *= $pscounts[idx]}.inject(:+)\n puts \"score: #{score}\"\n score = score / (name.size - name.count(' '))\n score *= 2 if $firstnames.include? name.downcase.split(' ').first\n\n score\nend",
"def word_score(word)\n score_table = { \"a\" => 1, \"b\" => 2, \"c\" => 3, \"d\" => 4, \"e\" => 5,\n \"f\" => 6, \"g\" => 7, \"h\" => 8, \"i\" => 9, \"j\" => 10,\n \"k\" => 11, \"l\" => 12, \"m\" => 13, \"n\" => 14, \"o\" => 15,\n \"p\" => 16, \"q\" => 17, \"r\" => 18, \"s\" => 19, \"t\" => 20,\n \"u\" => 21, \"v\" => 22, \"w\" => 23, \"x\" => 24, \"y\" => 25,\n \"z\" => 26 }\n score = 0\n # turn word into array of characters and their count, consecutively\n dissected = word.split(\"\").chunk{ |char| char }.map{ |char, count| [char, count.length.to_i] }\n dissected.each do |char, count|\n # if not repeated consecutively, add score directly from hash table\n if count == 1\n score += score_table[char]\n # if repeated consecutively, calculate score then add to score\n else\n score += score_table[char] ** count + score_table[char]\n end\n end\n puts \"score of \" + word + \": \" + score.to_s\nend",
"def get_character_score(char)\n value = @scores.find_all { |el| el[0] == char.upcase }\n score = value.size > 0 ? value[0][1]:0\n score + ((@score_bonus.include? char.upcase) ? 5 : 0)\n end",
"def scrabble(word)\n @scrabble_scores = { 'A' => 1, 'E' => 1, 'I' => 1, 'O' => 1, 'U' => 1, 'L' => 1, 'N' => 1, 'R' => 1, 'S' => 1, 'T' => 1, 'D' => 2, 'G' => 2, 'B' => 3, 'C' => 3, 'M' => 3, 'P' => 3, 'F' => 4, 'H' => 4, 'V' => 4, 'W' => 4, 'Y' => 4, 'K' => 5, 'J' => 8, 'X' => 8, 'Q' => 10, 'Z' => 10\n }\n\n score = 0;\n\n word.upcase.each_char do |letter|\n score += @scrabble_scores[letter]\n end\n puts score\nend",
"def scores(names)\n scores = []\n # go thru the name list\n names.each_index do |i|\n sum = 0\n names[i].each_char do |letter|\n # sum of the alphabetical value of all letters of a name\n value = letter.ord - 'A'.ord + 1\n sum += value\n # puts(\"char: #{letter}, value: #{value}\")\n end\n # puts(\"total: #{sum}, index: #{i}\")\n # multiply the sum with the alphabetical position of the name\n sum *= i + 1\n\n # retain that product in a list\n scores << sum\n end\n\n scores\nend",
"def high(string)\n letter_scores = ('a'..'z').zip(1..26).to_h\n words = string.split\n\n word_scores = words.map do |word|\n score = 0\n word.chars.each { |char| score += letter_scores[char]}\n score \n end\n words[word_scores.index(word_scores.max)]\nend",
"def letter_score(letter)\n ('a'..'z').include?(letter) ? (letter.ord - 'a'.ord + 1) : (letter.ord - 'A'.ord + 27)\n end",
"def word_ranking(str)\n \n # return empty string if it it empty \n if str.length == 0\n return str\n # else split str to an array of words\n else\n word_arr = str.split(\" \")\n end\n \n # map each letter to a score\n letter_score = {}\n letters = [*(\"a\"..\"z\")]\n letters.each.with_index do |x, index|\n letter_score[x] = index + 1\n end\n# return letter_score\n\n# calculate each word\n word_score = {}\n\n \n word_arr.each do |x|\n score = x.chars.collect {|letter| letter_score[letter] }.reduce(&:+)\n word_score[x] = score\n end\n\n return word_score.key(word_score.values.max)\n\nend",
"def hand_score(hand)\n\tpossible = {\n \t'A' => 4,\n \t'K' => 3,\n \t'Q' => 2,\n \t'J' => 1\n }\n \tscore = 0\n \thand.each_char { |letter|\n score += possible[letter.upcase]\n }\n\treturn score\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create a Scalyr log endpoint Create a Scalyr for a particular service and version. | def create_log_scalyr_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: LoggingScalyrApi.create_log_scalyr ...'
end
# unbox the parameters from the hash
service_id = opts[:'service_id']
version_id = opts[:'version_id']
# verify the required parameter 'service_id' is set
if @api_client.config.client_side_validation && service_id.nil?
fail ArgumentError, "Missing the required parameter 'service_id' when calling LoggingScalyrApi.create_log_scalyr"
end
# verify the required parameter 'version_id' is set
if @api_client.config.client_side_validation && version_id.nil?
fail ArgumentError, "Missing the required parameter 'version_id' when calling LoggingScalyrApi.create_log_scalyr"
end
allowable_values = ["none", "waf_debug", "null"]
if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])
fail ArgumentError, "invalid value for \"placement\", must be one of #{allowable_values}"
end
allowable_values = [1, 2]
if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])
fail ArgumentError, "invalid value for \"format_version\", must be one of #{allowable_values}"
end
allowable_values = ["US", "EU"]
if @api_client.config.client_side_validation && opts[:'region'] && !allowable_values.include?(opts[:'region'])
fail ArgumentError, "invalid value for \"region\", must be one of #{allowable_values}"
end
# resource path
local_var_path = '/service/{service_id}/version/{version_id}/logging/scalyr'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])
if !content_type.nil?
header_params['Content-Type'] = content_type
end
# form parameters
form_params = opts[:form_params] || {}
form_params['name'] = opts[:'name'] if !opts[:'name'].nil?
form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?
form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?
form_params['format'] = opts[:'format'] if !opts[:'format'].nil?
form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?
form_params['region'] = opts[:'region'] if !opts[:'region'].nil?
form_params['token'] = opts[:'token'] if !opts[:'token'].nil?
form_params['project_id'] = opts[:'project_id'] if !opts[:'project_id'].nil?
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type] || 'LoggingScalyrResponse'
# auth_names
auth_names = opts[:debug_auth_names] || ['token']
new_options = opts.merge(
:operation => :"LoggingScalyrApi.create_log_scalyr",
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: LoggingScalyrApi#create_log_scalyr\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end | [
"def create_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.create_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.create_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.create_log_https\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"POST\", \"PUT\"]\n if @api_client.config.client_side_validation && opts[:'method'] && !allowable_values.include?(opts[:'method'])\n fail ArgumentError, \"invalid value for \\\"method\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"0\", \"1\", \"2\"]\n if @api_client.config.client_side_validation && opts[:'json_format'] && !allowable_values.include?(opts[:'json_format'])\n fail ArgumentError, \"invalid value for \\\"json_format\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['content_type'] = opts[:'content_type'] if !opts[:'content_type'].nil?\n form_params['header_name'] = opts[:'header_name'] if !opts[:'header_name'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['header_value'] = opts[:'header_value'] if !opts[:'header_value'].nil?\n form_params['method'] = opts[:'method'] if !opts[:'method'].nil?\n form_params['json_format'] = opts[:'json_format'] if !opts[:'json_format'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.create_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#create_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def initialize(version, base_url)\n @version = version\n @base_url = base_url \n @logger = Logger.new(STDERR)\n @endpoint_url = \"#{@base_url}/rpc/soap/jirasoapservice-v#{version}\"\n end",
"def update_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.update_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_https_name = opts[:'logging_https_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.update_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.update_log_https\"\n end\n # verify the required parameter 'logging_https_name' is set\n if @api_client.config.client_side_validation && logging_https_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_https_name' when calling LoggingHttpsApi.update_log_https\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"POST\", \"PUT\"]\n if @api_client.config.client_side_validation && opts[:'method'] && !allowable_values.include?(opts[:'method'])\n fail ArgumentError, \"invalid value for \\\"method\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"0\", \"1\", \"2\"]\n if @api_client.config.client_side_validation && opts[:'json_format'] && !allowable_values.include?(opts[:'json_format'])\n fail ArgumentError, \"invalid value for \\\"json_format\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https/{logging_https_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_https_name' + '}', CGI.escape(logging_https_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['content_type'] = opts[:'content_type'] if !opts[:'content_type'].nil?\n form_params['header_name'] = opts[:'header_name'] if !opts[:'header_name'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['header_value'] = opts[:'header_value'] if !opts[:'header_value'].nil?\n form_params['method'] = opts[:'method'] if !opts[:'method'].nil?\n form_params['json_format'] = opts[:'json_format'] if !opts[:'json_format'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.update_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#update_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_log_splunk_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingSplunkApi.create_log_splunk ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingSplunkApi.create_log_splunk\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingSplunkApi.create_log_splunk\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/splunk'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['use_tls'] = opts[:'use_tls'] if !opts[:'use_tls'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingSplunkResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingSplunkApi.create_log_splunk\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingSplunkApi#create_log_splunk\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_log_loggly_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogglyApi.create_log_loggly ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogglyApi.create_log_loggly\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogglyApi.create_log_loggly\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/loggly'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogglyResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogglyApi.create_log_loggly\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogglyApi#create_log_loggly\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_log_logshuttle_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogshuttleApi.create_log_logshuttle ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogshuttleApi.create_log_logshuttle\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogshuttleApi.create_log_logshuttle\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/logshuttle'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogshuttleResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogshuttleApi.create_log_logshuttle\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogshuttleApi#create_log_logshuttle\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_log_https(opts = {})\n data, _status_code, _headers = create_log_https_with_http_info(opts)\n data\n end",
"def get_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.get_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_https_name = opts[:'logging_https_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.get_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.get_log_https\"\n end\n # verify the required parameter 'logging_https_name' is set\n if @api_client.config.client_side_validation && logging_https_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_https_name' when calling LoggingHttpsApi.get_log_https\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https/{logging_https_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_https_name' + '}', CGI.escape(logging_https_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.get_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#get_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def list_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.list_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.list_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.list_log_https\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'Array<LoggingHttpsResponse>'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.list_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#list_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_scalyr_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingScalyrApi.update_log_scalyr ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_scalyr_name = opts[:'logging_scalyr_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingScalyrApi.update_log_scalyr\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingScalyrApi.update_log_scalyr\"\n end\n # verify the required parameter 'logging_scalyr_name' is set\n if @api_client.config.client_side_validation && logging_scalyr_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_scalyr_name' when calling LoggingScalyrApi.update_log_scalyr\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"US\", \"EU\"]\n if @api_client.config.client_side_validation && opts[:'region'] && !allowable_values.include?(opts[:'region'])\n fail ArgumentError, \"invalid value for \\\"region\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/scalyr/{logging_scalyr_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_scalyr_name' + '}', CGI.escape(logging_scalyr_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['region'] = opts[:'region'] if !opts[:'region'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['project_id'] = opts[:'project_id'] if !opts[:'project_id'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingScalyrResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingScalyrApi.update_log_scalyr\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingScalyrApi#update_log_scalyr\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_log_sumologic_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingSumologicApi.create_log_sumologic ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingSumologicApi.create_log_sumologic\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingSumologicApi.create_log_sumologic\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/sumologic'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingSumologicResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingSumologicApi.create_log_sumologic\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingSumologicApi#create_log_sumologic\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_endpoint\n \"\"\n end",
"def delete_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.delete_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_https_name = opts[:'logging_https_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.delete_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.delete_log_https\"\n end\n # verify the required parameter 'logging_https_name' is set\n if @api_client.config.client_side_validation && logging_https_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_https_name' when calling LoggingHttpsApi.delete_log_https\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https/{logging_https_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_https_name' + '}', CGI.escape(logging_https_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'InlineResponse200'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.delete_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#delete_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_new_version\n version = Google::Apis::AppengineV1beta5::Version.new(\n :id => @version_id,\n :name => \"apps/#{@app_id}/services/#{@service_id}/versions/#{@version_id}\",\n :api_version => @version_info['api_version'],\n :runtime => @version_info['runtime'],\n :threadsafe => @version_info['threadsafe'],\n :handlers => @version_info['handlers'].map do |handler|\n if handler.key?('static_files')\n Google::Apis::AppengineV1beta5::UrlMap.new(\n :url_regex => handler['url'],\n :static_files => Google::Apis::AppengineV1beta5::StaticFilesHandler.new(\n :path => handler['static_files'],\n :upload_path_regex => \"#{@bucket_uri}/#{@uploaded_files[handler][:bucket]}\"))\n elsif handler.key?('static_dir')\n # TODO(nelsona): Add support to directories\n raise 'Uploading directories is not supported'\n elsif handler.key?('script')\n Google::Apis::AppengineV1beta5::UrlMap.new(\n :url_regex => handler['url'],\n :script => Google::Apis::AppengineV1beta5::ScriptHandler.new(\n :script_path => handler['script']))\n else\n raise 'Unknown handler tye'\n end\n end\n .compact,\n :deployment => {\n :files => Hash[*@uploaded_files.map do |_, path|\n [::File.basename(path[:filename]), Google::Apis::AppengineV1beta5::FileInfo.new(\n :source_url => \"#{@bucket_uri}/#{path[:bucket]}\")]\n end\n .flatten]\n }\n )\n\n STDERR.print 'Requesting create new version for the application...'\n new_version = @app_engine.create_app_service_version(\n @app_id, @service_id, version)\n STDERR.puts ' done.'\n\n @operation_id = new_version.name.split('/').last\n end",
"def endpoint_generator(api_host, api_version, ssl)\n ssl ? scheme = 'https' : scheme = 'http'\n if api_version\n \"#{scheme}://#{api_host}/#{api_version}\"\n else\n \"#{scheme}://#{api_host}\"\n end\n end",
"def create_service_version(opts = {})\n data, _status_code, _headers = create_service_version_with_http_info(opts)\n data\n end",
"def post_create\n response = self.class.post(\"/service/#{$service_id}/version/#{$service_version}/logging/sftp\", \n headers: { \"Fastly-Key\" => $key},\n body: { \"name\" => \"#{$name}\",\n \"address\" => \"#{$address}\",\n \"port\" => \"22\",\n \"format\" => \"#{$log_format}\",\n \"user\" => \"#{$user}\",\n \"secret_key\" => \"#{$secret_key}\",\n \"public_key\" => \"#{$public_key}\" })\n end",
"def create\n req = VCAP::Services::Api::BindingTokenRequest.decode(request_body)\n\n cfg = ServiceConfig.find_by_name(req.service_id)\n raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg\n raise CloudError.new(CloudError::FORBIDDEN) unless cfg.user_id == user.id\n\n tok = ::BindingToken.generate(\n :label => cfg.service.label,\n :service_config => cfg,\n :binding_options => req.binding_options\n )\n tok.save!\n\n resp = {\n :label => tok.label,\n :binding_token => tok.uuid\n }\n render_ok resp\n end",
"def update_log_loggly_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogglyApi.update_log_loggly ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_loggly_name = opts[:'logging_loggly_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogglyApi.update_log_loggly\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogglyApi.update_log_loggly\"\n end\n # verify the required parameter 'logging_loggly_name' is set\n if @api_client.config.client_side_validation && logging_loggly_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_loggly_name' when calling LoggingLogglyApi.update_log_loggly\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/loggly/{logging_loggly_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_loggly_name' + '}', CGI.escape(logging_loggly_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogglyResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogglyApi.update_log_loggly\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogglyApi#update_log_loggly\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Update the Scalyr log endpoint Update the Scalyr for a particular service and version. | def update_log_scalyr(opts = {})
data, _status_code, _headers = update_log_scalyr_with_http_info(opts)
data
end | [
"def update_log_scalyr_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingScalyrApi.update_log_scalyr ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_scalyr_name = opts[:'logging_scalyr_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingScalyrApi.update_log_scalyr\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingScalyrApi.update_log_scalyr\"\n end\n # verify the required parameter 'logging_scalyr_name' is set\n if @api_client.config.client_side_validation && logging_scalyr_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_scalyr_name' when calling LoggingScalyrApi.update_log_scalyr\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"US\", \"EU\"]\n if @api_client.config.client_side_validation && opts[:'region'] && !allowable_values.include?(opts[:'region'])\n fail ArgumentError, \"invalid value for \\\"region\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/scalyr/{logging_scalyr_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_scalyr_name' + '}', CGI.escape(logging_scalyr_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['region'] = opts[:'region'] if !opts[:'region'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['project_id'] = opts[:'project_id'] if !opts[:'project_id'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingScalyrResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingScalyrApi.update_log_scalyr\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingScalyrApi#update_log_scalyr\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.update_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_https_name = opts[:'logging_https_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.update_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.update_log_https\"\n end\n # verify the required parameter 'logging_https_name' is set\n if @api_client.config.client_side_validation && logging_https_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_https_name' when calling LoggingHttpsApi.update_log_https\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"POST\", \"PUT\"]\n if @api_client.config.client_side_validation && opts[:'method'] && !allowable_values.include?(opts[:'method'])\n fail ArgumentError, \"invalid value for \\\"method\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"0\", \"1\", \"2\"]\n if @api_client.config.client_side_validation && opts[:'json_format'] && !allowable_values.include?(opts[:'json_format'])\n fail ArgumentError, \"invalid value for \\\"json_format\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https/{logging_https_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_https_name' + '}', CGI.escape(logging_https_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['content_type'] = opts[:'content_type'] if !opts[:'content_type'].nil?\n form_params['header_name'] = opts[:'header_name'] if !opts[:'header_name'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['header_value'] = opts[:'header_value'] if !opts[:'header_value'].nil?\n form_params['method'] = opts[:'method'] if !opts[:'method'].nil?\n form_params['json_format'] = opts[:'json_format'] if !opts[:'json_format'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.update_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#update_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_https(opts = {})\n data, _status_code, _headers = update_log_https_with_http_info(opts)\n data\n end",
"def update \n#--{{{\n init_logging\n updater = Updater::new self\n updater.update\n#--}}}\n end",
"def put_update\n response = self.class.put(\"/service/#{$service_id}/version/#{$service_version}/logging/sftp/#{$name}\", \n headers: { \"Fastly-Key\" => $key }, \n body: $put_form_data )\n end",
"def update_log_loggly_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogglyApi.update_log_loggly ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_loggly_name = opts[:'logging_loggly_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogglyApi.update_log_loggly\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogglyApi.update_log_loggly\"\n end\n # verify the required parameter 'logging_loggly_name' is set\n if @api_client.config.client_side_validation && logging_loggly_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_loggly_name' when calling LoggingLogglyApi.update_log_loggly\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/loggly/{logging_loggly_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_loggly_name' + '}', CGI.escape(logging_loggly_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogglyResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogglyApi.update_log_loggly\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogglyApi#update_log_loggly\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_sumologic_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingSumologicApi.update_log_sumologic ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_sumologic_name = opts[:'logging_sumologic_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingSumologicApi.update_log_sumologic\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingSumologicApi.update_log_sumologic\"\n end\n # verify the required parameter 'logging_sumologic_name' is set\n if @api_client.config.client_side_validation && logging_sumologic_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_sumologic_name' when calling LoggingSumologicApi.update_log_sumologic\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/sumologic/{logging_sumologic_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_sumologic_name' + '}', CGI.escape(logging_sumologic_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingSumologicResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingSumologicApi.update_log_sumologic\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingSumologicApi#update_log_sumologic\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_splunk_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingSplunkApi.update_log_splunk ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_splunk_name = opts[:'logging_splunk_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingSplunkApi.update_log_splunk\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingSplunkApi.update_log_splunk\"\n end\n # verify the required parameter 'logging_splunk_name' is set\n if @api_client.config.client_side_validation && logging_splunk_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_splunk_name' when calling LoggingSplunkApi.update_log_splunk\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/splunk/{logging_splunk_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_splunk_name' + '}', CGI.escape(logging_splunk_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['use_tls'] = opts[:'use_tls'] if !opts[:'use_tls'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingSplunkResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingSplunkApi.update_log_splunk\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingSplunkApi#update_log_splunk\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_logshuttle_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogshuttleApi.update_log_logshuttle ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_logshuttle_name = opts[:'logging_logshuttle_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogshuttleApi.update_log_logshuttle\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogshuttleApi.update_log_logshuttle\"\n end\n # verify the required parameter 'logging_logshuttle_name' is set\n if @api_client.config.client_side_validation && logging_logshuttle_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_logshuttle_name' when calling LoggingLogshuttleApi.update_log_logshuttle\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/logshuttle/{logging_logshuttle_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_logshuttle_name' + '}', CGI.escape(logging_logshuttle_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogshuttleResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogshuttleApi.update_log_logshuttle\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogshuttleApi#update_log_logshuttle\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_logger_config(service_setup_data)\n driver = VaultDriver.from_secrets_file service_setup_data[:environment]\n service_name = service_setup_data['deployment']['service_name']\n logger = Syslogger.new(driver)\n if logger.check_record_exists(self)\n puts 'record already exists'\n return\n end\n result, error = logger.add_record_to_rsyslog(self)\n if result\n puts 'syslog updated!'\n else\n puts \"error #{error}\" unless error.nil?\n puts 'not updated'\n end\n end",
"def update\n # PUNDIT_REVIEW_AUTHORIZE\n # PUNDIT_CHECK_AUTHORIZE (did not find instance)\n # authorize @bundle_logger\n @dataservice_bundle_logger = Dataservice::BundleLogger.find(params[:id])\n\n respond_to do |format|\n if @dataservice_bundle_logger.update_attributes(params[:dataservice_bundle_logger])\n flash[:notice] = 'Dataservice::BundleLogger was successfully updated.'\n format.html { redirect_to(@dataservice_bundle_logger) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @dataservice_bundle_logger.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n msg = 'CLC servers do not support updates'\n raise NotImplementedError.new(msg)\n end",
"def update\n get_credentials\n begin\n response = resource[\"/update/#{app}\"].post(:apikey => @credentials[1], :frequency => options[:frequency])\n rescue RestClient::InternalServerError\n display \"An error has occurred.\"\n end\n display response.to_s\n end",
"def update_log_splunk(opts = {})\n data, _status_code, _headers = update_log_splunk_with_http_info(opts)\n data\n end",
"def create_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.create_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.create_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.create_log_https\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"POST\", \"PUT\"]\n if @api_client.config.client_side_validation && opts[:'method'] && !allowable_values.include?(opts[:'method'])\n fail ArgumentError, \"invalid value for \\\"method\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"0\", \"1\", \"2\"]\n if @api_client.config.client_side_validation && opts[:'json_format'] && !allowable_values.include?(opts[:'json_format'])\n fail ArgumentError, \"invalid value for \\\"json_format\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['content_type'] = opts[:'content_type'] if !opts[:'content_type'].nil?\n form_params['header_name'] = opts[:'header_name'] if !opts[:'header_name'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['header_value'] = opts[:'header_value'] if !opts[:'header_value'].nil?\n form_params['method'] = opts[:'method'] if !opts[:'method'].nil?\n form_params['json_format'] = opts[:'json_format'] if !opts[:'json_format'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.create_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#create_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update\n cloudformation(:update)\n end",
"def update\n respond_to do |format|\n if @log_csp_datum.update(log_csp_datum_params)\n format.html { redirect_to @log_csp_datum, notice: 'Log csp datum was successfully updated.' }\n format.json { render :show, status: :ok, location: @log_csp_datum }\n else\n format.html { render :edit }\n format.json { render json: @log_csp_datum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_log_scalyr_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingScalyrApi.create_log_scalyr ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingScalyrApi.create_log_scalyr\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingScalyrApi.create_log_scalyr\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"US\", \"EU\"]\n if @api_client.config.client_side_validation && opts[:'region'] && !allowable_values.include?(opts[:'region'])\n fail ArgumentError, \"invalid value for \\\"region\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/scalyr'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['region'] = opts[:'region'] if !opts[:'region'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['project_id'] = opts[:'project_id'] if !opts[:'project_id'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingScalyrResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingScalyrApi.create_log_scalyr\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingScalyrApi#create_log_scalyr\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def activate\n STDERR.print \"Activating version #{@version_id}...\"\n service = @app_engine.get_app_service(@app_id, @service_id)\n service.split.allocations.clear\n service.split.allocations[@version_id] = 1\n @app_engine.patch_app_service(@app_id, @service_id, service, :mask => 'split')\n STDERR.puts ' done.'\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Update the Scalyr log endpoint Update the Scalyr for a particular service and version. | def update_log_scalyr_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: LoggingScalyrApi.update_log_scalyr ...'
end
# unbox the parameters from the hash
service_id = opts[:'service_id']
version_id = opts[:'version_id']
logging_scalyr_name = opts[:'logging_scalyr_name']
# verify the required parameter 'service_id' is set
if @api_client.config.client_side_validation && service_id.nil?
fail ArgumentError, "Missing the required parameter 'service_id' when calling LoggingScalyrApi.update_log_scalyr"
end
# verify the required parameter 'version_id' is set
if @api_client.config.client_side_validation && version_id.nil?
fail ArgumentError, "Missing the required parameter 'version_id' when calling LoggingScalyrApi.update_log_scalyr"
end
# verify the required parameter 'logging_scalyr_name' is set
if @api_client.config.client_side_validation && logging_scalyr_name.nil?
fail ArgumentError, "Missing the required parameter 'logging_scalyr_name' when calling LoggingScalyrApi.update_log_scalyr"
end
allowable_values = ["none", "waf_debug", "null"]
if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])
fail ArgumentError, "invalid value for \"placement\", must be one of #{allowable_values}"
end
allowable_values = [1, 2]
if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])
fail ArgumentError, "invalid value for \"format_version\", must be one of #{allowable_values}"
end
allowable_values = ["US", "EU"]
if @api_client.config.client_side_validation && opts[:'region'] && !allowable_values.include?(opts[:'region'])
fail ArgumentError, "invalid value for \"region\", must be one of #{allowable_values}"
end
# resource path
local_var_path = '/service/{service_id}/version/{version_id}/logging/scalyr/{logging_scalyr_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_scalyr_name' + '}', CGI.escape(logging_scalyr_name.to_s))
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])
if !content_type.nil?
header_params['Content-Type'] = content_type
end
# form parameters
form_params = opts[:form_params] || {}
form_params['name'] = opts[:'name'] if !opts[:'name'].nil?
form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?
form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?
form_params['format'] = opts[:'format'] if !opts[:'format'].nil?
form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?
form_params['region'] = opts[:'region'] if !opts[:'region'].nil?
form_params['token'] = opts[:'token'] if !opts[:'token'].nil?
form_params['project_id'] = opts[:'project_id'] if !opts[:'project_id'].nil?
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type] || 'LoggingScalyrResponse'
# auth_names
auth_names = opts[:debug_auth_names] || ['token']
new_options = opts.merge(
:operation => :"LoggingScalyrApi.update_log_scalyr",
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: LoggingScalyrApi#update_log_scalyr\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end | [
"def update_log_scalyr(opts = {})\n data, _status_code, _headers = update_log_scalyr_with_http_info(opts)\n data\n end",
"def update_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.update_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_https_name = opts[:'logging_https_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.update_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.update_log_https\"\n end\n # verify the required parameter 'logging_https_name' is set\n if @api_client.config.client_side_validation && logging_https_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_https_name' when calling LoggingHttpsApi.update_log_https\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"POST\", \"PUT\"]\n if @api_client.config.client_side_validation && opts[:'method'] && !allowable_values.include?(opts[:'method'])\n fail ArgumentError, \"invalid value for \\\"method\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"0\", \"1\", \"2\"]\n if @api_client.config.client_side_validation && opts[:'json_format'] && !allowable_values.include?(opts[:'json_format'])\n fail ArgumentError, \"invalid value for \\\"json_format\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https/{logging_https_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_https_name' + '}', CGI.escape(logging_https_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['content_type'] = opts[:'content_type'] if !opts[:'content_type'].nil?\n form_params['header_name'] = opts[:'header_name'] if !opts[:'header_name'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['header_value'] = opts[:'header_value'] if !opts[:'header_value'].nil?\n form_params['method'] = opts[:'method'] if !opts[:'method'].nil?\n form_params['json_format'] = opts[:'json_format'] if !opts[:'json_format'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.update_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#update_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_https(opts = {})\n data, _status_code, _headers = update_log_https_with_http_info(opts)\n data\n end",
"def update \n#--{{{\n init_logging\n updater = Updater::new self\n updater.update\n#--}}}\n end",
"def put_update\n response = self.class.put(\"/service/#{$service_id}/version/#{$service_version}/logging/sftp/#{$name}\", \n headers: { \"Fastly-Key\" => $key }, \n body: $put_form_data )\n end",
"def update_log_loggly_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogglyApi.update_log_loggly ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_loggly_name = opts[:'logging_loggly_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogglyApi.update_log_loggly\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogglyApi.update_log_loggly\"\n end\n # verify the required parameter 'logging_loggly_name' is set\n if @api_client.config.client_side_validation && logging_loggly_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_loggly_name' when calling LoggingLogglyApi.update_log_loggly\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/loggly/{logging_loggly_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_loggly_name' + '}', CGI.escape(logging_loggly_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogglyResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogglyApi.update_log_loggly\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogglyApi#update_log_loggly\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_sumologic_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingSumologicApi.update_log_sumologic ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_sumologic_name = opts[:'logging_sumologic_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingSumologicApi.update_log_sumologic\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingSumologicApi.update_log_sumologic\"\n end\n # verify the required parameter 'logging_sumologic_name' is set\n if @api_client.config.client_side_validation && logging_sumologic_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_sumologic_name' when calling LoggingSumologicApi.update_log_sumologic\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/sumologic/{logging_sumologic_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_sumologic_name' + '}', CGI.escape(logging_sumologic_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingSumologicResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingSumologicApi.update_log_sumologic\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingSumologicApi#update_log_sumologic\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_splunk_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingSplunkApi.update_log_splunk ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_splunk_name = opts[:'logging_splunk_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingSplunkApi.update_log_splunk\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingSplunkApi.update_log_splunk\"\n end\n # verify the required parameter 'logging_splunk_name' is set\n if @api_client.config.client_side_validation && logging_splunk_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_splunk_name' when calling LoggingSplunkApi.update_log_splunk\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/splunk/{logging_splunk_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_splunk_name' + '}', CGI.escape(logging_splunk_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['use_tls'] = opts[:'use_tls'] if !opts[:'use_tls'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingSplunkResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingSplunkApi.update_log_splunk\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingSplunkApi#update_log_splunk\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_log_logshuttle_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingLogshuttleApi.update_log_logshuttle ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n logging_logshuttle_name = opts[:'logging_logshuttle_name']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingLogshuttleApi.update_log_logshuttle\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingLogshuttleApi.update_log_logshuttle\"\n end\n # verify the required parameter 'logging_logshuttle_name' is set\n if @api_client.config.client_side_validation && logging_logshuttle_name.nil?\n fail ArgumentError, \"Missing the required parameter 'logging_logshuttle_name' when calling LoggingLogshuttleApi.update_log_logshuttle\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/logshuttle/{logging_logshuttle_name}'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s)).sub('{' + 'logging_logshuttle_name' + '}', CGI.escape(logging_logshuttle_name.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingLogshuttleResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingLogshuttleApi.update_log_logshuttle\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingLogshuttleApi#update_log_logshuttle\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_logger_config(service_setup_data)\n driver = VaultDriver.from_secrets_file service_setup_data[:environment]\n service_name = service_setup_data['deployment']['service_name']\n logger = Syslogger.new(driver)\n if logger.check_record_exists(self)\n puts 'record already exists'\n return\n end\n result, error = logger.add_record_to_rsyslog(self)\n if result\n puts 'syslog updated!'\n else\n puts \"error #{error}\" unless error.nil?\n puts 'not updated'\n end\n end",
"def update\n # PUNDIT_REVIEW_AUTHORIZE\n # PUNDIT_CHECK_AUTHORIZE (did not find instance)\n # authorize @bundle_logger\n @dataservice_bundle_logger = Dataservice::BundleLogger.find(params[:id])\n\n respond_to do |format|\n if @dataservice_bundle_logger.update_attributes(params[:dataservice_bundle_logger])\n flash[:notice] = 'Dataservice::BundleLogger was successfully updated.'\n format.html { redirect_to(@dataservice_bundle_logger) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @dataservice_bundle_logger.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n msg = 'CLC servers do not support updates'\n raise NotImplementedError.new(msg)\n end",
"def update\n get_credentials\n begin\n response = resource[\"/update/#{app}\"].post(:apikey => @credentials[1], :frequency => options[:frequency])\n rescue RestClient::InternalServerError\n display \"An error has occurred.\"\n end\n display response.to_s\n end",
"def update_log_splunk(opts = {})\n data, _status_code, _headers = update_log_splunk_with_http_info(opts)\n data\n end",
"def create_log_https_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingHttpsApi.create_log_https ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingHttpsApi.create_log_https\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingHttpsApi.create_log_https\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"POST\", \"PUT\"]\n if @api_client.config.client_side_validation && opts[:'method'] && !allowable_values.include?(opts[:'method'])\n fail ArgumentError, \"invalid value for \\\"method\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"0\", \"1\", \"2\"]\n if @api_client.config.client_side_validation && opts[:'json_format'] && !allowable_values.include?(opts[:'json_format'])\n fail ArgumentError, \"invalid value for \\\"json_format\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/https'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['tls_ca_cert'] = opts[:'tls_ca_cert'] if !opts[:'tls_ca_cert'].nil?\n form_params['tls_client_cert'] = opts[:'tls_client_cert'] if !opts[:'tls_client_cert'].nil?\n form_params['tls_client_key'] = opts[:'tls_client_key'] if !opts[:'tls_client_key'].nil?\n form_params['tls_hostname'] = opts[:'tls_hostname'] if !opts[:'tls_hostname'].nil?\n form_params['request_max_entries'] = opts[:'request_max_entries'] if !opts[:'request_max_entries'].nil?\n form_params['request_max_bytes'] = opts[:'request_max_bytes'] if !opts[:'request_max_bytes'].nil?\n form_params['url'] = opts[:'url'] if !opts[:'url'].nil?\n form_params['content_type'] = opts[:'content_type'] if !opts[:'content_type'].nil?\n form_params['header_name'] = opts[:'header_name'] if !opts[:'header_name'].nil?\n form_params['message_type'] = opts[:'message_type'] if !opts[:'message_type'].nil?\n form_params['header_value'] = opts[:'header_value'] if !opts[:'header_value'].nil?\n form_params['method'] = opts[:'method'] if !opts[:'method'].nil?\n form_params['json_format'] = opts[:'json_format'] if !opts[:'json_format'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingHttpsResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingHttpsApi.create_log_https\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingHttpsApi#create_log_https\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update\n cloudformation(:update)\n end",
"def update\n respond_to do |format|\n if @log_csp_datum.update(log_csp_datum_params)\n format.html { redirect_to @log_csp_datum, notice: 'Log csp datum was successfully updated.' }\n format.json { render :show, status: :ok, location: @log_csp_datum }\n else\n format.html { render :edit }\n format.json { render json: @log_csp_datum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_log_scalyr_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: LoggingScalyrApi.create_log_scalyr ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling LoggingScalyrApi.create_log_scalyr\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling LoggingScalyrApi.create_log_scalyr\"\n end\n allowable_values = [\"none\", \"waf_debug\", \"null\"]\n if @api_client.config.client_side_validation && opts[:'placement'] && !allowable_values.include?(opts[:'placement'])\n fail ArgumentError, \"invalid value for \\\"placement\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [1, 2]\n if @api_client.config.client_side_validation && opts[:'format_version'] && !allowable_values.include?(opts[:'format_version'])\n fail ArgumentError, \"invalid value for \\\"format_version\\\", must be one of #{allowable_values}\"\n end\n allowable_values = [\"US\", \"EU\"]\n if @api_client.config.client_side_validation && opts[:'region'] && !allowable_values.include?(opts[:'region'])\n fail ArgumentError, \"invalid value for \\\"region\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/logging/scalyr'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n form_params['placement'] = opts[:'placement'] if !opts[:'placement'].nil?\n form_params['response_condition'] = opts[:'response_condition'] if !opts[:'response_condition'].nil?\n form_params['format'] = opts[:'format'] if !opts[:'format'].nil?\n form_params['format_version'] = opts[:'format_version'] if !opts[:'format_version'].nil?\n form_params['region'] = opts[:'region'] if !opts[:'region'].nil?\n form_params['token'] = opts[:'token'] if !opts[:'token'].nil?\n form_params['project_id'] = opts[:'project_id'] if !opts[:'project_id'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'LoggingScalyrResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"LoggingScalyrApi.create_log_scalyr\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: LoggingScalyrApi#create_log_scalyr\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def activate\n STDERR.print \"Activating version #{@version_id}...\"\n service = @app_engine.get_app_service(@app_id, @service_id)\n service.split.allocations.clear\n service.split.allocations[@version_id] = 1\n @app_engine.patch_app_service(@app_id, @service_id, service, :mask => 'split')\n STDERR.puts ' done.'\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parses a box given as a string of four numbers separated by commas. | def parse_box(key, box)
if box.kind_of?(String)
begin
raw_box = box.split(",").map(&:to_f)
box = [[raw_box[0], raw_box[1]], [raw_box[2], raw_box[3]]]
rescue
raise_error(type: "box.format", key: "key", value: box)
end
end
return box
end | [
"def parse_mp4_box(cursor, box_names, hierarchy = [], max_cursor = nil, &proc)\n #log_debug \"=== @#{cursor} - Parsing #{@data[cursor..cursor+31].inspect} ...\"\n nbr_boxes = 0\n nbr_direct_subboxes = 0\n container_box_max_cursor = ((max_cursor == nil) ? @end_offset : max_cursor)\n # Compute the size of data before looking for sub-boxes\n data_size = 0\n data_size += box_names[:box_info][:data_size] if ((box_names[:box_info] != nil) and (box_names[:box_info][:data_size] != nil))\n nbr_expected_subboxes = nil\n if ((box_names[:box_info] != nil) and\n (box_names[:box_info][:nbr_children_range] != nil))\n str_nbr_subboxes = @data[cursor+box_names[:box_info][:nbr_children_range][0]..cursor+box_names[:box_info][:nbr_children_range][1]]\n case str_nbr_subboxes.size\n when 1\n nbr_expected_subboxes = str_nbr_subboxes.ord\n when 2\n nbr_expected_subboxes = BinData::Uint16be.read(str_nbr_subboxes)\n when 3\n nbr_expected_subboxes = BinData::Uint24be.read(str_nbr_subboxes)\n when 4\n nbr_expected_subboxes = BinData::Uint32be.read(str_nbr_subboxes)\n when 8\n nbr_expected_subboxes = BinData::Uint64be.read(str_nbr_subboxes)\n else\n # Can't read it. Will not check for them.\n end\n end\n cursor += data_size\n # Compute the map of possible box names\n complete_box_names = box_names.merge(ACCEPTABLE_BOX_TYPES_ALL)\n while (cursor < container_box_max_cursor)\n size = BinData::Uint32be.read(@data[cursor..cursor+3])\n name = @data[cursor+4..cursor+7]\n # Check the validity of the box\n if (!complete_box_names.has_key?(name))\n log_debug \"@#{cursor} - Invalid box type: #{name.inspect} within #{hierarchy.join('/')}. Known ones are: #{complete_box_names.keys.join(', ')}.\"\n if ((box_names[:box_info] == nil) or\n (box_names[:box_info][:ignore_unknown_boxes] != true))\n if (max_cursor == nil)\n # We consider the file is finished, as the box being parsed is the root one.\n return cursor, nbr_boxes\n else\n truncated_data(\"@#{cursor} - No valid box type found, but container box has not been parsed completely.\")\n end\n end\n end\n # This box is valid, or we don't care (in this case we will have to skip its contents)\n nbr_boxes += 1\n nbr_direct_subboxes += 1\n box_cursor = cursor\n box_hierarchy = hierarchy + [name]\n log_debug \"=== @#{cursor} - Found box #{box_hierarchy.join('/')} of size #{size}\"\n cursor += 8\n if (size == 1)\n size = BinData::Uint64be.read(@data[cursor..cursor+7])\n log_debug \"=== @#{cursor} - Real size is #{size}\"\n cursor += 8\n end\n if (max_cursor == nil)\n # For root elements, this error is synonym of truncated data as container_box_max_cursor is set arbitrarily to @end_offset\n truncated_data(\"@#{cursor} - Box #{box_hierarchy.join('/')} with size #{size} should finish at cursor #{box_cursor + size}, but container box set maximal cursor to #{container_box_max_cursor}.\", container_box_max_cursor) if (box_cursor + size > container_box_max_cursor)\n else\n invalid_data(\"@#{cursor} - Box #{box_hierarchy.join('/')} with size #{size} should finish at cursor #{box_cursor + size}, but container box set maximal cursor to #{container_box_max_cursor}.\") if (box_cursor + size > container_box_max_cursor)\n end\n yield(box_hierarchy, box_cursor, size)\n if (size == 0)\n # Last box, to the end.\n return nil, nbr_boxes\n else\n if (complete_box_names[name] != nil)\n # Now call sub-boxes that should start at current cursor\n new_cursor, nbr_subboxes = parse_mp4_box(cursor, complete_box_names[name], box_hierarchy, box_cursor + size, &proc)\n nbr_boxes += nbr_subboxes\n # Check cursor is at the correct position\n invalid_data(\"@#{new_cursor} - After parsing box #{box_hierarchy.join('/')}, cursor should have been @#{box_cursor+size}\") if ((new_cursor != nil) and (new_cursor != box_cursor + size))\n end\n cursor = box_cursor + size\n end\n # Check for an eventual padding if any\n cursor += box_names[:box_info][:nbr_bytes_possible_padding] if ((box_names[:box_info] != nil) and\n (box_names[:box_info][:nbr_bytes_possible_padding] != nil) and\n (cursor == container_box_max_cursor - box_names[:box_info][:nbr_bytes_possible_padding]) and\n (@data[cursor..container_box_max_cursor-1] == \"\\x00\" * box_names[:box_info][:nbr_bytes_possible_padding]))\n progress(cursor)\n end\n # If we were expecting a given number of direct subboxes, compare them now\n invalid_data(\"@#{cursor} - Was expecting #{nbr_expected_subboxes} sub-boxes, but read #{nbr_direct_subboxes}.\") if ((nbr_expected_subboxes != nil) and (nbr_direct_subboxes != nbr_expected_subboxes))\n\n return cursor, nbr_boxes\n end",
"def bbox_from_string(string, factory)\n return unless string\n minlon, minlat, maxlon, maxlat = string.split(',').collect { |i| i.to_f }\n bbox = RGeo::Cartesian::BoundingBox.new(factory)\n bbox.add(factory.point(minlon, minlat)).add(factory.point(maxlon, maxlat))\n end",
"def bbox_from_string(string, factory)\n return unless string\n minlon, minlat, maxlon, maxlat = string.split(',').collect(&:to_f)\n bbox = RGeo::Cartesian::BoundingBox.new(factory)\n bbox.add(factory.point(minlon, minlat)).add(factory.point(maxlon, maxlat))\n end",
"def bbox_from_string(string, factory)\n return unless string\n\n minlon, minlat, maxlon, maxlat = string.split(\",\").collect(&:to_f)\n bbox = RGeo::Cartesian::BoundingBox.new(factory)\n bbox.add(factory.point(minlon, minlat)).add(factory.point(maxlon, maxlat))\n end",
"def parse_coord(coord)\n coord = coord.gsub(/\\s/, '')\n return nil unless coord =~ /^\\d,\\d$/\n\n parts = coord.split(/,/)\n result = Coord.new(parts[0].to_i, parts[1].to_i)\n return nil unless result.valid?\n return result\nend",
"def parse(input)\t\n\t\tlines = input.each_line \n\n\t\trows = []\n\t\tboxes = []\n\t\tcols = []\n\t\t1.upto(9) {\n\t\t\trows << Block.new\n\t\t\tboxes << Block.new\n\t\t\tcols << Block.new\t\t\n\t\t}\n\n\t\tr = 0 # The current row\n\t\tcell_index = 0 # The current cell\n\n\t\tlines.each do |line|\n\t\t\tif r > 8 \n\t\t\t\tbreak\n\t\t\tend\n\t\t\tnumbers = line.split(',')\t\n\t\t\tc = 0 # The current column\n\t\t\t\n\t\t\tnumbers.each do |number|\t\t\t\t\n\t\t\t\tb = c / 3 + 3 * (r / 3) # The current box\n\t\t\t\tnumber = number.delete \",\"\n\t\t\t\tnumber = number.to_i\n\t\t\t\t\n\t\t\t\tcell= Cell.new(rows[r], cols[c], boxes[b])\n\t\t\t\t#print r.to_s + c.to_s + b.to_s + \" \"\n\t\t\t\tnumber = 0 if number == nil\n\t\t\t\tcell.set(number)\n\n\t\t\t\tif number == 0\n\t\t\t\t\t@unsolved << cell\n\t\t\t\tend\n\t\t\t\t\t\t\t\t\n\t\t\t\t@cells[cell_index] = cell;\n\t\t\t\t\n\t\t\t\tc += 1\n\t\t\t\tcell_index += 1 \n\t\t\tend\t\n\t\t\tr += 1 \n\t\t\t#puts\n\t\tend\n\t\t#puts\n\t\t#show\n\t\t#gets\n\tend",
"def separate_comma(integer)\n strinteger = integer.to_s.split('')\n figures = strinteger.size / 3\n if strinteger.size <= 3\n \tp integer.to_s\n elsif strinteger.size%3 == 0\n \tt = -4\n \t(figures.to_i - 1).times do |x|\n \t strinteger.insert(t, ',')\n \t t -= 4\n \tend\n p strinteger.join\n else\n \tt = -4\n \t(figures.to_i).times do |x|\n \t strinteger.insert(t, ',')\n \t t -= 4\n \tend\n p strinteger.join\n end\nend",
"def parse_line(match)\n raise InvalidFormat.new \"Couldn't parse box list\" if match.nil?\n Derelict::Box.new *match.captures[0..1]\n end",
"def parseLine(line)\n # Split the line by each 'x' in the input to a list\n parts = line.split(\"x\")\n # Don't assume your input is good!\n # Check that we actually got the 3 dimensions we were expecting\n if parts.size != 3\n raise Exception.new(\"expected exactly 3 dimensions, got #{parts.size}\")\n end\n # Return the length, width, and height\n # We need to convert the strings in the split string into integers using `int`\n # The map function is a useful way of running a single function on every element in a list\n # So the following is the same as doing this:\n # lst = []\n # parts.each do |part|\n # lst << part.to_i\n # end\n parts.map(&:to_i)\nend",
"def parse(input_string)\n operand_list = input_string.split(/[,|\\n|\\\\n]/)\n operand_list\n end",
"def formatted_number(str)\n str = str.first\n return false if str =~ /[^\\d\\.\\,]/\n if str.include?('.')\n return false if str.count('.') > 1 || str.start_with?('.') || str.end_with?('.')\n str = str.split('.').first\n end\n if str.include?(',')\n return false if str.start_with?(',') || str.end_with?(',')\n parts = str.split(',')\n parts.each_with_index do |part, idx|\n return false if part.empty? || part.size > 3\n next if idx == 0\n return false if part.size < 3\n end\n else\n return false if str.size > 3\n end\n true\nend",
"def parse_grid_data(input_string)\n # pull grid information in the format <integer>x<integer> from the start of the input string\n grid_segment = input_string.match(@grid_format_regex)\n\n # first check that the grid has matched something\n if grid_segment == nil\n raise ArgumentError, \"Unable to find grid settings at start of input string: #{input_string}\"\n end\n\n # assign the grid dimensions\n @grid_dimensions = [grid_segment[1].to_i, grid_segment[2].to_i]\n\n @logger.info \"Grid dimensions are: #{@grid_dimensions}\"\n\n # check here that the grid contains only positive numbers\n @grid_dimensions.each do |a|\n raise ArgumentError, \"Invalid grid input: #{a} Only positive integers allowed.\" unless a >= 0\n end\n end",
"def separate_comma(integer)\n x = integer.to_s.split(\"\")\n length = x.count\n chunks = length / 3 #how many chunks\n\ncommas = Array.new\n\nwhile chunks > 0 do\n every3 = x.pop(3)\n chunks -= 1\n every3.unshift(\",\")\n commas = every3 + commas\n puts commas.join\nend\nresult = (x + commas)\n if result[0] == \",\"\n result.delete_at(0)\n end\n return result.join\nend",
"def extract_numbers(string)\n results = []\n numbers = string.scan(/(\\d+[.?,?\\d*]*\\s?)/i)\n\n if numbers != nil\n numbers.each do |number|\n puts number[0]\n formatted_number = number[0].gsub(\",\", \"\")\n final_number = formatted_number.gsub(\" \", \"\").to_f\n results << final_number\n end\n end\n\n results\nend",
"def separate_comma(integer)\n\n string = integer.to_s\n if string.length > 3\n array = string.split(\"\")\n i = (string.length - 3)\n while i > 0\n array.insert(i, \",\")\n i -= 3\n end\n array.join\n else\n string\n end\n\nend",
"def separate_comma(integer)\n\n if(integer.is_a?(Integer))\n \n if integer < 0\n print \"Please enter a positive integer.\"\n elsif integer == 0\n print \"0\"\n else\n i_string = integer.to_s\n num_commas = (i_string.length - 1)/3.floor\n i_split = i_string.split('')\n \n i = -4\n num_commas.times do\n i_split.insert(i,',')\n i += -4\n end\n \n return i_split.join('')\n \n end\n else\n \n print \"Please enter an integer only\"\n \n end\nend",
"def parse_number_of_units\n number = @arguments.scan(/[\\d]+/).first.strip.to_i\n self.number_of_units = number\n end",
"def seperate_comma(integer)\n digits = Array.new\n string_maker = Array.new\n s_integer = integer.to_s\n digits += s_integer.each_char.to_a\n num_fix = digits.length\n repeat = num_fix / 3\n remainder = num_fix % 3\n if num_fix == 0\n puts \"There is no number!\"\n elsif num_fix <= 3\n string_maker << digits\n elsif remainder == 0\n string_maker << digits[0] + digits[1] + digits[2]\n counter = 1\n (repeat-1).times do\n string_maker << \",\"\n string_maker << digits[0 +(3 * counter)]\n string_maker << digits[1 +(3 * counter)]\n string_maker << digits[2 +(3 * counter)]\n counter += 1\n end\n elsif remainder == 1\n string_maker << digits[0]\n counter = 0\n repeat.times do\n string_maker << \",\"\n string_maker << digits[1 +(3 * counter)]\n string_maker << digits[2 +(3 * counter)]\n string_maker << digits[3 +(3 * counter)]\n counter += 1\n end\n elsif remainder == 2\n string_maker << digits[0] + digits[1]\n counter = 0\n repeat.times do\n string_maker << \",\"\n string_maker << digits[2 +(3 * counter)]\n string_maker << digits[3 +(3 * counter)]\n string_maker << digits[4 +(3 * counter)]\n counter += 1\n end\n end\n p string_maker.join\nend",
"def separate_comma(number)\n \n number = number.to_s \n total_commas_needed = number.length / 3 \n \n if number.length < 4\n return number\n end \n \n if number.length > 3 && number.length % 3 == 0 \n # 6 digits => 2 commas but need only 1\n \n total_commas_needed = total_commas_needed -1 \n p total_commas_needed\n \n counter = -4\n total_commas_needed.times do \n number.insert(counter, \",\")\n counter = -4\n return number\n end\n end \n \nif number.length > 3 \n counter = -4\n total_commas_needed.times do \n number.insert(counter, \",\")\n counter -= 4 \n end\n end\n return number\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /folha/fonte_recursos/1 GET /folha/fonte_recursos/1.xml | def show
@folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @folha_fonte_recurso }
end
end | [
"def new\n @folha_fonte_recurso = Folha::FonteRecurso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @folha_fonte_recurso }\n end\n end",
"def index\n @fontes_de_recurso = FonteDeRecurso.all\n end",
"def index\n @ficha_tematicas = FichaTematica.busqueda(params[:page], params[:generico], params[:buscar])\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @ficha_tematicas }\n end\n end",
"def index\n @feria2010rechazados = Feria2010rechazado.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2010rechazados }\n end\n end",
"def index\n @feria2009rechazados = Feria2009rechazado.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2009rechazados }\n end\n end",
"def lista\n @receitas = Receita.all\n\n respond_to do |format|\n format.html # lista.html.erb\n format.xml { render :xml => @receitas }\n end\n end",
"def index\n @recettes = @paramun.recettes\n\n respond_to do |format|\n if @recettes.empty?\n format.xml { render request.format.to_sym => \"rrecErreurA\" } ## Aucune Recettes\n else\n format.xml { render xml: @recettes }\n end\n end\n end",
"def index\n @feria2010observaciones = Feria2010observacion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2010observaciones }\n end\n end",
"def index\n @feria2009cruces = Feria2009cruce.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2009cruces }\n end\n end",
"def index\n @fichas = Ficha.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @fichas }\n end\n end",
"def index\n @cuentas = Cuenta.all\n\n @cadena = getcuentasxml\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @cadena }\n end\n end",
"def index\n @feria2010calificacionrecursos = Feria2010calificacionrecurso.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2010calificacionrecursos }\n end\n end",
"def index\n @texte_accueils = TexteAccueil.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @texte_accueils }\n end\n end",
"def index\n @feria2009calificaciones = Feria2009calificacion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2009calificaciones }\n end\n end",
"def index\n retrieve_vtodos\n\n respond_to do |format|\n format.html # index.html.erb\n format.rdf { render :xml => ICAL::Vtodo.to_xml }\n end\n end",
"def show\n @receta = Receta.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @receta }\n end\n end",
"def index\n @foto_de_legajos = @legajo.foto_de_legajos.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @foto_de_legajos }\n end\n end",
"def index\n @glosarios = Glosario.busqueda(params[:page],params[:generico], params[:buscar], 20)\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @glosarios }\n end\n end",
"def index\n @feria2010calificaciones = Feria2010calificacion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2010calificaciones }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /folha/fonte_recursos/new GET /folha/fonte_recursos/new.xml | def new
@folha_fonte_recurso = Folha::FonteRecurso.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @folha_fonte_recurso }
end
end | [
"def new\n @receta = Receta.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @receta }\n end\n end",
"def create\n @folha_fonte_recurso = Folha::FonteRecurso.new(params[:folha_fonte_recurso])\n\n respond_to do |format|\n if @folha_fonte_recurso.save\n format.html { redirect_to(@folha_fonte_recurso, :notice => 'Fonte recurso cadastrado com sucesso.') }\n format.xml { render :xml => @folha_fonte_recurso, :status => :created, :location => @folha_fonte_recurso }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @folha_fonte_recurso.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @receita = Receita.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @receita }\n end\n end",
"def new\n @regiaos = Regiao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @regiaos }\n end\n end",
"def new\n @repasse_fabrica = RepasseFabrica.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @repasse_fabrica }\n end\n end",
"def new\n @remocao = Remocao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @remocao }\n end\n end",
"def new\n @contratista = Contratista.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @contratista }\n end\n end",
"def new\n @ficha_tematica = FichaTematica.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @ficha_tematica }\n end\n end",
"def new\n @servico = Servico.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @servico }\n end\n end",
"def new\n @protocolo = Protocolo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @protocolo }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @nomina }\n end\n end",
"def new\n @revista = Revista.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @revista }\n end\n end",
"def new\n @tiposcaracteristica = Tiposcaracteristica.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @tiposcaracteristica }\n end\n end",
"def new\n @orc_ficha = OrcFicha.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @orc_ficha }\n end\n end",
"def new\n @contato_interno = ContatoInterno.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @contato_interno }\n end\n end",
"def new\n @feria2010observacion = Feria2010observacion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @feria2010observacion }\n end\n end",
"def new\n @registro = Registro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @registro }\n end\n end",
"def new\n @tipo_fuente = TipoFuente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @tipo_fuente }\n end\n end",
"def new\n @comentario = Comentario.new\n respond_to do |format| \n format.xml { render xml: @comentario }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /folha/fonte_recursos POST /folha/fonte_recursos.xml | def create
@folha_fonte_recurso = Folha::FonteRecurso.new(params[:folha_fonte_recurso])
respond_to do |format|
if @folha_fonte_recurso.save
format.html { redirect_to(@folha_fonte_recurso, :notice => 'Fonte recurso cadastrado com sucesso.') }
format.xml { render :xml => @folha_fonte_recurso, :status => :created, :location => @folha_fonte_recurso }
else
format.html { render :action => "new" }
format.xml { render :xml => @folha_fonte_recurso.errors, :status => :unprocessable_entity }
end
end
end | [
"def create\n @fonte_de_recurso = FonteDeRecurso.new(fonte_de_recurso_params)\n\n respond_to do |format|\n if @fonte_de_recurso.save\n addlog(\"Fonte e recurso criada\")\n format.html { redirect_to @fonte_de_recurso, notice: 'Fonte de recurso criado com sucesso.' }\n format.json { render :show, status: :created, location: @fonte_de_recurso }\n else\n format.html { render :new }\n format.json { render json: @fonte_de_recurso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @folha_fonte_recurso = Folha::FonteRecurso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @folha_fonte_recurso }\n end\n end",
"def destroy\n @folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])\n @folha_fonte_recurso.destroy\n\n respond_to do |format|\n format.html { redirect_to(folha_fonte_recursos_url) }\n format.xml { head :ok }\n end\n end",
"def index\n @fontes_de_recurso = FonteDeRecurso.all\n end",
"def create\n @fonte = Fonte.new(fonte_params)\n\n respond_to do |format|\n if @fonte.save\n format.html { redirect_to fontes_path, notice: \"Fonte #{@fonte.nome} Criado com Sucesso!\" }\n format.json { render :show, status: :created, location: @fonte }\n else\n format.html { render :new }\n format.json { render json: @fonte.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @fonte_de_recurso.destroy\n addlog(\"Fonte de recurso apagada\")\n respond_to do |format|\n format.html { redirect_to fontes_de_recurso_url, notice: 'Fonte de recurso apagado com sucesso.' }\n format.json { head :no_content }\n end\n end",
"def show\n @folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @folha_fonte_recurso }\n end\n end",
"def create\n @receta = Receta.new(params[:receta])\n\n respond_to do |format|\n if @receta.save\n format.html { redirect_to(@receta, :notice => 'La receta se ha creado correctamente.') }\n format.xml { render :xml => @receta, :status => :created, :location => @receta }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @receta.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @regiaos = Regiao.new(params[:regiao])\n\n respond_to do |format|\n if @regiaos.save\n flash[:notice] = 'REGIÃO SALVA COM SUCESSO'\n format.html { redirect_to(new_regiao_path)}\n format.xml { render :xml => @regiaos, :status => :created, :location => @regiaos }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @regiaos.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fonte_de_recurso.update(fonte_de_recurso_params)\n addlog(\"Fonte de recurso atualizada\")\n format.html { redirect_to @fonte_de_recurso, notice: 'Fonte de recurso atualizado com sucesso.' }\n format.json { render :show, status: :ok, location: @fonte_de_recurso }\n else\n format.html { render :edit }\n format.json { render json: @fonte_de_recurso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def lista\n @receitas = Receita.all\n\n respond_to do |format|\n format.html # lista.html.erb\n format.xml { render :xml => @receitas }\n end\n end",
"def create\n @reclamacao = Reclamacao.new(params[:reclamacao])\n\n respond_to do |format|\n if @reclamacao.save\n format.html { redirect_to(@reclamacao, :notice => 'Reclamacao was successfully created.') }\n format.xml { render :xml => @reclamacao, :status => :created, :location => @reclamacao }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @reclamacao.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])\n\n respond_to do |format|\n if @folha_fonte_recurso.update_attributes(params[:folha_fonte_recurso])\n format.html { redirect_to(@folha_fonte_recurso, :notice => 'Fonte recurso atualizado com sucesso.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @folha_fonte_recurso.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @orc_ficha = OrcFicha.new(params[:orc_ficha])\n\n respond_to do |format|\n if @orc_ficha.save\n flash[:notice] = 'SALVO COM SUCESSO.'\n format.html { redirect_to(@orc_ficha) }\n format.xml { render :xml => @orc_ficha, :status => :created, :location => @orc_ficha }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @orc_ficha.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @recapito = Recapito.new(params[:recapito])\n\n respond_to do |format|\n if @recapito.save\n flash[:notice] = 'Recapito was successfully created.'\n format.html { redirect_to(@recapito) }\n format.xml { render :xml => @recapito, :status => :created, :location => @recapito }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @recapito.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def index\n @ficha_tematicas = FichaTematica.busqueda(params[:page], params[:generico], params[:buscar])\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @ficha_tematicas }\n end\n end",
"def create\n @receita = Receita.new(receita_params)\n respond_to do |format|\n if @receita.save\n flash[:notice] = 'Cabeçalho de Receita foi criado com sucesso.'\n format.html { redirect_to new_receita_receitai_url(@receita) }\n format.xml { render :xml => @receita, :status => :created, :location => @receita }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @receita.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @font = OpenTypeFont.find(params[:open_type_font_id])\n @otf_unicode = @font.unicodes.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @otf_unicode }\n end\n end",
"def create\n @font = Font.new(params[:font])\n\n respond_to do |format|\n if @font.save\n format.html { redirect_to(@font, :notice => 'Font was successfully created.') }\n format.xml { render :xml => @font, :status => :created, :location => @font }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @font.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /folha/fonte_recursos/1 PUT /folha/fonte_recursos/1.xml | def update
@folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])
respond_to do |format|
if @folha_fonte_recurso.update_attributes(params[:folha_fonte_recurso])
format.html { redirect_to(@folha_fonte_recurso, :notice => 'Fonte recurso atualizado com sucesso.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @folha_fonte_recurso.errors, :status => :unprocessable_entity }
end
end
end | [
"def update\n respond_to do |format|\n if @fonte_de_recurso.update(fonte_de_recurso_params)\n addlog(\"Fonte de recurso atualizada\")\n format.html { redirect_to @fonte_de_recurso, notice: 'Fonte de recurso atualizado com sucesso.' }\n format.json { render :show, status: :ok, location: @fonte_de_recurso }\n else\n format.html { render :edit }\n format.json { render json: @fonte_de_recurso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fonte.update(fonte_params)\n format.html { redirect_to fontes_path, notice: \"Fonte #{@fonte.nome} Atualizado com Sucesso!\" }\n format.json { render :show, status: :ok, location: @fonte }\n else\n format.html { render :edit }\n format.json { render json: @fonte.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @folha_fonte_recurso = Folha::FonteRecurso.new(params[:folha_fonte_recurso])\n\n respond_to do |format|\n if @folha_fonte_recurso.save\n format.html { redirect_to(@folha_fonte_recurso, :notice => 'Fonte recurso cadastrado com sucesso.') }\n format.xml { render :xml => @folha_fonte_recurso, :status => :created, :location => @folha_fonte_recurso }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @folha_fonte_recurso.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def destroy\n @folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])\n @folha_fonte_recurso.destroy\n\n respond_to do |format|\n format.html { redirect_to(folha_fonte_recursos_url) }\n format.xml { head :ok }\n end\n end",
"def new\n @folha_fonte_recurso = Folha::FonteRecurso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @folha_fonte_recurso }\n end\n end",
"def update\n respond_to do |format|\n if @pregoestitulosgrafico.update(pregoestitulosgrafico_params)\n format.html { redirect_to @pregoestitulosgrafico, notice: 'Pregoestitulosgrafico was successfully updated.' }\n format.json { render :show, status: :ok, location: @pregoestitulosgrafico }\n else\n format.html { render :edit }\n format.json { render json: @pregoestitulosgrafico.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @orc_ficha = OrcFicha.find(params[:id])\n\n respond_to do |format|\n if @orc_ficha.update_attributes(params[:orc_ficha])\n flash[:notice] = 'SALVO COM SUCESSO.'\n format.html { redirect_to(@orc_ficha) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @orc_ficha.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @objeto.update(caracteristica_params)\n set_redireccion\n format.html { redirect_to @redireccion, notice: 'Caracteristica was successfully updated.' }\n format.json { render :show, status: :ok, location: @objeto }\n else\n format.html { render :edit }\n format.json { render json: @objeto.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @objeto.update(caracterizacion_params)\n set_redireccion\n format.html { redirect_to @redireccion, notice: 'Caracterizacion was successfully updated.' }\n format.json { render :show, status: :ok, location: @objeto }\n else\n format.html { render :edit }\n format.json { render json: @objeto.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @open_type_font = OpenTypeFont.find(params[:id])\n\n respond_to do |format|\n if @open_type_font.update_attributes(params[:open_type_font])\n format.html { redirect_to(@open_type_font, :notice => 'Open type font was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @open_type_font.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fichatecnica.update(fichatecnica_params)\n format.html { redirect_to reference_fichatecnicas_path(@reference), notice: 'Fichatecnica was successfully updated.' }\n format.json { render :show, status: :ok, location: @fichatecnica }\n else\n format.html { render :edit }\n format.json { render json: @fichatecnica.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @regiaos = Regiao.find(params[:id])\n\n respond_to do |format|\n if @regiaos.update_attributes(params[:regiao])\n flash[:notice] = 'REGIÃO SALVA COM SUCESSO'\n format.html { redirect_to(@regiaos) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @regiaos.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @tarefa = Tarefa.find(params[:id])\n titulo = params[:tarefa][:titulo]\n descricao = params[:tarefa][:descricao]\n prazo = params[:tarefa][:prazo]\n\n respond_to do |format|\n if @tarefa.update(:titulo => titulo, :descricao => descricao, :prazo => prazo)\n format.html { redirect_to @tarefa, notice: 'Tarefa was successfully updated.' }\n format.json { render :show, status: :ok, location: @tarefa }\n else\n format.html { render :edit }\n format.json { render json: @tarefa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @inventario_cosa_registro.update(inventario_cosa_registro_params)\n format.html { redirect_to @inventario_cosa_registro, notice: 'Inventario cosa registro was successfully updated.' }\n format.json { render :show, status: :ok, location: @inventario_cosa_registro }\n else\n format.html { render :edit }\n format.json { render json: @inventario_cosa_registro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @folha_fonte_recurso }\n end\n end",
"def update\n @arquivo_documento = Arquivo::Documento.find(params[:id])\n\n respond_to do |format|\n if @arquivo_documento.update_attributes(params[:arquivo_documento])\n format.html { redirect_to(@arquivo_documento, :notice => 'Documento atualizado com sucesso.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @arquivo_documento.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @font = Font.find(params[:id])\n\n respond_to do |format|\n if @font.update_attributes(params[:font])\n format.html { redirect_to(@font, :notice => 'Font was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @font.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @detalle_documento_de_compra.update(detalle_documento_de_compra_params)\n format.html { redirect_to @detalle_documento_de_compra, notice: 'Detalle documento de compra was successfully updated.' }\n format.json { render :show, status: :ok, location: @detalle_documento_de_compra }\n else\n format.html { render :edit }\n format.json { render json: @detalle_documento_de_compra.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fonte_de_recurso = FonteDeRecurso.new(fonte_de_recurso_params)\n\n respond_to do |format|\n if @fonte_de_recurso.save\n addlog(\"Fonte e recurso criada\")\n format.html { redirect_to @fonte_de_recurso, notice: 'Fonte de recurso criado com sucesso.' }\n format.json { render :show, status: :created, location: @fonte_de_recurso }\n else\n format.html { render :new }\n format.json { render json: @fonte_de_recurso.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /folha/fonte_recursos/1 DELETE /folha/fonte_recursos/1.xml | def destroy
@folha_fonte_recurso = Folha::FonteRecurso.find(params[:id])
@folha_fonte_recurso.destroy
respond_to do |format|
format.html { redirect_to(folha_fonte_recursos_url) }
format.xml { head :ok }
end
end | [
"def destroy\n @orc_ficha = OrcFicha.find(params[:id])\n @orc_ficha.destroy\n\n respond_to do |format|\n format.html { redirect_to(orc_fichas_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @fichaselemento = Fichaselemento.find(params[:id])\n @fichaselemento.destroy\n\n respond_to do |format|\n format.html { redirect_to(fichaselementos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @arquivo_documento = Arquivo::Documento.find(params[:id])\n @arquivo_documento.destroy\n\n respond_to do |format|\n format.html { redirect_to(arquivo_documentos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @texte_accueil = TexteAccueil.find(params[:id])\n @texte_accueil.destroy\n\n respond_to do |format|\n format.html { redirect_to(texte_accueils_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @referencia = Referencia.find(params[:id])\n @referencia.destroy\n\n respond_to do |format|\n format.html { redirect_to(referencias_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @ficha_tematica = FichaTematica.find(params[:id])\n @ficha_tematica.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_ficha_tematicas_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @tipo_documento = TipoDocumento.find(params[:id])\n @tipo_documento.destroy\n\n respond_to do |format|\n format.html { redirect_to(tipo_documentos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @detalle_documento_de_compra.destroy\n respond_to do |format|\n format.html { redirect_to :back, notice: 'Linea eliminada' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tipo_de_documento = TipoDeDocumento.find(params[:id])\n @tipo_de_documento.destroy\n\n respond_to do |format|\n format.html { redirect_to(tipos_de_documento_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @feriadocenterechazado = Feriadocenterechazado.find(params[:id])\n @feriadocenterechazado.destroy\n\n respond_to do |format|\n format.html { redirect_to(feriadocenterechazados_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @correspondencia = Correspondencia.find(params[:id])\n @correspondencia.destroy\n\n respond_to do |format|\n format.html { redirect_to(correspondencias_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @faixa_de_desconto = FaixaDeDesconto.find(params[:id])\n @faixa_de_desconto.destroy\n\n respond_to do |format|\n format.html { redirect_to(faixas_de_desconto_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n arquivo = Arquivo.find(@pregoestitulosgrafico.arquivo_id)\n\n File.delete(arquivo.caminho)\n\n pregoestitulo = Pregoestitulo.find(@pregoestitulosgrafico.pregoestitulo_id)\n \n @pregoestitulosgrafico.destroy\n respond_to do |format|\n format.html { redirect_to pregoestitulo, notice: 'Arquivo excluído com sucesso.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @dependencia = Dependencia.find(params[:id])\n @dependencia.destroy\n\n respond_to do |format|\n format.html { redirect_to(dependencias_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @cobros_detalhe = CobrosDetalhe.find(params[:id])\n @cobros_detalhe.destroy\n\n respond_to do |format|\n format.html { redirect_to \"/cobros/#{@cobros_detalhe.cobro_id}\" }\n format.xml { head :ok }\n end\n end",
"def destroy\n @receita = Receita.find(params[:id])\n @receita.destroy\n\n respond_to do |format|\n format.html { redirect_to(receitas_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @feriadocentecalificacion = Feriadocentecalificacion.find(params[:id])\n @feriadocentecalificacion.destroy\n\n respond_to do |format|\n format.html { redirect_to(feriadocentecalificaciones_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @econtrato = Econtrato.find(params[:id])\n @econtrato.destroy\n\n respond_to do |format|\n format.html { redirect_to(econtratos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @registro_alimento = RegistroAlimento.find(params[:id])\n @registro_alimento.destroy\n\n respond_to do |format|\n format.html { redirect_to(registro_alimentos_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /spaceships/1 GET /spaceships/1.json | def show
@spaceship = Spaceship.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @spaceship }
end
end | [
"def destroy\n @spaceship = Spaceship.find(params[:id])\n @spaceship.destroy\n\n respond_to do |format|\n format.html { redirect_to spaceships_url }\n format.json { head :no_content }\n end\n end",
"def show\n @space = Space.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @space }\n end\n end",
"def index\n @ships = Ship.all\n end",
"def index\n if params['user_id']\n @user = User.find(params['user_id'])\n @spaces = @user.spaces.visible_by(current_user)\n else\n @spaces = Space.visible_by(current_user).first(10)\n end\n #render json: @spaces.as_json(only: [:id, :name, :description, :updated_at, :user_id])\n render json: SpacesRepresenter.new(@spaces).to_json\n end",
"def show\n @clientship = current_user.clientships.find(params[:id]) \n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @clientship }\n end\n end",
"def index\n @spaces = Space.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @spaces }\n end\n end",
"def index\n @assigned_ships = AssignedShip.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @assigned_ships }\n end\n end",
"def index\n #Wenn Schiffe existieren\n @ship = Ship.find(params[:ship_id])\n @ships_stations = @ship.ships_stations\n end",
"def getShips()\n return @ships\n end",
"def index\n @clientships = current_user.clientships.all \n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @clientships }\n end\n end",
"def new\n @spaceship = Spaceship.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spaceship }\n end\n end",
"def index\n @player_ships = PlayerShip.all\n end",
"def ship(ship_id)\n @ships[ship_id]\n end",
"def show\n @hostship = Hostship.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @hostship }\n end\n end",
"def show\n @ship_class = ShipClass.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ship_class }\n end\n end",
"def spaceships\n space_flights.map{|space_flight|space_flight.spaceship}\n end",
"def starship\n fetch('hitchhikers_guide_to_the_galaxy.starships')\n end",
"def index\n @user_ships = UserShip.all\n end",
"def show\n @assigned_ship = AssignedShip.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @assigned_ship }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /spaceships/new GET /spaceships/new.json | def new
@spaceship = Spaceship.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @spaceship }
end
end | [
"def create\n @spaceship = Spaceship.new(params[:spaceship])\n\n respond_to do |format|\n if @spaceship.save\n format.html { redirect_to @spaceship, notice: 'Spaceship was successfully created.' }\n format.json { render json: @spaceship, status: :created, location: @spaceship }\n else\n format.html { render action: \"new\" }\n format.json { render json: @spaceship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @space = Space.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @space }\n end\n end",
"def new\n \n @ship = Ship.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @ship }\n end\n end",
"def new\n @ship = Ship.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @ship }\n end\n end",
"def new\n @assigned_ship = AssignedShip.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @assigned_ship }\n end\n end",
"def new\n @ship_class = ShipClass.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @ship_class }\n end\n end",
"def create\n @space = Space.new(params[:space])\n\n respond_to do |format|\n if @space.save\n format.html { redirect_to admin_spaces_url, notice: 'Space was successfully created.' }\n format.json { render json: @space, status: :created, location: @space }\n else\n format.html { render action: \"new\" }\n format.json { render json: @space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @hostship = Hostship.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hostship }\n end\n end",
"def create\n @space = Space.new(space_params)\n @space.user = current_user\n if @space.save\n render json: @space\n else\n render json: @space.errors, status: :unprocessable_entity\n end\n end",
"def create\n @internship = current_user.internships.new(internship_params)\n\n respond_to do |format|\n if @internship.save\n format.html { redirect_to internships_url, notice: 'Internship was successfully created.' }\n else\n format.html { render action: 'new' }\n format.json { render json: @internship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @spaceforlease = Spaceforlease.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spaceforlease }\n end\n end",
"def create\n @ship = Ship.new(params[:ship])\n respond_to do |format|\n if @ship.save\n format.html { redirect_to @ship, notice: 'Ship was successfully created.' }\n format.json { render json: @ship, status: :created, location: @ship }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @internship = Internship.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @internship }\n end\n end",
"def create\n @ship = current_user.ships.new(ship_params)\n if @ship.save\n redirect_to ship_path(@ship), notice: 'ship was successfully created.'\n else\n render 'new'\n end\nend",
"def new\n @space_entry = SpaceEntry.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @space_entry }\n end\n end",
"def create\n @starship = Starship.new(starship_params)\n\n respond_to do |format|\n if @starship.save\n format.html { redirect_to @starship, notice: \"Starship was successfully created.\" }\n format.json { render :show, status: :created, location: @starship }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @starship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @space = @location.spaces.build(space_params)\n\n respond_to do |format|\n if @space.save\n format.html { redirect_to [:admin, @space.location.org, @space.location, @space], notice: 'Space was successfully created.' }\n format.json { render :show, status: :created, location: [:admin, @space.location.org, @space.location, @space] }\n else\n format.html { render :new }\n format.json { render json: @space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @space_cat = SpaceCat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @space_cat }\n end\n end",
"def create\n @internship = Internship.new(params[:internship])\n\n respond_to do |format|\n if @internship.save\n format.html { redirect_to @internship, notice: 'Internship was successfully created.' }\n format.json { render json: @internship, status: :created, location: @internship }\n else\n format.html { render action: \"new\" }\n format.json { render json: @internship.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /spaceships POST /spaceships.json | def create
@spaceship = Spaceship.new(params[:spaceship])
respond_to do |format|
if @spaceship.save
format.html { redirect_to @spaceship, notice: 'Spaceship was successfully created.' }
format.json { render json: @spaceship, status: :created, location: @spaceship }
else
format.html { render action: "new" }
format.json { render json: @spaceship.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @space = Space.new(space_params)\n @space.user = current_user\n if @space.save\n render json: @space\n else\n render json: @space.errors, status: :unprocessable_entity\n end\n end",
"def create\n @space = Space.new(params[:space])\n\n respond_to do |format|\n if @space.save\n format.html { redirect_to admin_spaces_url, notice: 'Space was successfully created.' }\n format.json { render json: @space, status: :created, location: @space }\n else\n format.html { render action: \"new\" }\n format.json { render json: @space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ship = Ship.new(params[:ship])\n respond_to do |format|\n if @ship.save\n format.html { redirect_to @ship, notice: 'Ship was successfully created.' }\n format.json { render json: @ship, status: :created, location: @ship }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @space = @location.spaces.build(space_params)\n\n respond_to do |format|\n if @space.save\n format.html { redirect_to [:admin, @space.location.org, @space.location, @space], notice: 'Space was successfully created.' }\n format.json { render :show, status: :created, location: [:admin, @space.location.org, @space.location, @space] }\n else\n format.html { render :new }\n format.json { render json: @space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \n @ship = current_user.create_ship(ship_params)\n \n\n respond_to do |format|\n if @ship!=nil\n current_user.activeShip = @ship.id\n if @ship.save\n format.html { redirect_to @ship, notice: 'Ship was successfully created.' }\n format.json { render :show, status: :created, location: @ship }\n else\n format.html { render :new }\n format.json { render json: @ship.errors, status: :unprocessable_entity }\n end\n else\n format.html { redirect_to ships_path, notice: 'Kauf nicht erfolgreich!' }\n \n end\n end\n end",
"def destroy\n @spaceship = Spaceship.find(params[:id])\n @spaceship.destroy\n\n respond_to do |format|\n format.html { redirect_to spaceships_url }\n format.json { head :no_content }\n end\n end",
"def create\n @starship = Starship.new(starship_params)\n\n respond_to do |format|\n if @starship.save\n format.html { redirect_to @starship, notice: \"Starship was successfully created.\" }\n format.json { render :show, status: :created, location: @starship }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @starship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @api_space = Api::Space.new(api_space_params)\n\n respond_to do |format|\n if @api_space.save\n format.json { render json: @api_space }\n #format.html { redirect_to @api_space, notice: 'Space was successfully created.' }\n #format.json { render :show, status: :created, location: @api_space }\n else\n format.html { render :new }\n format.json { render json: @api_space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ships_station = ShipsStation.new(ships_station_params)\n\n respond_to do |format|\n if @ships_station.save\n format.html { redirect_to @ships_station, notice: 'Ships station was successfully created.' }\n format.json { render :show, status: :created, location: @ships_station }\n else\n format.html { render :new }\n format.json { render json: @ships_station.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @space_station = SpaceStation.new(space_station_params)\n\n if @space_station.save\n render json: @space_station, status: :created, location: @space_station\n else\n render json: @space_station.errors, status: :unprocessable_entity\n end\n end",
"def create\n @player_ship = PlayerShip.new(player_ship_params)\n\n respond_to do |format|\n if @player_ship.save\n format.html { redirect_to @player_ship, notice: 'Player ship was successfully created.' }\n format.json { render :show, status: :created, location: @player_ship }\n else\n format.html { render :new }\n format.json { render json: @player_ship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @internship = current_user.internships.new(internship_params)\n\n respond_to do |format|\n if @internship.save\n format.html { redirect_to internships_url, notice: 'Internship was successfully created.' }\n else\n format.html { render action: 'new' }\n format.json { render json: @internship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ship = current_user.ships.new(ship_params)\n if @ship.save\n redirect_to ship_path(@ship), notice: 'ship was successfully created.'\n else\n render 'new'\n end\nend",
"def createShips\n for ship in Ship.find(:all, :order => \"id\")\n self.my_ships.build(ship_id: ship.id)\n self.enemy_ships.build(ship_id: ship.id)\n end\n self.save\n end",
"def create\n @hostship = Hostship.new(params[:hostship])\n\n respond_to do |format|\n if @hostship.save\n format.html { redirect_to @hostship, notice: 'Hostship was successfully created.' }\n format.json { render json: @hostship, status: :created, location: @hostship }\n else\n format.html { render action: \"new\" }\n format.json { render json: @hostship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @safe_space = SafeSpace.new(safe_space_params)\n\n respond_to do |format|\n if @safe_space.save\n format.html { redirect_to @safe_space, notice: 'Safe space was successfully created.' }\n format.json { render :show, status: :created, location: @safe_space }\n else\n format.html { render :new }\n format.json { render json: @safe_space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @space = Space.new(params[:space])\n\n respond_to do |format|\n if @space.save\n format.html { redirect_to(@space, :notice => 'Space was successfully created') }\n format.xml { render :xml => @space, :status => :created, :location => @space }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @space.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @ship_request = ShipRequest.new(ship_request_params)\n\n if @ship_request.save\n render json: @ship_request, status: :created, location: @ship_request\n else\n render json: @ship_request.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user_ship = UserShip.new(user_ship_params)\n\n respond_to do |format|\n if @user_ship.save\n format.html { redirect_to @user_ship, notice: 'User ship was successfully created.' }\n format.json { render :show, status: :created, location: @user_ship }\n else\n format.html { render :new }\n format.json { render json: @user_ship.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /spaceships/1 PUT /spaceships/1.json | def update
@spaceship = Spaceship.find(params[:id])
respond_to do |format|
if @spaceship.update_attributes(params[:spaceship])
format.html { redirect_to @spaceship, notice: 'Spaceship was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @spaceship.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n if @space.update(space_params)\n render json: @space, status: :ok\n else\n render json: @space.errors, status: :unprocessable_entity\n end\n end",
"def destroy\n @spaceship = Spaceship.find(params[:id])\n @spaceship.destroy\n\n respond_to do |format|\n format.html { redirect_to spaceships_url }\n format.json { head :no_content }\n end\n end",
"def update\n @space = Space.find(params[:id])\n \n respond_to do |format|\n if @space.update_attributes(params[:space])\n format.html { redirect_to @space, notice: 'Space was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @space = Space.find(params[:id])\n\n respond_to do |format|\n if @space.update_attributes(params[:space])\n format.html { redirect_to admin_spaces_url, notice: 'Space was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @spaceship = Spaceship.new(params[:spaceship])\n\n respond_to do |format|\n if @spaceship.save\n format.html { redirect_to @spaceship, notice: 'Spaceship was successfully created.' }\n format.json { render json: @spaceship, status: :created, location: @spaceship }\n else\n format.html { render action: \"new\" }\n format.json { render json: @spaceship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n respond_to do |format|\n if @api_space.update(api_space_params)\n format.json { render json: @api_space }\n # format.html { redirect_to @api_space, notice: 'Space was successfully updated.' }\n # format.json { render :show, status: :ok, location: @api_space }\n else\n format.html { render :edit }\n format.json { render json: @api_space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update(space_id_or_space_name, body = {})\n @client.space.update(space_id_or_space_name, body)\n end",
"def update\n respond_to do |format|\n old_name = @space_type.name\n if @space_type.update(space_type_params)\n @space_type.spaces.each { |s| s.touch }\n format.html { redirect_to @space_type, notice: t('.update_ok') }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @space_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @ship = Ship.find(params[:id])\n\n respond_to do |format|\n if @ship.update_attributes(params[:ship])\n format.html { redirect_to @ship, notice: 'Ship was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @ship = Ship.find(params[:id])\n\n respond_to do |format|\n if @ship.update_attributes(params[:ship])\n format.html { redirect_to @ship, notice: 'Ship was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @space_station = SpaceStation.find(params[:id])\n\n if @space_station.update(space_station_params)\n head :no_content\n else\n render json: @space_station.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @space_info.update(space_info_params)\n format.html { redirect_to @space_info, notice: 'Space info was successfully updated.' }\n format.json { render :show, status: :ok, location: @space_info }\n else\n format.html { render :edit }\n format.json { render json: @space_info.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @clientship = current_user.clientships.find(params[:id])\n\n respond_to do |format|\n if @clientship.update_attributes(params[:clientship])\n format.html { redirect_to @clientship, notice: 'Clientship was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @clientship.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n authorization(\"update\", @sharespace)\n\n respond_to do |format|\n if @sharespace.update(sharespace_params)\n format.html { redirect_to @sharespace, notice: 'Sharespace was successfully updated.' }\n format.json { render :show, status: :ok, location: @sharespace }\n else\n format.html { render :edit }\n format.json { render json: @sharespace.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @rack_space.update(rack_space_params)\n format.html { redirect_to @rack_space, notice: 'Rack space was successfully updated.' }\n format.json { render :show, status: :ok, location: @rack_space }\n else\n format.html { render :edit }\n format.json { render json: @rack_space.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @ship_placement.update(ship_placement_params)\n format.html { redirect_to @ship_placement, notice: 'Ship placement was successfully updated.' }\n format.json { render :show, status: :ok, location: @ship_placement }\n else\n format.html { render :edit }\n format.json { render json: @ship_placement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @ships_station.update(ships_station_params)\n format.html { redirect_to @ships_station, notice: 'Ships station was successfully updated.' }\n format.json { render :show, status: :ok, location: @ships_station }\n else\n format.html { render :edit }\n format.json { render json: @ships_station.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @ship_request = ShipRequest.find(params[:id])\n\n if @ship_request.update(ship_request_params)\n head :no_content\n else\n render json: @ship_request.errors, status: :unprocessable_entity\n end\n end",
"def update\n @assigned_ship = AssignedShip.find(params[:id])\n\n respond_to do |format|\n if @assigned_ship.update_attributes(params[:assigned_ship])\n format.html { redirect_to @assigned_ship, notice: 'Assigned ship was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @assigned_ship.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /spaceships/1 DELETE /spaceships/1.json | def destroy
@spaceship = Spaceship.find(params[:id])
@spaceship.destroy
respond_to do |format|
format.html { redirect_to spaceships_url }
format.json { head :no_content }
end
end | [
"def destroy\n @clientship = current_user.clientships.find(params[:id])\n @clientship.destroy\n\n respond_to do |format|\n format.html { redirect_to clientships_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @hostship = Hostship.find(params[:id])\n @hostship.destroy\n\n respond_to do |format|\n format.html { redirect_to hostships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @discipleship.destroy\n respond_to do |format|\n format.html { redirect_to discipleships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ship = Ship.find(params[:id])\n @ship.destroy\n\n respond_to do |format|\n format.html { redirect_to(ships_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n # Destroy all associated ship items\n ShipItem.where(:ship_id => @player_ship.id).delete_all\n \n @player_ship.destroy\n respond_to do |format|\n format.html { redirect_to player_ships_path, notice: 'Ship was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @space = Space.find(params[:id])\n\n @space.destroy\n\n respond_to do |format|\n format.html { redirect_to spaces_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @space.destroy\n respond_to do |format|\n format.html { redirect_to spaces_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @internship.destroy\n respond_to do |format|\n format.html { redirect_to internships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @internship = Internship.find(params[:id])\n @internship.destroy\n\n respond_to do |format|\n format.html { redirect_to internships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @assigned_ship = AssignedShip.find(params[:id])\n @assigned_ship.destroy\n\n respond_to do |format|\n format.html { redirect_to assigned_ships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ab_reltionship.destroy\n respond_to do |format|\n format.html { redirect_to ab_reltionships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ship_request.destroy\n\n head :no_content\n end",
"def destroy\n @monitorship = Monitorship.find(params[:id])\n @monitorship.destroy\n\n respond_to do |format|\n format.html { redirect_to monitorships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @space = Space.find(params[:id])\n @space.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_spaces_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @discipleship.destroy\n respond_to do |format|\n format.html { redirect_to discipleships_url, notice: 'Discipulado eliminado correctamente' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @roleship.destroy\n respond_to do |format|\n format.html { redirect_to roleships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @starship.destroy\n respond_to do |format|\n format.html { redirect_to starships_url, notice: \"Starship was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @team_player_ship.destroy\n respond_to do |format|\n format.html { redirect_to team_player_ships_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @shipfleet.destroy\n respond_to do |format|\n format.html { redirect_to shipfleets_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Make sure the message has valid message ids for the message, and fetch them | def fetch_message_ids field
self[field] ? self[field].message_ids || [self[field].message_id] : []
end | [
"def has_message_id?; end",
"def test_get_existing_message_id\n id = Message.all.first&.id\n return if id.nil?\n\n get \"/messages/#{id}\"\n assert last_response.ok?\n assert_equal 'application/vnd.api+json', last_response.headers['Content-Type']\n\n response_body = JSON.parse last_response.body\n data = response_body['data']\n\n verify_message Message[data['id']], data\n end",
"def message_ids\n return @message_ids\n end",
"def get(message_id)\r\n messages.detect { |message| message.message_id.to_s == message_id.to_s }\r\n end",
"def has_message_id?\n !fields.select { |f| f.responsible_for?('Message-ID') }.empty?\n end",
"def fetch\n new_messages = []\n \n # Ignore any messages that don't have the hashtag.\n # Use twitter search to do this instead?\n @twitter_client.replies.reject{|m| !m.text[/#wase/]}.each do |reply|\n message = Message.new(reply.id, reply.text)\n \n # Skip if we've already processed this message.\n unless @all_messages.include?(message)\n @all_messages << message\n new_messages << message\n end\n end\n new_messages\n end",
"def get_messages\n @connection.uid_search(@filter).each do |message|\n puts \"PROCESSING MESSAGE #{message}\"\n body=@connection.uid_fetch(message,\"RFC822\")[0].attr[\"RFC822\"]\n @processor.process(body, @options)\n @connection.uid_copy(message, 'Processed')\n\n @connection.uid_store(message,\"+FLAGS\",[:Deleted])\n end\n @connection.expunge\n #@connection.delete_all\n end",
"def find_matching_recipient_by_message_id(message_id, state: :incomplete)\n restext = \"[{\\\"aws_sns_sms_message_id\\\":\\\"#{message_id}\\\"}]\"\n\n res = if state == :incomplete\n incomplete_recipients\n else\n DynamicModel::ZeusBulkMessageRecipient\n end\n res = res.where(response: restext)\n res.first\n end",
"def new_message_ids\n url = unread_messages_url\n\n Enumerator.new do |block|\n loop do\n messages, next_page_url = unread_messages(url: url)\n messages.each { |msg| block.yield msg }\n\n break unless next_page_url\n\n url = next_page_url\n end\n end\n end",
"def find_messages\n @box = @boxes.find { |box| @mailbox =~ /#{box}/ } # TODO: needs more work\n raise unless @box\n @email = @flag[@box]\n raise unless @email\n return [answered_in_curr, wrote_in_curr, responses_in_curr].flatten\n end",
"def to_gcloud_messages message_ids #:nodoc:\n msgs = @messages.zip(Array(message_ids)).map do |arr, id|\n Message.from_gapi \"data\" => arr[0],\n \"attributes\" => jsonify_hash(arr[1]),\n \"messageId\" => id\n end\n # Return just one Message if a single publish,\n # otherwise return the array of Messages.\n if @mode == :single && msgs.count <= 1\n msgs.first\n else\n msgs\n end\n end",
"def new_ids mailbox\n result = []\n imap.examine mailbox\n\n uidvalidity = imap.responses['UIDVALIDITY'].first\n uidnext = imap.responses['UIDNEXT'].first\n\n if (mbox_state = state[mailbox]) && (mbox_state[:uidvalidity] == uidvalidity)\n result = gm_ids(mbox_state[:uidnext]...uidnext)\n end\n\n state[mailbox] = { uidvalidity: uidvalidity, uidnext: uidnext }\n\n logger.info \"#{result.length} new messages in #{mailbox}\"\n logger.debug result\n\n return result\n end",
"def parse_message_id\n return `grep \"Message-Id\" #{self.full_pathname}`.split(\"\\n\")[1].to_s.gsub(\"Message-Id: \",\"\")\n end",
"def spec_message_with_id(id)\n return @messages_by_id[id]\n end",
"def check_msgs\n\t\tmessages = Message.all\t\n\t\tmessages.each do |m|\n\t\t\tstatus = get_msg_info(m.sid)\n\t\t\tm.status = status unless status==\"Error\" and m.status != \"Error\"\n\t\t\tm.save\n\t\tend\n\tend",
"def sync_messages\n Mail.connection do |imap|\n imap.select 'INBOX'\n validity_id = imap.responses[\"UIDVALIDITY\"].last if imap.responses[\"UIDVALIDITY\"]\n if Message.validity.eql? validity_id\n uids = imap.uid_search([\"NOT\", \"DELETED\"]).sort\n local_uids = Message.ids\n if uids != local_uids\n Sidekiq::Logging.logger.info \"*** Syncing Some ***\"\n new_ids = uids - local_uids\n deleted_ids = local_uids - uids\n unless new_ids.blank?\n fetchdata = imap.uid_fetch(new_ids, ['RFC822'])\n fetchdata.each do |rec|\n validity_id = imap.responses[\"UIDVALIDITY\"].last if imap.responses[\"UIDVALIDITY\"]\n msg = Message.new(uid: rec.attr['UID'], validity_id: validity_id, raw_message: rec.attr['RFC822'])\n msg.save\n end\n end\n self.sync_deleted(deleted_ids.map{|id| [validity_id,id].join ':'}) unless deleted_ids.blank?\n end\n else\n self.sync_all\n end\n Message.ids\n end\n end",
"def fetch\n capture_errors(MessageError) do\n if defined?(@message) && !@message.nil?\n @message\n else\n Mail.new(conn.fetch(@uid).ok![0])\n end\n end\n end",
"def fetch(uids, _format)\n uids.map { |uid|\n raw = File.read(File.join(fixture_path, \"#{uid}.json\"))\n hash = JSON.parse(raw)\n IMAPMessage.new(hash)\n }\n end",
"def get_safebox_messages(safebox_guid)\n handle_error { sendsecure_connection.get(\"api/v2/safeboxes/#{safebox_guid}/messages.json\") }\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
unnests all the mime stuff and returns a list of [type, filename, content] tuples. for multipart/alternative parts, will only return the subpart that matches preferred_type. if none of them, will only return the first subpart. | def decode_mime_parts part, preferred_type, level=0
if part.multipart?
if mime_type_for(part) =~ /multipart\/alternative/
target = part.body.parts.find { |p| mime_type_for(p).index(preferred_type) } || part.body.parts.first
if target # this can be nil
decode_mime_parts target, preferred_type, level + 1
else
[]
end
else # decode 'em all
part.body.parts.compact.map { |subpart| decode_mime_parts subpart, preferred_type, level + 1 }.flatten 1
end
else
type = mime_type_for part
filename = mime_filename_for part
id = mime_id_for part
content = mime_content_for part, preferred_type
[[type, filename, id, content]]
end
end | [
"def decode_mime_parts part, preferred_type, level=0\n if part.multipart?\n if mime_type_for(part) =~ /multipart\\/alternative/\n target = part.body.find { |p| mime_type_for(p).index(preferred_type) } || part.body.first\n if target # this can be nil\n decode_mime_parts target, preferred_type, level + 1\n else\n []\n end\n else # decode 'em all\n part.body.compact.map { |subpart| decode_mime_parts subpart, preferred_type, level + 1 }.flatten 1\n end\n else\n type = mime_type_for part\n filename = mime_filename_for part\n id = mime_id_for part\n content = mime_content_for part, preferred_type\n [[type, filename, id, content]]\n end\n end",
"def mime_content_for mime_part, preferred_type\n return \"\" unless mime_part.body # sometimes this happens. not sure why.\n\n mt = mime_type_for(mime_part) || \"text/plain\" # i guess\n content_type = if mt =~ /^(.+);/ then $1.downcase else mt end\n source_charset = if mt =~ /charset=\"?(.*?)\"?(;|$)/i then $1 else \"US-ASCII\" end\n\n content = mime_part.decode\n converted_content, converted_charset = if(converter = CONVERSIONS[[content_type, preferred_type]])\n send converter, content, source_charset\n else\n [content, source_charset]\n end\n\n if content_type =~ /^text\\//\n Decoder.transcode \"utf-8\", converted_charset, converted_content\n else\n converted_content\n end\n end",
"def mime_types\n [].tap do |result|\n @parts.each do |part|\n result << part.content_type\n end\n end\n end",
"def flatten_body(*types)\n types = types.flatten\n if self.multipart?\n case self.content_type.type\n when 'multipart/alternative'\n part = self.parts.reverse.find {|part| part.consists_of_mime_types?(types) }\n part ? part.flatten_body(types) : []\n when 'multipart/mixed', 'multipart/related'\n # FIXME: For multipart/related, this should look for a start parameter and try that first. \n parts = self.parts.collect {|part| part.flatten_body(types) }\n parts.flatten\n when 'multipart/signed'\n self.parts.first.flatten_body(types)\n when 'multipart/appledouble'\n self.parts[1].flatten_body(types)\n else\n # FIXME: should we also have an entry for message/rfc822 etc.\n []\n end\n else\n self.consists_of_mime_types?(types) ? [self] : []\n end\n end",
"def mime_content_for mime_part, preferred_type\n return \"\" unless mime_part.body # sometimes this happens. not sure why.\n\n content_type = mime_part.fetch_header(:content_type) || \"text/plain\"\n source_charset = mime_part.charset || \"US-ASCII\"\n\n content = mime_part.decoded\n converted_content, converted_charset = if(converter = CONVERSIONS[[content_type, preferred_type]])\n send converter, content, source_charset\n else\n [content, source_charset]\n end\n\n if content_type =~ /^text\\//\n Decoder.transcode \"utf-8\", converted_charset, converted_content\n else\n converted_content\n end\n end",
"def get_mime_part(part, type)\n return part.body if part[\"content-type\"].to_s =~ %r!#{type}!\n # Recurse the multi-parts\n part.parts.each do |sub_part|\n r = get_mime_part(sub_part, type)\n return r if r\n end\n nil\n end",
"def extract_mime(message, mime)\n message[\"_mime_parts\"] ||= {}\n\n # Loop over each MIME part.\n if(mime && mime[\"Parts\"])\n mime[\"Parts\"].each do |part|\n content_types = part[\"Headers\"][\"Content-Type\"]\n if(content_types && content_types.any?)\n # Extract the first Content-Type header (there should only be one),\n # and only pull out the primary part, ignoring extra suffix\n # information after the \";\" (like charset).\n content_type = content_types.first.split(\";\").first.downcase\n\n # Extract the body text, taking into account base64 encoded\n # content.\n part = part.dup\n if(part[\"Body\"] && part[\"Headers\"][\"Content-Transfer-Encoding\"] == [\"base64\"])\n part[\"_body\"] = Base64.decode64(part[\"Body\"])\n else\n part[\"_body\"] = part[\"Body\"]\n end\n\n # Add this information in an easier to lookup hash.\n message[\"_mime_parts\"][content_type] = part\n\n # Recursively extract MIME types, which accounts for nested\n # multipart/mixed entires.\n if(part[\"MIME\"])\n extract_mime(message, part[\"MIME\"])\n end\n end\n end\n end\n end",
"def mime_parts(uids, mime_type)\n media_type, subtype = mime_type.upcase.split('/', 2)\n\n structures = imap.fetch uids, 'BODYSTRUCTURE'\n\n structures.zip(uids).map do |body, uid|\n section = nil\n structure = body.attr['BODYSTRUCTURE']\n\n case structure\n when Net::IMAP::BodyTypeMultipart then\n parts = structure.parts\n\n section = parts.each_with_index do |part, index|\n break index if part.media_type == media_type and\n part.subtype == subtype\n end\n\n next unless Integer === section\n when Net::IMAP::BodyTypeText, Net::IMAP::BodyTypeBasic then\n section = 'TEXT' if structure.media_type == media_type and\n structure.subtype == subtype\n end\n\n [uid, section]\n end.compact\n end",
"def email_parts_of_type(email, content_type = \"text/plain\")\n email.body.parts.select {|part|\n if part.respond_to?(:content_type)\n part.content_type.downcase.include? content_type\n end\n }\n end",
"def mimeTypes(type)\n data = readJSONFile(settings.startConfig[\"dataMIME\"])\n result = \"\"\n if type == nil\n data[\"mimeTypes\"].each do |element|\n result += element[\"mimeId\"]+\"\\n\"\n end\n else\n data[\"mimeTypes\"].each do |element|\n if element[\"mimeId\"] == type\n result = element[\"desc\"]\n end\n end\n end\n return result\n end",
"def calculate_content_part\n pts = parts\n \n #Look for unnamed text/html\n for pt in pts\n return pt if pt.content_type == 'text/html' and pt.name == nil\n end\n\n #Look for unnamed text/plain\n for pt in pts\n return pt if pt.content_type == 'text/plain' and pt.name == nil\n end\n \n return pts.first\n end",
"def additional_mime_types\n @additional_mime_types ||= [\n # [ content-type , [ array, of, filename, extentions] ]\n [\"images/svg+xml\", [\"svg\"]],\n [\"video/x-flv\", [\"flv\"]],\n [\"application/x-shockwave-flash\", [\"swf\"]],\n [\"text/plain\", [\"rb\", \"rhtml\", \"md\", \"markdown\"]],\n ]\n end",
"def parse_multipart(part, index)\n multipart_selector = \"BODY[#{index + 1}]\"\n multipart = @email_connection.fetch(@current_message_id, multipart_selector)\n multipart_body = multipart.first.attr[multipart_selector]\n case part.media_type\n when 'TEXT' then @ingested_item.message = multipart_body\n when 'MULTIPART' then @ingested_item.message = multipart_body\n else\n type = \"#{part.media_type}/#{part.subtype}\"\n attachment = { type: type, encoding: part.encoding, contents: multipart_body }\n @ingested_item.attachments << attachment\n end\n end",
"def first_content_type(acceptable, available)\n return acceptable.first if available.empty?\n available.flatten!\n acceptable.each do |pattern|\n type = available.detect { |t| File.fnmatch(pattern, t) }\n return type if type\n end\n nil\n end",
"def mime_types(type)\n type = mime_type type\n type =~ /^application\\/(xml|javascript)$/ ? [type, \"text/#$1\"] : [type]\n end",
"def describe_mime_structure(depth = 0)\n result = (' '*depth) + self.content_type.type + \"\\n\"\n if self.multipart?\n self.parts.each do |part|\n result << part.describe_mime_structure(depth+1)\n end\n end\n result.chomp! if depth == 0\n result\n end",
"def mime_types(type)\n type = mime_type type\n type =~ %r{^application/(xml|javascript)$} ? [type, \"text/#{$1}\"] : [type]\n end",
"def parse_content_type(content)\n parts = content.split(';')\n mimetype = MIME::Types[parts[0]].first\n mimetype.preferred_extension.upcase()\n end",
"def get_main_body_text_part\n leaves = get_attachment_leaves\n \n # Find first part which is text/plain\n leaves.each do |p|\n if p.content_type == 'text/plain'\n return p\n end\n end\n\n # Otherwise first part which is any sort of text\n leaves.each do |p|\n if p.main_type == 'text'\n return p\n end\n end\n \n # ... or if none, consider first part \n p = leaves[0]\n # if it is a known type then don't use it, return no body (nil)\n if mimetype_to_extension(p.content_type)\n # this is guess of case where there are only attachments, no body text\n # e.g. http://www.whatdotheyknow.com/request/cost_benefit_analysis_for_real_n\n return nil\n end\n # otherwise return it assuming it is text (sometimes you get things\n # like binary/octet-stream, or the like, which are really text - XXX if\n # you find an example, put URL here - perhaps we should be always returning\n # nil in this case)\n return p\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
the content of a mime part itself. if the contenttype is text/, it will be converted to utf8. otherwise, it will be left in the original encoding | def mime_content_for mime_part, preferred_type
return "" unless mime_part.body # sometimes this happens. not sure why.
content_type = mime_part.fetch_header(:content_type) || "text/plain"
source_charset = mime_part.charset || "US-ASCII"
content = mime_part.decoded
converted_content, converted_charset = if(converter = CONVERSIONS[[content_type, preferred_type]])
send converter, content, source_charset
else
[content, source_charset]
end
if content_type =~ /^text\//
Decoder.transcode "utf-8", converted_charset, converted_content
else
converted_content
end
end | [
"def mime_content_for mime_part, preferred_type\n return \"\" unless mime_part.body # sometimes this happens. not sure why.\n\n mt = mime_type_for(mime_part) || \"text/plain\" # i guess\n content_type = if mt =~ /^(.+);/ then $1.downcase else mt end\n source_charset = if mt =~ /charset=\"?(.*?)\"?(;|$)/i then $1 else \"US-ASCII\" end\n\n content = mime_part.decode\n converted_content, converted_charset = if(converter = CONVERSIONS[[content_type, preferred_type]])\n send converter, content, source_charset\n else\n [content, source_charset]\n end\n\n if content_type =~ /^text\\//\n Decoder.transcode \"utf-8\", converted_charset, converted_content\n else\n converted_content\n end\n end",
"def utf_content\n return @blob.content if @encoding == \"utf-8\"\n Base64.decode64(@blob.content)\n end",
"def decoded_multipart_mail(mail_part)\n encoded = encode_to_unicode(mail_part&.decoded)\n\n encoded if text_mail_body? || html_mail_body?\n end",
"def contents_convert_utf8\n @publication.title = @publication.title ? @publication.title.force_encoding('UTF-8') : \"\"\n @publication.abstract = @publication.abstract ? @publication.abstract.force_encoding('UTF-8') : \"\"\n @publication.contents = @publication.contents ? @publication.contents.force_encoding('UTF-8') : \"\"\n end",
"def _convert_part_body_to_text(part)\n if part.nil?\n text = \"[ Email has no body, please see attachments ]\"\n text_charset = \"utf-8\"\n else\n text = part.body\n text_charset = part.charset\n if part.content_type == 'text/html'\n # e.g. http://www.whatdotheyknow.com/request/35/response/177\n # XXX This is a bit of a hack as it is calling a convert to text routine.\n # Could instead call a sanitize HTML one.\n text = IncomingMessage._get_attachment_text_internal_one_file(part.content_type, text)\n end\n end\n\n # Charset conversion, turn everything into UTF-8\n if not text_charset.nil?\n begin\n # XXX specially convert unicode pound signs, was needed here\n # http://www.whatdotheyknow.com/request/88/response/352\n text = text.gsub(\"£\", Iconv.conv(text_charset, 'utf-8', '£')) \n # Try proper conversion\n text = Iconv.conv('utf-8', text_charset, text)\n rescue Iconv::IllegalSequence, Iconv::InvalidEncoding\n # Clearly specified charset was nonsense\n text_charset = nil\n end\n end\n if text_charset.nil?\n # No specified charset, so guess\n \n # Could use rchardet here, but it had trouble with \n # http://www.whatdotheyknow.com/request/107/response/144\n # So I gave up - most likely in UK we'll only get windows-1252 anyway.\n\n begin\n # See if it is good UTF-8 anyway\n text = Iconv.conv('utf-8', 'utf-8', text)\n rescue Iconv::IllegalSequence\n begin\n # Or is it good windows-1252, most likely\n text = Iconv.conv('utf-8', 'windows-1252', text)\n rescue Iconv::IllegalSequence\n # Text looks like unlabelled nonsense, strip out anything that isn't UTF-8\n text = Iconv.conv('utf-8//IGNORE', 'utf-8', text) + \"\\n\\n[ WhatDoTheyKnow note: The above text was badly encoded, and has had strange characters removed. ]\"\n end\n end\n end\n \n # An assertion that we have ended up with UTF-8 XXX can remove as this should\n # always be fine if code above is\n Iconv.conv('utf-8', 'utf-8', text)\n\n # Fix DOS style linefeeds to Unix style ones (or other later regexps won't work)\n # Needed for e.g. http://www.whatdotheyknow.com/request/60/response/98\n text = text.gsub(/\\r\\n/, \"\\n\")\n\n # Compress extra spaces down to save space, and to stop regular expressions\n # breaking in strange extreme cases. e.g. for\n # http://www.whatdotheyknow.com/request/spending_on_consultants\n text = text.gsub(/ +/, \" \")\n\n return text\n end",
"def normilize_content\n\t\t\t@content = @content.unpack(FORMATS[content_transfer_encoding]) if FORMATS.has_key?(content_transfer_encoding)\n\t\t\t@content\n\t\tend",
"def mime_type_charset_detecter(mime_type); end",
"def content_type\n @content_type ||= begin\n type = Rack::Mime.mime_type(format_extension)\n type[/^text/] ? \"#{type}; charset=utf-8\" : type\n end\n end",
"def encoded_body\n return unless body\n\n set_meta_encoding\n return nil unless mime_type_allowed?\n\n content = body\n unless content.is_utf8?\n encoding_options = { invalid: :replace, undef: :replace, replace: '' }\n content.encode!('UTF-8', webpage.meta_encoding, encoding_options)\n end\n\n content\n end",
"def process_media(part)\n # Mail body auto-magically decodes quoted\n # printable for text/html type.\n file = temp_file(part)\n if part.part_type? =~ /^text\\// ||\n part.part_type? == 'application/smil'\n type, content = transform_text_part(part)\n else\n if part.part_type? == 'application/octet-stream'\n type = type_from_filename(filename?(part))\n else\n type = part.part_type?\n end\n content = part.body.decoded\n end\n return type, nil if content.nil? || content.empty?\n\n log(\"#{self.class} writing file #{file}\", :info)\n File.open(file, 'wb'){ |f| f.write(content) }\n return type, file\n end",
"def encoding_for_mime_type(type)\n encoding = \"BINARY\" if binary_mime_types.any? { |matcher| matcher === type }\n encoding ||= default_external_encoding if respond_to?(:default_external_encoding)\n encoding\n end",
"def read_body\n email.body_to_mime.parts.map { |part| \n [\n part.content_type,\n part.body.encode(\"UTF-8\", \"binary\", invalid: :replace, undef: :replace, replace: \"\")\n ].join(\": \")\n }.join(\"\\n\")\n # .gsub('\\n', \"\\n\").gsub('\\r', \"\\r\").gsub('\\t', \"\\t\")\n end",
"def encoding\n Magic.guess_file_mime_encoding(@path)\n end",
"def encoding\n content_types = self.to_hash[\"content-type\"]\n\n return \"utf-8\" if !content_types\n\n content_types.each do |c_type|\n return $2 if c_type =~ /(^|;\\s?)charset=(.*?)\\s*(;|$)/\n end\n\n \"binary\"\n end",
"def mime_part\n message.mime_part\n end",
"def detect_content_type(text)\n #; [!onjro] returns 'text/html; charset=utf-8' when text starts with '<'.\n #; [!qiugc] returns 'application/json' when text starts with '{'.\n #; [!zamnv] returns nil when text starts with neight '<' nor '{'.\n case text\n when /\\A\\s*</ ; return \"text/html; charset=utf-8\" # probably HTML\n when /\\A\\s*\\{/; return \"application/json\" # probably JSON\n else ; return nil\n end\n end",
"def charset\n type = content_type_parse\n if type && %r{\\Atext/} =~ type && @base_uri && /\\Ahttp\\z/i =~ @base_uri.scheme\n 'iso-8859-1' # RFC2616 3.7.1\n else\n nil\n end\n end",
"def retrieve_text_part(mail)\n return mail.text_part if mail.multipart?\n (mail.mime_type =~ /^text\\/plain$/i) && mail\n end",
"def normalize_mime(content_type)\n MIME::Type.simplified(content_type, remove_x_prefix: true)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the shortest path from source to destination using BFS algorithm | def shortest_path
initial_position_obj = { position: start_position, source: {} }
knights_path = [initial_position_obj]
while knights_path.present?
current_position = knights_path.shift
position = current_position[:position]
if position == end_position
return path_to_destination(current_position, initial_position_obj)
end
add_possible_destination(position, current_position, knights_path)
end
end | [
"def shortest_path\n dist, previous = Hash.new(Infinity), {}\n dist[@source] = 0.0\n queue = @graph.vertex_set.dup\n\n until queue.empty?\n u = queue.min { |a,b| dist[a.name] <=> dist[b.name] }\n break if dist[u.name].infinite?\n queue.delete(u)\n\n u.each_edge do |e, v|\n alt = dist[u.name] + e.weight\n if alt < dist[v.name]\n dist[v.name] = alt\n previous[v.name] = u.name\n end\n end\n end\n\n path = []\n u = @dest\n until previous[u].nil?\n path.unshift(u)\n u = previous[u]\n end\n\n path.unshift(@source)\n end",
"def find_shortest_path(source, dest, graph = @graph)\n queue = [[source]]\n visited = Set.new\n\n until queue.empty?\n path = queue.shift\n vertex = path.last\n return path if vertex == dest\n\n next if visited.include?(vertex)\n\n graph[vertex].each do |curr_node|\n new_path = Array.new(path)\n new_path << curr_node\n return new_path if curr_node == dest\n\n queue << new_path\n end\n visited << vertex\n end\n end",
"def bfs_shortest_path(start, goal, paths=false)\n\t\tdist = nil\n\t explored = {} # keep track of explored nodes\n\t previous = {}\n\t queue = [[start, 0]] # keep track of all the paths to be checked\n\t is_goal = false\n\t while !queue.empty? && !is_goal # keeps looping until all possible paths have been checked\n\t node, dist = queue.pop # pop the first path from the queue\n\t if !explored.include?(node) # get the last node from the path\n\t neighbours = @edges[node] \n\t explored[node] = true # mark node as explored\n\t next if neighbours.nil?\n\t dist += 1 \n\t neighbours.each do |neighbour| # go through all neighbour nodes, construct a new path\n\t \tnext if explored.include?(neighbour)\n\t queue.unshift([neighbour, dist]) # push it into the queue\n\t previous[neighbour] = node if paths\n\t if neighbour == goal # return path if neighbour is goal\n\t \tis_goal = true\n\t \tbreak\n\t end\n\t end\n\t end\n\t end\n\t\tif is_goal\n\t\t\tpath = build_path(previous, start, goal) if paths\n\t\telse\n\t\t\tdist = nil \n\t\t\tpath = []\n\t\tend\n\t return dist, path\n\tend",
"def compute_shortest_path\n update_distance_of_all_edges_to(Float::INFINITY)\n @distance_to[@source_node] = 0\n\n # The prioriy queue holds a node and its distance from the source node.\n @pq.insert(@source_node, 0)\n while @pq.any?\n node = @pq.remove_min\n node.adjacent_edges.each do |adj_edge|\n relax(adj_edge)\n end\n end\n end",
"def shortest_way(source, dest)\n\t\t@source = source\n dijkstra source\n \n if @distances[dest] != @infinity\n return @distances[dest]\n end\n\tend",
"def shortest_path_between_nodes(initial, destination)\n initial.distance = 0\n\n current = initial\n loop do\n # at the destination node, stop calculating\n break if current == destination\n\n unvisited.delete(current)\n\n calculate_neighbor_shortest_distances(current)\n\n return nil if no_reachable_nodes\n\n current = unvisited.min_by(&:distance)\n end\n\n destination.path\n end",
"def shortest_paths(src, destinations)\n return [] if destinations.empty?\n\n paths = []\n visited = Set.new([src])\n queue = Containers::MinHeap.new\n queue.push([1, [src]])\n\n until queue.empty?\n _, path = queue.pop\n\n # Not going to find shorter paths than current best, return.\n break if paths.any? && paths[0].size < path.size\n\n cur = path.last\n paths << path if destinations.include?(cur)\n\n neighbors(cur).each do |pos|\n next if visited.include?(pos) || occupied?(pos)\n\n visited.add(pos)\n new_path = Array.new(path.size) { |i| path[i].dup }\n new_path << pos\n queue.push([new_path.size, new_path])\n end\n end\n\n paths\n end",
"def breadth_first_search(graph, source, target = nil)\n # queue: first-in, first-out (FIFO). keeps track of which vertices have already\n # been visited but have not yet been visited from, so we know where to search.\n queue = Queue.new\n\n # a `Set` instance is a collection of unordered values with no duplicates\n # maintain a list of visited nodes and prevent checking a node more than once\n visited = Set.new\n\n # shortest path information, if applicable\n meta = {}\n\n # enqueue the source key (push it to the empty queue)\n queue.enq(source)\n\n until queue.empty?\n # current node, which we `shift` from the queue\n current = queue.deq\n\n # print the shortest path if it was found\n return path(source, current, meta) if target && current == target\n\n # we don't have to keep track of distance here, since we have a method\n # to access each of the node's neighbors. the neighbors are stored in a set.\n # process each neighboring vertex of the current node,\n # i.e. traverse all outgoing edges from the current node.\n current.neighbors.each do |neighbor|\n # if the neighbor node is unvisited, we ignore this edge\n unless visited.include?(neighbor)\n queue.enq(neighbor)\n visited.add(neighbor) # we just enqueued this node, so mark it as visited\n meta[neighbor] = current # record the path (only done once, b/c of `unless`)\n end\n end\n end\nend",
"def shortest_path_to target\n node = Node.new(pos, 0, nil)\n seen = Set.new\n queue = []\n\n seen << node.pos\n queue << node\n adj = [\n [0, 1],\n [1, 0],\n [0, -1],\n [-1, 0],\n ]\n\n while !queue.empty?\n node = queue.shift\n\n return node if node.pos == target\n\n dist = node.dist\n i, j = node.pos\n\n neighbors = adj.map { |x, y| [x+i, y+j] }.select { |c| is_valid? c, seen }\n neighbors.each do |pos|\n n = Node.new(pos, dist + 1, node)\n queue << n\n seen << pos\n end\n end\n\n # p \"No path found to #{target}\"\n return\n end",
"def dijkstra(graph, source, destination)\n dist = {}\n dist[source] = 0\n visited = {}\n path = {}\n q = FastContainers::PriorityQueue.new(:min)\n q.push(source, 0)\n path[source] = []\n\n q.pop_each do |point, cost|\n if point == destination\n return [cost, path[point]]\n end\n if visited.key?(point) && cost >= dist[point]\n next\n end\n visited[point] = 1\n dist[point] = cost\n\n current_path = path[point]\n\n graph.find_edges(point).map do |next_e| \n path[next_e.destination] = current_path + [next_e]\n\n cp_line = source != point ? current_path.last.line : []\n line_switched = source != point && (cp_line & next_e.line).length == 0\n line_switch = line_switched ? 15 : 0\n\n q.push(next_e.destination, next_e.cost + line_switch + cost)\n end\n end\n\n return nil\nend",
"def bfs_shortest_path(node1, node2)\n distance, route = breadth_first_search(node1)\n step = distance[node2]\n node = node2\n path = [ node2 ]\n while node != node1 and route[node]\n node = route[node]\n path.unshift(node)\n end\n return step, path\n end",
"def shortest_path(source, finish_list)\n dist = {} # The distance of all vertices from start vertex\n prev = {} # Previous node in the optimal path\n q = Depq.new\n\n @dg.each_vertex do |v|\n # dist contains the \"Depq::Locator\" for that value\n dist[v] = (v == source) ? q.insert(v, 0) : q.insert(v, INFINITY)\n end\n\n while not q.empty?\n u = q.delete_min\n if dist[u].priority == INFINITY\n @log.info(\"shortest_path(): dist[#{u}] == Infinity\")\n break\n end\n if finish_list.include?(u)\n finish_vertex = u\n break\n end\n @dg.adjacent_vertices(u).each do |v|\n alt = dist[u].priority + 1\n if alt < dist[v].priority\n dist[v].update(v, alt)\n prev[v] = u\n end\n end\n end # while\n\n s = [] # The shortest path from source to finish_vertex\n w = finish_vertex\n while not prev[w].nil?\n s.push(w)\n w = prev[w]\n end\n\n return s\n end",
"def shortest_path(start_node, end_node, graph)\n adjacent_edges = graph.select{ | edge | edge[NODES].include?(start_node) }\n remaining_edges = graph - adjacent_edges\n shortest_path = Path.new\n adjacent_edges.each do | edge |\n path = Path.new [edge]\n neighbor_node = (edge[NODES] - [start_node])[0] # ['A', 'B'] - ['A'] => ['B']\n unless neighbor_node == end_node\n path_ahead = shortest_path(neighbor_node, end_node, remaining_edges)\n (path_ahead.empty?)? path.clear : path.concat(path_ahead)\n end \n shortest_path = path if path.distance < shortest_path.distance\n end\n shortest_path\n end",
"def shortest_path\n road(@end)\n @path\n end",
"def shortest_path(from_x, from_y, to_x, to_y)\n @visited = Array.new(@matrix.size) { Array.new(@matrix.first.size) { false } }\n @farthest_node = nil\n queue = Queue.new\n queue << Node.new(from_x, from_y, 0)\n\n while !queue.empty? do\n node = queue.pop\n\n if !@farthest_node || node.dist > @farthest_node.dist\n @farthest_node =node\n end\n\n if node.x == to_x && node.y == to_y\n # We pathed to the target\n target_node = node\n break\n end\n [[-1,0],[1,0],[0,1],[0,-1]].each do |dir|\n x = node.x + dir[0]\n y = node.y + dir[1]\n if is_valid?(x, y)\n @visited[y][x] = true\n queue.push(Node.new(x, y, node.dist + 1, node))\n end\n end\n end\n\n # We didn't find a path to the target\n return nil unless target_node\n\n # Trace back the journey\n journey = []\n journey.push [node.x,node.y]\n while !node.parent.nil? do\n node = node.parent\n journey.push [node.x,node.y]\n end\n journey.reverse.drop(1)\n end",
"def shortest_path from, to, init_state\n path_vertices = []\n path_edges = []\n\n spt = shortest_path_tree( from, to, init_state, true )\n curr = spt.get_vertex( to )\n\n #if the end node wasn't found\n unless curr then raise RuntimeError end\n\n path_vertices << curr\n\n while incoming = curr.edge_in( 0 )\n path_edges << incoming\n\n curr = incoming.from\n path_vertices << curr\n end\n\n return path_vertices.reverse, path_edges.reverse\n end",
"def find_shortest_route\n node = @closed_nodes[@goal[0]][@goal[1]]\n loop do\n break if node.from == @start\n @result << node.from\n @shortest_route[node.from[0]][node.from[1]] = true\n node = @closed_nodes[node.from[0]][node.from[1]]\n end\n end",
"def shortest_path(source, target)\n init(source)\n relax_edges(target, true)\n PathBuilder.new(source, @visitor.parents_map).path(target)\n end",
"def shortest_paths s\n dijkstra s\n puts \"Source: #{s}\"\n @nodes.each do |dest|\n puts \"\\nTarget: #{dest}\"\n print_path dest\n if @d[dest] == @INFINITY\n puts \"\\nNO PATH\"\n else\n puts \"\\nDistance: #{@d[dest]}\"\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Saves the given knight's position and returns the board id | def save
if self.class.valid_position?(knight_position)
unique_board_id = board_id
redis_conn.set(unique_board_id, knight_position)
{ status: :success, board_id: unique_board_id }
else
{ status: :failed, message: "Invalid knight's position" }
end
end | [
"def save_board( last_move )\n recent = boards[boards.keys.max]\n @boards[ @boards.keys.max + 1 ] = recent.dup.play_move!( last_move )\n end",
"def store_board(board)\n game_board = board.get_board\n new_state = \"\"\n game_board.each do |cords, stack|\n if stack != nil\n stack.each do |piece|\n new_state += cords[0].to_s + \",\" + cords[1].to_s + \",\" \n \n new_state += \"1\" if piece.color == \"white\"\n new_state += \"0\" if piece.color == \"black\"\n \n new_state += \",\" + piece.type.to_s + \":\"\n end\n end\n end\n #puts \"storing board, state: #{new_state}\"\n #puts \"\"\n update_attributes(:state => new_state, :turn => board.get_turn)\n end",
"def knight_on_board(knight)\n return if knight.nil?\n\n grid[knight[1] - 1][knight[0] - 1] = 'K'\n end",
"def save_game\n filename = \"#{Time.now.strftime(\"%Y%m%d-%H%M%S-\")}#{@player.name}.save\"\n data = Marshal.dump([@player, @board])\n File.open(filename, \"w\") { |gamesave| gamesave.puts data }\n puts \"Game saved with filename #{filename}!\"\n end",
"def save_round(theirmove,mymove,winner)\n nil\n end",
"def place_knights\r\n $board[1][0] = Knight.new('white')\r\n\t\t$board[6][0] = Knight.new('white')\r\n\t\t$board[1][7] = Knight.new('black')\r\n\t\t$board[6][7] = Knight.new('black')\r\n end",
"def save_board_grid\n session[:saved_board_grid] = @board.grid\n end",
"def place_piece piece_id, r, c\r\n puts \"Placing a #{piece_id} at #{r}, #{c}\"\r\n @board[r][c]= piece_id\r\n end",
"def save_moves(moves)\n # moves is a nested hash\n self.current_pieces.each do |piece|\n if moves.has_key?(piece.name)\n piece_moves = moves[piece.name]\n piece_moves.each do |destination, flags|\n if flags.class == String\n if flags == \"\"\n flags = [] \n else \n flags = flags.to_a\n end\n end\n piece.moves.create!(from: piece.position, to: destination, flags: flags) unless destination.nil?\n end\n end\n end\n self.save!\n end",
"def save_moves\n\t\tFile.open(@filename, 'w') {|f| f.write(@past_moves.to_yaml)}\n\tend",
"def save_game\n Save.save\n end",
"def add_move_to_board(loc, game_piece)\n @board[loc] = game_piece\n end",
"def save_round(their_move,my_move,winner)\n @last_round = [their_move,my_move,winner]\n @last_round\n end",
"def apply_move!(position)\n board_will_change!\n\n x, y = position\n board[x, y] = current_player\n self.current_player = next_player\n\n save\n end",
"def save_game\n puts \"Enter the name you want to save your game under:\"\n name = gets.chomp.downcase\n\n file_name = name + \".yaml\"\n file_path = File.join(\"./saves\", file_name)\n data = { :board => @board }\n\n File.open(file_path, 'w') { |file| file.write(data.to_yaml) }\n end",
"def mark_at(column, row)\n @board[column][row]\n end",
"def knight_path(x, y)\n start_coords = get_cell(x, y)\n start_coords.color = \"brown\"\n moves = start_coords.piece.moves\n moves.each do |move|\n new_x = x + move[0]\n new_y = y + move[1]\n if new_x >= 1 && new_x <= 8 && new_y <= 7 && new_y >= 0\n if get_cell_piece(new_x, new_y) == \"\" || get_cell_piece(new_x, new_y).color != start_coords.piece.color\n set_cell_color(new_x, new_y, \"red\")\n end\n end\n end\n end",
"def save\n\t\tputs \"Type in a name to save your changes.\"\n\t\tname = interact\n\t\tsave_this = {\n\t\t\t\"@board\" => @board,\n\t\t\t\"@missed_letters\" => @missed_letters,\n\t\t\t\"@turns\" => @turns,\n\t\t\t\"@secret_word\" => @secret_word,\n\t\t\t\"@winner\" => @winner\n\t\t}\n\t\tFile.open(\"./save/#{name}.json\", \"w\") do |f|\n\t\t\tf.write(save_this.to_json)\n\t\tend\n\t\tputs \"Your game was saved under the name of '#{name}'. Bye\"\n\tend",
"def place_piece(player, piece, position)\n raise BoardLockedError, \"Board was set from FEN string\" if @fen\n rank_index = RANKS[position.downcase.split('').first]\n\n file_index = position.split('').last.to_i-1\n icon = (piece == :knight ? :night : piece).to_s.split('').first\n (player == :black ? icon.downcase! : icon.upcase!)\n @board[file_index][rank_index] = icon\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Adds all possible destinations with respect to the knight's position | def add_possible_destination(position, knights_action, knights_path)
possible_destinations = possible_destinations(position)
possible_destinations.each do |possible_destination|
add_path(possible_destination, knights_action, knights_path)
end
end | [
"def add_possible_destination_movements(position, knight_movement, knight_movements)\n possible_destinations = possible_destinations(position)\n\n possible_destinations.each do |possible_destination|\n add_movement(possible_destination, knight_movement, knight_movements)\n end\n end",
"def add_movement(destination, knight_movement, knight_movements)\n self.visited_destinations << destination\n knight_movements << { position: destination, source: knight_movement }\n end",
"def add_path(destination, knights_action, knights_path)\n visited_destinations << destination\n knights_path << { position: destination, source: knights_action }\n end",
"def destinations(board)\n #selects destinations that are on the board\n dest_array = @moves.select do |move|\n move = [move[0] + @pos[0], move[1] + @pos[1]]\n move.all? {|i| (0..7).include?(i)}\n end\n\n #selects only destinations that are empty or have the opponents piece on it\n dest_array = dest_array.select do |pos|\n piece = board[pos[0]][pos[1]]\n piece.nil? || piece.player != @player\n end \n end",
"def knight_moves(start, destination, queue = @queue)\n if (start == destination)\n p \"Input a different start and destination please.\"\n else\n @visited_spaces = []\n @visited_nodes = []\n queue << Space.new(start, nil)\n until @visited_spaces.include?(destination) do\n generate_spaces(@queue)\n end\n if @visited_spaces.include?(destination)\n @visited_nodes.each do |node|\n if (node.x == destination[0] && node.y == destination[1])\n while node.previous_space do\n @pathing.unshift([node.x, node.y])\n node = node.previous_space\n @moves += 1\n end\n @pathing.unshift([node.x, node.y])\n end\n end\n puts \"Knight reaches #{destination} in #{moves} moves:\"\n p @pathing\n end\n end\n end",
"def knight_in_all_directions\n row_index, column_index = current_square\n\n moves = [[row_index - 1, column_index + 2],\n [row_index - 1, column_index - 2],\n [row_index + 1, column_index + 2],\n [row_index + 1, column_index - 2],\n [row_index - 2, column_index + 1],\n [row_index - 2, column_index - 1],\n [row_index + 2, column_index + 1],\n [row_index + 2, column_index - 1]]\n end",
"def knight_spots_surrounding(location)\n moves = [1, -1].product([2, -2])\n moves += moves.map(&:reverse)\n moves.map { |move| location + Vector.new(move) }\n end",
"def possible_moves\n Knight.possible_moves_from(@position)\n end",
"def apply_location(moves)\r\n moves.delete([0,0])\r\n moves.map { |move| [(move[0] + @location[0]), (move[1] + @location[1])]}\r\n end",
"def knight_moves(position, final)\n position == final ? result = [final] : result = 0\n position = [position]\n queue = []\n while result == 0\n pm = possible_movements(position.last)\n if pm.include?(final)\n result = position << final\n else\n pm.each do |move|\n queue << (position + [move])\n end\n position = queue.delete_at(0)\n end\n end\n pretty_result(result)\nend",
"def knight_moves(origin, destination)\n root = Square.new(convert_to_square_num(origin), 0, nil)\n destination_square = find_destination_of_shortest_path(convert_to_square_num(destination), [root])\n # p destination_square\n shortest_path = find_shortest_path_from_origin(destination_square)\n # p shortest_path\n puts \"You made it in #{shortest_path.length} moves! Here's your path:\"\n shortest_path.each { |coordinate| p coordinate }\n p destination\n shortest_path\n end",
"def shortest_paths(dest)\n position = dest\n final = {}\n analisados = {}\n route = []\n route << dest\n @previous['a'] = -1\n\n @nodes.each do |n|\n analisados[n] = false\n end\n analisados[position] = true\n\n while analisados(analisados)\n adyacentes(position, analisados).each do |n|\n if @distance[n] == (@distance[position] - graph[n][position])\n @previous[position] = n\n position = n\n route << n\n end\n analisados[n] = true\n end\n\n end\n route << 'a'\n route\n end",
"def king_castling_moves\n moves = []\n from_to = []\n from = current_king_coordinate\n moves << [x-2, y] if left_castling?(@current_player.color)\n moves << [x+2, y] if right_castling?(@current_player.color)\n moves.each do |to|\n from_to << [from,to]\n end\n from_to\n end",
"def next_moves\n\t\tmoves = [ [ @x + 2, @y + 1 ], [ @x + 2, @y - 1 ], [ @x + 1, @y + 2 ],\n\t\t\t\t\t\t [ @x + 1, @y - 2 ], [ @x - 1, @y + 2 ], [ @x - 1, @y - 2 ],\n\t\t\t\t\t\t [ @x - 2, @y + 1 ], [ @x - 2, @y -1 ] ]\n\t\tmoves.map { |move| Knight.new(move, @history) }\n\tend",
"def knight_path(from, to)\n# i think open queue is an array made of this structure, seems like object in java\n open_queue = [PositionPath.new( from, [copy(from)] )]\n# discovered is array of from\n discovered = [from]\n\n# until the open queue is empty\n until open_queue.empty?\n# the current is the first in the queue\n# .shift takes the first & removes it also \n current = open_queue.shift\n\n# return the current path if the current position is equal to to ? \n return current.path if current.position == to\n\n# i think it is calling the valid moves method, using current position as \n# the parameter, & for each call is \n valid_moves(current.position).each do |move|\n\n# i think if it is already included in discovered, don't have to worry about it\n unless discovered.include?(move)\n\n# but if it is not discovered yet, then add the move to discovered\n discovered << move\n\n# open queue gets pushed on it the make position path with paramters of current & move\n open_queue.push(make_position_path(current, move)) \n end\n end\n end\n \nend",
"def knight_moves(start_pos, end_pos)\n queue = []\n current_pos = NodeMover.new(start_pos, nil)\n until current_pos.position == end_pos\n current_pos.next_moves.each { |move| queue.push(move) }\n current_pos = queue.shift\n end\n print_parent(current_pos)\nend",
"def possible_moves\n moves = []\n\n @directions.each do |direction|\n move = position\n loop do\n move = [direction[0] + move[0], direction[1] + move[1]]\n break if !@board.in_bounds?(move) || @board[move].color == color\n moves << move\n break unless @board.empty?(move)\n end\n end\n\n moves\n end",
"def update_visited_moves\n visited_coordinates << [x , y]\n end",
"def available_directions\n if origin_direction.nil?\n puts 'it seems that you got lost along the way, best things is to stop here'\n raise ::LunchGame::Errors::ExitCommandReceived\n end\n possibles_directions = []\n possibles_directions << :east if east_room\n possibles_directions << :north if north_room\n possibles_directions << :west if west_room\n possibles_directions << :south if south_room\n LunchGame::Helpers.relative_directions(\n origin_direction: origin_direction,\n directions: possibles_directions\n )\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add destination to visited destinations Adds destination with respect to the action taken by knight and adds the path taken to destination to source to track. | def add_path(destination, knights_action, knights_path)
visited_destinations << destination
knights_path << { position: destination, source: knights_action }
end | [
"def add_possible_destination(position, knights_action, knights_path)\n\n possible_destinations = possible_destinations(position)\n\n possible_destinations.each do |possible_destination|\n add_path(possible_destination, knights_action, knights_path)\n end\n end",
"def add_movement(destination, knight_movement, knight_movements)\n self.visited_destinations << destination\n knight_movements << { position: destination, source: knight_movement }\n end",
"def add_possible_destination_movements(position, knight_movement, knight_movements)\n possible_destinations = possible_destinations(position)\n\n possible_destinations.each do |possible_destination|\n add_movement(possible_destination, knight_movement, knight_movements)\n end\n end",
"def set_destination(destination)\n if @destination\n @destination.remove_entrance(self)\n end\n @destination = destination\n if @destination\n @destination.add_entrance(self)\n end\n return\n end",
"def add_destination(dest)\n dest = [dest].flatten.compact\n dest.each do |d|\n @destinations << MIDISyncOutput.new(d)\n end\n end",
"def knight_moves(start, destination, queue = @queue)\n if (start == destination)\n p \"Input a different start and destination please.\"\n else\n @visited_spaces = []\n @visited_nodes = []\n queue << Space.new(start, nil)\n until @visited_spaces.include?(destination) do\n generate_spaces(@queue)\n end\n if @visited_spaces.include?(destination)\n @visited_nodes.each do |node|\n if (node.x == destination[0] && node.y == destination[1])\n while node.previous_space do\n @pathing.unshift([node.x, node.y])\n node = node.previous_space\n @moves += 1\n end\n @pathing.unshift([node.x, node.y])\n end\n end\n puts \"Knight reaches #{destination} in #{moves} moves:\"\n p @pathing\n end\n end\n end",
"def shortest_paths(dest)\n position = dest\n final = {}\n analisados = {}\n route = []\n route << dest\n @previous['a'] = -1\n\n @nodes.each do |n|\n analisados[n] = false\n end\n analisados[position] = true\n\n while analisados(analisados)\n adyacentes(position, analisados).each do |n|\n if @distance[n] == (@distance[position] - graph[n][position])\n @previous[position] = n\n position = n\n route << n\n end\n analisados[n] = true\n end\n\n end\n route << 'a'\n route\n end",
"def add_path(source, target, cost)\n if !@graph.key?(source)\n @graph[source] = { target => cost }\n else\n @graph[source][target] = cost\n end\n\n @nodes << source unless @nodes.include?(source)\n @nodes << target unless @nodes.include?(target)\n end",
"def add_dest!(transition, state)\n self.add_dest(transition, state).add_src(transition, self)\n state\n end",
"def add=(location)\n @destinations << location\n end",
"def add_destination(object)\n if object.class.to_s == \"String\"\n object = (@net.get_place object or @net.get_transition object)\n end\n if validate_source_destination(object)\n @destination = object\n object.add_input(self)\n else\n raise \"Invalid arc destination object: #{object.class}\"\n end\n end",
"def add_destination(object)\n\t\t\tif validate_source_destination(object)\n\t\t\t\t@destination = object\n\t\t\t\tobject.add_input(self)\n\t\t\telse\n\t\t\t\traise \"Invalid arc destination object: #{object.class}\"\n\t\t\tend\n\t\tend",
"def destinations\n @destinations ||= []\n end",
"def all_paths_util(u, d)\n # Mark the current node as visited and store in path\n @visited[u] = true\n @path << u\n\n # If current vertex is same as destination, then store it\n # current path[]\n if u == d\n @all_paths << @path.dup\n else\n # If current vertex is not destination\n # Recur for all the vertices adjacent to this vertex\n @graph[u].each do |i|\n if @visited[i] == false\n all_paths_util(i, d)\n end\n end\n end\n # Remove current vertex from path[] and mark it as unvisited\n @path.pop()\n @visited[u]= false\n end",
"def set_destination_node(x, y)\n set_node(x, y, :destination)\n @destination_node = @nodes[y][x]\n end",
"def knight_moves(origin, destination)\n root = Square.new(convert_to_square_num(origin), 0, nil)\n destination_square = find_destination_of_shortest_path(convert_to_square_num(destination), [root])\n # p destination_square\n shortest_path = find_shortest_path_from_origin(destination_square)\n # p shortest_path\n puts \"You made it in #{shortest_path.length} moves! Here's your path:\"\n shortest_path.each { |coordinate| p coordinate }\n p destination\n shortest_path\n end",
"def destination_setup_step_add(step)\n\t\t@config[\"destination\"][\"setup\"][step] = true\n\tend",
"def add_edge(source, destination, weight)\n if @edges.key?(source)\n @edges[source][destination] = weight\n else\n @edges[source] = { destination => weight }\n end\n end",
"def addEdge(codeSet,destState)\n @edges.push([codeSet, destState])\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Instantiates a new unifiedRoleManagementPolicyExpirationRule and sets the default values. | def initialize()
super
@odata_type = "#microsoft.graph.unifiedRoleManagementPolicyExpirationRule"
end | [
"def initialize()\n super\n @odata_type = \"#microsoft.graph.unifiedRoleManagementPolicyNotificationRule\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.unifiedRoleManagementPolicyApprovalRule\"\n end",
"def add_default_expiration(vm, exps)\n return exps if vm.default_expiration\n exps << Berta::Entities::Expiration.new(vm.next_expiration_id,\n Time.now.to_i + Berta::Settings.expiration_offset,\n Berta::Settings.expiration.action)\n end",
"def default_expiration\n expirations\n .find_all { |exp| exp.default_action? && exp.in_expiration_interval? }\n .min { |exp| exp.time.to_i }\n end",
"def set_prerequisite_expiration_period(opts)\n opts = check_params(opts,[:expiration_periods])\n super(opts)\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.unifiedRoleManagementPolicyAuthenticationContextRule\"\n end",
"def set_default_password_expiration\n return if password_updated_at\n\n self.password_updated_at = (self.class.expire_password_after - self.class.remind_days_before).days.ago\n save\n end",
"def set_expiration_period(opts)\n opts = check_params(opts,[:times])\n super(opts)\n end",
"def initialize(object, ttl = nil)\n ttl = TTL_ONE_HOUR if ttl.nil?\n @object = object\n @expiry_time = Time.now + ttl\n end",
"def refresh_rates_expiration!\n @rates_expiration = Time.now + ttl_in_seconds\n end",
"def expiration_time\n @expiration_time ||= 1.day\n end",
"def expiration=(value)\n @expiration = value\n end",
"def expiration=(expiration_date)\n unless self.new_record?\n logger.warn(\"Attempted to set expiration on existing record: access_token id=#{self.id}. Update ignored\")\n return\n end\n super(expiration_date)\n\n self.expired = expiration_date.nil? || (expiration_date == '') || expiration_date.past?\n end",
"def low_card_cache_expiration_policy_object\n @_low_card_cache_expiration_policy_object || low_card_cache_expiration_policy_object_inherited\n end",
"def set_expiration_date\n self.expiration_date = DateTime.new(self.expiration_year,\n self.expiration_month,\n 28)\n end",
"def default_expires\n @default_expires ||= 3600\n end",
"def create_expirations(params = nil)\n\t\tself.class.expirations.each_with_index do |sym, index|\n\t\t\tklass = self.namespaced_class(sym)\n\t\t\texpiration = klass.new \n\t\t\texpiration.offset = params.nil? ? klass::DEFAULT_OFFSET : params[:offset]\n\t\t\texpiration.offset_units = params.nil? \\\n\t\t\t\t? klass::DEFAULT_OFFSET_UNITS : params[:offset_units]\n\t\t\texpiration.tranzaction = self\n\t\t\texpiration.save!\n\t\tend\n\tend",
"def get_expiration_defaults(opts = {})\n data, _status_code, _headers = get_expiration_defaults_with_http_info(opts)\n data\n end",
"def ensure_valid_expiration_date\n self.expiration_date = Date.current if self.expiration_date < Date.current\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the isExpirationRequired property value. Indicates whether expiration is required or if it's a permanently active assignment or eligibility. | def is_expiration_required
return @is_expiration_required
end | [
"def is_expiration_required=(value)\n @is_expiration_required = value\n end",
"def expiration_required?\n @expiration_required\n end",
"def expiration_behavior\n return @expiration_behavior\n end",
"def can_expire?\n return !!@expiry\n end",
"def require_expiration\n @expiration_required = true\n end",
"def expiration\n return @expiration\n end",
"def in_expiration_interval?\n time_interval = time.to_i - Time.now.to_i\n time_interval <= Berta::Settings.expiration_offset && time_interval >= 0\n end",
"def expired?\n self.expiration.past? || self[:expired]\n end",
"def expiration_behavior=(value)\n @expiration_behavior = value\n end",
"def has_default_expiration?\n expiredate < Time.now+60*60+60 # The last 60seconds are for safety\n end",
"def terms_expiration\n return @terms_expiration\n end",
"def expiration_must_be_future\n return if expiration.nil?\n errors.add(:expiration, 'is in the past') unless expiration.future?\n end",
"def expired?\n false\n end",
"def expiration_date_time\n return @expiration_date_time\n end",
"def expiry_on?\n :on == self.expiry_option\n end",
"def expired?\n expiry_date.expired?\n end",
"def is_approval_required\n return @is_approval_required\n end",
"def getExpiration; @expires; end",
"def expiration_time\n @expiration_time ||= 1.day\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the isExpirationRequired property value. Indicates whether expiration is required or if it's a permanently active assignment or eligibility. | def is_expiration_required=(value)
@is_expiration_required = value
end | [
"def is_expiration_required\n return @is_expiration_required\n end",
"def require_expiration\n @expiration_required = true\n end",
"def expiration_required?\n @expiration_required\n end",
"def expiration_behavior=(value)\n @expiration_behavior = value\n end",
"def expiration=(value)\n @expiration = value\n end",
"def is_required=(value)\n @is_required = value\n end",
"def is_approval_required=(value)\n @is_approval_required = value\n end",
"def set_ExpirationDate(value)\n set_input(\"ExpirationDate\", value)\n end",
"def expiration_must_be_future\n return if expiration.nil?\n errors.add(:expiration, 'is in the past') unless expiration.future?\n end",
"def ensure_valid_expiration_date\n self.expiration_date = Date.current if self.expiration_date < Date.current\n end",
"def admin_users_setExpiration(options = {})\n raise ArgumentError, 'Required arguments :expiration_ts missing' if options[:expiration_ts].nil?\n raise ArgumentError, 'Required arguments :user_id missing' if options[:user_id].nil?\n post('admin.users.setExpiration', options)\n end",
"def set_ExpirationDate(value)\n set_input(\"ExpirationDate\", value)\n end",
"def terms_expiration=(value)\n @terms_expiration = value\n end",
"def expiration=(expiration_date)\n unless self.new_record?\n logger.warn(\"Attempted to set expiration on existing record: access_token id=#{self.id}. Update ignored\")\n return\n end\n super(expiration_date)\n\n self.expired = expiration_date.nil? || (expiration_date == '') || expiration_date.past?\n end",
"def set_prerequisite_expiration_period(opts)\n opts = check_params(opts,[:expiration_periods])\n super(opts)\n end",
"def set_expiration_and_one_time_state(expire_in, is_one_time)\n self.expire_in = expire_in\n self.is_one_time = is_one_time\n end",
"def set_as_expired(expiration_datetime = DateTime.now)\n self.status = Status::EXPIRED\n self.expired_at = expiration_datetime\n self\n end",
"def is_registration_required=(value)\n @is_registration_required = value\n end",
"def set_expiration_date\n self.expiration_date = DateTime.new(self.expiration_year,\n self.expiration_month,\n 28)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the maximumDuration property value. The maximum duration allowed for eligibility or assignment which is not permanent. Required when isExpirationRequired is true. | def maximum_duration
return @maximum_duration
end | [
"def maximum_duration=(value)\n @maximum_duration = value\n end",
"def maximum_lifetime_in_minutes\n return @maximum_lifetime_in_minutes\n end",
"def max_duration=(max_duration)\n max_duration = nil unless max_duration.present?\n settings.max_duration = max_duration\n end",
"def max_record_duration_in_seconds\n return @max_record_duration_in_seconds\n end",
"def max_lifetime\n return @max_lifetime\n end",
"def max_video_duration\n # 2:20m\n 140\n end",
"def max_record_duration_in_seconds=(value)\n @max_record_duration_in_seconds = value\n end",
"def maximum_lifetime_in_minutes=(value)\n @maximum_lifetime_in_minutes = value\n end",
"def max_lifetime=(value)\n @max_lifetime = value\n end",
"def max_scaled_request_expiry_seconds\n @config_info['Maximum Scaled Request Expiry Duration in Seconds']\n end",
"def get_duration\n (DURATION_IN_MINUTES > 60) ? 60 : DURATION_IN_MINUTES\n end",
"def max_time\n @max_time ||= 0.2\n end",
"def max_time_ms\n spec[MAX_TIME_MS]\n end",
"def duration_in_minutes\n @duration_in_minutes\n end",
"def max_password_age\n AD.interval_to_secs(self['maxPwdAge'])\n end",
"def inactive_duration\n return @inactive_duration\n end",
"def lifetime_in_minutes\n return @lifetime_in_minutes\n end",
"def inactive_duration=(value)\n @inactive_duration = value\n end",
"def duration\n duration_property ? duration_property.ruby_value : nil\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the maximumDuration property value. The maximum duration allowed for eligibility or assignment which is not permanent. Required when isExpirationRequired is true. | def maximum_duration=(value)
@maximum_duration = value
end | [
"def max_duration=(max_duration)\n max_duration = nil unless max_duration.present?\n settings.max_duration = max_duration\n end",
"def max_lifetime=(value)\n @max_lifetime = value\n end",
"def maximum_lifetime_in_minutes=(value)\n @maximum_lifetime_in_minutes = value\n end",
"def max_record_duration_in_seconds=(value)\n @max_record_duration_in_seconds = value\n end",
"def maximum_duration\n return @maximum_duration\n end",
"def set_MaxAgeSeconds(value)\n set_input(\"MaxAgeSeconds\", value)\n end",
"def set_MaxTime(value)\n set_input(\"MaxTime\", value)\n end",
"def inactive_duration=(value)\n @inactive_duration = value\n end",
"def set_VideoDuration(value)\n set_input(\"VideoDuration\", value)\n end",
"def set_Duration(value)\n set_input(\"Duration\", value)\n end",
"def set_duration(duration)\n @duration = duration\n @remaining = duration\n end",
"def max_video_duration\n # 2:20m\n 140\n end",
"def default_duration=(value)\n @default_duration = value\n end",
"def duration=(value)\n @duration = value\n end",
"def duration=(value)\n @duration = value\n end",
"def duration=(int)\n @duration = int\n set_ends_at\n end",
"def set_MaximumLength(value)\n set_input(\"MaximumLength\", value)\n end",
"def duration_in_minutes=(t)\n self.duration = t\n end",
"def set_MaximumAge(value)\n set_input(\"MaximumAge\", value)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates child work with: All the appropriate metadata copied from parent title/creator copied from file_set file_set set as member of new child_work DOES save the new child work. DOES NOT actually add it to parent_work yet. That's expensive and needs to be done in a lock. DOES NOT transfer collection membership, that's a whole different mess. | def create_intermediary_child_work(parent, file_set)
new_child_work = GenericWork.new(title: file_set.title, creator: [current_user.user_key])
new_child_work.apply_depositor_metadata(current_user.user_key)
# make original fileset a member of our new child work
self.class.add_to_parent(new_child_work, file_set, 0, make_thumbnail: true, make_representative: true)
# and set the child work's metadata based on the parent work.
# bibliographic
attrs_to_copy = parent.attributes.sort.map { |a| a[0] }
attrs_to_copy -= ['id', "title", 'lease_id', 'embargo_id', 'head', 'tail', 'access_control_id', 'thumbnail_id', 'representative_id' ]
attrs_to_copy.each do |a|
new_child_work[a] = parent[a]
end
# permissions-related
new_child_work.visibility = parent.visibility
new_child_work.embargo_release_date = parent.embargo_release_date
new_child_work.lease_expiration_date = parent.lease_expiration_date
parent_permissions = parent.permissions.map(&:to_hash)
# member HAS to be saved to set it's permission attributes, not sure why.
# extra saves make things extra slow. :(
new_child_work.save!
if parent_permissions.present?
new_child_work.permissions_attributes = parent_permissions
end
# But now everything seems to be properly saved without need for another save.
return new_child_work
end | [
"def spawn attributes={}\n child = self.dup\n self.work_groups.each do |wg|\n new_wg = WorkGroup.new(:institution=>wg.institution,:project=>child)\n child.work_groups << new_wg\n wg.group_memberships.each do |gm|\n new_gm = GroupMembership.new(:person=>gm.person, :work_group=>wg)\n new_wg.group_memberships << new_gm\n end\n end\n child.assign_attributes(attributes)\n child.avatar=nil\n child.lineage_ancestor=self\n child\n end",
"def spawn(attributes = {})\n child = dup\n work_groups.each do |wg|\n new_wg = WorkGroup.new(institution: wg.institution, project: child)\n child.work_groups << new_wg\n wg.group_memberships.each do |gm|\n new_gm = GroupMembership.new(person: gm.person, work_group: wg)\n new_wg.group_memberships << new_gm\n end\n end\n child.assign_attributes(attributes)\n child.avatar = nil\n child.lineage_ancestor = self\n child\n end",
"def save_parents\n if self.new_parent and !(self.parents.include?(self.new_parent))\n relationship = self.new_parent.related_works.build :work_id => self.id\n relationship.save(false)\n end\n end",
"def attach_to_work(work, file_set_params = {})\n acquire_lock_for(work.id) do\n # Ensure we have an up-to-date copy of the members association, so that we append to the end of the list.\n work.reload unless work.new_record?\n file_set.visibility = work.visibility unless assign_visibility?(file_set_params)\n work.ordered_members << file_set\n # heliotrope change: don't assign representative_id and thumbnail_id\n # work.representative = file_set if work.representative_id.blank?\n # work.thumbnail = file_set if work.thumbnail_id.blank?\n # Save the work so the association between the work and the file_set is persisted (head_id)\n # NOTE: the work may not be valid, in which case this save doesn't do anything.\n work.save\n Hyrax.config.callback.run(:after_create_fileset, file_set, user)\n end\n end",
"def add_copy_of_instance(work, instance, file_ids)\n files = extract_files(file_ids.first)\n i = create_copy_of_instance(instance, files.size)\n i.files << files\n\n work.instances << i\n i.ie = work\n work.save\n i.save\n i\n end",
"def fork_and_work\n @cpid = fork {setup_child; work}\n log(:at => :fork, :pid => @cpid)\n Process.wait(@cpid)\n end",
"def create(work)\n super\n if work.env[:work_members_attributes].blank?\n @message = NO_CHILD\n add_relationships_succeeded\n else\n call_service\n end\n rescue StandardError => e\n add_relationships_failed\n log(\"failed while adding relationships work: #{e.message}\")\n end",
"def start_collection_process\n #Since windows doesn't support fork, the main process will have to collect\n #the submissions.\n # Fork is also skipped if in testing mode\n if RUBY_PLATFORM =~ /(:?mswin|mingw)/ || Rails.env.test?\n while collect_next_submission\n end\n return\n end\n\n m_logger = MarkusLogger.instance\n\n #Check to see if there is still a process running\n m_logger.log('Checking to see if there is already a submission collection' +\n ' process running')\n begin\n unless self.child_pid.nil?\n m_logger.log(\"waitpid on '#{child_pid}'\")\n Process.waitpid(self.child_pid, Process::WNOHANG)\n #If child is still running do nothing, otherwise reset the child_pid\n if $?.nil?\n m_logger.log('Submission collection process still running, doing nothing')\n return\n else\n self.child_pid = nil\n self.save\n end\n end\n\n #If for some reason there is no process with id self.child_pid, simply\n #proceed by forking a new process as usual.\n rescue Errno::ESRCH, Errno::ECHILD\n end\n\n #We have to re-establish a separate database connection for each process\n db_connection = ActiveRecord::Base.remove_connection\n\n pid = fork do\n begin\n ActiveRecord::Base.establish_connection(db_connection)\n m_logger.log('Submission collection process established database' +\n ' connection successfully')\n #Any custom tasks to be performed by the child can be given as a block\n if block_given?\n m_logger.log('Submission collection process now evaluating provided code block')\n yield\n m_logger.log('Submission collection process done evaluating provided code block')\n end\n while collect_next_submission\n if SubmissionCollector.first.stop_child\n m_logger.log('Submission collection process now exiting because it was ' +\n 'asked to stop by its parent')\n exit!(0)\n end\n end\n m_logger.log('Submission collection process done')\n exit!(0)\n ensure\n ActiveRecord::Base.remove_connection\n end\n end\n #parent\n if pid\n ActiveRecord::Base.establish_connection(db_connection)\n self.child_pid = pid\n self.save\n end\n end",
"def fork_and_work\n cpid = fork {setup_child; work}\n log(:at => :fork, :pid => cpid)\n Process.wait(cpid)\n end",
"def assign(work)\n return unless acquire_worker\n\n begin\n w = Worker.new(work, @options)\n Chore.run_hooks_for(:before_fork,w)\n pid = nil\n Chore.run_hooks_for(:around_fork,w) do\n pid = fork do\n work.each do | item |\n Chore.run_hooks_for(:fetched_off_internal_q, item)\n end\n after_fork(w)\n Chore.run_hooks_for(:within_fork,w) do\n Chore.run_hooks_for(:after_fork,w)\n begin\n Chore.logger.info(\"Started worker:#{Time.now}\")\n w.start\n Chore.logger.info(\"Finished worker:#{Time.now}\")\n ensure\n Chore.run_hooks_for(:before_fork_shutdown)\n exit!(true)\n end\n end #within_fork\n end #around_fork\n end\n\n Chore.logger.debug { \"Forked worker #{pid}\"}\n workers[pid] = w\n rescue => ex\n Chore.logger.error { \"Failed to fork worker: #{ex.message} #{ex.backtrace * \"\\n\"}\"}\n release_worker\n end\n end",
"def create_subtask parent_phid, name, assignee, description, due_date\n unless assignee\n # get assignee from parent task\n assignee = run_conduit('maniphest.query', phids: [parent_phid])[parent_phid][\"ownerPHID\"]\n end\n\n parent_name = run_conduit('maniphest.query', phids: [parent_phid])[parent_phid][\"title\"]\n\n name_with_parent = \"#{parent_name} - #{name}\"\n\n due_date_str = due_date ? due_date.strftime('%s') : nil\n\n result = run_conduit \"maniphest.createtask\", {\n \"title\" => name_with_parent,\n \"ownerPHID\" => assignee,\n \"description\" => description,\n \"auxiliary\" => {\n \"std:maniphest:tulip:due-date\" => due_date_str\n }\n }\n\n return result[\"phid\"]\nend",
"def spawn\n\t\tchild = self.children.create!\n\t\tchild.chapter_id = self.chapter_id\n\t\tchild.save\n\t\treturn child\n\tend",
"def create_child_blip\n #TODO\n end",
"def build_work(work_files)\n work_files.each do |file|\n next if file.blank?\n if file.end_with? '-metadata.json'\n @work_metadata = JSON.parse(File.read(file))\n work_metadata[:packaged_by_package_name] = dip_id\n write_json(work_metadata)\n else\n FileUtils.cp_r(file, src)\n end\n end\n write_dc\n end",
"def create_child(name, file_contents)\n raise NotFoundError.new(self) unless exists?\n\n raise OperationNotAllowedError.new(:create_child, self)\n end",
"def create_work(xml_metadata)\n parsed_data = Ingest::Services::MetadataParser.new(xml_metadata,\n @depositor,\n @collection,\n @config).parse\n work_attributes = parsed_data[:work_attributes]\n # Create new work record and save\n new_work = work_record(work_attributes)\n new_work.save!\n\n new_work\n\n end",
"def attach_file_to_work(work, file_set, file_set_params)\n acquire_lock_for(work.id) do\n # Ensure we have an up-to-date copy of the members association, so\n # that we append to the end of the list.\n work.reload unless work.new_record?\n unless assign_visibility?(file_set_params)\n copy_visibility(work, file_set)\n end\n work.ordered_members << file_set\n set_representative(work, file_set)\n set_thumbnail(work, file_set)\n\n # Save the work so the association between the work and the file_set is persisted (head_id)\n work.save\n end\n end",
"def attach_files\n @parent = Work.find_by_friendlier_id!(params[:parent_id])\n authorize! :update, @parent\n\n current_position = @parent.members.maximum(:position) || 0\n\n files_params = (params[:cached_files] || []).\n collect { |s| JSON.parse(s) }.\n sort_by { |h| h && h.dig(\"metadata\", \"filename\")}\n\n files_params.each do |file_data|\n asset = Asset.new()\n\n if derivative_storage_type = params.dig(:storage_type_for, file_data[\"id\"])\n asset.derivative_storage_type = derivative_storage_type\n end\n\n asset.position = (current_position += 1)\n asset.parent_id = @parent.id\n asset.file = file_data\n asset.title = (asset.file&.original_filename || \"Untitled\")\n asset.published = @parent.published\n asset.save!\n end\n\n if @parent.representative_id == nil\n @parent.update(representative: @parent.members.order(:position).first)\n end\n\n redirect_to admin_work_path(@parent.friendlier_id, anchor: \"nav-members\")\n end",
"def attach_to_parent\n @change_set = change_set_class.new(find_resource(params[:id]))\n parent_resource = find_resource(parent_resource_params[:id])\n authorize! :update, parent_resource\n\n parent_change_set = DynamicChangeSet.new(parent_resource)\n if parent_change_set.validate(parent_resource_params)\n current_member_ids = parent_resource.member_ids\n attached_member_ids = parent_change_set.member_ids\n parent_change_set.member_ids = current_member_ids + attached_member_ids\n obj = nil\n change_set_persister.buffer_into_index do |persist|\n obj = persist.save(change_set: parent_change_set)\n end\n after_update_success(obj, @change_set)\n else\n after_update_failure\n end\n rescue Dry::Types::ConstraintError\n after_update_failure\n rescue Valkyrie::Persistence::ObjectNotFoundError => e\n after_update_error e\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the list of node configs hashes in the given scenario. | def node_configs(scenario_id)
parse_node_config_files(parse_scenario_file(scenario_id))
end | [
"def parse_node_config_files(scenario)\n scenario[\"nodes\"].map do |node|\n config_path = File.join(configuration_root, \"nodes\", node[\"node_config\"])\n JSON.parse(File.read(config_path))\n end\nend",
"def node_configs(scenario_file)\n parse_node_config_files(parse_scenario_file(scenario_file))\nend",
"def node_subhashes(node)\n l_hash = node.left ? node.left._hash : self.class.null_hash_at(node.depth + 1)\n r_hash = node.right ? node.right._hash : self.class.null_hash_at(node.depth + 1)\n [l_hash, r_hash]\n end",
"def find_scenario_paths(scenarios)\n return [] if scenarios.nil?\n scenarios.select do |path|\n (@location + path + \"scenario.yml\").exist?\n end.uniq.compact\n end",
"def generate_node_config\n run_list = { run_list: @recipes.map{|name| \"recipe[#{name}]\"} }\n @ssh.write \"/tmp/node.json\", content: JSON.generate(run_list), sudo: true\n end",
"def generate_node_config\n run_list = { run_list: @recipes.map{|name| \"recipe[#{name}]\"} }\n ssh.write \"#{CHEF_VAR_PATH}/node.json\", content: JSON.generate(run_list), sudo: true\n end",
"def get_netconf_nodes_in_config \n get_uri = \"/restconf/config/opendaylight-inventory:nodes\"\n response = @rest_agent.get_request(get_uri)\n check_response_for_success(response) do |body|\n if body.has_key?('nodes') && body['nodes'].has_key?('node')\n devices = []\n body['nodes']['node'].each do |node|\n devices << node['id'] unless node['id'].include?('openflow')\n end\n NetconfResponse.new(NetconfResponseStatus::OK, devices)\n else\n NetconfResponse.new(NetconfResponseStatus::DATA_NOT_FOUND)\n end\n end\n end",
"def node_list\n list = {}\n search = Chef::Search::Query.new\n query = config[:query]\n\n ui.msg \"Search nodes '#{query}'\"\n search.search('node', query) do |node|\n if node['chef'] && node['chef']['client_version']\n version = node['chef']['client_version']\n\n list[version] ||= []\n list[version] << node\n end\n end\n ui.msg ''\n\n list\n end",
"def get_env_nodes(env)\n # Get all nodes in environment\n dirty_chef_nodes = @@rest.get_rest(\"environments/#{env}/nodes\")\n chef_nodes = []\n dirty_chef_nodes.each do |key, value|\n chef_nodes << key\n end\n chef_nodes.sort!\n return chef_nodes\n end",
"def get_all_nodes_in_config\n get_uri = \"/restconf/config/opendaylight-inventory:nodes\"\n response = @rest_agent.get_request(get_uri)\n check_response_for_success(response) do |body|\n if body.has_key?('nodes') && body['nodes'].has_key?('node')\n devices = []\n body['nodes']['node'].each do |node|\n devices << node['id']\n end\n NetconfResponse.new(NetconfResponseStatus::OK, devices)\n else\n NetconfResponse.new(NetconfResponseStatus::DATA_NOT_FOUND)\n end\n end\n end",
"def config_from_node(inventory_hash, node_name)\n inventory_hash['groups'].each do |group|\n group['nodes'].each do |node|\n if node['name'] == node_name\n return node['config']\n end\n end\n end\n raise \"No config was found for #{node_name}\"\n end",
"def config_from_node(inventory_hash, node_name)\n inventory_hash['groups'].each do |group|\n group['targets'].each do |node|\n if node['uri'] == node_name\n return node['config']\n end\n end\n end\n raise \"No config was found for #{node_name}\"\n end",
"def extract_hash(node); end",
"def generate_chef_json(node)\n json_info = Hash.new\n json_info[:domain] = node.try_opt(:domain) || \"souffle\"\n json_info.merge!(node.options[:attributes])\n json_info[:run_list] = node.run_list\n JSON.pretty_generate(json_info)\n end",
"def get_scenarios\n scenarios = []\n File.open(@options[:scenarios_file]) do |f|\n f.each_line do |line|\n scenarios << line.chomp\n end\n end\n return scenarios.sort\n end",
"def list\n chef_connection.node.all\n end",
"def selected_node_configurations\n @node_configurations.each do |name, configuration|\n next unless node_names.include?(name)\n\n yield name, configuration\n end\n end",
"def getScenario(scenario_name)\n configYAML = YAML.load_file(File.join(File.dirname(__FILE__),'/../config.yml'))\n [load_scenario(scenario_name, configYAML), Random.new(configYAML['seed'])]\nend",
"def hashes\n return @hashes\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the environment from the node config hash, or 'production' if it is nil or empty. | def node_environment(node_config)
env = node_config['environment']
(env.nil? || env.empty?) ? 'production' : env
end | [
"def node_environment(node_config)\n env = node_config[\"environment\"]\n env.nil? || env.empty? ? \"production\" : env\nend",
"def environment\n if exists?(:stage)\n stage\n elsif exists?(:rails_env)\n rails_env\n elsif(ENV['RAILS_ENV'])\n ENV['RAILS_ENV']\n else\n \"production\"\n end\nend",
"def environment \n if exists?(:stage)\n stage\n elsif exists?(:rails_env)\n rails_env \n elsif(ENV['RAILS_ENV'])\n ENV['RAILS_ENV']\n else\n \"production\" \n end\nend",
"def environment\n return 'Local' if Rails.env.development?\n\n hostname = `hostname -s`\n return 'Development' if hostname =~ /dev$/\n return 'Staging' if hostname =~ /stage$/\n\n # Otherwise returns nil, indicating production environment\n end",
"def environment \n #if exists?(:stage)\n # stage\n #elsif exists?(:rails_env)\n if exists?(:rails_env)\n rails_env \n elsif(ENV['RAILS_ENV'])\n ENV['RAILS_ENV']\n else\n \"production\" \n end\nend",
"def environment\n # Allow override using \"ENVIRONMENT_BANNER\" environment variable.\n env_var = ENV['ENVIRONMENT_BANNER']\n return nil if !env_var.nil? && env_var.downcase == 'production'\n return env_var if !env_var.blank?\n \n return 'Local' if Rails.env.development?\n \n hostname = `hostname -s`\n return 'Development' if hostname =~ /dev$/\n return 'Staging' if hostname =~ /stage$/\n \n # Otherwise return nil, indicating production\n end",
"def environment # rubocop:disable Metrics/CyclomaticComplexity\n # Allow override using \"ENVIRONMENT_BANNER\" environment variable.\n env_var = ENV['ENVIRONMENT_BANNER']\n return nil if !env_var.nil? && env_var.downcase == 'production' # rubocop:disable Performance/Casecmp\n return env_var if env_var.present?\n\n return 'Local' if Rails.env.development?\n\n hostname = `hostname -s`\n return 'Development' if hostname =~ /dev$/\n return 'Staging' if hostname =~ /stage$/\n\n # Otherwise return nil, indicating production\n end",
"def env_tag\n env = provision_object.get_tags[:environment]\n return env[0, 3] unless env.blank?\n end",
"def default_environment_name\n return nil unless config?\n config.default_environment\n end",
"def environment\n unless defined?(@environment)\n @environment = (node.environment and node.environment != \"\") ? node.environment : nil\n end\n Puppet::Node::Environment.current = @environment\n @environment\n end",
"def env_str\n @env_str ||= begin\n env = Rails.env\n env.include?('production') ? '' : env\n end\n end",
"def railsenv\n config['environment_variables']['RAILS_ENV'] || 'production'\nend",
"def environment\n if config_options.fetch(:environment, nil)\n config_options[:environment]\n elsif defined?(Rails.env) && !Rails.env.nil? # rubocop:disable Rails/UnknownEnv\n Rails.env.to_s\n elsif defined?(ENV['environment']) && !ENV['environment'].nil?\n ENV['environment']\n elsif defined?(ENV['RAILS_ENV']) && !ENV['RAILS_ENV'].nil?\n raise \"You're depending on RAILS_ENV for setting your environment. Please use ENV['environment'] for non-rails environment setting: 'rake foo:bar environment=test'\"\n else\n ENV['environment'] = 'development'\n end\n end",
"def environment\n if defined?(Rails) && Rails.respond_to?(:env)\n Rails.env.to_s\n else\n ENV['RAILS_ENV'] || ENV['RACK_ENV'] || ENV['ENV'] || 'development'\n end\n end",
"def env\n @env ||= ActiveSupport::StringInquirer.new(ENV['RAILS_ENV'].presence || 'development')\n end",
"def environment\n environment = 'development'\n\n if ARGV.last.match(/(development|production)/)\n environment = ARGV.last\n end\n \n return environment\nend",
"def environment\n node.environment\n end",
"def environment\n if env[\"STATSD_ENV\"]\n env[\"STATSD_ENV\"]\n elsif defined?(Rails) && Rails.respond_to?(:env)\n Rails.env.to_s\n else\n env[\"RAILS_ENV\"] || env[\"RACK_ENV\"] || env[\"ENV\"] || \"development\"\n end\n end",
"def get_rails_env(default)\n workspace_tags = get_workspace_tags()\n if workspace_tags.has_key?('RAILS_ENV')\n return workspace_tags['RAILS_ENV']\n else\n return default\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Group the list of node configs into a hash keyed by their environments. A nil or empty environment will be interpreted as 'production'. | def group_by_environment(node_configs)
node_configs.group_by do |config|
node_environment(config)
end
end | [
"def modules_per_environment(node_configs)\n node_configs = group_by_environment(node_configs)\n modules = node_configs.map do |env, configs|\n [env, configs.map { |c| c[\"modules\"] }.flatten.uniq]\n end\n Hash[modules]\nend",
"def modules_per_environment(node_configs)\n node_configs = group_by_environment(node_configs)\n modules = node_configs.map do |env, configs|\n [env, configs.map { |c| c['modules'] }.flatten.uniq]\n end\n Hash[modules]\nend",
"def environments\n config['environments'].keys\n end",
"def environments\n fetch(:environments, nil) || [fetch(:environment, 'production')]\n end",
"def environments\n @environments ||= [:production]\n end",
"def environments\n _get(\"/system/environments\") { |json| json }\n end",
"def envs_details\n current_envs = {}\n show_envs.each do |e|\n z = Pem::Env.new(e, self)\n current_envs[e] = z.mods\n end\n\n current_envs\n end",
"def canonicalize_environments(manifest)\n canonicalize_key(manifest, :environments) do |environments|\n Hash[\n environments.map do |key, environment|\n [key, canonicalize(environment)]\n end\n ]\n end\n end",
"def envs_details\n current_envs = {}\n show_envs.each do |e|\n z = PemEnv.new(e, self)\n current_envs[e] = z.mods\n end\n\n current_envs\n end",
"def build_environment_hash\n parsed_variables = {}\n @env.each do |env_row|\n # Each row can potentially contain multiple environment\n # variables\n variables = extract_variables(env_row)\n\n variables.each do |variables_with_values|\n variables_with_values.each do |key, value|\n parsed_variables[key] = value\n end\n end\n end\n\n @override_envs.each do |env|\n parsed_variables = parsed_variables.merge env\n end\n\n parsed_variables\n end",
"def environments\n environments = []\n environment_ids.each do |id|\n e = Environment.find(id)\n environments << e unless e.nil?\n end\n \n environments << self\n end",
"def node_environment(node_config)\n env = node_config[\"environment\"]\n env.nil? || env.empty? ? \"production\" : env\nend",
"def node_environment(node_config)\n env = node_config['environment']\n (env.nil? || env.empty?) ? 'production' : env\nend",
"def known_environments\n return [] unless config?\n config.environments.map { |name, _| environments.find_or_create_by!(name: name) }\n end",
"def merge_hosts\n each do |env, target, data|\n next if data[:host].nil?\n host = data[:host].to_sym\n @data[:environments][env][target] = @data[:hosts][host].deep_merge(data) unless @data[:hosts][host].nil?\n end\n end",
"def environment_names\n environments.map { |environment| environment['name'] }\n end",
"def getDeployedEnvs()\n envArray = Array.new\n stacks = @cfUtil.getKeystoneStacks('keystone-2-')\n stacks.each do |stack|\n stk = stack.downcase\n if stk.include? '-common-'\n env = stk.split('keystone-2-').last.partition('-common').first\n elsif stk.include? '-nagift-'\n env = stk.split('keystone-2-').last.partition('-nagift').first\n elsif stk.include? '-igift-'\n env = stk.split('keystone-2-').last.partition('-igift').first\n elsif stk.include? '-cloop-'\n env = stk.split('keystone-2-').last.partition('-cloop').first\n elsif stk.include? '-bes-'\n env = stk.split('keystone-2-').last.partition('-bes').first\n else\n puts \"getDeployedEnvs: Error: keystone-2- not found in stack name #{stack}\"\n end\n envArray << env.upcase unless env.nil?\n end\n envArray.uniq\n end",
"def load!\n configs = {}\n @array.each_with_index do |obj, idx|\n next unless obj.start_with?('--' + @env_prefix)\n\n value = extract_value(obj, idx + 1)\n key = obj.split('=').first\n .sub(/^--#{@env_prefix}_?/, '')\n .downcase.split('__')\n recursive_set(configs, key, split_env_string(value))\n end\n configs\n end",
"def environments\n properties[:environments].map { |environment| Link.new(environment, nil, client) }\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a hash from environments to modules; removes duplicate modules. | def modules_per_environment(node_configs)
node_configs = group_by_environment(node_configs)
modules = node_configs.map do |env, configs|
[env, configs.map { |c| c['modules'] }.flatten.uniq]
end
Hash[modules]
end | [
"def modules_per_environment(node_configs)\n node_configs = group_by_environment(node_configs)\n modules = node_configs.map do |env, configs|\n [env, configs.map { |c| c[\"modules\"] }.flatten.uniq]\n end\n Hash[modules]\nend",
"def modules_hash\n @modules\n end",
"def modules_hash\n @modules\n end",
"def unique_modules\n @unique_modules\n end",
"def environment_hash\n @environment_hash ||= build_environment_hash\n end",
"def module_names\n @cache[:modules]\n end",
"def module_types\n\t\tmodule_sets.keys.dup\n\tend",
"def rsync_modules\n rsync_resources.reduce({}) do |hash, resource|\n if resource.config_path == new_resource.config_path && (\n resource.action == :add ||\n resource.action.include?(:add)\n )\n hash[resource.name] ||= {}\n resource_attributes.each do |key|\n value = resource.send(key)\n next if value.nil?\n hash[resource.name][snake_to_space(key)] = value\n end\n end\n\n hash\n end\n end",
"def composite_keys \n Hash.new.tap do |hash|\n SPREE_MODULES.each do |mod|\n hash.merge! get_translation_keys(\"spree_#{mod}\")\n end\n end\n end",
"def modules_by_path\n modules_by_path = {}\n modulepath.each do |path|\n Dir.chdir(path) do\n module_names = Dir.glob('*').select do |d|\n FileTest.directory?(d) && (File.basename(d) =~ /^[\\w]+([-]{1}[\\w]+)*$/)\n end\n modules_by_path[path] = module_names.sort.map do |name|\n Puppet::Module.new(name, :environment => self, :path => File.join(path, name))\n end\n end\n end\n modules_by_path\n end",
"def missing_module_list\n # forming hash and then getting its vals to remove dups in same <module,version,namepsace>\n module_hash = @cmp_ref_info_list.inject({}) do |h, r|\n module_name = r[:component_type].split('__').first\n remote_namespace = r[:remote_namespace]\n ndx = \"#{module_name}---#{r[:version]}---#{remote_namespace}\"\n info = {\n name: module_name,\n version: r[:version]\n }\n info.merge!(remote_namespace: remote_namespace) if remote_namespace\n h.merge!(ndx => info)\n end\n\n module_hash.values\n end",
"def get_puppet_module_path_map\n if @puppet_module_path_to_name == UNSET_VALUE\n @puppet_module_path_to_name = {}\n end\n return @puppet_module_path_to_name\n end",
"def env_hash()\n\t\t\tDigest::MD5.hexdigest( name )\n\t\tend",
"def envs_details\n current_envs = {}\n show_envs.each do |e|\n z = PemEnv.new(e, self)\n current_envs[e] = z.mods\n end\n\n current_envs\n end",
"def to_hash\n load_list\n @tools.inject(Hash.new) { |h, kv| k, v = kv; h[k] = v; h }\n end",
"def envs_details\n current_envs = {}\n show_envs.each do |e|\n z = Pem::Env.new(e, self)\n current_envs[e] = z.mods\n end\n\n current_envs\n end",
"def env_hash()\n\t\t\tnil\n\t\tend",
"def env_hash\n read_env || reset_env unless defined?(DataCache.env_hash)\n DataCache.env_hash\n end",
"def find_module(name, version)\n e = []\n\n @envs.each do |k, v|\n next unless v.keys.include?(name) && v[name] == version\n e << k\n end\n\n e\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the path to the local hiera.yaml file for the specified hiera. | def hiera_configpath(hiera)
File.join('config', 'hieras', hiera, 'hiera.yaml')
end | [
"def hiera_config_path_on(sut)\n File.join(puppet_environment_path_on(sut), 'hiera.yaml')\n end",
"def hiera_datadir\n # This output lets us know where Hiera is configured to look on the system\n puppet_lookup_info = run_shell('puppet lookup --explain test__simp__test').stdout.strip.lines\n puppet_config_check = run_shell('puppet agent --configprint manifest').stdout\n\n if puppet_config_check.nil? || puppet_config_check.empty?\n fail(\"No output returned from `puppet config print manifest`\")\n end\n\n puppet_env_path = File.dirname(puppet_config_check)\n\n # We'll just take the first match since Hiera will find things there\n puppet_lookup_info = puppet_lookup_info.grep(/Path \"/).grep(Regexp.new(puppet_env_path))\n\n # Grep always returns an Array\n if puppet_lookup_info.empty?\n fail(\"Could not determine hiera data directory under #{puppet_env_path}\")\n end\n\n # Snag the actual path without the extra bits\n puppet_lookup_info = puppet_lookup_info.first.strip.split('\"').last\n\n # Make the parent directories exist\n run_shell(\"mkdir -p #{File.dirname(puppet_lookup_info)}\", acceptable_exit_codes: [0])\n\n # We just want the data directory name\n datadir_name = puppet_lookup_info.split(puppet_env_path).last\n\n # Grab the file separator to add back later\n file_sep = datadir_name[0]\n\n # Snag the first entry (this is the data directory)\n datadir_name = datadir_name.split(file_sep)[1]\n\n # Constitute the full path to the data directory\n datadir_path = puppet_env_path + file_sep + datadir_name\n\n # Return the path to the data directory\n return datadir_path\nend",
"def yaml_path\n path + \".piston.yml\"\n end",
"def hiera_datadir(sut)\n # This output lets us know where Hiera is configured to look on the system\n puppet_lookup_info = on(sut, 'puppet lookup --explain test__simp__test').output.strip.lines\n\n if sut.puppet['manifest'].nil? || sut.puppet['manifest'].empty?\n fail(\"No output returned from `puppet config print manifest` on #{sut}\")\n end\n\n puppet_env_path = File.dirname(sut.puppet['manifest'])\n\n # We'll just take the first match since Hiera will find things there\n puppet_lookup_info = puppet_lookup_info.grep(/Path \"/).grep(Regexp.new(puppet_env_path))\n\n # Grep always returns an Array\n if puppet_lookup_info.empty?\n fail(\"Could not determine hiera data directory under #{puppet_env_path} on #{sut}\")\n end\n\n # Snag the actual path without the extra bits\n puppet_lookup_info = puppet_lookup_info.first.strip.split('\"').last\n\n # Make the parent directories exist\n sut.mkdir_p(File.dirname(puppet_lookup_info))\n\n # We just want the data directory name\n datadir_name = puppet_lookup_info.split(puppet_env_path).last\n\n # Grab the file separator to add back later\n file_sep = datadir_name[0]\n\n # Snag the first entry (this is the data directory)\n datadir_name = datadir_name.split(file_sep)[1]\n\n # Constitute the full path to the data directory\n datadir_path = puppet_env_path + file_sep + datadir_name\n\n # Return the path to the data directory\n return datadir_path\nend",
"def hiera_datadirs(hiera)\n configpath = hiera_configpath(hiera)\n config = YAML.load_file(configpath)\n backends = [config[:backends]].flatten\n datadirs = backends.map { |be| config[be.to_sym][:datadir] }.uniq\n datadirs.map do |datadir|\n localpath = File.join('config', 'hieras', hiera, File.basename(datadir))\n [localpath, datadir]\n end\nend",
"def load_hiera\n @hiera = YAML.load_file(HIERA_CONFIG)\n end",
"def config_path(file = \"config\")\n Halcyon.root/'config'/\"#{file}.yml\"\n end",
"def local_database_config_path\n File.join 'temp', \"db_settings.#{fetch(:stage)}.yaml\"\n end",
"def yaml_path\n \"#{user_directory}/.yolo/config.yml\"\n end",
"def custom_prefabs_path\n Pathname.new(File.expand_path(File.join(File.basename(__FILE__), '..', '.prefabs.yml')))\n end",
"def full_config_path(path)\n File.expand_path('config/fozzie.yml', path)\n end",
"def hiera_datadir(host)\n host[:type] =~ /aio/ ? File.join(host.puppet['codedir'], 'hieradata') : host[:hieradatadir]\n end",
"def local_backup_path\n [local_directory, Confluence.filename].join('/')\n end",
"def config_path(filename)\n @fixture_path.join(filename)\n end",
"def segments_yml_path\n path = fixtures_path.join(\"segments/#{segments.join('/')}.yml\")\n path.exist? ? path : project_path.join('config/segments.yml.bak') \n end",
"def fixture_path\n configuration.fixture_path\n end",
"def config_file\n Pathname.new(@definition[\"CfgFile\"])\n end",
"def template_path\n File.join(File.dirname(__FILE__), \"..\", \"default_config.yaml\")\n end",
"def local_path\n File.join([\"Subassemblies\", \"#{self.name}.craft\"])\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a list of pairs of datadir filepaths for the given hiera. The pairs contain the local and target filepaths, respectively. | def hiera_datadirs(hiera)
configpath = hiera_configpath(hiera)
config = YAML.load_file(configpath)
backends = [config[:backends]].flatten
datadirs = backends.map { |be| config[be.to_sym][:datadir] }.uniq
datadirs.map do |datadir|
localpath = File.join('config', 'hieras', hiera, File.basename(datadir))
[localpath, datadir]
end
end | [
"def child_paths(prefix, l, r)\n children = (dir_children(l) + dir_children(r)).uniq.sort\n if prefix\n children.map {|x| File.join(prefix, x) }\n else\n children\n end\nend",
"def paths(arrs)\n arrs.inject([[]]) do |paths, arr|\n arr.map {|e| paths.map {|path| path + [e]}}.flatten(1)\n end\n end",
"def paths(arrs)\n arrs.inject([[]]) do |paths, arr|\n arr.map {|e| paths.map {|path| path + [e]}}.flatten(1)\n end\n end",
"def hiera_datadir\n # This output lets us know where Hiera is configured to look on the system\n puppet_lookup_info = run_shell('puppet lookup --explain test__simp__test').stdout.strip.lines\n puppet_config_check = run_shell('puppet agent --configprint manifest').stdout\n\n if puppet_config_check.nil? || puppet_config_check.empty?\n fail(\"No output returned from `puppet config print manifest`\")\n end\n\n puppet_env_path = File.dirname(puppet_config_check)\n\n # We'll just take the first match since Hiera will find things there\n puppet_lookup_info = puppet_lookup_info.grep(/Path \"/).grep(Regexp.new(puppet_env_path))\n\n # Grep always returns an Array\n if puppet_lookup_info.empty?\n fail(\"Could not determine hiera data directory under #{puppet_env_path}\")\n end\n\n # Snag the actual path without the extra bits\n puppet_lookup_info = puppet_lookup_info.first.strip.split('\"').last\n\n # Make the parent directories exist\n run_shell(\"mkdir -p #{File.dirname(puppet_lookup_info)}\", acceptable_exit_codes: [0])\n\n # We just want the data directory name\n datadir_name = puppet_lookup_info.split(puppet_env_path).last\n\n # Grab the file separator to add back later\n file_sep = datadir_name[0]\n\n # Snag the first entry (this is the data directory)\n datadir_name = datadir_name.split(file_sep)[1]\n\n # Constitute the full path to the data directory\n datadir_path = puppet_env_path + file_sep + datadir_name\n\n # Return the path to the data directory\n return datadir_path\nend",
"def paths(arrs)\n arrs.inject([[]]) do |paths, arr|\n flatten(arr.map {|e| paths.map {|path| path + [e]}}, 1)\n end\n end",
"def resolve_paths\n [dataset_dir, parent.dataset_dir]\n end",
"def hierfiles\n paths = []\n\n Dir[\"#{etc}/*\"].select { |f| hierdir? f }.each do |hier|\n Find.find hier do |f|\n # Move on if the file is itself a hier dir or a dotfile\n next if File.basename(f) =~ /\\A(_|\\.)/\n\n # This is a non-hier node\n paths.push f\n\n # Don't recurse into a non-hier dir\n Find.prune if File.directory? f\n end\n end\n\n paths\n end",
"def hiera_datadir(sut)\n # This output lets us know where Hiera is configured to look on the system\n puppet_lookup_info = on(sut, 'puppet lookup --explain test__simp__test').output.strip.lines\n\n if sut.puppet['manifest'].nil? || sut.puppet['manifest'].empty?\n fail(\"No output returned from `puppet config print manifest` on #{sut}\")\n end\n\n puppet_env_path = File.dirname(sut.puppet['manifest'])\n\n # We'll just take the first match since Hiera will find things there\n puppet_lookup_info = puppet_lookup_info.grep(/Path \"/).grep(Regexp.new(puppet_env_path))\n\n # Grep always returns an Array\n if puppet_lookup_info.empty?\n fail(\"Could not determine hiera data directory under #{puppet_env_path} on #{sut}\")\n end\n\n # Snag the actual path without the extra bits\n puppet_lookup_info = puppet_lookup_info.first.strip.split('\"').last\n\n # Make the parent directories exist\n sut.mkdir_p(File.dirname(puppet_lookup_info))\n\n # We just want the data directory name\n datadir_name = puppet_lookup_info.split(puppet_env_path).last\n\n # Grab the file separator to add back later\n file_sep = datadir_name[0]\n\n # Snag the first entry (this is the data directory)\n datadir_name = datadir_name.split(file_sep)[1]\n\n # Constitute the full path to the data directory\n datadir_path = puppet_env_path + file_sep + datadir_name\n\n # Return the path to the data directory\n return datadir_path\nend",
"def absolute_paths\n return [] unless valid?\n drive_path = MountedDrives.path_to(drive)\n relative_paths.map { |p| File.join(drive_path, p) }\n end",
"def get_binary_path_list\n\t\tbinary_path_list = Array.new\n\t\t@b_cpp_files.each do |cpp_file|\n\t\t\tbinary_path_list.push(\"#{@name}\")\n\t\tend\n\t\t@t_cpp_files.each do |cpp_file|\n\t\t\tbinary_path_list.push(\"#{@name}\")\n\t\tend\n\t\t@subdirs.each do |subdir|\n\t\t\tbinary_path_list_temp = subdir.get_binary_path_list\n\t\t\tbinary_path_list_temp.each do |path|\n\t\t\t\tbinary_path_list.push(\"#{@name}/#{path}\")\n\t\t\tend\n\t\tend\n\n\t\treturn binary_path_list\n\tend",
"def hiera_datadir(host)\n host[:type] =~ /aio/ ? File.join(host.puppet['codedir'], 'hieradata') : host[:hieradatadir]\n end",
"def hiera_configpath(hiera)\n File.join('config', 'hieras', hiera, 'hiera.yaml')\nend",
"def possible_paths(opts)\n # file_names is implemented in each store.\n file_names(opts).map { |file_name| possible_paths_file(opts, file_name) }.flatten\n end",
"def paths\n map{ |dir| Pathname.new(dir) }\n end",
"def resolve_paths\n [dataset_dir]\n end",
"def all_data_paths(path)\n each_data_path(path).to_a\n end",
"def file_paths(file_tree, path_so_far = \"\")\n paths = []\n file_tree.keys.each do |key|\n if file_tree[key] == true\n extended_path = path_so_far + \"#{key}\"\n paths << extended_path\n else\n extended_path = path_so_far + \"#{key}/\"\n paths += file_paths(file_tree[key], extended_path)\n end\n end\n paths\nend",
"def guest_paths(folders)\n folders.map { |parts| parts[2] }\n end",
"def possible_paths_for(mappings)\n root_mappings.map{|root|\n mappings.first.map{|inner|\n mappings.last.map{|outer|\n ::File.join(root, inner, outer, '/') }}}.flatten\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /lists/new GET /lists/new.xml | def new
@list = List.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @list }
end
end | [
"def new\n @title = \"New Listing\"\n @list = List.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @list }\n end\n end",
"def new\n @list = List.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @list }\n end\n end",
"def new\n @listitem = Listitem.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @listitem }\n end\n end",
"def new\n @mylist = Mylist.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @mylist }\n end\n end",
"def new\n @list = List.find(params[:id])\n @todolist = @list.todolists.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @todolist }\n end\n end",
"def new\n @thing_list = ThingList.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @thing_list }\n end\n end",
"def new\n @list ||= current_user.lists.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @list }\n end\n end",
"def new\n @list = List.new\n \n respond_to do |format|\n format.html { render \"listopia/lists/new\"}\n format.json { render json: @list }\n end\n end",
"def new\n @liste = Liste.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @liste }\n end\n end",
"def new\n @page_title = \"Task List New\"\n @task_list = TaskList.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @task_list }\n end\n end",
"def new\n @list = current_user.lists.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @list }\n end\n end",
"def new\n @todo_list = TodoList.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @todo_list }\n end\n end",
"def new\n @lista = Lista.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @lista }\n end\n end",
"def new\n @list_item = @list.list_items.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @list_item }\n end\n end",
"def new\n @todolist = Todolist.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @todolist }\n end\n end",
"def new\n @item_list = ItemList.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @item_list }\n end\n end",
"def new\n @list_view = ListView.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @list_view }\n end\n end",
"def new\n @task_list = TaskList.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @task_list }\n end\n end",
"def new\n @seenlist = Seenlist.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @seenlist }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
optional parameters: name: _method, type: String, optional values: GET|POST name: _region, type: String name: _scheme, type: String, optional values: http|https name: address, type: String name: client_token, type: String name: is_public_address, type: String name: load_balancer_mode, type: String name: load_balancer_name, type: String name: owner_account, type: String name: owner_id, type: Long name: resource_owner_account, type: String name: resource_owner_id, type: Long | def create_load_balancer(optional={})
args = self.class.new_params
args[:query]['Action'] = 'CreateLoadBalancer'
args[:region] = optional[:_region] if (optional.key? :_region)
if optional.key? :_method
raise ArgumentError, '_method must be GET|POST' unless 'GET|POST'.split('|').include? optional[:_method]
args[:method] = optional[:_method]
end
if optional.key? :_scheme
raise ArgumentError, '_scheme must be http|https' unless 'http|https'.split('|').include? optional[:_scheme]
args[:scheme] = optional[:_scheme]
end
if optional.key? :address
args[:query]['Address'] = optional[:address]
end
if optional.key? :client_token
args[:query]['ClientToken'] = optional[:client_token]
end
if optional.key? :is_public_address
args[:query]['IsPublicAddress'] = optional[:is_public_address]
end
if optional.key? :load_balancer_mode
args[:query]['LoadBalancerMode'] = optional[:load_balancer_mode]
end
if optional.key? :load_balancer_name
args[:query]['LoadBalancerName'] = optional[:load_balancer_name]
end
if optional.key? :owner_account
args[:query]['OwnerAccount'] = optional[:owner_account]
end
if optional.key? :owner_id
args[:query]['OwnerId'] = optional[:owner_id]
end
if optional.key? :resource_owner_account
args[:query]['ResourceOwnerAccount'] = optional[:resource_owner_account]
end
if optional.key? :resource_owner_id
args[:query]['ResourceOwnerId'] = optional[:resource_owner_id]
end
self.run(args)
end | [
"def create_load_balancer(optional={})\n\t\targs = self.class.new_params\n\t\targs[:query]['Action'] = 'CreateLoadBalancer'\n\t\targs[:region] = optional[:_region] if (optional.key? :_region)\n\t\tif optional.key? :_method\n\t\t\traise ArgumentError, '_method must be GET|POST' unless 'GET|POST'.split('|').include? optional[:_method]\n\t\t\targs[:method] = optional[:_method]\n\t\tend\n\t\tif optional.key? :_scheme\n\t\t\traise ArgumentError, '_scheme must be http|https' unless 'http|https'.split('|').include? optional[:_scheme]\n\t\t\targs[:scheme] = optional[:_scheme]\n\t\tend\n\t\tif optional.key? :address_type\n\t\t\targs[:query]['AddressType'] = optional[:address_type]\n\t\tend\n\t\tif optional.key? :bandwidth\n\t\t\traise ArgumentError, 'bandwidth must be equal or greater than -1' unless optional[:bandwidth] < -1\n\t\t\traise ArgumentError, 'bandwidth must be equal or less than 1000' unless optional[:bandwidth] > 1000\n\t\t\targs[:query]['Bandwidth'] = optional[:bandwidth]\n\t\tend\n\t\tif optional.key? :client_token\n\t\t\targs[:query]['ClientToken'] = optional[:client_token]\n\t\tend\n\t\tif optional.key? :internet_charge_type\n\t\t\targs[:query]['InternetChargeType'] = optional[:internet_charge_type]\n\t\tend\n\t\tif optional.key? :load_balancer_name\n\t\t\targs[:query]['LoadBalancerName'] = optional[:load_balancer_name]\n\t\tend\n\t\tif optional.key? :master_zone_id\n\t\t\targs[:query]['MasterZoneId'] = optional[:master_zone_id]\n\t\tend\n\t\tif optional.key? :max_conn_limit\n\t\t\targs[:query]['MaxConnLimit'] = optional[:max_conn_limit]\n\t\tend\n\t\tif optional.key? :owner_account\n\t\t\targs[:query]['OwnerAccount'] = optional[:owner_account]\n\t\tend\n\t\tif optional.key? :owner_id\n\t\t\targs[:query]['OwnerId'] = optional[:owner_id]\n\t\tend\n\t\tif optional.key? :resource_owner_account\n\t\t\targs[:query]['ResourceOwnerAccount'] = optional[:resource_owner_account]\n\t\tend\n\t\tif optional.key? :resource_owner_id\n\t\t\targs[:query]['ResourceOwnerId'] = optional[:resource_owner_id]\n\t\tend\n\t\tif optional.key? :security_status\n\t\t\targs[:query]['SecurityStatus'] = optional[:security_status]\n\t\tend\n\t\tif optional.key? :slave_zone_id\n\t\t\targs[:query]['SlaveZoneId'] = optional[:slave_zone_id]\n\t\tend\n\t\tif optional.key? :v_switch_id\n\t\t\targs[:query]['VSwitchId'] = optional[:v_switch_id]\n\t\tend\n\t\tif optional.key? :vpc_id\n\t\t\targs[:query]['VpcId'] = optional[:vpc_id]\n\t\tend\n\t\tself.run(args)\n\tend",
"def describe_load_balancers(optional={})\n\t\targs = self.class.new_params\n\t\targs[:query]['Action'] = 'DescribeLoadBalancers'\n\t\targs[:region] = optional[:_region] if (optional.key? :_region)\n\t\tif optional.key? :_method\n\t\t\traise ArgumentError, '_method must be GET|POST' unless 'GET|POST'.split('|').include? optional[:_method]\n\t\t\targs[:method] = optional[:_method]\n\t\tend\n\t\tif optional.key? :_scheme\n\t\t\traise ArgumentError, '_scheme must be http|https' unless 'http|https'.split('|').include? optional[:_scheme]\n\t\t\targs[:scheme] = optional[:_scheme]\n\t\tend\n\t\tif optional.key? :address\n\t\t\targs[:query]['Address'] = optional[:address]\n\t\tend\n\t\tif optional.key? :address_type\n\t\t\targs[:query]['AddressType'] = optional[:address_type]\n\t\tend\n\t\tif optional.key? :internet_charge_type\n\t\t\targs[:query]['InternetChargeType'] = optional[:internet_charge_type]\n\t\tend\n\t\tif optional.key? :load_balancer_id\n\t\t\targs[:query]['LoadBalancerId'] = optional[:load_balancer_id]\n\t\tend\n\t\tif optional.key? :master_zone_id\n\t\t\targs[:query]['MasterZoneId'] = optional[:master_zone_id]\n\t\tend\n\t\tif optional.key? :network_type\n\t\t\targs[:query]['NetworkType'] = optional[:network_type]\n\t\tend\n\t\tif optional.key? :owner_account\n\t\t\targs[:query]['OwnerAccount'] = optional[:owner_account]\n\t\tend\n\t\tif optional.key? :owner_id\n\t\t\targs[:query]['OwnerId'] = optional[:owner_id]\n\t\tend\n\t\tif optional.key? :resource_owner_account\n\t\t\targs[:query]['ResourceOwnerAccount'] = optional[:resource_owner_account]\n\t\tend\n\t\tif optional.key? :resource_owner_id\n\t\t\targs[:query]['ResourceOwnerId'] = optional[:resource_owner_id]\n\t\tend\n\t\tif optional.key? :security_status\n\t\t\targs[:query]['SecurityStatus'] = optional[:security_status]\n\t\tend\n\t\tif optional.key? :server_id\n\t\t\targs[:query]['ServerId'] = optional[:server_id]\n\t\tend\n\t\tif optional.key? :slave_zone_id\n\t\t\targs[:query]['SlaveZoneId'] = optional[:slave_zone_id]\n\t\tend\n\t\tif optional.key? :v_switch_id\n\t\t\targs[:query]['VSwitchId'] = optional[:v_switch_id]\n\t\tend\n\t\tif optional.key? :vpc_id\n\t\t\targs[:query]['VpcId'] = optional[:vpc_id]\n\t\tend\n\t\tself.run(args)\n\tend",
"def create_load_balancer(name, options = {})\n params = {}\n params.merge!(Fog::AWS.indexed_param('Subnets.member.%d', options[:subnets]))\n params.merge!(Fog::AWS.indexed_param('SecurityGroups.member.%d', options[:security_groups]))\n params.merge!(Fog::AWS.serialize_keys('Scheme', options[:scheme]))\n params.merge!(Fog::AWS.serialize_keys('Type', options[:type]))\n params.merge!(Fog::AWS.serialize_keys('IpAddressType', options[:ip_address_type]))\n\n\n unless options[:tags].nil?\n tag_keys = options[:tags].keys.sort\n tag_values = tag_keys.map { |key| options[:tags][key] }\n params.merge!(Fog::AWS.indexed_param('Tags.member.%d.Key', tag_keys))\n params.merge!(Fog::AWS.indexed_param('Tags.member.%d.Value', tag_values))\n end\n\n unless options[:subnet_mappings].nil?\n subnet_ids = []\n allocation_ids = []\n private_ipv4_address = []\n options[:subnet_mappings].each do |subnet_mapping|\n subnet_ids.push(subnet_mapping[:subnet_id])\n allocation_ids.push(subnet_mapping[:allocation_id])\n private_ipv4_address.push(subnet_mapping[:private_ipv4_address])\n end\n params.merge!(Fog::AWS.indexed_param('SubnetMappings.member.%d.SubnetId', subnet_ids))\n params.merge!(Fog::AWS.indexed_param('SubnetMappings.member.%d.AllocationId', allocation_ids))\n params.merge!(Fog::AWS.indexed_param('SubnetMappings.member.%d.PrivateIPv4Address', private_ipv4_address))\n end\n\n\n request({\n 'Action' => 'CreateLoadBalancer',\n 'Name' => name,\n :parser => Fog::Parsers::AWS::ELBV2::CreateLoadBalancer.new\n }.merge!(params))\n end",
"def region_base_url_params\n {'url_ver'=>'Z39.88-2004', 'svc_id'=>'info:lanl-repo/svc/getRegion',\n 'svc_val_fmt'=>'info:ofi/fmt:kev:mtx:jpeg2000',\n 'rft_id' => rft_id\n }\n end",
"def op_endpoint; end",
"def request(*)\n raise 'HttpApiBuilder::BaseClient#request must be implemented, see documentation'\n end",
"def rest_endpoint=(_arg0); end",
"def http_options; end",
"def create name, type, attributes = {}\n\n attribute_list = []\n\n attributes.each do |attr_name,values|\n [values].flatten.each do |value|\n attribute_list << { \n :attribute_name => attr_name, \n :attribute_value => value.to_s \n }\n end\n end\n\n client.create_load_balancer_policy(\n :load_balancer_name => load_balancer.name,\n :policy_name => name.to_s,\n :policy_type_name => type.to_s,\n :policy_attributes => attribute_list)\n\n LoadBalancerPolicy.new(load_balancer, name, :type => type.to_s)\n\n end",
"def create_load_balancer(lb_name, options = {})\n params = {}\n params.merge!(Fog::AWS.serialize_keys('Subnets', options[:subnet_ids])) if options[:subnet_ids]\n params.merge!(Fog::AWS.serialize_keys('SecurityGroups', options[:security_group_ids])) if options[:security_group_ids]\n\n if options[:tags]\n tags = options[:tags]\n params.merge!(Fog::AWS.serialize_keys('Tags', tags.map { |k, v| { 'Key' => k, 'Value' => v } }))\n end\n\n request({\n 'Action' => 'CreateLoadBalancer',\n 'Name' => lb_name,\n 'Scheme' => options[:scheme] || 'internet-facing',\n 'IpAddressType' => options[:ip_address_type] || 'ipv4',\n :parser => Fog::Parsers::AWS::ELBV2::CreateLoadBalancer.new\n }.merge(params))\n end",
"def http_request_for_method(method_name, method_url, request_body = nil)\n content_type_header = {\"Content-Type\" => \"application/json\"}\n\n # This is a workaround for a potential problem that arises from mis-using the\n # API. If you call SoftLayer_Virtual_Guest and you call the getObject method\n # but pass a virtual guest as a parameter, what happens is the getObject method\n # is called through an HTTP POST verb and the API creates a new CCI that is a copy\n # of the one you passed in.\n #\n # The counter-intuitive creation of a new CCI is unexpected and, even worse,\n # is something you can be billed for. To prevent that, we ignore the request\n # body on a \"getObject\" call and print out a warning.\n if (method_name == :getObject) && (nil != request_body) then\n $stderr.puts \"Warning - The getObject method takes no parameters. The parameters you have provided will be ignored.\"\n request_body = nil\n end\n\n if request_body && !request_body.empty?\n url_request = Net::HTTP::Post.new(method_url.request_uri(), content_type_header)\n else\n \turl_request = Net::HTTP::Get.new(method_url.request_uri())\n end\n\n # This warning should be obsolete as we should be using POST if the user\n \t # has provided parameters. I'm going to leave it in, however, on the off\n \t # chance that it catches a case we aren't expecting.\n if request_body && !url_request.request_body_permitted?\n $stderr.puts(\"Warning - The HTTP request for #{method_name} does not allow arguments to be passed to the server\")\n else\n # Otherwise, add the arguments as the body of the request\n url_request.body = request_body\n end\n\n \t url_request\n end",
"def allowed_request_methods=(_arg0); end",
"def RestParam(name); end",
"def on_rest_param(name); end",
"def http_parse_args\n [(self.api.base_url + self.method), self.params]\n end",
"def initialize(**options)\n @api_client = PayPoint::Blue::API.new(**options)\n super\n end",
"def initialize(request_url, params, client, options = {})\n if params.is_a?(String)\n @string_params = params\n @hash_params = Hash.from_url_params(params)\n else\n unless options.kind_of?(Hash)\n options = {}\n end\n options[:skip_param_keys] ||= []\n #this is a bit of helpful sugar for rails framework users\n options[:skip_param_keys] |= ['action','controller']\n\n if params.respond_to?(:reject)\n params.reject! {|key, val| options[:skip_param_keys].include?(key) }\n else\n params = {}\n end\n @hash_params = params\n @string_params = InboundRequest.get_http_params(@hash_params)\n end\n #puts \"Params are: #{params.inspect}\"\n @request_url = request_url\n @client = client\n @supplied_signature = @hash_params[self.class::SIGNATURE_KEY]\n @allow_sigv1 = options[:allow_sigv1] || false\n end",
"def request_parameters\n Hash[(Tumblr::Client::POST_OPTIONS | [:id, :type]).map {|key|\n [key.to_s, send(key)] if respond_to?(key) && send(key)\n }]\n end",
"def create_bridge_endpoint_with_http_info(bridge_endpoint, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiNetworkTransportBridgeEndpointsApi.create_bridge_endpoint ...'\n end\n # verify the required parameter 'bridge_endpoint' is set\n if @api_client.config.client_side_validation && bridge_endpoint.nil?\n fail ArgumentError, \"Missing the required parameter 'bridge_endpoint' when calling ManagementPlaneApiNetworkTransportBridgeEndpointsApi.create_bridge_endpoint\"\n end\n # resource path\n local_var_path = '/bridge-endpoints'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(bridge_endpoint)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'BridgeEndpoint')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiNetworkTransportBridgeEndpointsApi#create_bridge_endpoint\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Find out what new roles need filling in, and (at the same time) which ones need saving. We need filling in if 'dont_know' is set or if an email address is given which isn't in the system. It needs saving if it needs filling in or if is has a valid email address | def needs_filling_in?
any_need_filling = false
@roles.select{|r| r.new_entry }.each do |role|
role.needs_saving = role.needs_filling_in = false
if role.dont_know
role.needs_filling_in = true
role.needs_saving = true
elsif !role.email.empty?
role.needs_saving = true
user = User.with_email(role.email)
if user
role.role.role_user = user.user_id
else
role.needs_filling_in = true
role.user.contact.con_email = role.email
end
end
any_need_filling ||= role.needs_filling_in
end
any_need_filling
end | [
"def set_possible_roles\n\tif User.has_role Role.ADMINISTRATOR,session[:roles]\n\t @roles=Role.all\n\t return\n\tend\n\n\t@logged_in_user_role_id = UserRoleMap.getRoleidByUserid(session[:session_user])\n\t#@roles = Role.where(:id => RoleReportTo.select(\"user_role_id\").where(:manager_role_id => @logged_in_user_role_id))\n\t@roles = Role.getRolesByRoleid(RoleReportTo.getUserRoleidByManagerRoleid(@logged_in_user_role_id))\n\thas_volunteer=false\n\t@roles.each do |role|\n\t if role.role == Role.VOLUNTEER\n\t\thas_volunteer=true\n\t\tbreak\n\t end\n\tend\n\tunless has_volunteer\n\t @volunteer_role=Role.where(role:Role.VOLUNTEER)\n\t @volunteer_role.each do |role|\n\t\t@roles.push role\n\t end\n\tend\n end",
"def save_and_send_mails(learners_for_course_adding,learners_valid_for_signup,current_user,assessment_id)\n @final_signup_learners = Array.new #Creates a global hash to store filtered learners for sending signup mail.\n final_course_added_learners = Array.new\n @total_final_course_added_learners = Array.new\n @already_assigned = Array.new\n @admin = Array.new\n @other_org = Array.new\n @assessment = Assessment.find(assessment_id)\n unless learners_valid_for_signup.nil? or learners_valid_for_signup.blank? then\n learners_valid_for_signup.each { |learner|\n if valid_assign(@assessment.id,current_user)\n @user = User.find_by_email(learner.email)\n # final_signup_learners << learner.email\n case\n when (@user.user_id == current_user.id or @user.id == current_user.id) then # whether the learner is for current admin or not or whether admin is assigning himself as learner\n @learner = Learner.find_by_assessment_id_and_user_id_and_type_of_test_taker(assessment_id,@user.id,\"learner\")\n if @learner.nil? then # whether the learner is already present or not\n if valid_assign(@assessment.id,current_user)\n if @assessment.assessment_rules.nil? or @assessment.assessment_rules.blank?\n store_learner_details(@assessment,current_user,@user,\"\",\"learner\")\n else\n store_learner_details_using_rules(@assessment,current_user,@user,\"\",\"learner\")\n end\n fill_signup_course_added_array(@user)\n end\n elsif !@learner.nil? and @learner.active == \"no\" then # if learner is existing in learners table with active status \"no\" then just change the status to \"yes\"No need to add new record for him again\n @learner.update_attribute(:active,\"yes\")\n fill_signup_course_added_array(@learner.user)\n increase_assessment_columns_while_assigning(@assessment,@learner)\n end\n end\n end\n }\n end\n \n #dont write final_course_added_learners= total_final_course_added_learners = learners_for_course_adding. This doesnt work.. they make use of same address and mem space.\n # so x=y=3, here if u make any changes on x they reflect on y also.\n final_course_added_learners = learners_for_course_adding\n # total_final_course_added_learners.replace(final_course_added_learners)\n final_course_added_learners.each { |learner|\n @user = User.find_by_email(learner.email)\n\n @learner = Learner.find_by_assessment_id_and_user_id_and_type_of_test_taker(assessment_id,@user.id,\"learner\")\n if @learner.nil? then # whether the learner is already present or not\n if valid_assign(@assessment.id,current_user)\n if @assessment.assessment_rules.nil? or @assessment.assessment_rules.blank?\n store_learner_details(@assessment,current_user,@user,\"\",\"learner\")\n else\n store_learner_details_using_rules(@assessment,current_user,@user,\"\",\"learner\")\n end\n fill_signup_course_added_array(@user)\n end\n elsif !@learner.nil? and @learner.active == \"no\" then # if learner is existing in learners table with active status \"no\" then just change the status to \"yes\"No need to add new record for him again\n @learner.update_attribute(:active,\"yes\")\n fill_signup_course_added_array(@learner.user)\n increase_assessment_columns_while_assigning(@assessment,@learner)\n end\n }\n \n test_added_count = send_test_added_mails(@total_final_course_added_learners,@assessment,current_user)\n signup_count = send_signup_mails(@final_signup_learners,@assessment,current_user)\n return (signup_count + test_added_count)\n end",
"def validate!\n requested.to_a.flatten.uniq.each do |roleid|\n validate_role_exists! roleid\n end\n end",
"def update_user_attributes\n unless username.to_s.empty?\n person = Person.person_by_username(username)\n\n # Update the e-mail address if required... \n self.email = person.email_address unless self.email == person.email_address\n\n # Roles generated from the Person fields, user/department/faculty roles are all \n # different RoleTypes\n user_role = Role.match_user_role_by_name(person.user_type)\n department_role = Role.match_department_role_by_name(person.department_ou)\n faculty_role = Role.match_faculty_role_by_name(person.faculty_code)\n\n # Only update the role if it doesn't match the currently stored role.\n update_role(user_role) unless self.roles.include?(user_role)\n update_role(department_role) unless self.roles.include?(department_role)\n update_role(faculty_role) unless self.roles.include?(faculty_role)\n end\n end",
"def reconcile_privileges! occasion=nil, *more_info\n logger.info \"Reassigning privileges for #{self.class} id #{self.id}: #{occasion} #{more_info.to_json}\" if occasion\n # :active if and only if email is verified.\n set_role! :active, email_verified?, :skip_save\n # :veteran if older than 1 month\n # set_role! :veteran, ( (Time.now - self.created_at) >= 1.months ), :skip_save\n end",
"def assign_managers\n\tauto_assigned_managers_roles={Role.CFR_POC => [Role.CFR_FELLOW],\n\t\t\t\t\t\t\t\t Role.VOLUNTEER => [Role.EVENTS_FELLOW],\n\t\t\t\t\t\t\t\t Role.CFR_FELLOW => [Role.CITY_PRESIDENT,Role.NATIONAL_CFR_HEAD],\n\t\t\t\t\t\t\t\t Role.EVENTS_FELLOW => [Role.CITY_PRESIDENT,Role.NATIONAL_EVENTS_HEAD],\n\t\t\t\t\t\t\t\t Role.CITY_FINANCIAL_CONTROLLER=> [Role.NATIONAL_FINANCIAL_CONTROLLER]}\n\t@user_role_maps = UserRoleMap.new\n\t@user_role_maps.assign_attributes({:role_id => @user.role_id, :user_id => @user.id})\n\t@user_role_maps.save(:validate=>false)\n\tuser_role=Role.find(@user.role_id)\n\tif user_role.has_no_managers?\n\t user_manager_map=ReportsTo.new\n\t administrator = Role.where(role: Role.ADMINISTRATOR).first\n\t user_manager_map.assign_attributes(:user_id => @user.id,:manager_id => administrator.id)\n\t user_manager_map.save\n\t return\n\telse\n\t @reports_tos=[]\n\t manager = ReportsTo.new\n\t if @user.manager_id.present?\n\t\tmanager.assign_attributes(:user_id => @user.id, :manager_id => @user.manager_id)\n\t\t@reports_tos.push manager\n\t elsif user_role.role!=Role.CITY_FINANCIAL_CONTROLLER && user_role.role!=Role.CFR_FELLOW\n\t\tflash[:error] = MadConstants.error_message_no_manager_selected\n\t\traise ActiveRecord::Rollback, 'City Level roles not assigned to anybody'\n\t end\n\tend\n\tuser_role = Role.find @user.role_id\n\tauto_assigned_managers_roles[user_role.role].each do |role_name|\n\t @manager=nil\n\t if Role.is_national_level_role? role_name\n\t\t@manager=User.find_single_manager_by_role_name role_name\n\t else\n\t\t@manager=User.find_single_manager_by_role_name_and_city_id role_name,@user.city_id\n\t end\n\t if @manager.nil?\n\t\tflash[:error] = MadConstants.error_message_no_city_or_national_level_managers\n\t\traise ActiveRecord::Rollback, 'City Level roles not assigned to anybody'\n\t end\n\t user_manager_map = ReportsTo.new\n\t user_manager_map.user_id = @user.id\n\t user_manager_map.manager_id = @manager.id\n\t @reports_tos.push user_manager_map\n\tend\n\t@reports_tos.each do |reports_to|\n\t reports_to.save(:validate => false)\n\tend\n end",
"def all_roles_assigned?(roles)\n \n all_roles_assigned = false\n self.board_design_entry_users.reload\n \n roles.each do | role|\n all_roles_assigned = self.board_design_entry_users.detect { |user| \n user.role_id == role.id \n }\n break if !all_roles_assigned\n end\n\n all_roles_assigned != nil\n \n end",
"def create_roles\n Role.where(name: 'organizer', resource: self).first_or_create(description: 'For the organizers of the conference (who shall have full access)')\n Role.where(name: 'cfp', resource: self).first_or_create(description: 'For the members of the CfP team')\n Role.where(name: 'info_desk', resource: self).first_or_create(description: 'For the members of the Info Desk team')\n Role.where(name: 'volunteers_coordinator', resource: self).first_or_create(description: 'For the people in charge of volunteers')\n end",
"def after_save\n unless roles_user.blank?\n if ( _critical = AlertGroup.critical )\n _critical.alert_types.each do |_alert_type|\n # * find or instantiate a row\n _alert_option = _alert_type.alert_options.find_by_roles_user_id( roles_user) # fetch existing\n # * WARNING: do not use the rails shortcut. some error here. check console output below\n # * user \"build\", then assign roles_user\n # # (rdb:1) _alert_type.alert_options.build( :roles_user_id => roles_user)\n # # #<AlertOption id: nil, roles_user_id: 1, alert_type_id: 27, phone_active: nil, email_active: nil, text_active: nil, created_at: nil, updated_at: nil>\n # # (rdb:1) roles_user\n # # #<RolesUser id: 3534, user_id: 3568, role_id: 3357, created_at: \"2010-12-21 17:18:25\", updated_at: \"2010-12-21 17:18:25\">\n _alert_option ||= _alert_type.alert_options.build # ( :roles_user_id => roles_user) # not found? build new\n _alert_option.roles_user = roles_user\n # * update all 3 attributes from self\n _alert_option.email_active = email_active\n _alert_option.phone_active = phone_active\n _alert_option.text_active = text_active\n # * code below is not working somehow. need all assignments explicitly, not dynamically\n # [\"phone_active\", \"email_active\", \"text_active\"].each do |_attribute|\n # # * fetch from self\n # # * assign to _alert_option\n # _alert_option.send( \"#{_attribute}=\".to_sym, self.send(\"#{_attribute}\"))\n # end\n _alert_option.save\n end # loop\n end # \"critical\" found\n end # roles_user missing?\n end",
"def can_modify_org_details?\n roles.include? Role.find_by(name: constant(\"user_role_types.change_org_details\"))\n end",
"def save_and_activate_package_learners\n if !params[:user][:email].nil? and !params[:user][:email].blank?\n @tenant = Tenant.find_by_custom_url(request.subdomain)\n admin_user = @tenant.user\n group = Group.find_by_user_id_and_group_name(admin_user.id,'Coupons')\n if User.find_by_email(params[:user][:email]).nil?\n\n #the below gets executed for DC books . control comes from /views/coupons/first_time_activation.html where learner fills many details like name,phonenumber,dateofbirth etc\n if !session[:user].nil?\n @user = User.new()\n @user.login = session[:user][:login]\n @user.email = params[:user][:email].strip()\n @user.typeofuser = \"learner\"\n @user.address = session[:user][:address]\n @user.date_of_birth = session[:user][:date_of_birth]\n @user.mob_number = session[:user][:mob_number]\n @user.designation = session[:user][:designation]\n @user.alternate_email = params[:user][:alternate_email]\n @user.student_course = session[:user][:student_course]\n @user.student_course_year = session[:user][:student_course_year]\n @user.student_college = session[:user][:student_college]\n @user.student_college_city = session[:user][:student_college_city]\n @user.user_id = admin_user.id\n @user.tenant_id = @tenant.id\n @user.group_id = group.id\n @user.save\n else\n\n if !params[:user][:login].nil? and !params[:user][:login].blank?\n @user = User.new()\n @user.login = params[:user][:login]\n @user.email = params[:user][:email].strip()\n @user.typeofuser = \"learner\"\n # @user.alternate_email = params[:user][:alternate_email]\n @user.user_id = admin_user.id\n @user.tenant_id = @tenant.id\n @user.group_id = group.id\n @user.save\n else\n flash[:enter_details] = \"Enter details\"\n redirect_to(\"/coupons/package_signup/#{params[:id]}\")\n end\n end\n unless @user.nil?\n coupon = Coupon.find(params[:id])\n #assign the first test/course to the learner\n assign_first_course_or_assessment_for_coupon(coupon,@user,admin_user)\n package_send_activation_mail(@user,'signup_package_learner_notification',@user.tenant,@user.tenant.user)\n flash[:email_notice] = \"Email was sent.\"\n redirect_to(\"/coupons/package_signup_confirmation/#{@user.id}\")\n end\n else\n flash[:enter_details] = \"Email already exists\"\n redirect_to(\"/coupons/package_signup/#{params[:id]}\")\n end\n else\n flash[:enter_details] = \"Enter details\"\n redirect_to(\"/coupons/package_signup/#{params[:id]}\")\n end\n end",
"def admin_update(attributes)\n if User.current_user.is_a?(Admin)\n success = true\n success = set_roles(attributes[:roles])\n if success && attributes[:email]\n self.email = attributes[:email]\n success = self.save(:validate => false)\n end\n success\n end\n end",
"def validate_on_create\n if self.omnipotent? && Role.find_by_omnipotent(true)\n errors.add_to_base(\"There can only be one omnipotent role.\")\n end\n end",
"def setup_role_and_users\n # Allow both role and users to be specified\n @receiving_user_ids = []\n\n if @role\n\n @role = @role.reject(&:blank?) if @role.is_a? Array\n @role_name = calc_field_or_return(@role)\n @role_name = @role_name.reject(&:blank?) if @role_name.is_a? Array\n\n @receiving_user_ids += Admin::UserRole.active_user_ids role_name: @role_name, app_type: @user.app_type\n end\n\n if @users\n @users = @users.reject(&:blank?) if @users.is_a? Array\n user_ids = calc_field_or_return(@users)\n @receiving_user_ids += User.where(id: user_ids).active.pluck(:id)\n end\n\n @receiving_user_ids.uniq!\n end",
"def create_staff_for_education_organization(roles, required)\n members = []\n if !required.nil? and required.size > 0\n required[\"staff\"].each do |member|\n # skip this entry if its an Educator --> handled in 'create_teachers' method\n next if [\"Student\", \"Educator\"].include? member[:role]\n\n @num_staff_members += 1\n members << {\"id\" => member[:staff_id], \"role\" => member[:role], \"name\" => member[:name], \"begin\" => member[:begin], \"end\" => member[:end]}\n for index in (0..(roles.size - 1)) do\n if Roles.to_string(roles[index]) == member[:role]\n @log.info \"Removing role: #{member[:role]} from default roles --> specified by member in staff catalog.\"\n roles.delete_at(index)\n break\n end\n end\n end\n end\n if !roles.nil? and roles.size > 0\n for index in (0..(roles.size - 1)) do\n @num_staff_members += 1\n members << {\"id\" => @num_staff_members, \"role\" => roles[index]}\n end\n end\n members\n end",
"def add_review_role\n\n @design = Design.find(params[:id])\n @review_roles = Role.get_review_roles + Role.get_manager_review_roles\n @review_roles.delete_if { |r| @design.role_open_review_count(r) > 0 }\n if ( params[:role_id]) #update the design\n role_id = params[:role_id]\n reviewer = User.find(params[:add][:name_id])\n #add the role to each design review results\n design_reviews = @design.design_reviews.sort_by { |dr| dr.review_type.sort_order }\n design_reviews.delete_if { |dr| dr.review_complete? }\n role = Role.find(role_id)\n assigned = false\n design_reviews.each do |dr| \n if ! dr.role_reviewer(role) # don't reassign\n next if !role.included_in_design_review?(dr.design)\n next if !role.review_types.include?(dr.review_type)\n drr = DesignReviewResult.new(:reviewer_id => reviewer.id, :role_id => role_id )\n drr.result = \"No Response\" if dr.active? \n dr.design_review_results << drr\n assigned = true\n end\n end\n if assigned\n DesignMailer::review_role_creation_notification(@design,\n role, reviewer).deliver\n flash[\"notice\"] = \"The #{role.display_name} role has been added\"\n flash[\"notice\"] += \"<br />Mail has been sent to #{reviewer.name}\"\n else\n flash[\"notice\"] = \"No available reviews to assign #{role.name} role to\"\n end\n redirect_to :action => 'design_review_reviewers', :id => params[:id]\n else\n @url = get_role_users_design_path\n\n \n #display the form to fill in the data\n end\n end",
"def rebuild_accessibilities( emails=nil )\r\n emails ||= self.contact_people.collect{ |contact| contact.email }\r\n emails << self.email\r\n StoreUser.transaction do\r\n StoreUser.update_all( \"erp_account_number = NULL\", [\"erp_account_number = ?\", self.account_num] )\r\n StoreUser.update_all(\r\n \"erp_account_number = '#{self.account_num}'\",\r\n ['email_address in (?)', emails]\r\n ) unless emails.empty?\r\n end\r\n end",
"def ensure_there_is_a_role\n if role.blank?\n self.role = \"student\"\n end\n end",
"def important_role\n _roles = roles.collect(&:name).compact.uniq\n _role = ( (_roles - ['super_admin', 'admin', 'halouser', 'caregiver']).blank? ? 'None' : 'Other' )\n ['super_admin', 'admin', 'halouser', 'caregiver'].each do |e|\n if _roles.include?( e)\n if ['admin', 'halouser'].include?( e)\n _role = \"#{e} / \" + self.send( \"is_#{e}_of_what\").compact.collect(&:name).join(', ')\n else\n _role = e\n end\n end\n end\n _role\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Build notification channel with recipient's id for uniqueness | def build_notification_id(id)
"Notification-#{id}"
end | [
"def create_notification(chatroom)\n last_message = chatroom.messages.last\n user = last_message.user\n other_user = chatroom.other_user user.id\n n = other_user.notification || Notification.create(user_id: other_user.id)\n n.count += 1\n n.save\n last_message.notification_id = n.id\n last_message.save\n NotificationsChannel.broadcast_to other_user,\n user: last_message.user.name,\n content: last_message.content,\n picture: last_message.user.picture,\n topic: self.topic,\n id: last_message.id\n end",
"def create_notification\n if !self.channel.blank? && !self.notifiable_followers.blank?\n Notification.create(\n notifiable_id: self.channel.id, notifiable_type: 'Channel', \n notification_type: 'Post', source_user_id: self.user_id, target_user_hash: {},\n target_user_ids: self.notifiable_followers , seen: false, custom_text: self.post.title)\n end\n end",
"def unique_id\n channel_id[/channel:([^<]+)/, 1]\n end",
"def get_channel hash, to\n if config.recipient then\n make_channel config.recipient\n elsif to then\n make_channel to\n elsif hash[:channel]\n make_channel hash[:channel]\n else\n raise CollinsNotify::ConfigurationError.new \"No slack.channel or config.recipient specified\"\n end\n end",
"def generate_chat_invite_link(chat_id)\n broadcast('@type' => 'generateChatInviteLink',\n 'chat_id' => chat_id)\n end",
"def genreate_channel_id(type = 'call')\n \"#{type}_#{Time.now.to_f.to_s.sub!('.', '')}\"\n end",
"def channel_to\n Channel.find(channel_id_to)\n end",
"def create_notification\n Notification.create(\n notifiable_id: self.interactionable_id, notifiable_type: self.interactionable_type, \n notification_type: self.interaction_type, source_user_id: self.user_id, target_user_hash: {\"#{self.owner.id}\": 'true'},\n target_user_ids: [self.owner.id] , seen: false, custom_text: self.user.profile.fullname)\n end",
"def create_mail\n # Removes the current user from the recipients and cc\n # if he doesn't want to receive notifications about what he does\n @author ||= User.current\n if @author.pref[:no_self_notified]\n recipients.delete(@author.mail) if recipients\n cc.delete(@author.mail) if cc\n end\n \n notified_users = [recipients, cc].flatten.compact.uniq\n # Rails would log recipients only, not cc and bcc\n mylogger.info \"Sending email notification to: #{notified_users.join(', ')}\" if mylogger\n \n # Blind carbon copy recipients\n if Setting.bcc_recipients?\n bcc(notified_users)\n recipients []\n cc []\n end\n super\n end",
"def gen_chat_uuid\n SecureRandom.uuid\n end",
"def build(payload)\n !payload.empty? && Channel.new(payload)\n end",
"def presence_message_generate(channel_name, action, subscriptor)\n presence_channel = \"#{channel_name}-presence\"\n a = { :channel => presence_channel,\n :data => {\n :user_id => subscriptor[:user_id],\n :action => action,\n :data => subscriptor[:data]\n }\n }\n end",
"def send_new_chat(data)\n stream_from \"Chat:#{data['chat']['id']}\"\n recipient = User.find(data['chat']['recipient_id'])\n unless current_user.id == recipient.id\n ActionCable.server.broadcast \"user_#{recipient.id}_chats\", chat: data['chat'], type: 'new_chat'\n end\n end",
"def channel(user)\n if user.is_a? String\n user\n else\n user.send(:push_id)\n end\n end",
"def joinChannel(idUser,idChannel)\n \n end",
"def notify_receiver\n conversation = ConversationBlueprint.render_as_json(self, view: :normal)\n ActionCable.server.broadcast \"user_#{ self.user_two_id }_notification_channel\", conversation\n end",
"def notification_id\n @id\n end",
"def gen_message_id\n\t\treturn \"<%s.%s@%s>\" % [\n\t\t\t(Time.now.to_f * 10000).to_i.to_s( 36 ),\n\t\t\t(rand( 2 ** 64 - 1 )).to_s( 36 ),\n\t\t\tSocket.gethostname\n\t\t]\n\tend",
"def createNotificationChannel(channel_type, content_type, version=1.0)\n url = \"#{@fqdn}#{NOTIFICATION_RESOURCE}\"\n headers = { \n :accept => 'application/json', \n :content_type => \"application/json\",\n }\n body = Webhooks.createChannel(channel_type, content_type, version)\n\n begin\n r = self.post(url, body.to_json, headers)\n rescue RestClient::Exception => e\n raise(ServiceException, e.response || e.message, e.backtrace)\n end\n Model::NotificationChannel.from_response(r)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /v1_users/new GET /v1_users/new.xml | def new
@user = V1::User.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @user }
end
end | [
"def new\n logger.debug(\"Create a new user\")\n @user = User.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user }\n end\n end",
"def new\n @user = User.new\n\n respond_to do |format|\n format.xml { render xml: @user}\n end\n end",
"def new\n @user = user.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user }\n end\n end",
"def new\n @user = @client.users.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user }\n end\n end",
"def new\n @new_user = NewUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @new_user }\n end\n end",
"def new\n @username = Username.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @username }\n end\n end",
"def new\n @user_name = UserName.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user_name }\n end\n end",
"def new_rest\n @dialogix_user = DialogixUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @dialogix_user }\n end\n end",
"def new\n @usr = Usr.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @usr }\n end\n end",
"def new\n @new_user = User.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @new_user }\n end\n end",
"def new\n @newuser = Newuser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @newuser }\n end\n end",
"def new\n @user1 = User1.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user1 }\n end\n end",
"def new\n @userinfo = Userinfo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @userinfo }\n end\n end",
"def new\n @email_user = Email::User.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @email_user }\n end\n end",
"def new\n @external_user = ExternalUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @external_user }\n end\n end",
"def new\n @cet_user = CetUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cet_users }\n end\n end",
"def new\n @client_user = ClientUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @client_user }\n end\n end",
"def new\n @user_template = UserTemplate.new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user_template }\n end\n end",
"def new\n @project_user = ProjectUser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @project_user }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /v1_users/1 PUT /v1_users/1.xml | def update
@user = V1::User.find(params[:id])
respond_to do |format|
if @user.update_attributes(params[:user])
flash[:notice] = 'V1::User was successfully updated.'
format.html { redirect_to(@user) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @user.errors, :status => :unprocessable_entity }
end
end
end | [
"def update!\n @authorize = nil\n update_plan! &&\n resp = put(\"/users/#{username}.xml\", {\n :user_key => apikey,\n \"user[first_name]\" => first_name,\n \"user[last_name]\" => last_name\n })\n end",
"def update\n if @api_v1_user.update(api_v1_user_params)\n render json: @api_v1_user\n else\n render json: @api_v1_user.errors, status: :unprocessable_entity\n end\n end",
"def update_rest\n @dialogix_user = DialogixUser.find(params[:id])\n\n respond_to do |format|\n if @dialogix_user.update_attributes(params[:dialogix_user])\n flash[:notice] = 'DialogixUser was successfully updated.'\n format.html { redirect_to(@dialogix_user) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @dialogix_user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post 'update', opts\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post opts.fetch(:path, update_path), opts\n end",
"def update\n @user1 = User1.find(params[:id])\n\n respond_to do |format|\n if @user1.update_attributes(params[:user1])\n format.html { redirect_to(@user1, :notice => 'User1 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user1.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_user id, payload\n\t\t\t\t\t(@connection.put USERS, id, payload).code\n\t\t\t\tend",
"def update\n @api_user = ApiUser.find(params[:id])\n\n respond_to do |format|\n if @api_user.update_attributes(params[:api_user])\n format.html { redirect_to(@api_user, :notice => 'Api user was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @api_user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user = @users.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n flash[:title] = \"User Updated\"\n flash[:class] = \"information\"\n flash[:notice] = 'User was successfully updated.'\n format.html { redirect_to(line_user_url(@line, @user)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_user\n end",
"def update_user(user_name, options={})\n request_hash = { 'UserName' => user_name}\n request_hash['NewUserName'] = options[:new_user_name] unless options[:new_user_name].right_blank?\n request_hash['NewPath'] = options[:new_path] unless options[:new_path].right_blank?\n link = generate_request(\"UpdateUser\", request_hash)\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n end",
"def update_tomcat_usersxml strFile\n\n\tFile.open(strFile) do |tomcatusersfile|\n\t\tusersxml = REXML::Document.new(tomcatusersfile)\n\t\t#update the server port number \n\t\ttomcat_users_root = usersxml.elements['tomcat-users']\n\t\ttomcat_users_root.add_element(get_tomcat_admin_role())\n\t\ttomcat_users_root.add_element(get_tomcat_admin_user())\n\t\tfusersxml = File.open(\"#{dspace_tomcatusers_xml}\", \"w\")\n\t\tfusersxml.puts usersxml\n\t\tusersXmlPath = fusersxml.path\n\t\tfusersxml.close\n\t\tend\nend",
"def update\n @user_name = UserName.find(params[:id])\n\n respond_to do |format|\n if @user_name.update_attributes(params[:user_name])\n format.html { redirect_to(@user_name, :notice => 'User name was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user_name.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @v2_user.update(v2_user_params)\n format.html { redirect_to @v2_user, notice: \"V2 user was successfully updated.\" }\n format.json { render :show, status: :ok, location: @v2_user }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @v2_user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @email_user = Email::User.find(params[:id])\n\n respond_to do |format|\n if @email_user.update_attributes(params[:email_user])\n format.html { redirect_to(@email_user, :notice => 'User was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @email_user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def edit(id, options={})\n request(:put, \"/users/#{id}.json\", default_params(options))\n end",
"def update\n @user2 = User2.find(params[:id])\n\n respond_to do |format|\n if @user2.update_attributes(params[:user2])\n format.html { redirect_to(@user2, :notice => 'User2 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user2.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n @user.name = params[:name]\n @user.email = params[:email]\n @user.password = params[:password]\n @user.photo = params[:photo]\n @user.role = params[:type]\n @user.save\n render json:@user\n end",
"def destroy\n @user = V1::User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to(v1_users_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /v1_users/1 DELETE /v1_users/1.xml | def destroy
@user = V1::User.find(params[:id])
@user.destroy
respond_to do |format|
format.html { redirect_to(v1_users_url) }
format.xml { head :ok }
end
end | [
"def delete\n @user = User.find(params[:id])\n @user.rvsps.delete_all()\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to(users_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @users = User.find(params[:id])\n @users.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_users_url) }\n format.xml { head :ok }\n end\n end",
"def delete_users\n delete(users_path)\n end",
"def destroy\n @user1 = User1.find(params[:id])\n @user1.destroy\n\n respond_to do |format|\n format.html { redirect_to(user1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @api_user = ApiUser.find(params[:id])\n @api_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(api_users_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @devex_user = DevexUser.find(params[:id])\n @devex_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(devex_users_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n # @api_v1_user.destroy\n\n head :no_content\n end",
"def destroy_rest\n @dialogix_user = DialogixUser.find(params[:id])\n @dialogix_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(dialogix_users_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @client_user = ClientUser.find(params[:id])\n @client_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(client_users_url) }\n format.xml { head :ok }\n end\n end",
"def delete(user)\n Rails.logger.debug \"Call to user.delete\"\n reqUrl = \"/api/user/#{self.email}\" #Set the request url\n rest_response = MwHttpRequest.http_delete_request(reqUrl,user['email'],user['password']) #Make the DELETE request to the server with the required parameters\n Rails.logger.debug \"Response from server: #{rest_response.code} #{rest_response.message}: #{rest_response.body}\"\n if rest_response.code == \"200\" #Validate if the response from the server is 200, which means OK\n return true, rest_response #Return success\n else\n return false, \"#{rest_response.code}\", \"#{rest_response.message}\" #Return error\n end\n end",
"def delete_user_for_tenant(args = {}) \n delete(\"/tenants.json/#{args[:tenantId]}/users/#{args[:userId]}\", args)\nend",
"def destroy\n @email_user = Email::User.find(params[:id])\n @email_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(email_users_url) }\n format.xml { head :ok }\n end\n end",
"def delete\n @user.destroy\n respond_to do |format|\n format.html { redirect_to v1_resources_users_all_path, notice: 'User was deleted.' }\n format.json { head :no_content }\n end\n end",
"def deleteUser\n end",
"def destroy\n @ecnuser = Ecnuser.find(params[:id])\n @ecnuser.destroy\n\n respond_to do |format|\n format.html { redirect_to(ecnusers_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @external_user = ExternalUser.find(params[:id])\n @external_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(external_users_url) }\n format.xml { head :ok }\n end\n end",
"def delete_user\n begin\n @user = User.find(params[:id])\n if(@user)\n #@sections = get_sections_for_user(@user.id);\n if @user.destroy\n #if(@sections)\n #@sections.each do |section|\n #section.destroy\n #end\n #end\n render_xml_output(\"Success\")\n else\n render_xml_output(\"Failure\")\n end\n else\n render_xml_output(\"user doesn\\'t exist\")\n end\n rescue ActiveRecord::RecordNotFound\n render_xml_output(\"No record found\")\n end\nend",
"def delete_users\n get_auth_token do\n delete(\"/api/users?access_token=#{@token}\")\n end\n end",
"def destroy\n @cet_user = CetUser.find(params[:id])\n @cet_user.destroy\n\n respond_to do |format|\n format.html { redirect_to(cet_users_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a hash of cards SheetName => CardName => Card Modifies defaultFile | def read_worksheet(ws, defaultFile)
cards = {}
keys = []
(1..ws.getNumRows()).each do |row|
if (row == 1)
cards[ws.getTitle()] = {}
end
card = {}
(1..ws.getNumCols()).each do |col|
cell = ws.getCell(row, col)
if cell.empty? then next end
# first row should be keys
if (row == 1)
keys << cell.gsub(' ', '_').downcase
else
card[keys[col-1]] = cell
end
end
unless (card.empty?)
cards[ws.getTitle()][card['name']] = card
end
if (defaultFile and card['count'])
defaultFile['cards'] << card.select do |field|
['name', 'count'].include?(field)
end
end
end
return cards
end | [
"def sheets_info\n if @info[:default_sheet]\n @info[:default_sheet] = verify_sheet_name(@info[:default_sheet])\n @info[:sheets].merge!(sheet_info(@info[:default_sheet]))\n else\n @info[:sheets_name].each { |name| @info[:sheets].merge!(sheet_info(name)) }\n @workbook.default_sheet = @info[:sheet_current]\n end\n end",
"def file_name_hash\n Digest::MD5.hexdigest(file_name)\n end",
"def dynamic_card_map\n map = {}\n Dir.glob(\"data/hsreplay-cards/*.html\").each do |file|\n card_data = HsreplayCardHtmlParser.new(open(file, 'r')).card_data\n map[card_data[\"card_id\"]] = card_data[\"dbf_id\"]\n end\n map\n end",
"def card_path_map\n return @@card_path_map if defined? @@card_path_map\n @@card_path_map ||= Hash[card_json_data.map {|card|\n [card_name_to_path(card['name']), card_data(card)]\n }]\n end",
"def hash_filename\n self.original_name = self.file_file_name.to_s\n self.file.instance_write(:file_name, \"#{Digest::MD5.hexdigest(self.file_file_name)}#{File.extname(self.file_file_name)}\")\n false\n end",
"def card_hash\n card_hash_indv_keys = [:question, :answer, :category]\n card_hash_array = self.string_split_card.map do |card_array|\n card_hash_indv_keys.zip(card_array).to_h\n end\n card_hash_array\n end",
"def make_card(card)\n image = card.image.file_name\n name = card.name\n return image, name\n end",
"def file_hash(file_name)\n file = file_name.split('/').last.split('.amps').first\n fhash = Hash.new\n file.split(':').each{|x| eqpos = x.index('=') \n fhash[x.slice(0...eqpos)] = x.slice(eqpos+1..x.length-1)\n }\n fhash\nend",
"def content_file_checksums\n @manifest_files.map { |f| identity_hash(f) }\n end",
"def hash_file\n\t\t\treturn @hash_file ||= Pathname.new(@@config.hash_dir).join(name).to_s\n\t\tend",
"def update_default_card\n if @hash['default']\n FakeBraintree.registry.customers[@hash['customer_id']]['credit_cards'].each do |card|\n card['default'] = false\n end\n @hash['default'] = true\n end\n end",
"def hash\n Digest::MD5.hexdigest(abs_filepath)[0..5]\n end",
"def excel_to_recoded_hash(filename)\n require 'spreadsheet'\n h={}\n book = Spreadsheet.open filename\n sheet= book.worksheet 0\n row_i=0\n sheet.each do |row|\n row_i += 1\n next if row_i == 1 or row[0].nil? or row[1].nil? or row[2].nil?\n key = row[0].to_sym\n h[key] ||= {}\n h[key][row[1]] = row[2]\n end\n h\n end",
"def replace_with_defaults\n require 'suggested_decks'\n\n @deck.cards = []\n\n card_list = SuggestedDecks::CARD_LISTS[@deck.character.name]\n\n card_list.keys.each do |type|\n card_list[type].each do |card_name|\n card = Card.joins(:card_type).where(name: card_name).where([\"card_types.name = ?\", type.capitalize]).first\n @deck.cards << card\n end\n end\n\n redirect_to deck_path(@deck), notice: \"Cards replaced by defaults\"\n end",
"def file_key\n \"#{work.friendlier_id}/#{deriv_type}_#{inputs_checksum}.#{deriv_type_definition[:suffix]}\"\n end",
"def duplicated_cards\n Cache.hash_get_all(\"#{@batch_id}_duplicated_cards\").presence || {}\n end",
"def get_hash(file)\n Digest::MD5.digest file\n end",
"def file_hash(file_path)\r\n\r\n @digest.reset\r\n @base_str = nil\r\n @digest.file(file_path).to_s\r\n end",
"def filename_hash2( file)\n expanded_assetsdir = File.expand_path(@output_type_obj.assetsdir)\n if File.expand_path(File.dirname( file )) != expanded_assetsdir \n digest_over_content( file )\n else\n digest_over_file_basename( file )\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Export the entire class definition as an OWL ontology. ==== Example Person.to_owl returns string representation of the OWL ontology in Turtle. Person.to_owl(:json) returns string representation of the OWL ontology in RDFJSON. | def to_owl(*args)
g = Graph.new
owl = Namespace.new('http://www.w3.org/2002/07/owl', 'owl', true)
foaf = Namespace.new("http://xmlns.com/foaf/0.1/", "foaf")
rdf = Namespace.new("http://www.w3.org/1999/02/22-rdf-syntax-ns", "rdf", true)
rdfs = Namespace.new("http://www.w3.org/2000/01/rdf-schema", 'rdfs', true)
xsd = Namespace.new('http://www.w3.org/2001/XMLSchema', 'xsd', true)
for property in properties
g << Triple.new(BNode.new('john'), foaf.knows, BNode.new('jane'))
end
return g
end | [
"def ontology\n load_ontology if @ontology.nil?\n @ontology\n end",
"def owl_equivalent_class\n end",
"def ontology_objects\n LinkedData::SampleData::Ontology.ontology_objects\n end",
"def rdfs_from_owl\n # Get all OWL classes\n qry = Query.new.distinct.select(:s)\n qry.where(:s, make_res(N::RDF::type), make_res(N::OWL + 'Class'))\n klasses = qry.execute\n modified = 0\n \n klasses.each do |klass|\n already_exists = false\n assit_kind_of(RDFS::Resource, klass)\n unless(klass.rdf::type.is_a?(RDFS::Resource) || klass.rdf::type.size < 2)\n klass.rdf::type.each do |type|\n already_exists = true if(type == make_res(N::RDFS + \"Class\"))\n end\n end\n updated = 0\n unless(already_exists)\n klass.rdf::type = make_res(N::RDFS + \"Class\")\n klass.save\n updated = 1\n end\n \n modified += updated\n yield(klasses.size, updated) if(block_given?)\n end\n \n return [klasses.size, modified]\n end",
"def create_ontology(ont_info)\n uri = URI.parse(TARGET_API)\n http = Net::HTTP.new(uri.host, uri.port)\n\n req = Net::HTTP::Put.new(\"/ontologies/#{ont_info['acronym']}\")\n req['Content-Type'] = 'application/json'\n req['Authorization'] = \"apikey token=#{TARGET_APIKEY}\"\n\n if ont_info['viewingRestriction'] == 'private'\n # In case of private ontology (acl: list of user that have the right to see the ontology)\n req.body = { 'acronym': ont_info['acronym'], 'name': ont_info['name'],\n 'group': ont_info['group'], 'hasDomain': ont_info['hasDomain'],\n 'administeredBy': [TARGETED_PORTAL_USER],\n 'viewingRestriction': 'private',\n 'acl': [TARGETED_PORTAL_USER] }.to_json\n else\n req.body = { 'acronym': ont_info['acronym'], 'name': ont_info['name'],\n 'group': ont_info['group'], 'hasDomain': ont_info['hasDomain'],\n 'administeredBy': [TARGETED_PORTAL_USER] }.to_json\n end\n\n response = http.start do |http|\n http.request(req)\n end\n\n return response\nend",
"def create_ontology(ont_info)\n new_ontology = LinkedData::Models::Ontology.new\n\n new_ontology.acronym = ont_info['acronym']\n new_ontology.name = ont_info['name']\n new_ontology.administeredBy = [USER]\n if ont_info['viewingRestriction'] == 'private'\n # In case of private ontology (acl: list of user that have the right to see the ontology)\n new_ontology.viewingRestriction = 'private'\n new_ontology.acl = [USER]\n end\n new_ontology\nend",
"def ontology; ontologies.first; end",
"def map_from_express( mapinput )\nputs ' '\nputs 'EXPRESS to OWL Structural Mapping V0.5'\nputs ' '\n\n######### Mapping Configuration Starts Here ##############\n\n# Set the base of the URI (i.e. the namespace) for OWL constructs created during the mapping\nuri_base = 'http://www.reeper.org'\n\n# Add RDFS andor Dublin Core basic annotations\n\n# Set to true if any annotation_list elements use Dublin Core (dc: or dcterms:)\ninclude_dublin_core = false\n\nannotation_list = Array.new\n# rdfs:comment must be position 0 as definition assignment hardcoded there\n#annotation_list[0] = ['rdfs:comment', nil]\n#annotation_list[1] = ['owl:versionInfo', '1']\n#annotation_list[2] = ['dc:creator', 'David Price, TopQuadrant Limited']\n#annotation_list[3] = ['dcterms:created','2010-10-22']\n#annotation_list[4] = ['dc:source', '{ iso standard 10303 part(214) version(2) object(1) automotive-design-schema(1) }']\n\n# Read definitions from csv file if found\ndefinition_hash = Hash.new\nif FileTest.exist?('definitions.csv')\n\trequire 'csv'\n\tputs '-- Definitions CSV File Found'\n\tCSV.open('definitions.csv', 'r') do |row|\n\t\tdefinition_hash[row[0].downcase] = row[1]\n\tend\nend\n\n# set to class name (e.g. 'TOP-THING') to make all created OWL Classes subclasses of a topmost class\ntop_class = 'AP233-ARM-THING'\n\n# add common string attributes to use OWL Thing as domain rather than schema classes\nthing_attributes = []\n#thing_attributes.push 'id'\n#thing_attributes.push 'name'\n#thing_attributes.push 'description'\n\n# datatypes for simple and aggregates of simple type\ndatatype_hash = Hash.new\ndatatype_hash[\"INTEGER\"] = 'http://www.w3.org/2001/XMLSchema#integer'\ndatatype_hash[\"REAL\"] = 'http://www.w3.org/2001/XMLSchema#float'\ndatatype_hash[\"NUMBER\"] = 'http://www.w3.org/2001/XMLSchema#float'\ndatatype_hash[\"BINARY\"] = 'http://www.w3.org/2001/XMLSchema#hexBinary'\ndatatype_hash[\"BOOLEAN\"] = 'http://www.w3.org/2001/XMLSchema#boolean'\ndatatype_hash[\"LOGICAL\"] = 'http://www.w3.org/2001/XMLSchema#boolean'\ndatatype_hash[\"STRING\"] = 'http://www.w3.org/2001/XMLSchema#string'\n\n# Write the property_type_hash for inclusion in other scripts or not\npost_processed_schema = false\npost_processed_schema_file_name = 'postprocessed_schema.rb'\npost_processed_schema_file = File.new(post_processed_schema_file_name,'w') if post_processed_schema\n\n######### Mapping Configuration Ends Here ##############\n\n\n# Template covering the start of the output file \noverall_start_template = %{<rdf:RDF \nxmlns:owl=\"http://www.w3.org/2002/07/owl#\" \nxmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\nxmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\" \nxmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n<% if include_dublin_core %>\t \nxmlns:dc=\"http://purl.org/dc/elements/1.1/\"\nxmlns:dcterms=\"http://purl.org/dc/terms/\"\n<% end %>\t \n}\n\n# Template covering the output file contents for each schema start\nschema_start_template = %{xmlns=\"<%= uri_base %>/<%= schema.name %>#\"\nxml:base=\"<%= uri_base %>/<%= schema.name %>#\" > \n\n<owl:Ontology rdf:about='' rdfs:label='<%= schema.name %>' >\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n<% if include_dublin_core %>\t\n<owl:imports rdf:resource=\"http://purl.org/dc/terms/\"/>\n<owl:imports rdf:resource=\"http://purl.org/dc/elements/1.1/\"/>\n<% end %>\t \n</owl:Ontology>\n<% if top_class != nil %>\t\n<owl:Class rdf:ID='<%= top_class %>' />\n<% end %>\n}\n\n# Template covering the output file contents for each entity type start or start of type = type that is a select\nentity_start_template = %{\n<owl:Class rdf:ID='<%= class_name %>' >\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n}\n# Template covering the output file contents for each entity type end or end of type = type that is a select\nentity_end_template = %{\n<% if top_class != nil %>\t\n<rdfs:subClassOf rdf:resource='#<%= top_class %>' />\n<% end %>\n</owl:Class> \n}\n\n# Template covering the output file contents for end of type = type that is a select\nclass_end_template = %{</owl:Class>}\n\n# Template covering the output file contents for each select type start\nselect_start_template = %{\n<owl:Class rdf:ID='<%= select.name %>' >\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n}\n# Template covering the output file contents for each select type end\nselect_end_template = %{<% if top_class != nil %>\t\n<rdfs:subClassOf rdf:resource='#<%= top_class %>' />\n<% end %>\n</owl:Class> \n}\n\n# Template covering the supertype(s) for each entity type and type = type that is a select\nsupertype_template = %{<rdfs:subClassOf rdf:resource='#<%= superclass_name %>' />\n}\n\n# Template covering OWL collections of class contents\nclass_collection_template = %{<owl:Class><owl:unionOf rdf:parseType=\"Collection\">\n<% item_name_list.each do |name| %>\n <rdf:Description rdf:about='#<%= name %>'/>\n <% end %>\t \n</owl:unionOf> \n</owl:Class>}\n\n# Template covering OWL collections of RDFS datatypes\ndatatype_collection_template = %{<rdfs:Datatype rdf:ID='<%= type_name %>'>\n<owl:equivalentClass><rdfs:Datatype><owl:unionOf rdf:parseType=\"Collection\">\n<% type_name_list.each do |name| %>\n <rdf:Description rdf:about='#<%= name %>' />\n <% end %>\n</owl:unionOf> \n</rdfs:Datatype></owl:equivalentClass>\n</rdfs:Datatype>}\n\n# Template covering abstract entity types\nabstract_entity_template = %{<rdfs:subClassOf rdf:resource='#<%= supertype.name %>' />\n}\n\n# Template covering the output file contents for each defined type of builtin datatype\ntype_builtin_template = %{\n<rdfs:Datatype rdf:ID='<%= datatype_name %>'>\n<rdfs:subClassOf rdf:resource='<%= superdatatype_name %>'/>\n</rdfs:Datatype>\n}\n\n# Template covering the mappings to rdf list\nrdflist_template = %{<rdfs:Class rdf:ID='<%= list_name %>'>\n<rdfs:subClassOf rdf:resource='<%= superlist_name %>'/>\n</rdfs:Class>}\n\n# Template covering the output file contents for each attribute that is an aggregate\nattribute_aggregate_template = %{}\n\n# Template covering the output file contents for each attribute that is an aggregate of select of entity\nattribute_aggregate_entity_select_template = %{}\n\n# Template covering the output file contents for each attribute that is a select of entity\nattribute_entity_select_template = %{}\n\n# Template covering the output file contents for each attribute\nattribute_template = %{}\n\n# Template covering the output file contents for each attribute that is builtin datatype\nattribute_builtin_template = %{<owl:DatatypeProperty rdf:ID='<%= owl_property_name %>'>\n<% if owl_property_domain != nil %>\t\n<rdfs:domain rdf:resource='#<%= owl_property_domain %>' />\n <% end %>\n<rdfs:range rdf:resource='<%= owl_property_range %>' />\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n<% if owl_super_property_name != nil %>\t\n<rdfs:subPropertyOf rdf:resource='#<%= owl_super_property_name %>' />\n <% end %>\n</owl:DatatypeProperty>\n}\n\n# Template covering the output file contents for each attribute that is type that is builtin datatype\nattribute_typebuiltin_template = %{<owl:DatatypeProperty rdf:ID='<%= owl_property_name %>'>\n<% if owl_property_domain != nil %>\t\n<rdfs:domain rdf:resource='#<%= owl_property_domain %>' />\n <% end %>\n<rdfs:range rdf:resource='#<%= owl_property_range %>' />\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n</owl:DatatypeProperty>\n}\n\n# Template covering the output file contents for each attribute that is entity or select type\nattribute_entity_template = %{<owl:ObjectProperty rdf:ID='<%= owl_property_name %>'>\n<rdfs:domain rdf:resource='#<%= owl_property_domain %>' />\n<rdfs:range rdf:resource='#<%= owl_property_range %>' />\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n</owl:ObjectProperty>\n}\n\n# Template covering the output file contents for each attribute that is entity or select type\ninverse_entity_template = %{<owl:ObjectProperty rdf:ID='<%= owl_property_name %>'>\n<rdfs:domain rdf:resource='#<%= owl_property_domain %>' />\n<rdfs:range rdf:resource='#<%= owl_property_range %>' />\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\n<owl:inverseOf rdf:resource=\"#<%= owl_inverted_property_name %>\"/>\t \n</owl:ObjectProperty>\n}\n\n# Template covering the output file contents for each attribute that is a redeclaration and reference to an entity or select type\nattribute_redeclare_entity_template = %{<owl:ObjectProperty rdf:ID='<%= owl_property_name %>'>\n<rdfs:domain rdf:resource='#<%= owl_property_domain %>' />\n<rdfs:range rdf:resource='#<%= owl_property_range %>' />\n<rdfs:subPropertyOf rdf:resource='#<%= owl_super_property_name %>' />\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \n</owl:ObjectProperty>\n}\n\n\n\n# Template covering the output file contents for each attribute that is enum datatype\nclass_enum_template = %{<owl:Class rdf:ID='<%= owl_class_name %>'>\n<rdfs:subClassOf rdf:resource=\"http://www.w3.org/2002/07/owl#Thing\"/>\n<owl:oneOf rdf:parseType=\"Collection\">\n<% enumitem_name_list.each do |name| %>\n<rdf:Description rdf:ID=\"<%= owl_class_name + '.' + name %>\"/>\n <% end %>\t\n</owl:oneOf>\n</owl:Class>}\n\n\n# Template covering the output file contents for each attribute that is enum datatype\nattribute_enum_template = %{<owl:DatatypeProperty rdf:ID='<%= owl_property_name %>'>\n<rdfs:domain rdf:resource='#<%= owl_property_domain %>' />\n <rdfs:range><owl:DataRange><owl:oneOf><rdf:List>\n<% enumitem_name_list.each do |name| %>\n <rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= name %></rdf:first>\n <% if enumitem_name_list[enumitem_name_list.size-1] != name %>\t\n <rdf:rest><rdf:List>\n <% end %>\t\n <% if enumitem_name_list[enumitem_name_list.size-1] == name %>\t\n <rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/> </rdf:List>\n <% end %>\t\n <% end %>\t \n <% (1..enumitem_name_list.size-1).each do %>\n </rdf:rest></rdf:List>\n <% end %>\t \n </owl:oneOf></owl:DataRange></rdfs:range>\n<% annotation_list.each do |i| %><% if i[1] != nil and i[1] != '' %><<%= i[0] %> rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"><%= i[1] %></<%= i[0] %>><% end %>\n<% end %>\t \t \n</owl:DatatypeProperty>}\n\n\n\n# Template covering min cardinality \nmin_cardinality_template = %{<rdf:Description rdf:about='#<%= class_name %>'>\n<rdfs:subClassOf>\n<owl:Restriction>\n<owl:onProperty rdf:resource='#<%= owl_property_name %>'/>\n <owl:minCardinality rdf:datatype=\"http://www.w3.org/2001/XMLSchema#nonNegativeInteger\"><%= min_cardinality %></owl:minCardinality>\n</owl:Restriction>\n</rdfs:subClassOf>\n</rdf:Description>}\n\n\n# Template covering max cardinality \nmax_cardinality_template = %{<rdf:Description rdf:about='#<%= class_name %>'>\n<rdfs:subClassOf>\n<owl:Restriction>\n <owl:onProperty rdf:resource='#<%= owl_property_name %>'/>\n<owl:maxCardinality rdf:datatype=\"http://www.w3.org/2001/XMLSchema#nonNegativeInteger\"><%= max_cardinality %></owl:maxCardinality>\n</owl:Restriction>\n</rdfs:subClassOf>\n</rdf:Description>}\n\n# Template covering cardinality \ncardinality_template = %{<rdf:Description rdf:about='#<%= class_name %>'>\n<rdfs:subClassOf>\n<owl:Restriction>\n <owl:onProperty rdf:resource='#<%= owl_property_name %>'/>\n<owl:cardinality rdf:datatype=\"http://www.w3.org/2001/XMLSchema#nonNegativeInteger\"><%= min_cardinality %></owl:cardinality>\n</owl:Restriction>\n</rdfs:subClassOf>\n</rdf:Description>}\n\n# Template covering the output file contents for each schema end\ndisjoint_template = %{\n<rdf:Description rdf:about='#<%= first_class_name %>'>\n<owl:disjointWith rdf:resource=\"#<%= disjoint_class_name %>\"/>\n</rdf:Description>}\n\n\n# Template covering the output file contents for each schema end\nschema_end_template = %{}\n\n# Template covering the end of the output file \noverall_end_template = %{ </rdf:RDF>}\n\n\ndef post_process_entity(entity, post_processed_schema_file, datatype_hash)\n\tattribute_list = entity.attributes_all_array.find_all{ |a| a.kind_of? EXPSM::Explicit}\n\n\tfor attribute in attribute_list\n\n\t\texpress_attribute_domain = nil\n\t\tif !attribute.isBuiltin\n\t\t\t\texpress_attribute_domain = NamedType.find_by_name( attribute.domain )\n\t\tend\n\t\tp28_property_name = entity.name.capitalize + '.' + attribute.name.capitalize\n\t\tproperty_quoted = false\n\t\tproperty_list = false\n\t\tproperty_type = '\"' + attribute.domain + '\"'\n\t\tif attribute.isBuiltin\n\t\t\tproperty_type = '\"' + datatype_hash[attribute.domain] + '\"'\n\t\tend\n\t\tif attribute.isBuiltin and attribute.domain == 'STRING'\n\t\t\tproperty_quoted = true\n\t\tend\n\t\tif (!attribute.redeclare_entity and express_attribute_domain.instance_of? EXPSM::Type and express_attribute_domain.isBuiltin and express_attribute_domain == 'STRING')\n\t\t\tproperty_quoted = true\n\t\t\tproperty_type = '\"' + datatype_hash[express_attribute_domain.domain] + '\"'\n\t\tend\n\t\tif ( attribute.isBuiltin and attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype = 'LIST')\n\t\t\tproperty_list = true\n\t\t\tproperty_type = '\"' + datatype_hash[attribute.domain] + '\"'\n\t\tend\n\n\t\tif (express_attribute_domain.instance_of? EXPSM::Type and express_attribute_domain.isBuiltin and attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype = 'LIST')\n\t\t\tproperty_list = true\n\t\t\tproperty_type = '\"' + datatype_hash[express_attribute_domain.domain] + '\"'\n\t\tend\n\n\t\tproperty_type = property_type.gsub('http://www.w3.org/2001/XMLSchema#','xsd:')\n\t\tpost_processed_schema_file.puts 'property_range_hash[\"' + p28_property_name + '\"] = [' + property_quoted.to_s + ',' + property_list.to_s + ',' + property_type.to_s + ']' \n\tend\nend\n\n\n# A recursive function to return complete set of items, following nested selects, for a select that are not selects themselves\ndef get_all_selections( item_list )\n\tselect_items = item_list.find_all{ |a| a.kind_of? EXPSM::TypeSelect}\n\tif select_items.size == 0\n\t\treturn item_list\n\tend\n\ttemp_item_list = item_list\n\tfor select in select_items\n\t\ttemp_item_list.delete( select )\n\t\ttemp_item_list = temp_item_list + select.selectitems_array\n\tend\n\tfinal_list = get_all_selections( temp_item_list )\nend\n\n# A recursive function to return the ultimate underlying type of a type\ndef is_utlimate_type_builtin( the_type )\n\treturn true if the_type.isBuiltin\n\tbase_type = NamedType.find_by_name( the_type.domain )\n\tcase\n\t\twhen (base_type.instance_of? EXPSM::TypeSelect or base_type.instance_of? EXPSM::Entity)\n\t\t\treturn false\n\t\twhen (base_type.kind_of? EXPSM::Type and base_type.isBuiltin)\n\t\t\treturn true\n\t\telse\n\t\t\treturn is_utlimate_type_builtin( base_type )\n\tend\nend\n\n# Set up list of schemas to process, input may be a repository containing schemas or a single schema\nif mapinput.kind_of? EXPSM::Repository\n\tschema_list = mapinput.schemas\nelsif mapinput.kind_of? EXPSM::SchemaDefinition\n\tschema_list = [mapinput]\nelse\n\tputs \"ERROR : map_from_express input no Repository instance or Schema instance\"\n\texit\nend\n\n\nfor schema in schema_list\n\n\ttype_mapped_list = []\n\tentity_mapped_list = []\n\texplicit_mapped_list = []\n\tinverse_mapped_list = []\t\n\tthing_attr_mapped_list = []\n\tsuperexpression_mapped_list = []\n\tlist_mapped_attr_list = []\n\tlist_mapped_type_list = []\n\ttype_union_mapped_list = []\n\tmixed_select_list = []\n\tall_explicit_list = []\n\tall_derived_list = []\n\tall_inverse_list = []\n\tall_superexpression_list = []\n\n# Set up separate file for each schema \n\tfilename = schema.name.to_s + \".owl\"\n\tfile = File.new(filename, \"w\")\n\n# Evaluate and write file start template \n res = ERB.new(overall_start_template)\n t = res.result(binding)\n\tfile.puts t\n\n# Evaluate and write schema start template \n\tres = ERB.new(schema_start_template)\n\tt = res.result(binding)\n\tfile.puts t\n\n\tselect_list = schema.contents.find_all{ |e| e.kind_of? EXPSM::TypeSelect }\n\tentity_list = schema.contents.find_all{ |e| e.kind_of? EXPSM::Entity }\n\tdefined_type_list = schema.contents.find_all{ |e| e.kind_of? EXPSM::Type }\n\tenum_type_list = schema.contents.find_all{ |e| e.kind_of? EXPSM::TypeEnum }\n\tdefined_type_not_builtin_list = defined_type_list.find_all{ |e| !e.isBuiltin }\n\tdefined_type_builtin_list = defined_type_list.find_all{ |e| e.isBuiltin } \n\n# Handle enumeration type maps to RDFS Datatype string\n\n\tfor type_enum in enum_type_list\n\t\ttype_mapped_list.push type_enum\n\t\towl_class_name = type_enum.name\n\t\tenumitem_name_list = type_enum.items.scan(/\\w+/)\n\t\tres = ERB.new(class_enum_template)\n\t\tt = res.result(binding)\n\t\tfile.puts t\n\tend\n\n\n# Handle defined type maps to RDFS Datatype\n\n\tfor type_builtin in defined_type_builtin_list\n\n# Handle defined type maps to RDFS Datatype\n\t\tif !type_builtin.instance_of? EXPSM::TypeAggregate\n\t\t\ttype_mapped_list.push type_builtin\n\t\t\tdatatype_name = type_builtin.name\n\t\t\tsuperdatatype_name = datatype_hash[type_builtin.domain]\n\t\t\tres = ERB.new(type_builtin_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n# Handle defined type maps to RDF List\n\t\telse\n\t\t\tlist_mapped_type_list.push type_builtin\n\t\t\ttype_mapped_list.push type_builtin\n\t\t\tlist_name = type_builtin.name\n\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\tres = ERB.new(rdflist_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\t\tend\n\tend\n\t\n\n\tfor type_not_builtin in defined_type_not_builtin_list\n\n\t\ttype_domain = NamedType.find_by_name( type_not_builtin.domain )\n\t\ttype_is_builtin = is_utlimate_type_builtin( type_not_builtin )\n\n\t\tcase\n\n# Handle simple defined type refining simple defined type of builtin map to RDFS Datatype\n\t\t\twhen (!type_not_builtin.instance_of? EXPSM::TypeAggregate and type_is_builtin and !type_domain.instance_of? EXPSM::TypeAggregate)\n\t\t\ttype_mapped_list.push type_not_builtin\n\t\t\tdatatype_name = type_not_builtin.name\n\t\t\tsuperdatatype_name = '#' + type_domain.name\n\t\t\tres = ERB.new(type_builtin_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\n# Handle simple defined type of aggr defined type of builtin map to RDF List\n\t\t\twhen (!type_not_builtin.instance_of? EXPSM::TypeAggregate and type_domain.instance_of? EXPSM::TypeAggregate and type_is_builtin)\n\t\t\ttype_mapped_list.push type_not_builtin\n\t\t\tlist_mapped_type_list.push type_not_builtin\n\t\t\tlist_name = type_not_builtin.name\n\t\t\tsuperlist_name = '#' + type_domain.name\n\t\t\tres = ERB.new(rdflist_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\n# Handle aggr defined type of simple defined type of builtin map to RDF List\n\t\t\twhen (type_not_builtin.instance_of? EXPSM::TypeAggregate and type_domain.instance_of? EXPSM::Type and type_is_builtin)\n\t\t\ttype_mapped_list.push type_not_builtin\n\t\t\tlist_mapped_type_list.push type_not_builtin\n\t\t\tlist_name = type_not_builtin.name\n\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\tres = ERB.new(rdflist_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\n# Handle aggr of entity/select type map to RDF List\n\t\t\twhen (type_not_builtin.instance_of? EXPSM::TypeAggregate and (type_domain.instance_of? EXPSM::Entity or type_domain.instance_of? EXPSM::TypeSelect))\n\t\t\ttype_mapped_list.push type_not_builtin\n\t\t\tlist_mapped_type_list.push type_not_builtin\n\t\t\tlist_name = type_not_builtin.name\n\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\tres = ERB.new(rdflist_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\t\t\t\n\n# Handle defined type that are type = type that is a select \n\t\t\twhen (type_domain.kind_of? EXPSM::TypeSelect)\n\t \t\t\ttype_mapped_list.push type_not_builtin\n\t\t\t\tsuperclass_name = type_not_builtin.domain\n\t\t\t\tclass_name = type_not_builtin.name\n\t\t\t\tres = ERB.new(entity_start_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n\t\t\t\tsuperclass_name = type_not_builtin.domain\n\t\t\t\tres = ERB.new(supertype_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t \n\t\t\t\tres = ERB.new(class_end_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n\t\t\telse\n\t\t\tputs 'ERROR : Not mapped = ' + type_not_builtin.name\n\t\tend\n\tend\n\n\n# Handle EXPRESS SELECT Type mapping to OWL Class hierarchy and RDFS Datatypes\n\n\tfor select in select_list\n\t\n\t\titem_name_list = select.selectitems.scan(/\\w+/)\n\n\t\tthis_select_all_items = get_all_selections( select.selectitems_array )\n\t\tthis_select_type_items = this_select_all_items.find_all{ |a| a.kind_of? EXPSM::Type}\n\n\t\tcase\t\t\n\n# Handle case of select items resolving to containing only items that are non-select Type\n\t\twhen this_select_type_items.size == this_select_all_items.size\n\t\t\ttype_mapped_list.push select\n\t\t\ttype_union_mapped_list.push select\n\t\t\ttype_name_list = item_name_list\n\t\t\ttype_name = select.name\n\t\t\tres = ERB.new(datatype_collection_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\n# Handle case of select items resolving to containing no item that is a non-select Type\n\t\telse\n\t\t\ttype_mapped_list.push select\n\t\t\t\n\t\t\tres = ERB.new(select_start_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\t file.puts '<owl:equivalentClass>'\t\t\n\n\t\t\tres = ERB.new(class_collection_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\t file.puts '</owl:equivalentClass>'\n\n\t\t\tres = ERB.new(select_end_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\t\t\tif (this_select_type_items.size != 0 and this_select_type_items.size != this_select_all_items.size)\n\t\t\t\tmixed_select_list.push select\n\t\t\tend\n\n\t\tend\n\tend\n\n# Handle EXPRESS Entity mappings to OWL Class hierarchy \n\n\tfor entity in entity_list\n\n\t\tif definition_hash[entity.name.downcase] != nil\n\t\t\tannotation_list[0] = ['rdfs:comment', definition_hash[entity.name.downcase]]\n\t\telse\n\t\t\tannotation_list[0] = ['rdfs:comment', nil]\n\t\tend\n\n\t\tentity_mapped_list.push entity\n\n\t\tclass_name = entity.name\n\t\tres = ERB.new(entity_start_template)\n\t\tt = res.result(binding)\n\t\tfile.puts t\n\n\t\tfor supertype in entity.supertypes_array\n\t\t\tsuperclass_name = supertype.name\n\t\t\tres = ERB.new(supertype_template)\n\t\t\tt = res.result(binding)\n\t\t\tfile.puts t\n\t\tend\n\n\t\tres = ERB.new(entity_end_template)\n\t\tt = res.result(binding)\n\t\tfile.puts t\n\n# Post-process the entity\n\t\tpost_process_entity(entity, post_processed_schema_file, datatype_hash) if post_processed_schema\n\n\t\t\n\t\tif entity.superexpression != nil\n\t\tall_superexpression_list.push entity \n\n# Handle simple case of one ONEOF in supertype expression mapped to disjoint between listed subclasses\n\t\t\tcase\n#\t\t\twhen (entity.superexpression.include?('ONEOF') and !entity.superexpression.include?('ANDOR') and !entity.superexpression.include?('TOTAL_OVER') and !entity.superexpression.include?('AND') and !entity.superexpression.include?('ABSTRACT'))\n\n\t\t\twhen (entity.superexpression.include?('ONEOF'))\n\t\t\t\tsuperexpression_mapped_list.push entity\t\n\t\t\t\ttempexpression = entity.superexpression\n\t\t\t\tif entity.superexpression.index('ONEOF') != 0\n\t\t\t\t\tputs 'WARNING: ' + entity.name + ' supertype mapping may be incomplete, only ONEOFs processed'\n\t\t\t\tend\n\t\t\t\twhile (tempexpression.include?('ONEOF'))\n\t\t\t\t\tposoneof = tempexpression.index('ONEOF')\n\t\t\t\t\ttempexpression = tempexpression[posoneof + 5,tempexpression.length - 5]\n\t\t\t\t\tposclose = tempexpression.index(')')\n\t\t\t\t\toneof_name_list = tempexpression[0,posclose].scan(/\\w+/)\n\t\t\t\t\twhile oneof_name_list.size != 0\n\t\t\t\t\t\tfirst_class_name = oneof_name_list[0]\n\t\t\t\t\t\toneof_name_list.delete(first_class_name)\n\t\t\t\t\t\tfor disjoint_class_name in oneof_name_list\n\t\t\t\t\t\t\tres = ERB.new(disjoint_template)\n\t\t\t\t\t\t\tt = res.result(binding)\n\t\t\t\t\t\t\tfile.puts t\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\telse\n\t\t\tend\n\t\t\n\t\tend\n\t\t\n\n\tend\n\n# Handle mapping common string attributes to OWL DatatypeProperties of OWL Thing \n\n\tfor thing_attribute in thing_attributes\n\t\towl_property_name = thing_attribute\n\t\towl_property_range = 'http://www.w3.org/2001/XMLSchema#string'\n\t\towl_property_domain = nil\n\t\towl_super_property_name = nil\n\t\tres = ERB.new(attribute_builtin_template)\n\t\tt = res.result(binding)\n\t\tfile.puts t\n\tend\n\n# Handle EXPRESS attributes on an entity-by-entity basis\n\tfor entity in entity_list\n\t\t\n\t\tclass_name = entity.name\n\n\t\tattribute_list = entity.attributes.find_all{ |a| a.kind_of? EXPSM::Explicit and !thing_attributes.include?(a.name)}\n\t\tthing_attr_list = entity.attributes.find_all{ |a| a.kind_of? EXPSM::Explicit and thing_attributes.include?(a.name)}\n\t\tinverse_list = entity.attributes.find_all{ |a| a.kind_of? EXPSM::Inverse and !thing_attributes.include?(a.name)}\n\t\t\n\t\tthing_attr_mapped_list = thing_attr_mapped_list + thing_attr_list\n\t\t\n\t\tall_explicit_list = all_explicit_list + entity.attributes.find_all{ |a| a.kind_of? EXPSM::Explicit}\n\t\tall_derived_list = all_derived_list + entity.attributes.find_all{ |a| a.kind_of? EXPSM::Derived}\n\n\t\tall_inverse_list = all_inverse_list + inverse_list\n\n# Handle EXPRESS inverse attribute mapping to OWL inverse property\t\t\n\t\tfor attribute in inverse_list\n\t\t\tif attribute.reverseAttr != []\n\t\t\t\tinverse_mapped_list.push attribute\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_inverted_property_name = attribute.reverseEntity.name + '.' + attribute.reverseAttr.name\n\t\t\t\tres = ERB.new(inverse_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\tend\n\t\tend\n\n# Handle EXPRESS explicit attribute mapping to OWL property\n\t\tfor attribute in attribute_list\n\t\t\t\n\t\t\texpress_attribute_domain = nil\n\t\t\ttype_is_builtin = false\n\t\t\tif !attribute.isBuiltin\n\t\t\t\texpress_attribute_domain = NamedType.find_by_name( attribute.domain )\n\t\t\t\tif express_attribute_domain.instance_of? EXPSM::Type\n\t\t\t\t\ttype_is_builtin = is_utlimate_type_builtin( express_attribute_domain )\n\t\t\t\tend\n\t\t\tend\n\t\n\t\t\tcase\n\n# Handle EXPRESS explicit attributes of LIST of built-in simple type, including redeclaration\n\t\t\twhen (attribute.isBuiltin and attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype = 'LIST')\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\tlist_mapped_attr_list.push attribute\n\t\t\t\tlist_name = entity.name + '.' + attribute.name + '-' + attribute.domain.to_s + '-List'\n\t\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\t\tres = ERB.new(rdflist_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_property_range = list_name\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attributes of ARRAY of built-in simple type, including redeclaration\n\t\t\twhen (attribute.isBuiltin and attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype = 'ARRAY')\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\tlist_mapped_attr_list.push attribute\n\t\t\t\tlist_name = entity.name + '.' + attribute.name + '-' + attribute.domain.to_s + '-Array'\n\t\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\t\tres = ERB.new(rdflist_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_property_range = list_name\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attributes of LIST of TYPE that is built-in simple type, including redeclaration\t\t\n\t\t\twhen (express_attribute_domain.instance_of? EXPSM::Type and type_is_builtin and attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype = 'LIST')\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\tlist_mapped_attr_list.push attribute\n\t\t\t\tlist_name = entity.name + '.' + attribute.name + '-' + attribute.domain.to_s + '-List'\n\t\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\t\tres = ERB.new(rdflist_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_property_range = list_name\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attributes of ARRAY of TYPE that is built-in simple type, including redeclaration\t\t\n\t\t\twhen (express_attribute_domain.instance_of? EXPSM::Type and type_is_builtin and attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype = 'ARRAY')\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\tlist_mapped_attr_list.push attribute\n\t\t\t\tlist_name = entity.name + '.' + attribute.name + '-' + attribute.domain.to_s + '-Array'\n\t\t\t\tsuperlist_name = \"http://www.w3.org/1999/02/22-rdf-syntax-ns#List\"\n\t\t\t\tres = ERB.new(rdflist_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_property_range = list_name\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n\n# Handle EXPRESS explicit attributes of built-in simple type, including redeclaration\n\t\t\twhen (attribute.isBuiltin)\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\towl_property_range = datatype_hash[attribute.domain]\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_super_property_name = nil\n\t\t\t\tif attribute.redeclare_entity != nil\n\t\t\t\t\tif attribute.redeclare_oldname == nil\n\t\t\t\t\t\towl_super_property_name = attribute.redeclare_entity + '.' + attribute.name\n\t\t\t\t\telse\n\t\t\t\t\t\towl_super_property_name = attribute.redeclare_entity + '.' + attribute.redeclare_oldname\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tres = ERB.new(attribute_builtin_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attribute, not redeclaration, refers to Type that is Type = builtin datatype\t\t\t\t\n\t\t\twhen (!attribute.redeclare_entity and express_attribute_domain.instance_of? EXPSM::Type and type_is_builtin)\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\tres = ERB.new(attribute_typebuiltin_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attributes of enum type, ignoring redeclarations\n\t\t\twhen (express_attribute_domain.kind_of? EXPSM::TypeEnum)\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\n#\t\t\t\tif !type_mapped_list.include?(express_attribute_domain)\n#\t\t\t\t\ttype_mapped_list.push express_attribute_domain\n#\t\t\t\tend\n#\t\t\t\tenumitem_name_list = express_attribute_domain.items.scan(/\\w+/)\n#\t\t\t\tres = ERB.new(attribute_enum_template)\n\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\t\tif attribute.redeclare_entity\n\t\t\t\t\tputs 'WARNING: ' + entity.name + ' ' + attribute.name + ' Attribute redeclaration for Enumerations not mapped'\n\t\t\t\tend\n\n# Handle EXPRESS explicit attribute, not redeclaration, and only refer to EXPRESS Select or Entity\t\t\t\t\n\t\t\twhen (!attribute.redeclare_entity and (express_attribute_domain.kind_of? EXPSM::Entity or express_attribute_domain.kind_of? EXPSM::TypeSelect))\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attribute, redeclaration,\tand only refer to EXPRESS Select or Entity\t\n\t\t\twhen (attribute.redeclare_entity and (express_attribute_domain.kind_of? EXPSM::Entity or express_attribute_domain.kind_of? EXPSM::TypeSelect))\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\tif attribute.redeclare_oldname == nil\n\t\t\t\t\towl_super_property_name = attribute.redeclare_entity + '.' + attribute.name\n\t\t\t\telse\n\t\t\t\t\towl_super_property_name = attribute.redeclare_entity + '.' + attribute.redeclare_oldname\n\t\t\t\tend\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\tres = ERB.new(attribute_redeclare_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attribute of EXPRESS Type = A Type name that is a Select\t\t\t\t\t\n\t\t\twhen (express_attribute_domain.kind_of? EXPSM::Type and NamedType.find_by_name( express_attribute_domain.domain ).kind_of? EXPSM::TypeSelect)\n\t\t\t\texplicit_mapped_list.push attribute\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\tres = ERB.new(attribute_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n# Handle EXPRESS explicit attribute, redeclaration,\tand only refer to EXPRESS Type = A Type name that is a Select\t\t\t\t\t\t\n\t\t\twhen (attribute.redeclare_entity and (express_attribute_domain.kind_of? EXPSM::Type and NamedType.find_by_name( express_attribute_domain.domain ).kind_of? EXPSM::TypeSelect))\n\t\n\t\t\t\texplicit_mapped_list.push attribute\t\t\n\t\t\t\towl_property_range = attribute.domain\n\t\t\t\towl_property_name = entity.name + '.' + attribute.name\n\t\t\t\tif attribute.redeclare_oldname == nil\n\t\t\t\t\towl_super_property_name = attribute.redeclare_entity + '.' + attribute.name\n\t\t\t\telse\n\t\t\t\t\towl_super_property_name = attribute.redeclare_entity + '.' + attribute.redeclare_oldname\n\t\t\t\tend\n\t\t\t\towl_property_domain = entity.name\n\t\t\t\tres = ERB.new(attribute_redeclare_entity_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\n\t\t\telse\n\t\t\t\tputs 'WARNING: ' + entity.name + ' ' + attribute.name + ' Attribute type not yet mapped'\n\t\t\tend\n\n\t\t\tmin_cardinality = 1\n\t\t\tmax_cardinality = 1\n\t\n\t\t\tif attribute.isOptional == true\n\t\t\t\tmin_cardinality = 0\n\t\t\tend\n\n\t\t\tif (attribute.instance_of? EXPSM::ExplicitAggregate and attribute.rank == 1 and attribute.dimensions[0].aggrtype != 'LIST' and attribute.dimensions[0].aggrtype != 'ARRAY')\n\t\t\t\tif attribute.isOptional == false\n\t\t\t\t\tmin_cardinality = attribute.dimensions[0].lower.to_i\n\t\t\t\tend\n\t\t\t\tif attribute.dimensions[0].upper == '?'\n\t\t\t\t\tmax_cardinality = -1\n\t\t\t\telse\n\t\t\t\t\tmax_cardinality = attribute.dimensions[0].upper.to_i\n\t\t\t\tend\n\t\t\t\tif attribute.rank > 1\n\t\t\t\t\tputs 'WARNING: ' + owl_property_name + ' n-dimensional aggregate attribute cardinalities not mapped ' \n\t\t\t\t\tmax_cardinality = -1\n\t\t\t\tend\n\t\t\tend\t\t\t\n\t\t\t\n\t\t\tif min_cardinality == max_cardinality\n\t\t\t\tres = ERB.new(cardinality_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\telse\n\t\t\t\tres = ERB.new(min_cardinality_template)\n\t\t\t\tt = res.result(binding)\n\t\t\t\tfile.puts t\n\t\t\t\tif max_cardinality != -1\n\t\t\t\t\tres = ERB.new(max_cardinality_template)\n\t\t\t\t\tt = res.result(binding)\n\t\t\t\t\tfile.puts t\n\t\t\t\tend\n\t\t\tend\t\t\t\n\n\n\t\t\tif (attribute.redeclare_entity and !(express_attribute_domain.kind_of? EXPSM::Entity or express_attribute_domain.kind_of? EXPSM::TypeSelect))\n\t\t\t\tif \t(attribute.redeclare_entity and !(express_attribute_domain.kind_of? EXPSM::Type and NamedType.find_by_name( express_attribute_domain.domain ).kind_of? EXPSM::TypeSelect))\n\t\t\t\t\tif (attribute.redeclare_entity and !attribute.isBuiltin)\n\t\t\t\t\t\tputs 'WARNING: ' + entity.name + ' ' + attribute.name + ' Attribute redeclaration may need hand editing'\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t\n\t\tend\n\n\tend\n\n\n\tres = ERB.new(schema_end_template)\n\tt = res.result(binding)\n\tfile.puts t\n\t\n\tputs ' '\n\tputs 'Schema Mapping Summary for : ' + schema.name\n\tall_type_list = schema.contents.find_all{ |e| e.kind_of? EXPSM::DefinedType }\n\t\n\t\n\tputs ' ENTITYs mapped = ' + entity_mapped_list.size.to_s + ' of ' + entity_list.size.to_s\n\tputs ' - ' + superexpression_mapped_list.size.to_s + ' of ' + all_superexpression_list.size.to_s + ' ENTITY SUPERTYPE expressions mapped (only simple ONEOF supported)'\n\tfor t in all_superexpression_list - superexpression_mapped_list\n\t\tputs ' Not mapped: ' + t.name\n\tend\n\tputs ' - ' + inverse_mapped_list.size.to_s + ' of ' + all_inverse_list.size.to_s + ' inverse attributes mapped (inverse of inherited not supported)'\t\n\tnotmapped_list = all_inverse_list - inverse_mapped_list\n\tfor t in notmapped_list\n\t\tputs ' Not mapped: ' + t.entity.name + '.' + t.name\n\tend\t\n\tputs ' - ' + thing_attr_mapped_list.size.to_s + ' of ' + all_explicit_list.size.to_s + ' explicit attributes mapped with owl:Thing as domain (configurable)'\t\n\tputs ' - ' + explicit_mapped_list.size.to_s + ' of ' + all_explicit_list.size.to_s + ' explicit attributes mapped'\n\tfor t in list_mapped_attr_list\n\t\tputs ' rdf:List mapped: ' + t.name\n\tend\n\n\tnotmapped_list = all_explicit_list - explicit_mapped_list - thing_attr_mapped_list\n\tfor t in notmapped_list\n\t\tputs ' Not mapped: ' + t.entity.name + '.' + t.name\n\tend\n\tputs ' - ' + all_derived_list.size.to_s + ' derived attributes not mapped, not supported'\n\tputs ' TYPEs mapped = ' + type_mapped_list.size.to_s + ' of ' + all_type_list.size.to_s\n\tfor t in list_mapped_type_list\n\t\tputs ' rdf:List mapped: ' + t.name\n\tend\n\tfor t in mixed_select_list\n\t\tputs ' Select of entity & type Class mapped: ' + t.name\n\tend\n\tfor t in type_union_mapped_list\n\t\tputs ' Select RDFS Datatype owl:unionOf mapped: ' + t.name\n\tend\n\tnotmapped_list = all_type_list - type_mapped_list\n\tfor t in notmapped_list\n\t\tputs ' Not mapped: ' + t.name\n\tend\n\tputs ' '\n\tputs 'Wrote post-processed schema to file: ' + post_processed_schema_file_name if post_processed_schema\n\n\nend\n\nres = ERB.new(overall_end_template)\nt = res.result(binding)\nfile.puts t\n\npost_processed_schema_file.close if post_processed_schema\n\nend",
"def rdfs_from_owl\n # Remove previous auto rdfs triples\n ActiveRDF::FederationManager.clear(N::TALIA.auto_rdfs.context)\n \n # This gets all OWL classes in the store\n all_qry = ActiveRDF::Query.new(N::URI).distinct.select(:class)\n all_qry.where(:class, N::RDF::type, N::OWL.Class)\n all_owl = all_qry.execute\n \n # This gets all OWL classes that already have an RDF class attached\n qry_rdfs = ActiveRDF::Query.new(N::URI).distinct.select(:class)\n qry_rdfs.where(:class, N::RDF::type, N::OWL.Class)\n qry_rdfs.where(:class, N::RDF::type, N::RDFS.Class)\n classes_with_rdfs = qry_rdfs.execute\n \n \n modified = 0\n blanks = 0\n \n class_hash = {}\n \n # Put all the existing owl classes in a hash\n all_owl.each do |owl_class|\n if(owl_class.is_a?(RDFS::BNode))\n blanks = blanks + 1\n next\n end\n \n class_hash[owl_class] = :has_rdfs_class\n end\n \n # Now remove the ones that already have an RDF class\n classes_with_rdfs.each do |owl_class|\n next if(owl_class.is_a?(RDFS::BNode))\n \n class_hash[owl_class] = :no_rdfs_class\n end\n \n # Now go through all klasses and add the missing triples\n class_hash.each do |klass, status|\n if(status == :has_rdfs_class)\n modified = modified + 1\n ActiveRDF::FederationManager.add(N::URI.new(klass), N::RDF.type, N::RDFS.Class, N::TALIA.auto_rdfs_context)\n end\n yield(class_hash.size) if(block_given?)\n end\n \n return [class_hash.size, modified, blanks]\n end",
"def to_rdf\n self\n end",
"def index\n @ontologies = Ontology.all\n end",
"def create_ontologies_and_submissions(options = {})\n LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)\n end",
"def ontologies\n @ontologies ||= {}\n if @ontologies.empty?\n response = request :get_ontology_names\n response[:item].each do |ont|\n @ontologies[ont[:key]] = ont[:value]\n end\n end\n @ontologies\n end",
"def to_rdf\n retrieve_triples_from_database(accept_header=\"application/rdf+xml\")\n end",
"def create_ontologies_and_submissions(options = {})\n LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)\n end",
"def to_jsonld(rdf)\n rdf.dump(:jsonld, standard_prefixes: true)\n end",
"def index\n\t\t@ontologies = Ontology.all\n\tend",
"def goo_namespaces\n Goo.configure do |conf|\n conf.add_namespace(:omv, RDF::Vocabulary.new(\"http://omv.ontoware.org/2005/05/ontology#\"))\n conf.add_namespace(:skos, RDF::Vocabulary.new(\"http://www.w3.org/2004/02/skos/core#\"))\n conf.add_namespace(:owl, RDF::Vocabulary.new(\"http://www.w3.org/2002/07/owl#\"))\n conf.add_namespace(:rdfs, RDF::Vocabulary.new(\"http://www.w3.org/2000/01/rdf-schema#\"))\n conf.add_namespace(:metadata,\n RDF::Vocabulary.new(\"http://data.bioontology.org/metadata/\"),\n default = true)\n conf.add_namespace(:metadata_def,\n RDF::Vocabulary.new(\"http://data.bioontology.org/metadata/def/\"))\n conf.add_namespace(:dc, RDF::Vocabulary.new(\"http://purl.org/dc/elements/1.1/\"))\n conf.add_namespace(:xsd, RDF::Vocabulary.new(\"http://www.w3.org/2001/XMLSchema#\"))\n conf.add_namespace(:oboinowl_gen,\n RDF::Vocabulary.new(\"http://www.geneontology.org/formats/oboInOwl#\"))\n conf.add_namespace(:obo_purl, RDF::Vocabulary.new(\"http://purl.obolibrary.org/obo/\"))\n conf.add_namespace(:umls,\n RDF::Vocabulary.new(\"http://bioportal.bioontology.org/ontologies/umls/\"))\n conf.id_prefix = \"http://data.bioontology.org/\"\n conf.pluralize_models(true)\n end\n end",
"def as_rdf()\n result_graph = RDF::Graph.new class_variable_get(:@@rdf_graph)\n result_graph << [self_uri, RDF::type, RDFS['Class']]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create the basic OWL triples for the class, without the properties. ==== Example Person.triple_for_class_definition returns a graph of the triples representing the bare class in OWL. ==== Rules for creating the ontology The class itself will be represented by a bnode. ==== Returns | def triples_for_class_definition
declare_namespaces
g = Graph.new
b = BNode.new(self.name)
g << Triple.new(b, URIRef.new('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), URIRef.new('http://www.w3.org/2002/07/owl#Class'))
return g
end | [
"def create_triples(clsauthor, bibo)\n myuri = RDF::URI(@liiScholarID)\n myssrnuri = RDF::URI(LII_SSRN_AUTHOR_URI_PREFIX + @ssrnAuthorID)\n RDF::Writer.for(:ntriples).new($stdout) do |writer|\n writer << RDF::Graph.new do |graph|\n graph << [myuri, RDF.type, clsauthor.CLSAuthor]\n graph << [myssrnuri, RDF.type, clsauthor.SSRNAuthor]\n graph << [myuri, OWL.sameAs, myssrnuri] unless @ssrnAuthorID.empty?\n\n graph << [myuri, clsauthor.birthYear, @birthYear] unless @birthYear.empty?\n graph << [myuri, clsauthor.deathYear, @deathYear] unless @deathYear.empty?\n graph << [myuri, FOAF.givenName, @firstName] unless @firstName.empty?\n graph << [myuri, clsauthor.middlename, @middleName] unless @middleName.empty?\n graph << [myuri, FOAF.name, @firstName + ' ' + @lastName] unless (@firstName.empty? || @lastName.empty?)\n graph << [myuri, FOAF.familyName, @lastName] unless @lastName.empty?\n\n unless @gPlusID.empty?\n # fakeid = RDF::URI(\"http://liicornell.org/googleplus/\" + @gPlusID)\n # graph << [fakeid, RDF.type, clsauthor.GooglePlusProfile]\n # graph << [myuri, clsauthor.hasGooglePlusProfile, fakeid]\n graph << [myuri, clsauthor.gPlusProfile, RDF::URI(GPLUS_URI_PREFIX+@gPlusID)]\n end\n\n unless @gScholarID.empty?\n # fakeid = RDF::URI('http://liicornell.org/googlescholar/' + @gScholarID)\n # graph << [fakeid, RDF.type, clsauthor.GoogleScholarPage]\n # graph << [myuri, clsauthor.hasGoogleScholarPage, fakeid]\n graph << [myuri, clsauthor.gScholarPage, RDF::URI(GSCHOLAR_URI_PREFIX + @gScholarID)]\n end\n\n unless @openGraphID.empty?\n # graph << [RDF::URI(OPENGRAPH_URI_PREFIX + @openGraphID), RDF.type, clsauthor.openGraphID]\n # graph << [myuri, OWL.sameAs, RDF::URI(OPENGRAPH_URI_PREFIX + @openGraphID)]\n end\n\n graph << [myuri, clsauthor.orcID, @orcidID] unless @orcidID.empty?\n graph << [myuri, clsauthor.ssrnAuthorID, @ssrnAuthorID] unless @ssrnAuthorID.empty?\n\n unless @worldCatID.empty?\n #fakeid = RDF::URI('http://liicornell.org/worldcat/' + Digest::MD5.hexdigest(@worldCatID))\n #graph << [fakeid, RDF.type, clsauthor.WorldCatPage]\n #graph << [myuri, clsauthor.hasWorldCatPage, fakeid]\n graph << [myuri, clsauthor.worldCatID, RDF::URI(@worldCatID)]\n graph << [myuri, OWL.sameAs, RDF::URI(@worldCatID)]\n end\n\n graph << [myuri, clsauthor.institutionBio, RDF::URI(@clsBio)] unless @clsBio.empty?\n\n unless @linkedInProfile.empty?\n #fakeid = RDF::URI('http://liicornell.org/linkedin/' + Digest::MD5.hexdigest(@linkedInProfile))\n #graph << [fakeid, RDF.type, clsauthor.LinkedInProfile]\n #graph << [myuri, clsauthor.hasLinkedInProfile, fakeid]\n graph << [myuri, clsauthor.linkedInProfile, RDF::URI(@linkedInProfile)]\n end\n\n\n graph << [myuri, FOAF.homepage, RDF::URI(@homepage)] unless @homepage.empty?\n\n unless @viafID.empty?\n #fakeid = RDF::URI('http://liicornell.org/viaf/' + Digest::MD5.hexdigest(@viafID))\n #graph << [fakeid, RDF.type, clsauthor.ViafPage]\n #graph << [myuri, clsauthor.hasViafPage, fakeid]\n graph << [myuri, clsauthor.viafID, RDF::URI(@viafID)]\n graph << [myuri, OWL.sameAs, RDF::URI(@viafID)]\n end\n\n graph << [myuri, clsauthor.crossRefID, @crossRefID] unless @crossRefID.empty?\n\n unless @bePressID.empty?\n #fakeid = RDF::URI('http://liicornell.org/bepress/' + Digest::MD5.hexdigest(@bePressID))\n #graph << [fakeid, RDF.type, clsauthor.BePressPage]\n #graph << [myuri, clsauthor.hasBePressPage, fakeid]\n graph << [myuri, clsauthor.bePressPage, RDF::URI(@bePressID)]\n end\n\n graph << [myuri, OWL.sameAs, RDF::URI(@dbPediaID)] unless @dbPediaID.empty?\n #graph << [myuri, OWL.sameAs, RDF::URI(@freeBaseID)] unless @freeBaseID.empty?\n end\n end\n end",
"def create_triples\n clsauthor = RDF::Vocabulary.new(CLS_VOCABULARY)\n bibo = RDF::Vocabulary.new(BIBO_VOCABULARY)\n myuri = RDF::URI(@paper_URI)\n RDF::Writer.for(:ntriples).new() do |writer|\n writer << RDF::Graph.new do |graph|\n graph << [myuri, DC.contributor, RDF::URI(@cls_author_uri)]\n graph << [myuri, RDF.type, bibo.Article]\n graph << [myuri, clsauthor.abstractPage, RDF::URI(@url)]\n graph << [myuri, DC.abstract, @abstract] unless @abstract.nil?\n graph << [myuri, clsauthor.ssrnOnlineDate, @online_date] unless @online_date.nil?\n graph << [myuri, clsauthor.ssrnPubDate, @pub_date] unless @pub_date.nil?\n graph << [myuri, bibo.doi, @doi] unless @doi.nil?\n graph << [myuri, DC.title, @title] unless @title.nil?\n graph << [myuri, clsauthor.ssrnAbsViewCount, @abstract_views] unless @abstract_views.nil?\n graph << [myuri, clsauthor.ssrnDLCount, @paper_dls] unless @paper_dls.nil?\n graph << [myuri, clsauthor.ssrnCitationCount, @paper_citations] unless @paper_citations.nil?\n graph << [myuri, clsauthor.ssrnFNCount, @paper_footnotes] unless @paper_citations.nil?\n graph << [myuri, clsauthor.ssrnDLRank, @dl_rank] if @dl_rank\n @keywords.each do |subj|\n # normalize to lowercase\n subj.downcase!\n graph << [myuri, DC.subject, subj]\n end\n @jelcodes.each do |jel|\n graph << [RDF::URI(LII_JEL_URI_PREFIX + jel), RDF.type, clsauthor.JelClass]\n graph << [myuri, clsauthor.jelClass, RDF::URI(LII_JEL_URI_PREFIX + jel)]\n end\n @coauthors.each do |scribbler|\n scribURI = RDF::URI(LII_SSRN_AUTHOR_URI_PREFIX + scribbler)\n graph << [scribURI, RDF.type, clsauthor.SSRNAuthor]\n graph << [myuri, DC.contributor, scribURI]\n # stick in name information, just to be informative\n coauthpage = SSRNAuthorPage.new(scribbler, scribURI)\n coauthpage.scrape\n graph << [scribURI, FOAF.givenName, coauthpage.firstName]\n graph << [scribURI, FOAF.familyName, coauthpage.lastName]\n end\n end\n end\n end",
"def to_owl(*args)\n g = Graph.new\n owl = Namespace.new('http://www.w3.org/2002/07/owl', 'owl', true)\n foaf = Namespace.new(\"http://xmlns.com/foaf/0.1/\", \"foaf\")\n rdf = Namespace.new(\"http://www.w3.org/1999/02/22-rdf-syntax-ns\", \"rdf\", true)\n rdfs = Namespace.new(\"http://www.w3.org/2000/01/rdf-schema\", 'rdfs', true)\n xsd = Namespace.new('http://www.w3.org/2001/XMLSchema', 'xsd', true)\n \n for property in properties\n g << Triple.new(BNode.new('john'), foaf.knows, BNode.new('jane'))\n end\n return g\n end",
"def rdfs_from_owl\n # Remove previous auto rdfs triples\n ActiveRDF::FederationManager.clear(N::TALIA.auto_rdfs.context)\n \n # This gets all OWL classes in the store\n all_qry = ActiveRDF::Query.new(N::URI).distinct.select(:class)\n all_qry.where(:class, N::RDF::type, N::OWL.Class)\n all_owl = all_qry.execute\n \n # This gets all OWL classes that already have an RDF class attached\n qry_rdfs = ActiveRDF::Query.new(N::URI).distinct.select(:class)\n qry_rdfs.where(:class, N::RDF::type, N::OWL.Class)\n qry_rdfs.where(:class, N::RDF::type, N::RDFS.Class)\n classes_with_rdfs = qry_rdfs.execute\n \n \n modified = 0\n blanks = 0\n \n class_hash = {}\n \n # Put all the existing owl classes in a hash\n all_owl.each do |owl_class|\n if(owl_class.is_a?(RDFS::BNode))\n blanks = blanks + 1\n next\n end\n \n class_hash[owl_class] = :has_rdfs_class\n end\n \n # Now remove the ones that already have an RDF class\n classes_with_rdfs.each do |owl_class|\n next if(owl_class.is_a?(RDFS::BNode))\n \n class_hash[owl_class] = :no_rdfs_class\n end\n \n # Now go through all klasses and add the missing triples\n class_hash.each do |klass, status|\n if(status == :has_rdfs_class)\n modified = modified + 1\n ActiveRDF::FederationManager.add(N::URI.new(klass), N::RDF.type, N::RDFS.Class, N::TALIA.auto_rdfs_context)\n end\n yield(class_hash.size) if(block_given?)\n end\n \n return [class_hash.size, modified, blanks]\n end",
"def as_rdf()\n result_graph = RDF::Graph.new class_variable_get(:@@rdf_graph)\n result_graph << [self_uri, RDF::type, RDFS['Class']]\n end",
"def rdfs_from_owl\n # Get all OWL classes\n qry = Query.new.distinct.select(:s)\n qry.where(:s, make_res(N::RDF::type), make_res(N::OWL + 'Class'))\n klasses = qry.execute\n modified = 0\n \n klasses.each do |klass|\n already_exists = false\n assit_kind_of(RDFS::Resource, klass)\n unless(klass.rdf::type.is_a?(RDFS::Resource) || klass.rdf::type.size < 2)\n klass.rdf::type.each do |type|\n already_exists = true if(type == make_res(N::RDFS + \"Class\"))\n end\n end\n updated = 0\n unless(already_exists)\n klass.rdf::type = make_res(N::RDFS + \"Class\")\n klass.save\n updated = 1\n end\n \n modified += updated\n yield(klasses.size, updated) if(block_given?)\n end\n \n return [klasses.size, modified]\n end",
"def triples(triple = Triple.new(nil, nil, nil), &block) # :yields: triple, context\n @graphs.inject([]) {|memo, g| memo += g.triples(triple, &block)}\n end",
"def triples_for_property(property)\n g = Graph.new\n b = BNode.new(property.field)\n t = Triple.new(b, URIRef.new('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), URIRef.new('http://www.w3.org/2002/07/owl#DatatypeProperty'))\n g << t\n return g\n end",
"def r_class\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n\n __ID1__ = nil\n\n\n begin\n # at line 157:5: CLASS ID ( inherits )? START ( attributes )* constructor ( method )* R_END\n match( CLASS, TOKENS_FOLLOWING_CLASS_IN_r_class_648 )\n\n # --> action\n $scope = \"class\"\n # <-- action\n\n __ID1__ = match( ID, TOKENS_FOLLOWING_ID_IN_r_class_652 )\n\n # --> action\n $classId = __ID1__.text\n # <-- action\n\n # at line 157:58: ( inherits )?\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0 == INHER )\n alt_4 = 1\n end\n case alt_4\n when 1\n # at line 157:58: inherits\n @state.following.push( TOKENS_FOLLOWING_inherits_IN_r_class_657 )\n inherits\n @state.following.pop\n\n end\n match( START, TOKENS_FOLLOWING_START_IN_r_class_660 )\n # at line 157:75: ( attributes )*\n while true # decision 5\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0.between?( BOOL, CHAR ) || look_5_0 == FLOAT || look_5_0 == INTEGER )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line 157:77: attributes\n @state.following.push( TOKENS_FOLLOWING_attributes_IN_r_class_665 )\n attributes\n @state.following.pop\n\n else\n break # out of loop for decision 5\n end\n end # loop for decision 5\n\n\n # --> action\n $program.add_attrs()\n # <-- action\n\n @state.following.push( TOKENS_FOLLOWING_constructor_IN_r_class_673 )\n constructor\n @state.following.pop\n # at line 157:129: ( method )*\n while true # decision 6\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0.between?( BOOL, CHAR ) || look_6_0 == FLOAT || look_6_0 == INTEGER || look_6_0 == VOID )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line 157:131: method\n @state.following.push( TOKENS_FOLLOWING_method_IN_r_class_678 )\n method\n @state.following.pop\n\n else\n break # out of loop for decision 6\n end\n end # loop for decision 6\n\n\n # --> action\n $program.add_class($classId)\n # <-- action\n\n match( R_END, TOKENS_FOLLOWING_R_END_IN_r_class_686 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n\n end\n\n return \n end",
"def class_def!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 90 )\n\n type = CLASS_DEF\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 211:13: 'class'\n match( \"class\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 90 )\n\n end",
"def create_ontologies_and_submissions(options = {})\n LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)\n end",
"def create_ontologies_and_submissions(options = {})\n LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)\n end",
"def get_triples_for_this_resource\n triples_graph = RDF::Graph.new\n @repository.query([RDF::URI.new(self.uri), :predicate, :object]) do |stmt|\n triples_graph << stmt\n end\n triples_graph\n end",
"def to_rdf()\n graph = RDF::Graph.new\n as_rdf(Array.new).each do |triple|\n puts triple \n graph << triple\n end\n return graph\n end",
"def create_triple(subject, predicate, object, datatype = nil)\n subject_uri = subject\n subject_uri = subject_uri.to_s unless subject_uri.instance_of?(String)\n subject_uri = \"<#{subject_uri}>\".sub(/\\s/, '%20')\n\n predicate_uri = predicate\n predicate_uri = predicate_uri.to_s unless predicate_uri.instance_of?(String)\n predicate_uri = \"<#{predicate_uri}>\".sub(/\\s/, '%20')\n\n object_representation = nil\n if object.kind_of?(RDF::URI) then\n object_uri = object.to_s\n object_representation = \"<#{object_uri}>\".sub(/\\s/, '%20')\n elsif object.kind_of?(Integer) then\n object_representation = object.to_s\n elsif object.kind_of?(Float) then\n object_representation = object.to_s\n elsif object.instance_of?(TrueClass) or object.instance_of?(FalseClass) then\n object_representation = object.to_s\n else\n if datatype then\n object_representation = \"\\\"#{object.to_s}\\\"^^<#{datatype.to_s}>\"\n else\n object_representation = \"\\\"#{object.to_s}\\\"\"\n end\n end\n\n begin\n if BioInterchange::format == :turtle then\n serialize_turtle(subject_uri, predicate_uri, object_representation)\n else\n @ostream.puts(\"#{subject_uri} #{predicate_uri} #{object_representation} .\")\n end\n rescue Errno::EPIPE\n # Whenever an output pipe disappears, then the user may be happy with what he/she\n # has seen and hit Ctrl-C, or, piped the output through a UNIX command line tool\n # such as \"head\".\n exit 0\n end\n\n subject\n end",
"def create_node_class\n class_name = \"#{parser.module_name}_#{name}_node\".camelize\n parser.const_set class_name, Class.new(RuleNode)\n end",
"def rule_triples_creator(rule_json, time, graph)\n rule = rule_json['rule']\n key = rule_json['key']\n parameters = rule['parameters']\n\n rule_uuid = generate_uuid\n rule_uri = '<' + validation_result_collection + rule_uuid + '>'\n\n # add type, uuid and timestamp, status\n rule_triples = rule_uri + \" a mu:validationResultCollection ;\\n\"\n rule_triples += \" mu:uuid '\" + rule_uuid + \"' ;\\n\"\n rule_triples += \" mu:timestamp '\" + time + \"'^^xsd:dateTime ;\\n\"\n rule_triples += \" mu:Graph '\" + graph + \"' ;\\n\" unless graph.nil?\n\n # end withadding a ruleId\n # and returning triples, uri and parameters\n rule_triples += \" mu:ruleId '\" + key + \"' .\\n\\n\"\n {\n 'rule_triples' => rule_triples,\n 'rule_uri' => rule_uri,\n 'parameters' => parameters,\n 'ruleId' => key\n }\n end",
"def create_ontology(ont_info)\n new_ontology = LinkedData::Models::Ontology.new\n\n new_ontology.acronym = ont_info['acronym']\n new_ontology.name = ont_info['name']\n new_ontology.administeredBy = [USER]\n if ont_info['viewingRestriction'] == 'private'\n # In case of private ontology (acl: list of user that have the right to see the ontology)\n new_ontology.viewingRestriction = 'private'\n new_ontology.acl = [USER]\n end\n new_ontology\nend",
"def triples(triple = Triple.new(nil, nil, nil), &block) # :yields: triple, context\n @store.triples(triple, nil, &block) || []\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create the OWL triples for a DataMapper property. ==== Example Person.triples_for_property(Person.properties[:id]) returns a graph of the triples representing the id property on Person in OWL. ==== Returns | def triples_for_property(property)
g = Graph.new
b = BNode.new(property.field)
t = Triple.new(b, URIRef.new('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), URIRef.new('http://www.w3.org/2002/07/owl#DatatypeProperty'))
g << t
return g
end | [
"def get_triples_for_this_resource\n triples_graph = RDF::Graph.new\n @repository.query([RDF::URI.new(self.uri), :predicate, :object]) do |stmt|\n triples_graph << stmt\n end\n triples_graph\n end",
"def property_ids\n result = Array.new\n self.properties.each do |p|\n result << p.id\n end\n result\n end",
"def get_properties\n if @properties.nil?\n parents = get_parents\n @properties = []\n term_properties = @ontology.get_properties_for_domain(@id)\n if term_properties\n @properties += term_properties\n end\n parents.each do |parent|\n parent_properties = @ontology.get_properties_for_domain(parent.id)\n if parent_properties\n @properties += parent_properties\n end\n end\n @properties.uniq!\n end\n return @properties\n end",
"def relationship_get_all_props id\n headers = {\n 'Accept' => 'application/json; charset=UTF-8',\n }\n get_request 'relationship/' + id + '/properties', headers\n end",
"def to_owl(*args)\n g = Graph.new\n owl = Namespace.new('http://www.w3.org/2002/07/owl', 'owl', true)\n foaf = Namespace.new(\"http://xmlns.com/foaf/0.1/\", \"foaf\")\n rdf = Namespace.new(\"http://www.w3.org/1999/02/22-rdf-syntax-ns\", \"rdf\", true)\n rdfs = Namespace.new(\"http://www.w3.org/2000/01/rdf-schema\", 'rdfs', true)\n xsd = Namespace.new('http://www.w3.org/2001/XMLSchema', 'xsd', true)\n \n for property in properties\n g << Triple.new(BNode.new('john'), foaf.knows, BNode.new('jane'))\n end\n return g\n end",
"def properties\n properties = []\n relations = self.property_relations\n relations.each do |relationship|\n properties.push relationship.property\n end\n properties\n end",
"def property_tree\n properties = Hash[self.explore.properties.map {|p| [p.id, p]}]\n properties.keys.each do |key|\n prop = properties[key]\n prop.parents.each {|par| properties[par].children << prop if properties[par]}\n end\n roots = properties.values.select {|p| p.parents.empty?}\n root = LinkedData::Client::Models::Property.new\n root.children = roots\n root\n end",
"def write_triples(enumerable, graph: @graph)\n patterns = []\n enumerable.each_triple do |subject, predicate, object|\n patterns << [subject, predicate, object]\n end\n\n begin\n query = RDF::Virtuoso::Query.insert(*patterns).graph(graph)\n res = @repo.insert(query)\n p res if @verbose\n rescue Exception => e\n @log.error e\n end\n end",
"def get_properties\n xml = client.call(\"#{url}/property\").parsed_response\n xml.css('properties property').map { |p| Vebra::Property.new(client, p, branch: self) }\n end",
"def props\n ret = {\"_neo_id\" => getId()}\n iter = getPropertyKeys.iterator\n while (iter.hasNext) do\n key = iter.next\n ret[key] = getProperty(key)\n end\n ret\n end",
"def triples_for_class_definition\n declare_namespaces\n g = Graph.new\n b = BNode.new(self.name)\n g << Triple.new(b, URIRef.new('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), URIRef.new('http://www.w3.org/2002/07/owl#Class'))\n return g\n end",
"def generate_node_properties(properties)\n File.open(\"nodes.csv\", \"w\") do |file|\n file.write(properties.join(\"\\t\") + \"\\n\")\n end\n end",
"def wb_create_properties(props)\n properties = Hash.new\n\n props.each do |prop|\n handle = prop[0]\n type = prop[1]\n data = '{\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"' + generate_random_string(8) +\n '\"}},\"descriptions\":{\"en\":{\"language\":\"en\",\"value\":\"' + generate_random_string(20) +\n '\"}},\"datatype\":\"' + type + '\"}'\n property = wb_create_entity(data, \"property\")\n properties[handle] = property\n end\n\n properties\n end",
"def triples(triple = Triple.new(nil, nil, nil), &block) # :yields: triple, context\n @graphs.inject([]) {|memo, g| memo += g.triples(triple, &block)}\n end",
"def relationship_set_props id, data\n headers = {\n 'Accept' => 'application/json; charset=UTF-8',\n 'Content-Type' => 'application/json',\n }\n\n put_request 'relationship/' + id + '/properties', data, headers\n end",
"def create_triples\n clsauthor = RDF::Vocabulary.new(CLS_VOCABULARY)\n bibo = RDF::Vocabulary.new(BIBO_VOCABULARY)\n @author_list.each do |author|\n author.create_triples(clsauthor, bibo)\n end\n end",
"def [](*properties)\n # Lesson Learnt\n # ==============\n # Don't change what doesn't belong to you\n properties = properties.map(&:to_s)\n run_in_transaction(:[], *properties) do\n node_properties = props # Fetch all properties as this is more efficient than firing a HTTP request for every property\n result = []\n properties.each { |k| result << node_properties[k] }\n # If a single property was asked then return it's value else return an array of values in the correct order\n if properties.length == 1\n result.first\n else\n result\n end\n end\n rescue NoMethodError => e\n _raise_doesnt_exist_anymore_error(e)\n end",
"def get_properties\n xml = client.call(\"#{attributes[:url]}/property\").parsed_response\n xml.css('properties property').map { |p| Vebra::Property.new(p, self) }\n end",
"def read_triples\n prod(:triples, %w{.}) do\n error(\"read_triples\", \"Unexpected end of file\") unless token = @lexer.first\n subject = case token.type || token.value\n when IPLSTART\n # iriPropertyList predicateObjectList? \n read_iriPropertyList || error(\"Failed to parse iriPropertyList\", production: :triples, token: @lexer.first)\n when '['\n # blankNodePropertyList predicateObjectList? \n read_blankNodePropertyList || error(\"Failed to parse blankNodePropertyList\", production: :triples, token: @lexer.first)\n else\n # subject predicateObjectList\n read_path || error(\"Failed to parse subject\", production: :triples, token: @lexer.first)\n end\n read_predicateObjectList(subject) || subject\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Utility method to declare a bunch of useful namespaces. ==== Example Person.declare_namespaces instantiates some useful namespaces. | def declare_namespaces
foaf = Namespace.new("http://xmlns.com/foaf/0.1/", "foaf")
rdf = Namespace.new("http://www.w3.org/1999/02/22-rdf-syntax-ns", "rdf", true)
rdfs = Namespace.new("http://www.w3.org/2000/01/rdf-schema", 'rdfs', true)
xsd = Namespace.new('http://www.w3.org/2001/XMLSchema', 'xsd', true)
owl = Namespace.new('http://www.w3.org/2002/07/owl', 'owl', true)
end | [
"def register_namespaces(namespaces); end",
"def namespaces(*namespaces)\n namespaces.each do |ns|\n namespace ns\n end\n @namespaces\n end",
"def namespaces\n @namespaces ||= self.class.name.split('::').slice(0...-1).map(&:underscore).map(&:to_sym)\n end",
"def exploded_namespace_declarations(ns)\n Hash[ns.map do |prefix, uri|\n if prefix==\"\"\n [\"xmlns\", uri]\n else\n [[prefix, \"xmlns\"], uri]\n end\n end]\n end",
"def ruby_namespace namespaces\n namespaces = namespaces.split \".\" if namespaces.is_a? String\n namespaces.reject(&:empty?).map(&:upcase_first).join \"::\"\n end",
"def collect_namespaces(object); end",
"def add_namespaces node\n raw.root.namespace_definitions.each do |ns|\n node.add_namespace_definition ns.prefix, ns.href if ns.href != NS\n end\n end",
"def unify_namespaces(ns1, ns2); end",
"def namespaces\n map(&:namespace).map(&:prefix).map(&:to_s).map(&:downcase).uniq\n end",
"def hidden_namespaces; end",
"def create_namespaces!\n count = Registry.count\n\n # Create the global team/namespace.\n team = Team.create(\n name: \"portus_global_team_#{count}\",\n owners: User.where(admin: true),\n hidden: true\n )\n Namespace.create!(\n name: \"portus_global_namespace_#{count}\",\n registry: self,\n visibility: Namespace.visibilities[:visibility_public],\n global: true,\n description: \"The global namespace for the registry #{Registry.name}.\",\n team: team\n )\n\n # TODO: change code once we support multiple registries\n User.find_each(&:create_personal_namespace!)\n end",
"def namespaces ns = \"root\", cn = \"__Namespace\"\n result = []\n each_instance( ns, cn ) do |inst|\n name = \"#{ns}/#{inst.Name}\"\n result << name\n result.concat namespaces name, cn\n end\n result.uniq\n end",
"def namespaces\n @namespaces ||= %w(Mithril::Controllers)\n end",
"def namespaces\n @namespaces ||= {\n wse: Akami::WSSE::WSE_NAMESPACE,\n ds: 'http://www.w3.org/2000/09/xmldsig#',\n wsu: Akami::WSSE::WSU_NAMESPACE,\n }\n end",
"def namespaces\n namespace.split(NAMESPACE_PATTERN)\n end",
"def default_namespaces\n @names\n end",
"def default_namespaces=(names)\n if names then\n @names = []\n names.each { |x| @names.push(x.dup) }\n else\n @names = nil\n end\n end",
"def addNameSpaceList(elm, nsList) \n nsList.each{|namespace|\n if(namespace.is_a?(Array))\n (prefix, uri) = namespace ;\n addNameSpace(elm, prefix, uri) ;\n else\n addNameSpace(elm,namespace) ;\n end\n }\n elm ;\n end",
"def namespaces\n names = self.class.name.split(\"::\")\n names.pop\n \n names.map(&:underscore).map(&:to_sym)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /asignaciones GET /asignaciones.json | def index
@asignaciones = Asignacione.all
end | [
"def index\n @asignaturas = Asignatura.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @asignaturas }\n end\n end",
"def index\n @asignacions = Asignacion.all\n end",
"def index\n @asignaturas = Asignatura.all\n end",
"def show\n @motivo_consulta = MotivoConsulta.find(params[:motivo_consulta_id])\n @asignacion = @motivo_consulta.asignaciones.find(params[:id]) \n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @asignacion }\n end\n end",
"def index\n @atividades = Atividade.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @atividades }\n end\n end",
"def index\n @avance_iniciativas = AvanceIniciativa.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @avance_iniciativas }\n end\n end",
"def show\n @asignatura = Asignatura.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @asignatura }\n end\n end",
"def asignaturas\n @seccion = \"Asignaturas\"\n @titulo = \"Asignaturas\"\n if params[:orden]\n case params[:orden]\n when \"Código\"\n @orden = \"codigo\"\n when \"Nombre\"\n @orden = \"nombre\"\n when \"Créditos\"\n @orden = \"unidades_credito\"\n when \"Tipo\"\n @orden = \"tipo\"\n else\n @orden = \"codigo\"\n end\n else\n @orden = \"codigo\"\n end\n \n @carreras = [\"Biología\", \"Computación\", \"Geoquímica\", \"Física\", \"Matemática\", \"Química\", \"Complementaria\"]\n @categorias = ['Código', 'Nombre', 'Créditos','Tipo'] #Este es el orden que se mostrará en el select de la vista\n @categoria_seleccionada = params[:orden]\n\n @asignatura_datos = [] #En este arreglo se guardan los pares \"Codigo - Nombre\" de cada asignatura para mostrar en el campo de busqueda\n Asignatura.order('nombre').all.each do |asignatura|\n #En este foro se agregan uno a uno los valores de cada asignatura al arreglo de la manera indicada arriba\n @asignatura_datos << asignatura.codigo.to_s + \" - \" + asignatura.nombre.to_s\n end\n end",
"def index\n @secciones = Seccion.order(\"grado_id\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @secciones }\n end\n end",
"def index\n @ubicacions = Ubicacion.all\n render json: @ubicacions, status: :ok\n end",
"def index\n @actividades_secciones = ActividadSeccion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @actividades_secciones }\n end\n end",
"def index\n @anuncios = Anuncio.all\n render json: @anuncios, status: :ok\n end",
"def asignar_curso\n render :json => Profesor.asignar_curso(params)\n end",
"def index\n @encriptions = Encription.all\n respond_to do |format|\n \n format.html \n format.json { render json: @encriptions }\n \n end\n end",
"def index\n @solicitacaos = Solicitacao.find(:all, :conditions => {:usuario_id => current_usuario.id})\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @solicitacaos }\n end\n end",
"def index\n @instituicoes = Instituicao.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @instituicoes }\n end\n end",
"def index\n @publicacions = Publicacion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @publicacions }\n end\n end",
"def index\n @assuntos = Assunto.all\n\n render json: @assuntos\n end",
"def index\n @dotacions = Dotacion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @dotacions }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns coordinates that will be reached after applying the +move+, starting from the +from+ coordinates | def relative_coords(from, move)
[from[0] + move[0], from[1] + move[1]]
end | [
"def valid_moves(from)\n\tpossible_moves(from).select { |move| valid_position?(move) }\nend",
"def valid_moves(from)\r\n\tpossible_moves(from).select { |move| valid_position?(move) }\r\nend",
"def relative_direction(from, to)\n # first, look for the case where the maze wraps, and from and to\n # are on opposite sites of the grid.\n if wrap_x? && from[1] == to[1] && (from[0] == 0 || to[0] == 0) && (from[0] == @width-1 || to[0] == @width-1)\n if from[0] < to[0]\n W\n else\n E\n end\n elsif wrap_y? && from[0] == to[0] && (from[1] == 0 || to[1] == 0) && (from[1] == @height-1 || to[1] == @height-1)\n if from[1] < to[1]\n N\n else\n S\n end\n elsif from[0] < to[0]\n if from[1] < to[1]\n SE\n elsif from[1] > to[1]\n NE\n else\n E\n end\n elsif from[0] > to[0]\n if from[1] < to[1]\n SW\n elsif from[1] > to[1]\n NW\n else\n W\n end\n elsif from[1] < to[1]\n S\n elsif from[1] > to[1]\n N\n else\n # same point!\n nil\n end\n end",
"def get_new_coordinates(choice)\n selected_move = moveset[possible_moves[choice]]\n return x + selected_move[0], y + selected_move[1]\n end",
"def getPossibleFrom move\n possibleFrom = []\n move.board.pieces[move.piece.colour][move.piece.class.to_s.to_sym].each do |coord, piece|\n possibleFrom << coord\n end\n possibleFrom\n end",
"def moving_at(pos)\n @moving.each do |moving|\n if moving.pos == pos\n return moving\n end\n end\n nil\n end",
"def apply_location(moves)\r\n moves.delete([0,0])\r\n moves.map { |move| [(move[0] + @location[0]), (move[1] + @location[1])]}\r\n end",
"def valid_moves(from)\n\n# takes possible moves based on from parameter, \n# each move is checked to see if it leads a valid position\n possible_moves(from).select { |move| valid_position?(move) }\nend",
"def contesting(move)\n future_position, query_position = position_strategy.positions(piece, move)\n contesting_piece = piece_get.call(query_position) if piece_get\n [future_position, contesting_piece]\n end",
"def get_position(full_move)\n full_move[0..1].map { | num | num.to_i - 1 }\n end",
"def required_moves(team, from_coordinate = nil)\n board = state_of_piece\n moves = []\n board.each_with_index do |row, row_index|\n row.each_with_index do |piece, column_index|\n if team == \"red\" && piece == \"RK\"\n next_move = can_eat_down?(row_index, column_index, \"B\") ||\n can_eat_up?(row_index, column_index, \"B\")\n elsif team == \"black\" && piece == \"BK\"\n next_move = can_eat_up?(row_index, column_index, \"R\") ||\n can_eat_down?(row_index, column_index, \"R\")\n elsif team == \"red\" && piece == \"R\"\n next_move = can_eat_up?(row_index, column_index, \"B\")\n elsif team == \"black\" && piece == \"B\"\n next_move = can_eat_down?(row_index, column_index, \"R\")\n end\n unless next_move.blank?\n # discard the move if it would take the piece off the board\n move = [row_index, column_index].concat(next_move)\n if off_the_board(next_move[0], next_move[1])\n # do nothing\n elsif from_coordinate != nil && from_coordinate == [row_index, column_index]\n moves << move\n elsif from_coordinate == nil\n moves << move\n end\n end\n end\n end\n return moves\n end",
"def traverse (from, to, points_visited_so_far = [])\n \n return points_visited_so_far if from.eql?(to)\n\n # Select those adjacent points that that has not been already traversed\n # and that do not represent walls\n possible_steps = adjacent_traversible_points(from).select { |point| \n (not points_visited_so_far.include?(point))\n }\n\n # For each possible step, take that step, and find out the list of points\n # that need to be traversed to reach \"to\" point. In case there were more\n # than one possible steps, pick the one that has smaller number of steps\n # to destination\n points_to_destination_from_here = []\n possible_steps.each do |point|\n traversal_result = traverse(point, to, points_visited_so_far + [point])\n if not traversal_result.empty?\n points_to_destination_from_here = traversal_result if \n (points_to_destination_from_here.empty? or\n traversal_result.size < points_to_destination_from_here.size)\n end\n end\n \n return points_to_destination_from_here\n\n end",
"def new_move_positions(pos)\n moves = KnightPathFinder.valid_moves(pos).reject do |move|\n @visited_positions.include?(move)\n end\n moves\n end",
"def get_move(start_pos = nil)\n until start_pos && valid_start_pos?(start_pos)\n start_pos = respond_to_input\n end\n self.cursor = start_pos\n self.end_cursor = self.cursor.dup\n end_pos = nil\n until end_pos && self[start_pos].moves.include?(end_pos)\n end_pos = respond_to_input\n end\n self.cursor = end_cursor\n self.end_cursor = nil\n move(start_pos, end_pos)\n end",
"def possible_moves\n Knight.possible_moves_from(@position)\n end",
"def move_piece(from, to)\n x_f = from[0]\n y_f = from[1]\n x_t = to[0]\n y_t = to[1]\n\n return if x_f > 7 || x_f < 0 || y_f > 7 || y_f < 0\n return if x_t > 7 || x_t < 0 || y_t > 7 || y_t < 0\n return if at(from).nil?\n\n @en_passant_vulnerable = nil\n if y_f == 1 && y_t == 3 || y_f == 6 && y_t == 4\n piece = at(from)\n @en_passant_vulnerable = piece if piece.notation == 'P'\n end\n \n capture(at(to), false) unless at(to).nil?\n \n @grid[y_t][x_t] = at(from)\n at(to).num_of_moves += 1\n at(to).position = [x_t, y_t]\n\n @grid[y_f][x_f] = nil\n\n @actions << \"#{x_f}#{y_f}-#{x_t}#{y_t}\"\n end",
"def next_move\n case Direction::DIRECTIONS[direction.current]\n when Direction::DIRECTIONS[0] # NORTH\n {x: 0, y: 1}\n when Direction::DIRECTIONS[1] # EAST\n {x: 1, y: 0}\n when Direction::DIRECTIONS[2] # SOUTH\n {x: 0, y: -1}\n when Direction::DIRECTIONS[3] # WEST\n {x: -1, y: 0}\n end\n end",
"def coords_after_move(x, y, direction)\n unless placement_is_valid?(x, y, direction)\n raise \"Invalid initial state (#{x}, #{y}, #{direction})\"\n end\n\n new_x = x + ( { 'WEST' => -1, 'EAST' => +1 }[direction] || 0 )\n new_y = y + ( { 'SOUTH' => -1, 'NORTH' => +1 }[direction] || 0 )\n\n coordinates_are_valid?(new_x, new_y) ? [ new_x, new_y ] : [ x, y ]\n end",
"def move (from, to)\n disk = @positions[from - 1].pop\n @positions[to - 1] << disk\n @moves = @moves + 1\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /despatches GET /despatches.json | def index
@despatches = Despatch.all
end | [
"def destroy\n @despatch.destroy\n respond_to do |format|\n format.html { redirect_to despatches_url, notice: 'Despatch was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def index\n @production_dpts = ProductionDpt.all\n\n render json: @production_dpts\n end",
"def index\n @departs = Depart.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @departs }\n end\n end",
"def index\n @watches = GOD.watches\n\t\tdisplay @watches\n end",
"def get_dispatches(opts = {})\n data, _status_code, _headers = get_dispatches_with_http_info(opts)\n data\n end",
"def index\n @post_prod_dpts = PostProdDpt.all\n\n render json: @post_prod_dpts\n end",
"def index\n @game_watches = GameWatch.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @game_watches }\n end\n end",
"def destroy\n @gratch.destroy\n respond_to do |format|\n format.html { redirect_to gratches_url }\n format.json { head :no_content }\n end\n end",
"def show\n @scratcher = Scratcher.find(params[:id])\n\n render json: @scratcher\n end",
"def delete_dispatches(opts = {})\n delete_dispatches_with_http_info(opts)\n nil\n end",
"def departures\n @station = Station.find_by! uuid: params[:uuid]\n @departure_board = @station.get_departure_board\n\n respond_to do |format|\n format.html\n format.xml { render xml: @departure_board }\n format.json { render json: @departure_board, callback: params['callback'] }\n end\n end",
"def index\n @gratches = Gratch.all\n\n end",
"def index\n @deed_votes = DeedVote.all\n\n render json: @deed_votes\n end",
"def index\n @deaneries = Deanery.all\n\n render json: @deaneries\n end",
"def dropletList()\n http, req = initReq(\"droplets/\")\n JSON.parse(http.request(req).body)\nend",
"def index\n\n @dtests = Dtest.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @dtests }\n end\n end",
"def index\n @desks = Desk.all\n\n render json: @desks\n end",
"def index\n @qa_dpts = QaDpt.all\n\n render json: @qa_dpts\n end",
"def index\n @dusts = Dust.all\n render json: @dusts\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /despatches POST /despatches.json | def create
@despatch = Despatch.new(despatch_params)
respond_to do |format|
if @despatch.save
format.html { redirect_to @despatch, notice: 'Despatch was successfully created.' }
format.json { render :show, status: :created, location: @despatch }
else
format.html { render :new }
format.json { render json: @despatch.errors, status: :unprocessable_entity }
end
end
end | [
"def index\n @despatches = Despatch.all\n end",
"def destroy\n @despatch.destroy\n respond_to do |format|\n format.html { redirect_to despatches_url, notice: 'Despatch was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def update\n respond_to do |format|\n if @despatch.update(despatch_params)\n format.html { redirect_to @despatch, notice: 'Despatch was successfully updated.' }\n format.json { render :show, status: :ok, location: @despatch }\n else\n format.html { render :edit }\n format.json { render json: @despatch.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @gratch.destroy\n respond_to do |format|\n format.html { redirect_to gratches_url }\n format.json { head :no_content }\n end\n end",
"def request_delete(json_rules)\n url_with_delete = \"#{@url}?_method=delete\"\n payload = json_rules\n response = RestClient.post(url_with_delete, payload, {:Authorization => form_auth_header}) \n response\n end",
"def delete_dispatches(opts = {})\n delete_dispatches_with_http_info(opts)\n nil\n end",
"def create\n\n request = RestClient.post File.join(API_SERVER,\"rest-api/departments\"), { \n 'name' => params['department']['name'], \n 'description' => params['department']['description'] }.to_json, :content_type => :json, :accept => :json\n redirect_to :action => :index\n end",
"def destroy\n @scratch = Scratch.find(params[:id])\n @scratch.destroy\n\n respond_to do |format|\n format.html { redirect_to(scratches_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @debug.destroy\n respond_to do |format|\n format.html { redirect_to debugs_url }\n format.json { head :no_content }\n end\n end",
"def create\n @deed = Deed.create(deed_params)\n\n if @deed.save\n render json: @deed, status: :created, location: @deed\n else\n render json: @deed.errors, status: :unprocessable_entity\n end\n end",
"def create\n @scratcher = Scratcher.new(permitted_params)\n\n if @scratcher.save\n render json: @scratcher, status: :created, location: @scratcher\n else\n render json: @scratcher.errors, status: :unprocessable_entity\n end\n end",
"def delete_dispatches_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DispatchApi.delete_dispatches ...'\n end\n # resource path\n local_var_path = '/dispatch/dispatches'\n\n # query parameters\n query_params = {}\n query_params[:'courseId'] = opts[:'course_id'] if !opts[:'course_id'].nil?\n query_params[:'since'] = opts[:'since'] if !opts[:'since'].nil?\n query_params[:'until'] = opts[:'_until'] if !opts[:'_until'].nil?\n query_params[:'datetimeFilter'] = opts[:'datetime_filter'] if !opts[:'datetime_filter'].nil?\n query_params[:'tags'] = @api_client.build_collection_param(opts[:'tags'], :csv) if !opts[:'tags'].nil?\n query_params[:'filter'] = opts[:'filter'] if !opts[:'filter'].nil?\n query_params[:'filterBy'] = opts[:'filter_by'] if !opts[:'filter_by'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['APP_NORMAL', 'OAUTH']\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DispatchApi#delete_dispatches\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def destroy\n @watch.destroy\n respond_to do |format|\n format.html { redirect_to watches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @patch.destroy\n respond_to do |format|\n format.html { redirect_to patches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @device_post_test.destroy\n respond_to do |format|\n format.html { redirect_to device_post_tests_url, notice: 'Device post test was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @swatch.destroy\n respond_to do |format|\n format.html { redirect_to swatches_url, notice: 'Swatch was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @dispatched_drip = DispatchedDrip.find(params[:id])\n @dispatched_drip.destroy\n\n respond_to do |format|\n format.html { redirect_to dispatched_drips_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bike_spec = BikeSpec.find(params[:id])\n @bike_spec.destroy\n\n respond_to do |format|\n format.html { redirect_to bike_specs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @diss_tracker.destroy\n respond_to do |format|\n format.html { redirect_to diss_trackers_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /despatches/1 PATCH/PUT /despatches/1.json | def update
respond_to do |format|
if @despatch.update(despatch_params)
format.html { redirect_to @despatch, notice: 'Despatch was successfully updated.' }
format.json { render :show, status: :ok, location: @despatch }
else
format.html { render :edit }
format.json { render json: @despatch.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n request = RestClient.put File.join(API_SERVER,\"rest-api/departments\"), { \n 'id' => params['id'], \n 'name' => params['department']['name'], \n 'description' => params['department']['description'] }.to_json, :content_type => :json, :accept => :json\n\n redirect_to :action => :index\n end",
"def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end",
"def patch(path, params: {}, headers: {})\n request_json :patch, path, params, headers\n end",
"def patch *args\n make_request :patch, *args\n end",
"def destroy\n @patch.destroy\n respond_to do |format|\n format.html { redirect_to patches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @patch = Patch.find(params[:id])\n @patch.destroy\n\n respond_to do |format|\n format.html { redirect_to patches_url }\n format.json { head :no_content }\n end\n end",
"def update\n opts = {\n desc: params[:desc] || @deed.desc,\n points: params[:points] || @deed.points\n }\n if @deed.update(opts)\n render json: opts, status: :ok, location: @deed\n else\n render json: @deed.errors, status: :unprocessable_entity\n end\n end",
"def update\n do_patch { return } # check if patch and do submission and return early if it is a patch (submission)\n # otherwise this is a PUT of the dataset metadata\n check_status { return } # check it's in progress, clone a submitted or raise an error\n respond_to do |format|\n format.json do\n dp = DatasetParser.new(hash: params['dataset'], id: @resource.identifier, user: @user)\n @stash_identifier = dp.parse\n ds = Dataset.new(identifier: @stash_identifier.to_s) # sets up display objects\n render json: ds.metadata, status: 200\n end\n end\n end",
"def update\n @patch = Patch.find(params[:id])\n\n respond_to do |format|\n if @patch.update_attributes(params[:patch])\n format.html { redirect_to @patch, notice: 'Patch was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @patch.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch?; request_method == \"PATCH\" end",
"def update\n do_patch { return } # check if patch and do submission and return early if it is a patch (submission)\n # otherwise this is a PUT of the dataset metadata\n check_status { return } # check it's in progress, clone a submitted or raise an error\n respond_to do |format|\n format.json do\n dp = if @resource\n DatasetParser.new(hash: params['dataset'], id: @resource.identifier, user: @user) # update dataset\n else\n DatasetParser.new(hash: params['dataset'], user: @user, id_string: params[:id]) # upsert dataset with identifier\n end\n @stash_identifier = dp.parse\n ds = Dataset.new(identifier: @stash_identifier.to_s) # sets up display objects\n render json: ds.metadata, status: 200\n end\n end\n end",
"def patch_update_consumption_detail_id\n id_array = JSON.parse(params[:ids])\n consumption_detail_id = params[:consumption_detail_id]\n UserAttachment.find_by_ids(id_array).update(consumption_detail_id: consumption_detail_id)\n consumption_detail_attachments = UserAttachment.find_by_ids(id_array)\n render json: { result: 'success', consumption_detail_attachments:consumption_detail_attachments }, status: 200\n end",
"def destroy\n @despatch.destroy\n respond_to do |format|\n format.html { redirect_to despatches_url, notice: 'Despatch was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def patch(url, data, headers = {})\n request(:patch, url, headers, :data => data)\n end",
"def patch(url, data, options={})\n default_client.patch(url, data, options)\n end",
"def index\n @despatches = Despatch.all\n end",
"def rest_edit(path, options={}, &blk)\n callback = Proc.new { |*args|\n @object = yield(*args) or pass\n rest_params.each { |k, v| @object.send :\"#{k}=\", v unless k == 'id' }\n\n return 400, @object.errors.to_json unless @object.valid?\n\n @object.save\n rest_respond @object\n }\n\n # Make it work with `Backbone.emulateHTTP` on.\n put path, &callback\n post path, &callback\n end",
"def update\n respond_to do |format|\n if @dessert.update(dessert_params)\n format.html { redirect_to @dessert}\n format.json { render :show, status: :ok, location: @dessert }\n else\n format.html { render :edit }\n format.json { render json: @dessert.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n json_update(factType,factType_params, FactType)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /despatches/1 DELETE /despatches/1.json | def destroy
@despatch.destroy
respond_to do |format|
format.html { redirect_to despatches_url, notice: 'Despatch was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @gratch.destroy\n respond_to do |format|\n format.html { redirect_to gratches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @json.destroy\n\n head :no_content\n end",
"def destroy\n @scratch = Scratch.find(params[:id])\n @scratch.destroy\n\n respond_to do |format|\n format.html { redirect_to(scratches_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n request = RestClient.delete File.join(API_SERVER,\"rest-api/departments\",params['id'])\n redirect_to :action => :index\t\n end",
"def delete_json(path)\n url = [base_url, path].join\n resp = HTTParty.delete(url, headers: standard_headers)\n parse_json(url, resp)\n end",
"def destroy\n @json_datum.destroy\n respond_to do |format|\n format.html { redirect_to json_data_url }\n format.json { head :no_content }\n end\n end",
"def delete\n RestClient.delete \"#{@uri}/api/requests/request/#{@data['requestId']||@data['id']}\"\n puts ' Deleted request: '.red + \"#{@data['requestId']||@data['id']}\".light_blue\n end",
"def delete\n spec[DELETE]\n end",
"def delete(path, params={}); make_request(:delete, host, port, path, params); end",
"def destroy\n @my_watch = MyWatch.find(params[:id])\n @my_watch.destroy\n\n respond_to do |format|\n format.html { redirect_to my_watches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @jsonfile = Jsonfile.find(params[:id])\n @jsonfile.destroy\n\n respond_to do |format|\n format.html { redirect_to(jsonfiles_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @dust_bath.destroy\n respond_to do |format|\n format.html { redirect_to dust_baths_url }\n format.json { head :no_content }\n end\n end",
"def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend",
"def delete endpoint\n do_request :delete, endpoint\n end",
"def destroy\n @slashdot_posting.destroy\n respond_to do |format|\n format.html { redirect_to slashdot_postings_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @single_action.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @redline.destroy\n respond_to do |format|\n format.html { redirect_to redlines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @depot = Depot.find(params[:id])\n @depot.destroy\n\n respond_to do |format|\n format.html { redirect_to depots_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @dispatch = Dispatch.find(params[:id])\n @dispatch.destroy\n\n respond_to do |format|\n format.html { redirect_to dispatches_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
remove both the local download file and remote eb config | def clean(mute=false)
return if @options[:dirty]
UI.say "Cleaning up eb remote config and local files" unless mute
eb.delete_configuration_template(
application_name: @updater.app_name,
template_name: current_name
) unless @options[:noop]
FileUtils.rm_f(@current_path)
end | [
"def remove!\n response = connection.exec!(\"rm #{ File.join(remote_path, remote_file) }\")\n if response =~ /No such file or directory/\n Logger.warn \"Could not remove file \\\"#{ File.join(remote_path, remote_file) }\\\".\"\n end\n end",
"def remove\n print \"[\\e[90m%s\\e[0m] Removed the daemon from the config \" % name\n config_was = File.exist?(FOREVER_PATH) ? YAML.load_file(FOREVER_PATH) : []\n config_was.delete_if { |conf| conf[:file] == file }\n File.open(FOREVER_PATH, \"w\") { |f| f.write config_was.to_yaml }\n end",
"def clean_remote\n to_delete = remote_assets - local_compiled_assets\n to_delete.each do |f|\n delete_remote_asset(bucket.files.get(f))\n end\n end",
"def remove_configuration\n File.delete config_file if configuration_present?\n end",
"def clean_staging\n staging_config_file = '_config.staging.yml'\n File.delete staging_config_file if File.exists? staging_config_file\nend",
"def remove_config\n FileUtils.cd(self.project_root) { FileUtils.rm_rf \"config/environments/#{self.name}\" }\n end",
"def decompress_remote_file\n run \"mkdir #{configuration[:release_path]} && cd #{configuration[:release_path]} && #{decompress(remote_filename).join(\" \")} && rm #{remote_filename}\"\n end",
"def remove_deploy_file\n `cd #{self.project_root} && rm -rf config/deploy/#{self.name}.rb`\n end",
"def clear_config\n FileUtils.rm @config_build if File.exists? @config_build\n end",
"def undeploy_config\n return false unless remove_config\n host.disable if reload_bacula\n end",
"def undeploy_config\n remove_config && reload_bacula\n end",
"def delete_file\n\n #delete the file\n Chef::Log.debug \"DEBUG: Removing file #{ node[\"php_fpm\"][\"pools_path\"] }/#{ @new_resource.pool_name }.conf!\"\n ::File.delete(\"#{ node[\"php_fpm\"][\"pools_path\"] }/#{ @new_resource.pool_name }.conf\")\n\nend",
"def remove_files! node_name\n filename = RunnerUtils.app_config.output_dir + \"#{node_name}.cfg\"\n \n\t\t\t# TODO: Make this a warning and don't halt execution\n unless filename.exist?\n RunnerUtils.fatal \"Unregister for #{node_name} expects nonexistant file to exist at #{filename.to_s}\"\n raise \"Can't delete nonexistant file at #{filename.to_s}\"\n end\n\n filename.delete\n RunnerUtils.debug \"Deleted file at #{filename.to_s}\"\n end",
"def clear_remote\n execute(:rm, '-rf', File.join(remote_cache_path, '*')) if test!(\"[ -d #{remote_cache_path} ]\")\n end",
"def action_remove\n notifying_block do\n delete_config\n end\n end",
"def delete_remote_file(node_id, options)\n delete \"/cluster/#{node_id}/files\", options\n end",
"def removed_cookbook_file(path); end",
"def config_delete(name)\n Bundler.settings.set_local(name, nil)\n Bundler.settings.set_global(name, nil)\n end",
"def clear_remote_bucket\n UserAssetService.get_remote_assets.map(&:delete)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the create_device_registry REST call | def create_device_registry request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_create_device_registry_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def create_registry!(params)\n svc = ::Registries::CreateService.new(nil, params)\n svc.execute\n return unless svc.valid?\n\n Rails.logger.tagged(\"registry\") do\n msg = JSON.pretty_generate(params)\n Rails.logger.info \"Registry created with the following parameters:\\n#{msg}\"\n end\nend",
"def create\n @registry = Registry.new(create_params)\n\n # Check the reachability of the registry.\n unless params[:force]\n msg = @registry.reachable?\n unless msg.empty?\n logger.info \"\\nRegistry not reachable:\\n#{@registry.inspect}\\n#{msg}\\n\"\n msg = \"#{msg} You can skip this check by clicking on the \\\"Skip remote checks\\\" checkbox.\"\n hsh = { name: @registry.name, hostname: @registry.hostname,\n use_ssl: @registry.use_ssl, external_hostname: @registry.external_hostname }\n redirect_to new_admin_registry_path(hsh), alert: msg\n return\n end\n end\n\n if @registry.save\n Namespace.update_all(registry_id: @registry.id)\n redirect_to admin_registries_path, notice: \"Registry was successfully created.\"\n else\n redirect_to new_admin_registry_path, alert: @registry.errors.full_messages\n end\n end",
"def create\n @registry = Registry.new(create_params)\n\n # Check the reachability of the registry.\n\n check_reachability(\"new\") unless params[:force]\n return if @unreachable\n\n if @registry.save\n Namespace.update_all(registry_id: @registry.id)\n redirect_to admin_registries_path, notice: \"Registry was successfully created.\"\n else\n redirect_to new_admin_registry_path, alert: @registry.errors.full_messages\n end\n end",
"def register\n physical_entity = {name: @name, description: @description, location:@location}\n virtual_entity = {uuid: nil}\n body = {body: {device: {name: @name, \n description: @description, \n model_name: @model, \n location: @location,\n physical_entity_attributes: physical_entity,\n virtual_entity_attributes: virtual_entity}}}\n response = self.class.post(API+'/devices', body)\n @uuid = response['uuid']\n end",
"def prepare_registry; end",
"def create\n @device = Device.create!(device_params)\n json_response(@device, :created)\n end",
"def create\n start = Time.now\n debug \"Creating device group: \\\"#{resource[:full_path]}\\\"\"\n connection = self.class.get_connection(resource[:account])\n recursive_group_create(connection,\n resource[:full_path],\n resource[:description],\n resource[:properties],\n resource[:disable_alerting])\n debug \"Finished in #{(Time.now-start)*1000.0} ms\"\n end",
"def create\n @bridal_registry = Spree::BridalRegistry.new bridal_registry_attributes\n @bridal_registry.user = spree_current_user\n @bridal_registry.save\n respond_with(@bridal_registry)\n end",
"def create\n @registry = @user.registries.new(params[:registry])\n\n @registry.null_gates\n respond_to do |format|\n if @registry.save\n flash[:notice] = 'Registry was successfully created.'\n format.html { redirect_to new_registry_path }\n format.xml { render :xml => @registry, :status => :created, :location => [ @user, @registry] }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @registry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n svc = ::Registries::CreateService.new(current_user, create_params)\n svc.force = params[:force]\n svc.execute\n\n if svc.valid?\n redirect_to admin_registries_path, notice: \"Registry was successfully created.\"\n elsif svc.reachable?\n flash[:alert] = svc.messages\n redirect_to new_admin_registry_path, alert: svc.messages\n else\n flash[:alert] = svc.messages\n render :new, status: :unprocessable_entity\n end\n end",
"def get_device_registry request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_get_device_registry_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def create\n @registry = @user.registries.new(registry_params)\n\n respond_to do |format|\n if @registry.save\n flash[:notice] = 'Registry was successfully created.'\n format.html { redirect_to user_registry_path(@user, @registry) }\n format.xml { render :xml => @registry, :status => :created, :location => [ @user, @registry] }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @registry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create_device(key, name = \"\", attributes = {}, *sensors)\n device = Device.new(key, name, attributes, *sensors)\n remoter.post(\"/v2/devices\", JSON.dump(device.to_hash)).on_success do |result|\n json = JSON.parse(result.body)\n Device.from_hash(json)\n end\n end",
"def create_registry!\n # Hostname configurable so some tests can check wrong hostnames.\n hostname = ENV[\"PORTUS_INTEGRATION_HOSTNAME\"] || \"172.17.0.1:5000\"\n Registry.create!(name: \"registry\", hostname: hostname, use_ssl: false)\n ENV[\"PORTUS_INTEGRATION_HOSTNAME\"] = nil\n\n User.create_portus_user!\nend",
"def register\n # Grab the facts for this node\n facts = MK.node.facts\n hw_id = MK.node.hw_id\n\n # Format the body of our register as a JSON message\n data = { 'hw_id' => hw_id, 'facts' => facts }.to_json\n\n # Grab our user agent header.\n user_agent = MK.node.user_agent\n\n # Build our headers for the request.\n headers = {\n 'Content-Type' => 'application/json',\n 'User-Agent' => user_agent\n }\n\n # Send our RPC registration command to the server; this will raise if\n # something goes wrong in the submission process.\n result = MK.server.send_register(data, headers)\n\n # Dispatch the command we received, unless there is none...\n execute(result['action']) unless result['action'] == 'none'\n\n # ...and we are good.\n return true\n end",
"def create_registration\n @secret = random_password(500)\n Chef::FileCache.store(File.join(\"registration\", @safe_name), @secret)\n @rest.post_rest(\"registrations\", { :id => @safe_name, :password => @secret, :validation_token => @validation_token })\n true\n end",
"def registry_createkey(key)\n\t\tif session_has_registry_ext\n\t\t\tmeterpreter_registry_createkey(key)\n\t\telse\n\t\t\tshell_registry_createkey(key)\n\t\tend\n\tend",
"def create\n @inventory_registry = InventoryRegistry.new(inventory_registry_params)\n\n respond_to do |format|\n if @inventory_registry.save\n format.html { redirect_to @inventory_registry, notice: 'Inventory registry was successfully created.' }\n format.json { render :show, status: :created, location: @inventory_registry }\n else\n format.html { render :new }\n format.json { render json: @inventory_registry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def register\n sensor = Sensor.new(cycles: 0)\n sensor.save\n puts \"Registered Device with id #{sensor.id}\"\n render json: {\"id\": \"#{sensor.id}\"}\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the get_device_registry REST call | def get_device_registry request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_get_device_registry_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def registry\n @registry ||= client.registries.get_from_uri(info[:registry]) unless info[:registry].nil?\n end",
"def create_device_registry request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_create_device_registry_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def registry_statistics\n client = IotHubApiClient.new(@options)\n res = client.get('/statistics/devices', @api_version_param)\n RegistryStatistics.create(response_json(res))\n end",
"def get_service_registry()\n raise NoMethodError, 'This method needs to be overridden'\n end",
"def update_device_registry request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_update_device_registry_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def resource_registry_options\n registry_data['registry_options']\n end",
"def send_registry_lookup(opts = {})\n send_call(\n sock: opts[:sock] || sock,\n call: build_registry_lookup(opts)\n )\n\n return_value = recv_return(\n sock: opts[:sock] || sock\n )\n\n if return_value.nil?\n return nil\n end\n\n if return_value.is_exception?\n raise ::Rex::Proto::Rmi::Exception, return_value.get_class_name\n end\n\n remote_object = return_value.get_class_name\n\n if remote_object.nil?\n return nil\n end\n\n remote_location = parse_registry_lookup_endpoint(return_value)\n\n if remote_location.nil?\n return nil\n end\n\n remote_location.merge(object: remote_object)\n end",
"def retrieve_h5p_registry\n response = RestClient.get 'https://h5p.org/registry.json', { accept: :json }\n JSON.parse(response)\nend",
"def calc_reg_list(device = :all)\n registrars.find_all { |dev, reg| device.to_sym == :all || device.to_s == dev }.map { |vp| vp[1] }\n end",
"def edit\n @can_change_hostname = Repository.none?\n @registry = Registry.find(params[:id])\n @registry_serialized = API::Entities::Registries.represent(\n @registry,\n current_user: current_user\n ).to_json\n end",
"def prepare_registry; end",
"def internal_get_register_values\n max_tries(3, 0.01, 'gdb_api:internal_get_register_values') do\n res = @debugger.send_mi_string('-data-list-register-values x')\n assert('unable to get register values') { res.content_type == 'done' }\n regs = res.value['register-values']\n internal_check_register_length(regs.length)\n return regs\n end\n end",
"def allocate_recovery_registry()\n if @recovery_registry.nil? then\n return {}\n else\n return @recovery_registry \n end\n end",
"def service_registry\n @service_registry ||= {}\n end",
"def send_registry_list(opts = {})\n send_call(\n sock: opts[:sock] || sock,\n call: build_registry_list(opts)\n )\n\n return_value = recv_return(\n sock: opts[:sock] || sock\n )\n\n if return_value.nil?\n return nil\n end\n\n if return_value.is_exception?\n raise ::Rex::Proto::Rmi::Exception, return_value.get_class_name\n end\n\n names = parse_registry_list(return_value)\n\n names\n end",
"def get_device(name); end",
"def initialize\n @registry = {}\n end",
"def registry\n type.registry\n end",
"def oai_registry_search(registry_name, xml_interface)\n results = Array.new\n\n # If OAI Regsitry is disabled in global config, just return no results\n return results if !$oai_registry\n\n puts \"---------------------\"\n puts \"Querying #{registry_name} OAI-PMH registry for likely DSpace sites ...\"\n puts \"(using XML interface at: #{xml_interface})\"\n\n # Request the full registry in XML & check return status\n open_page = open(xml_interface)\n puts \" Status returned: #{open_page.status.join(' ')}\"\n\n # Parse the response as XML\n doc = Nokogiri::XML(open_page)\n # Remove namespaces from result, as Nokogiri gets confused by their XML namespaces\n doc.remove_namespaces!\n\n # In the results, get all <baseURL> tags which contain \"/request\". \n # DSpace OAI interfaces tend to look like this [dspace.url]/oai/request\n links = doc.xpath(\"//baseURL[contains(.,'/request')]\")\n # Get size of result set\n puts \" Results found: #{links.length}\"\n\n # Loop through each result\n links.each do |link|\n # Get result URL\n url = link.content.to_s\n\n # Save URL to our results set, with a source of \"OpenDOAR\"\n results << [ \"#{registry_name} OAI-PMH Registry\", url ]\n end\n\n return results\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the update_device_registry REST call | def update_device_registry request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_update_device_registry_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def update\n attrs = update_params.merge(id: params[:id])\n svc = ::Registries::UpdateService.new(current_user, attrs)\n @registry = svc.execute\n\n if svc.valid?\n # NOTE: if we decide to use rails-observers at some point,\n # we can remove this from here and use it in observers\n Rails.cache.delete \"registry#{@registry.id}_status\"\n redirect_to admin_registries_path, notice: \"Registry updated successfully!\"\n else\n flash[:alert] = svc.messages\n @can_change_hostname = Repository.none?\n render \"edit\", status: :unprocessable_entity\n end\n end",
"def update\n @bridal_registry.update_attributes bridal_registry_attributes\n respond_with(@bridal_registry)\n end",
"def update\n respond_to do |format|\n if @registry.update(registry_params)\n format.html { redirect_to @registry, notice: 'Registry was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @registry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def edit\n @can_change_hostname = Repository.none?\n @registry = Registry.find(params[:id])\n @registry_serialized = API::Entities::Registries.represent(\n @registry,\n current_user: current_user\n ).to_json\n end",
"def update\n @registry = Registry.find(params[:id])\n @registry.update_attributes(update_params)\n redirect_to admin_registries_path, notice: \"Registry updated successfully!\"\n end",
"def update\n @registry = @user.registries.find(params[:id])\n\n respond_to do |format|\n # if @registry.update_attributes(params[:registry])\n if @registry.update_attributes(registry_params)\n flash[:notice] = 'Registry was successfully updated.'\n format.html { redirect_to([@user,@registry]) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @registry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @lcb_registry.update(lcb_registry_params)\n format.html { redirect_to @lcb_registry, notice: 'Lcb registry was successfully updated.' }\n format.json { render :show, status: :ok, location: @lcb_registry }\n else\n format.html { render :edit }\n format.json { render json: @lcb_registry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @inventory_registry.update(inventory_registry_params)\n format.html { redirect_to @inventory_registry, notice: 'Inventory registry was successfully updated.' }\n format.json { render :show, status: :ok, location: @inventory_registry }\n else\n format.html { render :edit }\n format.json { render json: @inventory_registry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @registry = Registry.find(params[:id])\n\n @registry.update_attributes(use_ssl: !@registry.use_ssl?)\n render template: \"admin/registries/update\", locals: { registry: @registry }\n end",
"def update\n @registry = @user.registries.find(params[:id])\n\n respond_to do |format|\n if @registry.update_attributes(params[:registry])\n flash[:notice] = 'Registry was successfully updated.'\n format.html { redirect_to([@user,@registry]) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @registry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @device.update(device_params)\n head :no_content\n end",
"def update_registry(mappings)\n @mappings = Payload.new(mappings || {})\n end",
"def update\n @luxe_registry = LuxeRegistry.find(params[:id])\n\n respond_to do |format|\n if @luxe_registry.update_attributes(params[:luxe_registry])\n format.html { redirect_to @luxe_registry, notice: 'Luxe registry was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @luxe_registry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @device.update(device_params)\n format.json { head :no_content }\n else\n format.json { render json: {errors: @device.errors}, status: :unprocessable_entity }\n end\n end\n end",
"def update_params\n permitted = [:name, :use_ssl, (:hostname unless Repository.any?), :external_hostname].compact\n params.require(:registry).permit(permitted)\n end",
"def update!\n res_name = 'deviceRun'\n endpoint = \"projects/#{@project_id}/runs/#{@run_id}/device-runs/#{id}\"\n\n config = @client.get_api_request(endpoint, res_name)\n\n update(config)\n end",
"def update\n respond_to do |format|\n if @quotation_registry.update(quotation_registry_params)\n format.html { redirect_to @quotation_registry, notice: 'Quotation registry was successfully updated.' }\n format.json { render :show, status: :ok, location: @quotation_registry }\n else\n format.html { render :edit }\n format.json { render json: @quotation_registry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def get_device_registry request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_get_device_registry_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Iot::V1::DeviceRegistry.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def update\n @device = Device.find(params[:id])\n\n if @device.update(device_params)\n head :no_content\n else\n render json: @device.errors, status: :unprocessable_entity\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the delete_device_registry REST call | def delete_device_registry request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_delete_device_registry_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def destroy\n \n @registry = Registry.find(params[:id])\n @registry.destroy\n logger.info \"*-*-*-*-* #{@registry.name} deleted by #{@user.username}.\"\n\n respond_to do |format|\n format.html { redirect_to( user_gifts_url(@user)) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @registry = Registry.find(params[:id])\n @registry.destroy\n\n respond_to do |format|\n format.html { redirect_to registries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @registry.destroy\n respond_to do |format|\n format.html { redirect_to registries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @registry.destroy\n respond_to do |format|\n format.html { redirect_to registries_url, notice: 'Registry was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def removeDevice deviceId\n options = { 'device' => deviceId }\n post REMOVE_DEVICE, options\n end",
"def destroy\n @registry = Registry.find(params[:id])\n @registry.destroy\n logger.info \"*-*-*-*-* #{@registry.name} deleted by #{@user.name}.\"\n\n respond_to do |format|\n format.html { redirect_to( user_gifts_url(@user)) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @luxe_registry = LuxeRegistry.find(params[:id])\n @luxe_registry.destroy\n\n respond_to do |format|\n format.html { redirect_to luxe_registries_url }\n format.json { head :no_content }\n end\n end",
"def delete!\n url = \"#{Rails.configuration.waste_exemplar_services_url}/registrations/#{uuid}.json\"\n Rails.logger.debug \"Registration: about to DELETE\"\n deleted = true\n begin\n response = RestClient.delete url\n\n # result = JSON.parse(response.body)\n self.uuid = nil\n save\n\n rescue => e\n Airbrake.notify(e)\n Rails.logger.error e.to_s\n deleted = false\n end\n deleted\n end",
"def delete\n Device.find(params[:device_id]).destroy\n end",
"def destroy\n @lcb_registry.destroy\n respond_to do |format|\n format.html { redirect_to lcb_registries_url, notice: 'Lcb registry was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bridal_registry.destroy\n respond_with(@bridal_registry) do |format|\n format.html { redirect_to account_path }\n end\n end",
"def destroy\n @device.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @device_registration.destroy\n respond_to do |format|\n format.html { redirect_to device_registrations_url, notice: 'Device registration was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_registry_repository(project, id)\n delete(\"/projects/#{url_encode project}/registry/repositories/#{id}\")\n end",
"def delete(resource_type_identifier); end",
"def unregister\n puts \"APN::Device.unregister\"\n http_delete(\"/api/device_tokens/#{self.token}\")\n end",
"def destroy\n start = Time.now\n debug(\"Deleting device group: \\\"#{resource[:full_path]}\\\"\")\n connection = self.class.get_connection(resource[:account])\n device_group = get_device_group(connection, resource[:full_path], 'id')\n if device_group\n delete_device_group = rest(connection,\n Puppet::Provider::Logicmonitor::DEVICE_GROUP_ENDPOINT % device_group['id'],\n Puppet::Provider::Logicmonitor::HTTP_DELETE)\n valid_api_response?(delete_device_group) ? nil : alert(delete_device_group)\n end\n debug \"Finished in #{(Time.now-start)*1000.0} ms\"\n end",
"def destroy\n @inventory_registry.destroy\n respond_to do |format|\n format.html { redirect_to inventory_registries_url, notice: 'Inventory registry was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_endpoint\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the list_device_registries REST call | def list_device_registries request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_list_device_registries_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::ListDeviceRegistriesResponse.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def index\n @device_registrations = DeviceRegistration.all\n end",
"def calc_reg_list(device = :all)\n registrars.find_all { |dev, reg| device.to_sym == :all || device.to_s == dev }.map { |vp| vp[1] }\n end",
"def registrations\n software_registrations + hardware_registrations\n end",
"def registered_devices=(value)\n @registered_devices = value\n end",
"def registers\n command('-data-list-register-names').\n results[:register_names].\n map { |reg| reg.empty? ? nil : reg.to_sym }\n end",
"def registrations\n recruiters.collect {|recruiter| recruiter.registrations }.flatten\n end",
"def registered_devices\n return @registered_devices\n end",
"def registered_devices()\n return MicrosoftGraph::Me::RegisteredDevices::RegisteredDevicesRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def index\n @bridal_registries = spree_current_user.bridal_registries\n respond_with(@bridal_registry)\n end",
"def registry_statistics\n client = IotHubApiClient.new(@options)\n res = client.get('/statistics/devices', @api_version_param)\n RegistryStatistics.create(response_json(res))\n end",
"def registrations\n @registrations ||= []\n end",
"def send_registry_list(opts = {})\n send_call(\n sock: opts[:sock] || sock,\n call: build_registry_list(opts)\n )\n\n return_value = recv_return(\n sock: opts[:sock] || sock\n )\n\n if return_value.nil?\n return nil\n end\n\n if return_value.is_exception?\n raise ::Rex::Proto::Rmi::Exception, return_value.get_class_name\n end\n\n names = parse_registry_list(return_value)\n\n names\n end",
"def internal_get_register_values\n max_tries(3, 0.01, 'gdb_api:internal_get_register_values') do\n res = @debugger.send_mi_string('-data-list-register-values x')\n assert('unable to get register values') { res.content_type == 'done' }\n regs = res.value['register-values']\n internal_check_register_length(regs.length)\n return regs\n end\n end",
"def index\n @registries = Registry.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @registries }\n end\n end",
"def index\n @registrers = Registrer.all\n end",
"def managed_app_registrations()\n return MicrosoftGraph::DeviceAppManagement::ManagedAppRegistrations::ManagedAppRegistrationsRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def registry\n @registry ||= client.registries.get_from_uri(info[:registry]) unless info[:registry].nil?\n end",
"def internal_get_register_names\n max_tries(3, 0.01, 'gdb_api:internal_get_register_names') do\n res = @debugger.send_mi_string('-data-list-register-names')\n assert('unable to get register names ') { res.content_type == 'done' }\n regs = res.value['register-names']\n # And because gdb is ŝ̜̟̜͇ͨͧ͋h̴̻̘̩͙̪͔ͧͯͤͦͯ̚ị̡̘̜̼̃̃ͨtͬ̈́\n regs_without_empty = regs.select { |x| x != '' }\n internal_check_register_length(regs_without_empty.length)\n return regs_without_empty\n end\n end",
"def get_all_uuids\n ble_uuids = Hardware.all_ble_uuids.map { |hw| hw.identifier }\n \n render :status => 200, :json => { :uuids => ble_uuids }\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the modify_cloud_to_device_config REST call | def modify_cloud_to_device_config request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_modify_cloud_to_device_config_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::DeviceConfig.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def update\n if !current_user.manager?\n head :forbidden\n else\n if @device_configuration.update(device_configuration_params)\n render json: @device_configuration.to_json, status: :ok\n else\n render json: @device_configuration.errors, status: :unprocessable_entity\n end\n end\n end",
"def configuration_modified\n end",
"def update_configuration(config)\n fsi = Vcloud::Core::Fog::ServiceInterface.new\n fsi.post_configure_edge_gateway_services(id, config)\n end",
"def app_client_configuration_change(data)\n Telemetry::V2::AppClientConfigurationChange.new(data[:changes], origin: data[:origin])\n end",
"def build_config\n conf = micro # returns {} if there isn't a saved config\n\n override(conf, :vmx, true) do\n locate_vmx(McfCommand.platform)\n end\n\n override(conf, :vmrun, true) do\n CFMicro::VMrun.locate(McfCommand.platform)\n end\n\n override(conf, :password) do\n ask(\"Please enter your MCF VM password (vcap user) password\", :echo => \"*\")\n end\n\n conf[:platform] = McfCommand.platform\n\n conf\n end",
"def set_base_configuration\n @cfg = {\n :name => @name,\n :guestId => @guestid,\n :files => { :vmPathName => '['+@datastore+']' },\n :numCPUs => @cpucount,\n :memoryMB => @ramsize,\n :memoryHotAddEnabled => true,\n :tools => RbVmomi::VIM.ToolsConfigInfo(\n :syncTimeWithHost => true\n ),\n :deviceChange => [\n {\n :operation => :add,\n :device => RbVmomi::VIM.VirtualLsiLogicController(\n :key => 1000,\n :busNumber => 0,\n :sharedBus => :noSharing\n )\n }, {\n :operation => :add,\n :device => RbVmomi::VIM.VirtualVmxnet3(\n :key => 0,\n :backing => RbVmomi::VIM.VirtualEthernetCardDistributedVirtualPortBackingInfo(\n :port => RbVmomi::VIM.DistributedVirtualSwitchPortConnection(\n :switchUuid => @dvswitch_uuid,\n :portgroupKey => @portgroup_key\n )\n ),\n :addressType => 'generated'\n )\n }\n ],\n :extraConfig => [\n {\n :key => 'bios.bootOrder',\n :value => 'ethernet0'\n }\n ]\n }\n end",
"def system_configure(_)\n RightScale::SystemConfiguration.reload()\n success_result\n end",
"def configureConsulServiceConfig(vmCfg:, config:)\n json_config = JSON.pretty_generate(config)\n\n yaml_config = {\n \"write_files\" => [\n {\n \"path\" => \"/srv/consul/service-config.json\",\n \"permissions\" => \"0640\",\n \"content\" => json_config\n }\n ]\n }\n vmCfg.vm.cloud_init :user_data, content_type: \"text/cloud-config\",\n inline: CLOUD_CONFIG_MERGE_SNIPPET + yaml_config.to_yaml.sub(\"---\", \"\")\n\n return vmCfg\nend",
"def munge_config_xml\n get_config_changes\n xml_base.xpath(\"//Component[contains(@FQDD, 'NIC.') or contains(@FQDD, 'FC.')]\").remove unless @changes['whole'].find_all{|k,v| k =~ /^(NIC|FC)\\./}.empty?\n xml_base['ServiceTag'] = @resource[:servicetag]\n\n handle_missing_devices(xml_base, @changes)\n @nonraid_to_raid = false\n\n if embedded_sata_change\n Puppet.debug(\"Embedded Mode Change detected running with RAID teardown only\")\n @changes.deep_merge!(get_raid_config_changes(xml_base, raid_reset=true))\n else\n @changes.deep_merge!(get_raid_config_changes(xml_base)) if attempt == 0\n end\n\n %w(BiosBootSeq HddSeq).each do |attr|\n existing_attr_val = find_current_boot_attribute(attr.downcase.to_sym)\n requested_val = @changes['partial']['BIOS.Setup.1-1'][attr]\n message = \"Attribute: %s, Existing value: %s, Requested value: %s\" % [attr, existing_attr_val, requested_val]\n Puppet.debug(message)\n if existing_attr_val && requested_val\n seq_diff = requested_val.delete(' ').split(',').zip(existing_attr_val.delete(' ').split(',')).select{|new_val, exist_val| new_val != exist_val}\n #If tearing down, the HDD will already be removed from the boot sequence\n if seq_diff.size ==0 || @resource[:ensure] == :teardown\n @changes['partial']['BIOS.Setup.1-1'].delete(attr)\n end\n end\n end\n\n # If we are tearing down and there are nonraid volumes, we need to make them raid volumes to\n # be able to boot from this controller again\n nonraid_disks = raid_configuration.select{|_,v| !v[:nonraid].empty?}\n if (@resource[:ensure] == :teardown && !nonraid_disks.empty?)\n # Move the nonraids to raid\n nonraid_map = {}\n raid_configuration.each{|k,v| nonraid_map[k] = v[:nonraid] if v[:nonraid]}\n nonraid_map.each do |controller, disks|\n @raid_configuration[controller][:virtual_disks] = [{:disks => disks, :level => \"raid0\", :type => :hdd}]\n @raid_configuration[controller][:nonraid] = []\n end\n # run #get_raid_config_changes again with overwritten raid_configuration\n @nonraid_to_raid = true\n @changes.deep_merge!(get_raid_config_changes(xml_base))\n end\n #Handle whole nodes (node should be replaced if exists, or should be created if not)\n @changes[\"whole\"].keys.each do |name|\n path = \"/SystemConfiguration/Component[@FQDD='#{name}']\"\n existing = xml_base.xpath(path).first\n #if node exists there, just go ahead and remove it\n if !existing.nil?\n existing.remove\n end\n create_full_node(name, @changes[\"whole\"][name], xml_base, xml_base.xpath(\"/SystemConfiguration\").first)\n end\n #Handle partial node changes (node should exist already, but needs data edited/added within)\n @changes['partial'].keys.each do |parent|\n process_partials(parent, @changes['partial'][parent], xml_base)\n end\n #Handle node removal (ensure nodes listed here don't exist)\n @changes[\"remove\"][\"attributes\"].keys.each do |parent|\n process_remove_nodes(parent, @changes[\"remove\"][\"attributes\"][parent], xml_base, \"Attribute\")\n end\n @changes[\"remove\"][\"components\"].keys.each do |parent|\n process_remove_nodes(parent, @changes[\"remove\"][\"components\"][parent], xml_base, \"Component\")\n end\n\n ##Clean up the config file of all the commented text\n xml_base.xpath('//comment()').remove\n remove_invalid_settings(xml_base)\n # Disable SD card and RAID controller for boot from SAN\n\n # Include NVDIMM setting that will only be included after NVDIMM enabled\n unless nvdimm_attrs_in_sync?\n @changes[\"partial\"][\"BIOS.Setup.1-1\"][\"PersistentMemoryScrubbing\"] = \"Auto\"\n end\n\n # Rotate the old xml files\n unless attempt == 0\n rotate_config_xml_file\n end\n File.open(@config_xml_path, 'w+') do |file|\n if embsata_in_sync?\n file.write(xml_base.to_xml(:indent => 2))\n else\n # If Embedded Sata mode is out of sync we need to change the FQDD's to what they will be\n # after the EmbSat mode is changed to RAIDmode\n file.write(xml_base.to_xml(:indent => 2).gsub(\"AHCI.Embedded\", \"RAID.Embedded\").gsub(\"ATA.Embedded\",\"RAID.Embedded\"))\n end\n end\n xml_base\n end",
"def configureConsulServiceConfig(vmCfg:, port:)\n vmCfg.vm.cloud_init :user_data, content_type: \"text/cloud-config\",\n inline: CLOUD_CONFIG_MERGE_SNIPPET + <<~EOF\n write_files:\n - path: /srv/consul/service-config.json\n permissions: '0640'\n content: |\n {\n \"name\": \"#{vmCfg.vm.hostname}\",\n \"port\": #{port.to_i}\n }\n EOF\n\n return vmCfg\nend",
"def computed_config; end",
"def create_or_update_guest_access_portal_configuration(args = {}) \n put(\"/guestaccess.json/gap/#{args[:portalId]}/configuration\", args)\nend",
"def put_config(opts)\n opts = check_params(opts,[:instance_format,:data])\n super(opts)\n end",
"def patch_per_instance_configs request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_patch_per_instance_configs_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def update\n @device_config = Probe::DeviceConfig.find(params[:id])\n\n respond_to do |format|\n if @device_config.update_attributes(params[:device_config])\n flash[:notice] = 'Probe::DeviceConfig.was successfully updated.'\n format.html { redirect_to(@device_config) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @device_config.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def apply_owncloud_configuration\n calculate_trusted_domains\n calculate_dbhost\n owncloud_config_update\n save_owncloud_node_configuration\n end",
"def patch_config_host(hostname, host_obj)\n # Handle custom variables\n new_host_obj = write_custom_variable(host_obj.dup)\n\n uri = URI.parse(\"#{@endpoint['url']}/config/host/#{hostname}\")\n request = Net::HTTP::Patch.new(uri.path)\n request.basic_auth(@endpoint_auth['user'], @endpoint_auth['password'])\n request['content-type'] = CONTENTTYPE\n request.body = new_host_obj.to_json\n response = @http.request(request)\n end",
"def update_serving_config request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_update_serving_config_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Retail::V2::ServingConfig.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def cloud_config\n microbosh_config[\"cloud\"]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the list_device_config_versions REST call | def list_device_config_versions request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_list_device_config_versions_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::ListDeviceConfigVersionsResponse.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def versions\n JSON.parse(RestClient.get(\"#{VERSION_URL}/.json\", self.default_headers))[\"versions\"].collect { |v| v[\"id\"] }.uniq\n end",
"def versions\n vars = api.get_config_vars(app).body\n _, versions_data = vars.detect {|k,v| k == 'HEROKU_CONFIG_VERSIONS'}\n\n if versions_data\n versions = JSON.parse(Zlib::Inflate.inflate(Base64.decode64(versions_data)))\n\n display(\"Saved config versions for #{app}: \")\n versions.keys.each do |version|\n time = Time.strptime(version, \"%Y%m%d%H%M%S\", Time.now.utc)\n display(\"#{version} (saved on #{time.to_s})\")\n end\n else\n display(\"#{app} has no versioned config vars.\")\n end\n end",
"def versions\n request_str = \"/gallery/appliances/#{id.to_i}/versions\"\n response = GenericRequest.new(self.class.studio_connection).get request_str\n tree = XmlSimple.xml_in response, \"ForceArray\" => [\"version\"]\n return tree[\"appliance\"][\"versions\"][\"version\"]\n end",
"def get_product_supported_versions(product_code)\n path = \"/d2l/api/#{product_code}/versions/\"\n _get(path)\nend",
"def versions(id)\n @connection.get(\"/v_beta/apps/#{id}/versions\")\n end",
"def fetch_api_versions\n raw, _, st = kubectl.run(\"api-versions\", attempts: 5, use_namespace: false)\n # The \"core\" group is represented by an empty string\n versions = { \"\" => %w(v1) }\n if st.success?\n rows = raw.split(\"\\n\")\n rows.each do |group_version|\n group, version = group_version.split(\"/\")\n versions[group] ||= []\n versions[group] << version\n end\n end\n versions\n end",
"def version_config version, data\n end",
"def list_versions(id)\n uri = URI::HTTP.build( {:host => @host, :port => @port,\n :path => build_resource(id), :query => \"versions\" } )\n\n headers = {}\n\n request = build_request( EsuRestApi::GET, uri, headers, nil )\n\n response = @session.request( request )\n\n handle_error( response )\n\n # Parse returned IDs\n return parse_version_list( response )\n end",
"def versions(page: 1)\n @cached_versions ||= []\n\n if @cached_versions.empty?\n obj = @net.get_object \"/api/2/apps/#{@public_identifier}/app_versions\"\n obj['app_versions'].each do |hashobj|\n @cached_versions << Version.create_from(hashobj, @net)\n end\n end\n\n PagingArray.paginate with: @cached_versions, page: page\n end",
"def versions=(value)\n @versions = value\n end",
"def versions\n registered_versions.keys\n end",
"def versions\n if resource.nil?\n []\n elsif resource.is_a?(Hyrax::FileMetadata)\n if storage_adapter.try(:\"supports?\", :versions)\n storage_adapter.find_versions(id: resource.file_identifier).to_a\n else\n []\n end\n else\n return resource.versions if resource.versions.is_a?(Array)\n resource.versions.all.to_a\n end\n end",
"def listversions(project=self.project)\n get('listversions.json', project: project)['versions']\n end",
"def controller_versions(controller)\n value_from_parents(controller, default: [Apipie.configuration.default_version]) do |c|\n ret = @controller_versions[c.to_s]\n ret unless ret.empty?\n end\n end",
"def version_nodes\n design_response.map do |response|\n response.dig('node', 'versions', 'edges')\n end\n end",
"def get_list_file_versions(path, opts = {})\n if Configuration.debugging\n Configuration.logger.debug \"Calling API: StorageApi#get_list_file_versions ...\"\n end\n \n # verify the required parameter 'path' is set\n fail \"Missing the required parameter 'path' when calling get_list_file_versions\" if path.nil?\n \n # resource path\n path = \"/storage/version/{path}\".sub('{format}','json').sub('{' + 'path' + '}', path.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'storage'] = opts[:'storage'] if opts[:'storage']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = ['application/json', 'text/json', 'application/xml', 'text/xml', 'text/javascript']\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = []\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n\n auth_names = []\n result = @api_client.call_api(:GET, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'FileVersionsResponse')\n if Configuration.debugging\n Configuration.logger.debug \"API called: StorageApi#get_list_file_versions. Result: #{result.inspect}\"\n end\n return result\n end",
"def versions\n # TODO make this a collection proxy, only loading the first, then the\n # rest as needed during iteration (possibly in chunks)\n return nil if @archived\n @versions ||= [self].concat(CloudKit.storage_adapter.query { |q|\n q.add_condition('resource_reference', :eql, @resource_reference)\n q.add_condition('archived', :eql, 'true')\n }.reverse.map { |hash| self.class.build_from_hash(hash) })\n end",
"def get_available_manifest_versions_with_http_info(device_type_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: DeviceTypesApi.get_available_manifest_versions ...\"\n end\n # verify the required parameter 'device_type_id' is set\n fail ArgumentError, \"Missing the required parameter 'device_type_id' when calling DeviceTypesApi.get_available_manifest_versions\" if device_type_id.nil?\n # resource path\n local_var_path = \"/devicetypes/{deviceTypeId}/availablemanifestversions\".sub('{format}','json').sub('{' + 'deviceTypeId' + '}', device_type_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['artikcloud_oauth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'ManifestVersionsEnvelope')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DeviceTypesApi#get_available_manifest_versions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def client_version\n []\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the list_device_states REST call | def list_device_states request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_list_device_states_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::ListDeviceStatesResponse.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def device_states_list\n get \"deviceStates\"\n end",
"def device_states_get(device_name)\n get \"deviceStates/#{device_name}\"\n end",
"def get_states\n perform(:get, 'enum/states', nil, nonauth_headers).body\n end",
"def device_states=(value)\n @device_states = value\n end",
"def device_states\n return @device_states\n end",
"def device_configuration_device_state_summaries()\n return MicrosoftGraph::DeviceManagement::DeviceConfigurationDeviceStateSummaries::DeviceConfigurationDeviceStateSummariesRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def device_configuration_states()\n return MicrosoftGraph::Users::Item::ManagedDevices::Item::DeviceConfigurationStates::DeviceConfigurationStatesRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def index\n @api_states = Api::State.all\n end",
"def service_states\n svcs(\"-H\", \"-o\", \"state,nstate\", @resource[:name]).chomp.split\n end",
"def get_state(device)\n call_rpc('GetState', {\n device: device\n }).split(',')\n end",
"def index\n @requests_states = Requests::State.all\n end",
"def paginated_list\n @all_states = $us_states.split(',').map(& :strip).try(:sort)\n to_drop = (current_page - 1) * per_page\n data = @all_states.drop(to_drop)[0..(per_page - 1)]\n message = data.present? ? 'States successfully fetched' : 'No data exists for this page'\n response(data, message)\n end",
"def device_statuses\n return @device_statuses\n end",
"def device_configuration_states=(value)\n @device_configuration_states = value\n end",
"def index\n @device_statuses = DeviceStatus.all\n end",
"def states\r\n @states.collect {|id| $data_states[id] }\r\n end",
"def index\n @states = states.all\n json_response(@states)\n end",
"def list_states\n if params[:country_2_code].present?\n all_states = [[\"\", \"Select State/Province\"]]\n Country.find_country_by_alpha2(\"#{params[:country_2_code]}\").states.sort.map { |state|\n all_states << [\"#{state[0]}\", \"#{state[1].first[1]}\"]\n }\n render :json => all_states\n else\n render :text => \"Please select a country to continue...\"\n end\n end",
"def instance_states\n states = []\n request(:get, entry_points[:instance_states]) do |response|\n Nokogiri::XML(response).xpath('states/state').each do |state_el|\n state = DeltaCloud::InstanceState::State.new(state_el['name'])\n state_el.xpath('transition').each do |transition_el|\n state.transitions << DeltaCloud::InstanceState::Transition.new(\n transition_el['to'],\n transition_el['action']\n )\n end\n states << state\n end\n end\n states\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the send_command_to_device REST call | def send_command_to_device request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_send_command_to_device_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::SendCommandToDeviceResponse.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def send_cmd(cmd)\n payload = [PAYLOAD_START, cmd, 0, 0, 0, 0, 0, 0]\n @handle.usb_control_msg(REQUEST_TYPE, REQUEST, 0, 0, payload.pack('CCCCCCCC'), 0)\n end",
"def rcon_send(command); end",
"def send_command(cmd, params = {})\n # Send the command\n return @server.command cmd , params\n end",
"def send\n self.message.values = [parameters, false, \"nil\", \"\", command_message]\n self.body = message.encode\n super\n end",
"def _send_command(command)\n # Create the console and get its id\n console = @client.call(\"console.create\")\n\n # Do an initial read / discard to pull out any info on the console\n # then write the command to the console\n @client.call(\"console.read\", console[\"id\"])\n @client.call(\"console.write\", console[\"id\"], \"#{command}\\n\")\n\n # Initial read\n output_string = \"\"\n output = @client.call(\"console.read\", console[\"id\"])\n output_string += \"#{output['data']}\"\n\n # Very very hacky. -- There should be a way to check\n # status of a call to make sure that it isn't in an error\n # state. For now, check the output for known error heuristics\n return output_string if output_string =~ /(\\[-\\]|Error)/\n\n # Read until finished\n while (!output.has_key?(\"result\")) do\n return unless output[\"busy\"]\n output_string += \"#{output['data']}\"\n output = @client.call(\"console.read\", console[\"id\"])\n return \"Error\" if output[\"result\"] == \"failure\"\n # A little bit of sleeping will prevent this infinite loop from\n # hogging up large portions of CPU time. It also adds load to the\n # msfrpc daemon as it will need to process these requests as wel..\n sleep 0.1\n end\n\n # Clean up console\n @client.call(\"console.destroy\", console[\"id\"])\n\n output_string\n end",
"def send_command(command)\n @client.write \"#{command}\\r\\n\"\n end",
"def send_command(actionName:, serviceType:, argList:, controlURL:, quiet: false )\n soapBody = \"<?xml version=\\\"1.0\\\" encoding=\\\"utf-8\\\"?><s:Envelope s:encodingStyle=\\\"http://schemas.xmlsoap.org/soap/encoding/\\\" xmlns:s=\\\"http://schemas.xmlsoap.org/soap/envelope/\\\"><s:Body><u:#{actionName} xmlns:u=\\\"urn:schemas-nds-com:service:#{serviceType}\\\">#{argList}</u:#{actionName}></s:Body></s:Envelope>\\0xd\\0xa\"\n \n soapHeaders = { 'Host' => \"#{@skyAddress}:#{@port}\",\n 'Content-Type' => 'text/xml', \n 'SOAPAction' => \"\\\"urn:schemas-nds-com:service:#{serviceType}##{actionName}\\\"\"\n }\n\n res = Net::HTTP.new(@skyAddress, @port).start do |http|\n url = \"/#{@decoder_key}#{controlURL}\"\n response = http.post(url, soapBody, soapHeaders)\n if !quiet\n puts response.code.to_i\n puts response.body #if response.code.to_i == 200\n end\n return response\n end\n end",
"def send command\n @write_fh.write(\"#{command}\\n\")\n end",
"def send(command)\n send_with_callback command, CommandCallback.new\n end",
"def transmit_command(command_str)\n \t puts \"sending command #{command_str}\"\n \t cmd_url = \"http://#{@host}/3?#{command_str}=I=3\"\n open cmd_url do |f|\n puts f.read\n end\n end",
"def communicate_command( *data )\n\t\t\t\tCommunicateCommand.new @interface, data\n\t\t\tend",
"def send_command( command )\n \n puts \"Command: \" + command.message unless not DEBUG\n if command.message.kind_of? String\n \n @daemon_socket.write( command.message )\n @commands << command\n \n # NOTE: This extra get_response is here because LCDd will get confused if sent two messages very\n # quickly when trying to add menu items to a menu. Hopefully it will be removed when they have their\n # commands queued properly.\n if command.message =~ /^menu_add_item/\n r1 = get_response\n \n if r1.message =~ /^success/\n get_response\n end\n \n return r1\n else\n return get_response\n end\n \n elsif command.message.kind_of? Array\n \n @daemon_socket.write( command.message.join(\"\\n\") )\n @commands << command\n return get_response( command.message.length )\n \n end\n end",
"def send_command(command) \n command_str = command.to_encoded_str\n if @out_command_hasher\n send_frame command_str + @out_command_hasher.hash(command_str)\n else\n send_frame command_str\n end\n end",
"def send_command(cmd)\n write cmd.chr\n end",
"def send_command(house_and_unit, command)\n house_code, unit_code = select_code(house_and_unit)\n @marshaller.send(X10_PREFIX + house_code + unit_code + ADDRESS_SUFFIX)\n @marshaller.send(X10_PREFIX + house_code + COMMAND_CODE_TABLE[command] +\n COMMAND_SUFFIX)\n end",
"def send(target, command, msgArray = [])\n msg = \"S #{target} #{command} #{LineSerializer.to_s(msgArray)}\"\n debug(\"Send message: \", msg)\n write(msg)\n end",
"def send_command(cmd, code, filename = \"\")\n cfg = PacketFu::Utils.whoami?(:iface => @opts[:iface])\n #---------------------------------------------------------------------------\n # Send one byte at a time\n #---------------------------------------------------------------------------\n cmd.each_byte do |word|\n tcp = PacketFu::TCPPacket.new\n \n tcp.eth_saddr = cfg[:eth_saddr]\n tcp.tcp_src = rand(0xfff - 1024) + 1024\n tcp.tcp_dst = @opts[:sport]\n tcp.tcp_flags.syn = 1;\n tcp.tcp_win = word\n tcp.tcp_seq = rand(0xffff)\n tcp.ip_saddr = cfg[:ip_saddr]\n tcp.ip_daddr = @opts[:host] \n \n tcp.recalc\n tcp.to_w(@opts[:iface])\n \n sleep @opts[:delay]\n end # each_byte\n\n #---------------------------------------------------------------------------\n # Send FIN packet\n #---------------------------------------------------------------------------\n tcp_fin = PacketFu::TCPPacket.new\n\n tcp_fin.eth_saddr = cfg[:eth_saddr]\n tcp_fin.tcp_src = rand(0xfff - 1024) + 1024\n tcp_fin.tcp_dst = @opts[:sport]\n tcp_fin.tcp_flags.fin = 1;\n tcp_fin.tcp_seq = rand(0xffff)\n tcp_fin.ip_saddr = cfg[:ip_saddr]\n tcp_fin.ip_daddr = @opts[:host] \n\n tcp_fin.recalc\n tcp_fin.to_w(@opts[:iface])\n\n if code == 1 then # Regular Command\n wait_cmd_response\n elsif code == 2 then # Get Command\n wait_get_response(cmd, filename)\n elsif code == 3 then # Put Command\n \n end \nend",
"def send_command(command,arguments=[])\n cmd = { \"command\" => command.to_s, \"arguments\" => arguments }.to_json\n send_message(cmd,\"/tmp/dhun.sock\")\n end",
"def command(command, options = {}, &callback)\n uuid = SecureRandom.uuid\n @command_callbacks << (callback || lambda { |reply| signal uuid, reply })\n string = \"#{command}\\n\"\n body_value = options.delete :command_body_value\n options.each_pair do |key, value|\n string << \"#{key.to_s.gsub '_', '-'}: #{value}\\n\" if value\n end\n string << \"\\n\" << body_value << \"\\n\" if body_value\n string << \"\\n\"\n send_data string\n wait uuid unless callback\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the bind_device_to_gateway REST call | def bind_device_to_gateway request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_bind_device_to_gateway_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::BindDeviceToGatewayResponse.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def bind(&block)\n\t\t\t@endpoint.bind(&block)\n\t\tend",
"def device_passthrough\n @device_passthrough\n end",
"def method_missing(method_name, *arguments, &block)\n device.send(method_name, *arguments, &block)\n end",
"def unbind_device_from_gateway request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_unbind_device_from_gateway_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Iot::V1::UnbindDeviceFromGatewayResponse.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def execute\n begin\n bridgehub_url = @info_values[\"api_server\"] + \"/app/manage-api/v1/bridges\"\n resource = RestClient::Resource.new(bridgehub_url,\n user: @info_values[\"api_username\"],\n password: @info_values[\"api_password\"])\n\n # Create the bridge data\n data = {}\n data.tap do |json|\n json['adapterClass'] = @parameters[\"adapter_class\"]\n json['name'] = @parameters[\"name\"]\n json['slug'] = @parameters[\"slug\"]\n json['ipAddresses'] = @parameters[\"ip_addresses\"]\n json['properties'] = @parameters[\"properties\"].empty? ? {} : JSON.parse(@parameters[\"properties\"])\n end\n\n # POST to the API\n response = resource.post(data.to_json, { accept: :json, content_type: :json })\n\n # build and return the results\n bridge = JSON.parse(response)[\"bridge\"]\n return <<-RESULTS\n <results>\n <result name=\"adapterClass\">#{escape(bridge[\"adapterClass\"])}</result>\n <result name=\"ipAddresses\">#{escape(bridge[\"ipAddresses\"])}</result>\n <result name=\"name\">#{escape(bridge[\"name\"])}</result>\n <result name=\"slug\">#{escape(bridge[\"slug\"])}</result>\n <result name=\"properties\">#{escape(bridge[\"properties\"].to_json)}</result>\n </results>\n RESULTS\n\n # If the credentials are invalid\n rescue RestClient::Unauthorized\n raise StandardError, \"(Unauthorized): You are not authorized.\"\n rescue RestClient::BadRequest => error\n raise StandardError, error.response\n end\n end",
"def create_bridge_endpoint_with_http_info(bridge_endpoint, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementPlaneApiNetworkTransportBridgeEndpointsApi.create_bridge_endpoint ...'\n end\n # verify the required parameter 'bridge_endpoint' is set\n if @api_client.config.client_side_validation && bridge_endpoint.nil?\n fail ArgumentError, \"Missing the required parameter 'bridge_endpoint' when calling ManagementPlaneApiNetworkTransportBridgeEndpointsApi.create_bridge_endpoint\"\n end\n # resource path\n local_var_path = '/bridge-endpoints'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(bridge_endpoint)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'BridgeEndpoint')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementPlaneApiNetworkTransportBridgeEndpointsApi#create_bridge_endpoint\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def platform_endpoint=(_arg0); end",
"def create_bridge_endpoint_with_http_info(bridge_endpoint, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: NetworkTransportApi.create_bridge_endpoint ...\"\n end\n # verify the required parameter 'bridge_endpoint' is set\n if @api_client.config.client_side_validation && bridge_endpoint.nil?\n fail ArgumentError, \"Missing the required parameter 'bridge_endpoint' when calling NetworkTransportApi.create_bridge_endpoint\"\n end\n # resource path\n local_var_path = \"/bridge-endpoints\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(bridge_endpoint)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'BridgeEndpoint')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: NetworkTransportApi#create_bridge_endpoint\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def handle_client_bridge_to(env, data)\r\n Broker.log(\"[SocketServer] Received bridge_to, forwarding to Invoca\")\r\n Broker.log(\"[SocketServer] Data: #{data}\")\r\n\r\n call_uuid = data['call']['id']\r\n national_number = data['agent']['phone_number']\r\n bridge_msg = {\r\n \"type\" => \"bridge_to\",\r\n \"call_uuid\" => call_uuid,\r\n \"country_code\" => \"1\",\r\n \"national_number\" => national_number\r\n }\r\n\r\n Broker.instrument('broker-invoca') {\r\n Broker.control_queue.publish(bridge_msg.to_json)\r\n EM.add_timer(0.5) { Broker.instrument('invoca') }\r\n }\r\n end",
"def register\n physical_entity = {name: @name, description: @description, location:@location}\n virtual_entity = {uuid: nil}\n body = {body: {device: {name: @name, \n description: @description, \n model_name: @model, \n location: @location,\n physical_entity_attributes: physical_entity,\n virtual_entity_attributes: virtual_entity}}}\n response = self.class.post(API+'/devices', body)\n @uuid = response['uuid']\n end",
"def call(env)\n env['upnp.devices'] = @devices\n @app.call(env)\n end",
"def device_ipaddress=(_arg0); end",
"def connect_device\n @interface.connect_device @device_port.text\n end",
"def call(env)\n if env[:machine].provider.pd_version_satisfies?('>= 10')\n super\n else\n # Just continue if port forwarding is not supported\n @app.call(env)\n end\n end",
"def call(env)\n packet = Orange::Packet.new(@core, env)\n # Set up this application as router if nothing else has\n # assumed routing responsibility (for Sinatra DSL like routing)\n self_routing = opts[:self_routing] || true\n if (!packet['route.router'] && self_routing)\n packet['route.router'] = self\n end\n packet.route\n packet.finish\n end",
"def adb_device options\n id = options[:id] || 1\n dev = DeviceAPI::Android::Device.new(\n serial: \"serial#{id}\",\n state: 'device',\n version: options[:os_version] || '1.2.3'\n )\n allow(dev).to receive(:wifi_mac_address) { options[:wifi_mac_addresss] || '00:11:22:33:44:%02d' % id }\n allow(dev).to receive(:ip_address) { options[:ip_address] || '192.168.100.%d' % id }\n allow(dev).to receive(:manufacturer) { options[:brand] || 'Test Brand' }\n allow(dev).to receive(:model) { options[:model] || 'Test Model' }\n allow(dev).to receive(:imei) { options[:imei] || '123456%d' % id }\n allow(dev).to receive(:version) { options[:os_version] || '1.2.3' }\n allow(dev).to receive(:get_device_type) { :default }\n dev.instance_variable_set(:@remote, options[:remote] ? true : false)\n dev\nend",
"def register_device\n Urbanairship.register_device(self.device_token, :alias => self.email)\n end",
"def create\n active_device = ActiveDevice.find_by_dev_uuid(active_device_params[:dev_uuid])\n if active_device == nil\n active_device = ActiveDevice.new(active_device_params)\n else \n active_device.update!(active_device_params)\n end\n active_device.save!\n render json: active_device\n rescue\n render json: active_device.errors, status: :unprocessable_entity\n end",
"def bind(fname, path)\n print_verbose \"Binding 'extensions/social_engineering/droppers/#{fname}' to '#{path}'\"\n begin\n response = RestClient.post \"#{@url}/server/bind?token=#{@token}\",\n { 'mount' => \"#{path}\",\n 'local_file' => \"#{fname}\" }.to_json,\n :content_type => :json,\n :accept => :json\n print_good \"Bound '#{fname}' successfully\" if response.code == 200\n rescue => e\n print_error \"Could not bind file #{fname}: #{e.message}\"\n end\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Baseline implementation for the unbind_device_from_gateway REST call | def unbind_device_from_gateway request_pb, options = nil
raise ::ArgumentError, "request must be provided" if request_pb.nil?
verb, uri, query_string_params, body = ServiceStub.transcode_unbind_device_from_gateway_request request_pb
query_string_params = if query_string_params.any?
query_string_params.to_h { |p| p.split "=", 2 }
else
{}
end
response = @client_stub.make_http_request(
verb,
uri: uri,
body: body || "",
params: query_string_params,
options: options
)
operation = ::Gapic::Rest::TransportOperation.new response
result = ::Google::Cloud::Iot::V1::UnbindDeviceFromGatewayResponse.decode_json response.body, ignore_unknown_fields: true
yield result, operation if block_given?
result
end | [
"def unbridge bridge_id\n post \"bridges/#{bridge_id}\", {callIds: []}\n\n nil\n end",
"def unregister\n puts \"APN::Device.unregister\"\n http_delete(\"/api/device_tokens/#{self.token}\")\n end",
"def unbind(port:)\n {\n method: \"Tethering.unbind\",\n params: { port: port }.compact\n }\n end",
"def deauthorize_device\n merchant = merchant_params\n authorize merchant\n \n device_id = params.require(:data).require(:device_id)\n merchant.deauthorize_device(device_id)\n render json: {}, status: :ok\n end",
"def unregister_device\n\t\t@device = Device.where(:token => params[:device_token]).first\n\t\t@device.vendor = nil\n\t\t@device.restaurant = nil\n\t\t@device.save\n\n\t\trespond_with @device, :location => nil\n\tend",
"def unbind(endpoint)\n LibZMQ.zmq_unbind(socket, endpoint)\n end",
"def unbind(desired_resource)\n iq = Iq.new(:set)\n unbind = iq.add REXML::Element.new('unbind')\n unbind.add_namespace @stream_features['unbind']\n resource = unbind.add REXML::Element.new('resource')\n resource.text = desired_resource\n\n send_with_id(iq)\n end",
"def lbmonitor_service_unbinding(payload)\n warn '[DEPRECATION] \"lbmonitor_service_binding\" is deprecated. Please use \"Netscaler::Lb::Monitor.bind\" instead.'\n raise ArgumentError, 'payload cannot be null' if payload.nil?\n payload = Netscaler.hash_hack(payload)\n validate_payload(payload, [:serviceName, :monitorName])\n return @netscaler.adapter.post_no_body(\"config/lbmonitor_service_binding/#{payload[:monitorName]}?action=unbind\", {'params' => {'action' => 'unbind'}, 'lbmonitor_service_binding' => payload})\n end",
"def parse_unbind_request(sequence)\n nil\n end",
"def unbind_network(network_id)\r\n # Validate required parameters.\r\n validate_parameters(\r\n 'network_id' => network_id\r\n )\r\n # Prepare query url.\r\n _path_url = '/networks/{networkId}/unbind'\r\n _path_url = APIHelper.append_url_with_template_parameters(\r\n _path_url,\r\n 'networkId' => network_id\r\n )\r\n _query_builder = Configuration.base_uri.dup\r\n _query_builder << _path_url\r\n _query_url = APIHelper.clean_url _query_builder\r\n # Prepare and execute HttpRequest.\r\n _request = @http_client.post(\r\n _query_url\r\n )\r\n CustomHeaderAuth.apply(_request)\r\n _context = execute_request(_request)\r\n validate_response(_context)\r\n end",
"def removeDevice deviceId\n options = { 'device' => deviceId }\n post REMOVE_DEVICE, options\n end",
"def deactivate\n lock\n\n @bridges = list_bridges\n\n attach_nic_id = @vm['TEMPLATE/NIC[ATTACH=\"YES\"]/NIC_ID']\n\n if @bridges\n process do |nic|\n next if attach_nic_id && attach_nic_id != nic[:nic_id]\n\n @nic = nic\n\n next if @bridges[@nic[:bridge]].nil?\n\n # Return if the bridge doesn't exist because it was already\n # deleted (handles last vm with multiple nics on the same\n # vlan)\n next unless @bridges.include? @nic[:bridge]\n\n # Return if we want to keep the empty bridge\n next if @nic[:conf][:keep_empty_bridge]\n\n # Return if the phydev device is not the only left device in\n # the bridge.A\n if @nic[:phydev].nil?\n keep = !@bridges[@nic[:bridge]].empty?\n else\n\n keep = @bridges[@nic[:bridge]].length > 1 ||\n !@bridges[@nic[:bridge]].include?(@nic[:phydev])\n end\n\n next if keep\n\n # Delete the bridge.\n OpenNebula.exec_and_log(\"#{command(:ip)} link delete\"\\\n \" #{@nic[:bridge]}\")\n @bridges.delete(@nic[:bridge])\n end\n end\n\n unlock\n\n 0\n end",
"def destroy\n requires :id\n \n service.delete_nat_gateway(id)\n true\n end",
"def unbind sid, uid, &block\n @app.session_service.unbind sid, uid, &block\n end",
"def unbind() end",
"def unmap(device)\n Command.execute_rc_log(\"#{COMMANDS[:unmap]} #{device}\", false)\n end",
"def lbmonitor_servicegroup_unbinding(payload)\n warn '[DEPRECATION] \"lbmonitor_servicegroup_unbinding\" is deprecated. Please use \"Netscaler::Lb::Monitor.unbind\" instead.'\n raise ArgumentError, 'payload cannot be null' if payload.nil?\n payload = Netscaler.hash_hack(payload)\n validate_payload(payload, [:serviceGroupName, :monitorName])\n return @netscaler.adapter.post_no_body(\"config/lbmonitor_servicegroup_binding/#{payload[:monitorName]}?action=unbind\", {'params' => {'action' => 'unbind'}, 'lbmonitor_servicegroup_binding' => payload})\n end",
"def deactivate\n lock\n\n @bridges = list_bridges\n\n attach_nic_id = @vm['TEMPLATE/NIC[ATTACH=\"YES\"]/NIC_ID']\n\n if @bridges\n process do |nic|\n next if attach_nic_id && attach_nic_id != nic[:nic_id]\n\n @nic = nic\n\n next if @nic[:phydev].nil?\n next if @bridges[@nic[:bridge]].nil?\n\n # Get the name of the vlan device.\n gen_vlan_dev_name\n\n # Return if the bridge doesn't exist because it was already\n # deleted (handles last vm with multiple nics on the same\n # vlan)\n next unless @bridges.include? @nic[:bridge]\n\n # Return if we want to keep the empty bridge\n next if @nic[:conf][:keep_empty_bridge]\n\n # Return if the vlan device is not the only left device in\n # the bridge.\n next if (@bridges[@nic[:bridge]].length > 1) || \\\n !@bridges[@nic[:bridge]].include?(@nic[:vlan_dev])\n\n # Delete the vlan device.\n delete_vlan_dev\n\n @bridges[@nic[:bridge]].delete(@nic[:vlan_dev])\n\n # Delete the bridge.\n OpenNebula.exec_and_log(\"#{command(:ip)} link delete\"\\\n \" #{@nic[:bridge]}\")\n @bridges.delete(@nic[:bridge])\n end\n end\n\n unlock\n\n 0\n end",
"def unbind(exchange, binding_key, arguments: {})\n @client.exchange_unbind(@name, exchange, binding_key, arguments: arguments)\n self\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
If `type` is an interface, and `self` has a type membership for `type`, then make sure it's visible. | def visible_interface_implementation?(type, context, warden)
if type.respond_to?(:kind) && type.kind.interface?
implements_this_interface = false
implementation_is_visible = false
warden.interface_type_memberships(self, context).each do |tm|
if tm.abstract_type == type
implements_this_interface ||= true
if warden.visible_type_membership?(tm, context)
implementation_is_visible = true
break
end
end
end
# It's possible this interface came by way of `include` in another interface which this
# object type _does_ implement, and that's ok
implements_this_interface ? implementation_is_visible : true
else
# If there's no implementation, then we're looking at Ruby-style inheritance instead
true
end
end | [
"def check_type(type) self.class.instance_eval { check_type(type) }; end",
"def check_type(instance, &recursively_check_type)\n raise NotImplementedError\n end",
"def interface_type?(type)\n interface_typelist.include?(resolve_type(type))\n rescue ModelKit::Types::NotFound\n false\n end",
"def applies_type?(scope, type); end",
"def check_type(type)\n return if @type == type\n raise \"Slot #{path} cannot be set as a #{type} because its already a #{@type}\" if @type \n @type = type\n end",
"def subtype_of?(type_)\n self == type_ || self.include?(type_)\n end",
"def subtype_of?(type)\n self == type || include?(type)\n end",
"def interface_class?\r\n interface? && !annotation?\r\n end",
"def check_type(type)\n if type.respond_to?(:validate!)\n type.validate!(self)\n elsif klass = self.class.factory(type.to_s)\n klass.new.validate!(self)\n else\n raise \"invalid type supplied for validation check\"\n end\n end",
"def interface_type?(typename)\n interface_types.include?(resolve_type(typename))\n end",
"def implements?(type)\n @interfaces.find do |interface|\n interface.type_of? type\n end\n end",
"def extends?(type)\n @supertype && @supertype.type_of?(type) && @supertype\n end",
"def type=(type)\n raise ArgumentError, \"Invalid type (#{type}). Must be nil or unavailable\" if type && type.to_sym != :unavailable\n super\n end",
"def type?(type)\n @scope.type?(type)\n end",
"def validate_type!(type)\n raise InvalidType.new(type) unless SUBTYPES.has_key?(type)\n end",
"def is_accessible?\n type != TYPES[:wall]\n end",
"def isChildOf(type)\n if (self.parent.class == type)\n return true\n else\n return self.parent.isChildOf(type) if self.parent.respond_to? :isChildOf\n end\n end",
"def has_item_of_type?(type)\n !items_by_type(type).empty?\n end",
"def can_upcast?(serialized_type)\n raise NotImplementedError\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /finger_prints GET /finger_prints.json | def index
@finger_prints = FingerPrint.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @finger_prints }
format.csv { send_data FingerPrint.scoped.to_csv, filename: "fingerprints-#{Date.today}.csv"}
end
end | [
"def show\n #debugger\n @finger_print = FingerPrint.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @finger_print }\n end\n end",
"def index\n # @setup = CloudPrint.setup(hash)\n # @printers = CloudPrint::Printer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @printers }\n end\n end",
"def show\n @magnetic_finger_print = MagneticFingerPrint.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @magnetic_finger_print }\n end\n end",
"def new\n @finger_print = FingerPrint.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @finger_print }\n end\n end",
"def show\n @wifi_finger_prints_record = WifiFingerPrintsRecord.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @wifi_finger_prints_record }\n end\n end",
"def lookup_http_fingerprints(opts={})\n uri = opts[:uri] || '/'\n method = opts[:method] || 'GET'\n fprints = []\n\n return fprints unless framework.db.active\n\n ::ApplicationRecord.connection_pool.with_connection {\n wspace = datastore['WORKSPACE'] ?\n framework.db.find_workspace(datastore['WORKSPACE']) : framework.db.workspace\n\n # only one result can be returned, as the +port+ field restricts potential results to a single service\n service = framework.db.services(:workspace => wspace,\n :hosts => {address: rhost},\n :proto => 'tcp',\n :port => rport).first\n return fprints unless service\n\n # Order by note_id descending so the first value is the most recent\n service.notes.where(:ntype => 'http.fingerprint').order(\"notes.id DESC\").each do |n|\n next unless n.data && n.data.kind_of?(::Hash)\n next unless n.data[:uri] == uri && n.data[:method] == method\n # Append additional fingerprints to the results as found\n fprints.unshift n.data.dup\n end\n }\n\n fprints\n end",
"def index\n @printers = Printer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @printers }\n end\n end",
"def index\n @prints = Print.joins(:attachment).all\n\n respond_to do |format|\n format.html # index.html.erb\n @prins = []\n format.json do\n @prints = @prints.collect do |print|\n prind = {}\n print.attributes.each do |key, value|\n prind[key] = value\n end\n attachment = print.attachment.attachment_fields\n print.attachment.attachment_fields.each do |key, value|\n prind[key] = value\n end\n prind[:attachment_name] = CommonActions.linkable(print_path(print), attachment[:attachment_name])\n prind[:links] = CommonActions.object_crud_paths(nil, edit_print_path(print), nil)\n @prins.push(prind)\n end\n render json: { aaData: @prins }\n end\n end\n end",
"def show\n @printing_screen = PrintingScreen.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @printing_screen }\n end\n end",
"def index\n if params[:printer_type].present?\n @printers = Printer.where(printer_type: params[:printer_type])\n else\n @printers = Printer.all\n end\n\n render json: @printers\n end",
"def show\n @printer_check = PrinterCheck.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @printer_check }\n end\n end",
"def index\n @print_works = @user.print_works\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @print_works }\n end\n end",
"def show\n @title = t('view.prints.show_title')\n @print = prints_scope.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @print }\n end\n end",
"def create\n @finger_print = FingerPrint.check_exist(params[:finger_print])\n\n respond_to do |format|\n format.html { redirect_to @finger_print, notice: 'FingerPrint was successfully created.' }\n format.json { render json: @finger_print, status: :created, location: @finger_print }\n end\n end",
"def show\n @finger = Finger.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @finger }\n end\n end",
"def destroy\n @finger_print = FingerPrint.find(params[:id])\n @finger_print.destroy\n\n respond_to do |format|\n format.html { redirect_to finger_prints_url }\n format.json { head :no_content }\n end\n end",
"def show\n begin\n @fucker = Fucker.find(params[:id])\n respond_to do |format|\n format.json { render json: @fucker }\n end\n rescue => err\n $log.warn(err)\n respond_to do |format|\n format.json { render json: err, status: :internal_server_error }\n end\n end\n end",
"def show\n @print_work = @user.print_works.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @print_work }\n end\n end",
"def index\n @fingerprints = Fingerprint.all\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /finger_prints/1 GET /finger_prints/1.jsonl | def show
#debugger
@finger_print = FingerPrint.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @finger_print }
end
end | [
"def index\n @finger_prints = FingerPrint.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @finger_prints }\n format.csv { send_data FingerPrint.scoped.to_csv, filename: \"fingerprints-#{Date.today}.csv\"}\n end\n end",
"def show\n @magnetic_finger_print = MagneticFingerPrint.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @magnetic_finger_print }\n end\n end",
"def index\n # @setup = CloudPrint.setup(hash)\n # @printers = CloudPrint::Printer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @printers }\n end\n end",
"def new\n @finger_print = FingerPrint.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @finger_print }\n end\n end",
"def show\n @wifi_finger_prints_record = WifiFingerPrintsRecord.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @wifi_finger_prints_record }\n end\n end",
"def lookup_http_fingerprints(opts={})\n uri = opts[:uri] || '/'\n method = opts[:method] || 'GET'\n fprints = []\n\n return fprints unless framework.db.active\n\n ::ApplicationRecord.connection_pool.with_connection {\n wspace = datastore['WORKSPACE'] ?\n framework.db.find_workspace(datastore['WORKSPACE']) : framework.db.workspace\n\n # only one result can be returned, as the +port+ field restricts potential results to a single service\n service = framework.db.services(:workspace => wspace,\n :hosts => {address: rhost},\n :proto => 'tcp',\n :port => rport).first\n return fprints unless service\n\n # Order by note_id descending so the first value is the most recent\n service.notes.where(:ntype => 'http.fingerprint').order(\"notes.id DESC\").each do |n|\n next unless n.data && n.data.kind_of?(::Hash)\n next unless n.data[:uri] == uri && n.data[:method] == method\n # Append additional fingerprints to the results as found\n fprints.unshift n.data.dup\n end\n }\n\n fprints\n end",
"def index\n @prints = Print.joins(:attachment).all\n\n respond_to do |format|\n format.html # index.html.erb\n @prins = []\n format.json do\n @prints = @prints.collect do |print|\n prind = {}\n print.attributes.each do |key, value|\n prind[key] = value\n end\n attachment = print.attachment.attachment_fields\n print.attachment.attachment_fields.each do |key, value|\n prind[key] = value\n end\n prind[:attachment_name] = CommonActions.linkable(print_path(print), attachment[:attachment_name])\n prind[:links] = CommonActions.object_crud_paths(nil, edit_print_path(print), nil)\n @prins.push(prind)\n end\n render json: { aaData: @prins }\n end\n end\n end",
"def index\n if params[:printer_type].present?\n @printers = Printer.where(printer_type: params[:printer_type])\n else\n @printers = Printer.all\n end\n\n render json: @printers\n end",
"def index\n @printers = Printer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @printers }\n end\n end",
"def show\n @printing_screen = PrintingScreen.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @printing_screen }\n end\n end",
"def show\n @printer_check = PrinterCheck.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @printer_check }\n end\n end",
"def show\n @title = t('view.prints.show_title')\n @print = prints_scope.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @print }\n end\n end",
"def show\n @probe = Probe.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @probe }\n end\n end",
"def show\n @grumble = Grumble.find(params[:id])\n render status: 200, json: @grumble.to_json\n end",
"def show\n begin\n @fucker = Fucker.find(params[:id])\n respond_to do |format|\n format.json { render json: @fucker }\n end\n rescue => err\n $log.warn(err)\n respond_to do |format|\n format.json { render json: err, status: :internal_server_error }\n end\n end\n end",
"def destroy\n @finger_print = FingerPrint.find(params[:id])\n @finger_print.destroy\n\n respond_to do |format|\n format.html { redirect_to finger_prints_url }\n format.json { head :no_content }\n end\n end",
"def show\n @finger = Finger.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @finger }\n end\n end",
"def stats\n get 'stats', format: 'json'\n end",
"def show\n @http_passer = Http::Passer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @http_passer }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /finger_prints/new GET /finger_prints/new.json | def new
@finger_print = FingerPrint.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @finger_print }
end
end | [
"def new\n @printer = Printer.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @printer }\n end\n end",
"def new\n @sprint = Sprint.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sprint }\n end\n end",
"def new\n @printing_screen = PrintingScreen.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @printing_screen }\n end\n end",
"def new\n @sprint = Sprint.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sprint }\n end\n end",
"def new\n @print_template = PrintTemplate.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @print_template }\n end\n end",
"def new\n @magnetic_finger_print = MagneticFingerPrint.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @magnetic_finger_print }\n end\n end",
"def new\n @printer_check = PrinterCheck.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @printer_check }\n end\n end",
"def new\n print_params(params)\n @skateboard = Skateboard.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @skateboard }\n end\n end",
"def new\n @wifi_finger_prints_record = WifiFingerPrintsRecord.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @wifi_finger_prints_record }\n end\n end",
"def create\n @finger_print = FingerPrint.check_exist(params[:finger_print])\n\n respond_to do |format|\n format.html { redirect_to @finger_print, notice: 'FingerPrint was successfully created.' }\n format.json { render json: @finger_print, status: :created, location: @finger_print }\n end\n end",
"def new\n @flood = Flood.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @flood }\n end\n end",
"def create\n @magnetic_finger_print = MagneticFingerPrint.new(params[:magnetic_finger_print])\n\n respond_to do |format|\n if @magnetic_finger_print.save\n format.html { redirect_to @magnetic_finger_print, notice: 'Magnetic finger print was successfully created.' }\n format.json { render json: @magnetic_finger_print, status: :created, location: @magnetic_finger_print }\n else\n format.html { render action: \"new\" }\n format.json { render json: @magnetic_finger_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @item_print = ItemPrint.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @item_print }\n end\n end",
"def create\n @title = t('view.prints.new_title')\n @print = current_user.prints.build(params[:print])\n session[:documents_for_printing].try(:clear)\n\n respond_to do |format|\n if @print.save\n format.html { redirect_to(@print, notice: t('view.prints.correctly_created')) }\n format.json { render json: @print, status: :created, location: @print }\n else\n format.html { render action: 'new' }\n format.json { render json: @print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @user_paper = UserPaper.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user_paper }\n end\n end",
"def new\n @print_work = @user.print_works.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @print_work }\n end\n end",
"def new\n @paper = Paper.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @paper }\n end\n end",
"def create\n @printer = Printer.new(printer_params)\n\n respond_to do |format|\n if @printer.save\n format.html { redirect_to root_path }\n format.json { render :show, status: :created, location: @printer }\n else\n format.html { render :new }\n format.json { render json: @printer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @probe = Probe.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @probe }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /finger_prints POST /finger_prints.json | def create
@finger_print = FingerPrint.check_exist(params[:finger_print])
respond_to do |format|
format.html { redirect_to @finger_print, notice: 'FingerPrint was successfully created.' }
format.json { render json: @finger_print, status: :created, location: @finger_print }
end
end | [
"def create\n @magnetic_finger_print = MagneticFingerPrint.new(params[:magnetic_finger_print])\n\n respond_to do |format|\n if @magnetic_finger_print.save\n format.html { redirect_to @magnetic_finger_print, notice: 'Magnetic finger print was successfully created.' }\n format.json { render json: @magnetic_finger_print, status: :created, location: @magnetic_finger_print }\n else\n format.html { render action: \"new\" }\n format.json { render json: @magnetic_finger_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @finger_print = FingerPrint.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @finger_print }\n end\n end",
"def create\n @print = current_user.prints.build(print_params)\n\n respond_to do |format|\n if @print.save\n format.html { redirect_to @print, notice: \"Print was successfully created.\" }\n format.json { render :show, status: :created, location: @print }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @title = t('view.prints.new_title')\n @print = current_user.prints.build(params[:print])\n session[:documents_for_printing].try(:clear)\n\n respond_to do |format|\n if @print.save\n format.html { redirect_to(@print, notice: t('view.prints.correctly_created')) }\n format.json { render json: @print, status: :created, location: @print }\n else\n format.html { render action: 'new' }\n format.json { render json: @print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fingerprint = Fingerprint.new(fingerprint_params)\n\n respond_to do |format|\n if @fingerprint.save\n format.html { redirect_to @fingerprint, notice: 'Fingerprint was successfully created.' }\n format.json { render :show, status: :created, location: @fingerprint }\n else\n format.html { render :new }\n format.json { render json: @fingerprint.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @finger_prints = FingerPrint.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @finger_prints }\n format.csv { send_data FingerPrint.scoped.to_csv, filename: \"fingerprints-#{Date.today}.csv\"}\n end\n end",
"def create\n @fine_print = FinePrint.new(fine_print_params)\n\n respond_to do |format|\n if @fine_print.save\n format.html { redirect_to @fine_print, notice: 'Fine print was successfully created.' }\n format.json { render :show, status: :created, location: @fine_print }\n else\n format.html { render :new }\n format.json { render json: @fine_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @printer = Printer.new(printer_params)\n\n respond_to do |format|\n if @printer.save\n format.html { redirect_to root_path }\n format.json { render :show, status: :created, location: @printer }\n else\n format.html { render :new }\n format.json { render json: @printer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @printing_screen = PrintingScreen.new(printing_screen_params)\n\n respond_to do |format|\n if @printing_screen.save\n format.html { redirect_to @printing_screen, notice: 'Printing screen was successfully created.' }\n format.json { render json: @printing_screen, status: :created, location: @printing_screen }\n else\n format.html { render action: \"new\" }\n format.json { render json: @printing_screen.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n #debugger\n @finger_print = FingerPrint.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @finger_print }\n end\n end",
"def create\n @print = Print.new(print_params)\n\n respond_to do |format|\n if @print.save\n format.html { redirect_to @print, notice: 'Print was successfully created.' }\n format.json { render action: 'show', status: :created, location: @print }\n else\n format.html { render action: 'new' }\n format.json { render json: @print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @wifi_finger_prints_record = WifiFingerPrintsRecord.new(params[:wifi_finger_prints_record])\n respond_to do |format|\n if @wifi_finger_prints_record.save\n format.html { redirect_to @wifi_finger_prints_record, notice: 'Wifi finger prints record was successfully created.' }\n format.json { render json: @wifi_finger_prints_record, status: :created, location: @wifi_finger_prints_record }\n else\n format.html { render action: \"new\" }\n format.json { render json: @wifi_finger_prints_record.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @print_detail = PrintDetail.new(print_detail_params)\n\n respond_to do |format|\n if @print_detail.save\n format.html { redirect_to @print_detail, notice: 'Print detail was successfully created.' }\n format.json { render :show, status: :created, location: @print_detail }\n else\n format.html { render :new }\n format.json { render json: @print_detail.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @finger = Finger.new(params[:finger])\n @finger.user = current_user\n \n respond_to do |format|\n if @finger.save\n flash[:notice] = 'Finger was successfully created.'\n format.html { redirect_to(fingers_path) }\n format.xml { render :xml => @finger, :status => :created, :location => @finger }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @finger.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def index\n # @setup = CloudPrint.setup(hash)\n # @printers = CloudPrint::Printer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @printers }\n end\n end",
"def destroy\n @finger_print = FingerPrint.find(params[:id])\n @finger_print.destroy\n\n respond_to do |format|\n format.html { redirect_to finger_prints_url }\n format.json { head :no_content }\n end\n end",
"def create\n @foot_print = FootPrint.new(foot_print_params)\n\n respond_to do |format|\n if @foot_print.save\n format.html { redirect_to @foot_print, notice: 'Foot print was successfully created.' }\n format.json { render :show, status: :created, location: @foot_print }\n else\n format.html { render :new }\n format.json { render json: @foot_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @reference_print = ReferencePrint.new(params[:reference_print])\n\n respond_to do |format|\n if @reference_print.save\n format.html { redirect_to @reference_print, notice: 'Reference print was successfully created.' }\n format.json { render json: @reference_print, status: :created, location: @reference_print }\n else\n format.html { render action: \"new\" }\n format.json { render json: @reference_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @item_print = ItemPrint.new(item_print_params)\n\n respond_to do |format|\n if @item_print.save\n format.html { redirect_to @item_print, notice: 'Item print was successfully created.' }\n format.json { render json: @item_print, status: :created, location: @item_print }\n else\n format.html { render action: \"new\" }\n format.json { render json: @item_print.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /finger_prints/1 PUT /finger_prints/1.json | def update
@finger_print = FingerPrint.find(params[:id])
respond_to do |format|
if @finger_print.update_attributes(params[:finger_print])
format.html { redirect_to @finger_print, notice: 'Finger print was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @finger_print.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @magnetic_finger_print = MagneticFingerPrint.find(params[:id])\n\n respond_to do |format|\n if @magnetic_finger_print.update_attributes(params[:magnetic_finger_print])\n format.html { redirect_to @magnetic_finger_print, notice: 'Magnetic finger print was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @magnetic_finger_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @sprint.update!(sprint_params)\n json_response(@sprint)\n end",
"def update\n @printer = Printer.find(params[:id])\n @printer.update_from_custom_printer!(printer_params)\n\n if @printer.save\n render json: @printer.map_to_custom_printer\n else\n render json: @printer.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @print.update(print_params)\n format.html { redirect_to @print, notice: \"Print was successfully updated.\" }\n format.json { render :show, status: :ok, location: @print }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @finger_print = FingerPrint.check_exist(params[:finger_print])\n\n respond_to do |format|\n format.html { redirect_to @finger_print, notice: 'FingerPrint was successfully created.' }\n format.json { render json: @finger_print, status: :created, location: @finger_print }\n end\n end",
"def update\n @finger = Finger.find(params[:id])\n @finger.user = current_user\n respond_to do |format|\n if @finger.update_attributes(params[:finger])\n flash[:notice] = 'Finger was successfully updated.'\n format.html { redirect_to(fingers_path) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @finger.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fingerprint.update(fingerprint_params)\n format.html { redirect_to @fingerprint, notice: 'Fingerprint was successfully updated.' }\n format.json { render :show, status: :ok, location: @fingerprint }\n else\n format.html { render :edit }\n format.json { render json: @fingerprint.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @printer = Printer.find(params[:id])\n\n respond_to do |format|\n if @printer.update_attributes(params[:printer])\n format.html { redirect_to @printer, notice: 'Printer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @printer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @sprint = Sprint.find(params[:id])\n respond_to do |format|\n if @sprint.update_attributes(params[:sprint])\n format.html { redirect_to @sprint, notice: 'Sprint was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sprint.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @sprint = Sprint.find(params[:id])\n\n respond_to do |format|\n if @sprint.update_attributes(params[:sprint])\n format.html { redirect_to @sprint, notice: 'Sprint was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sprint.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @print_detail.update(print_detail_params)\n format.html { redirect_to @print_detail, notice: 'Print detail was successfully updated.' }\n format.json { render :show, status: :ok, location: @print_detail }\n else\n format.html { render :edit }\n format.json { render json: @print_detail.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @finger_print = FingerPrint.find(params[:id])\n @finger_print.destroy\n\n respond_to do |format|\n format.html { redirect_to finger_prints_url }\n format.json { head :no_content }\n end\n end",
"def update\n respond_to do |format|\n if @spore_print.update(spore_print_params)\n format.html { redirect_to @spore_print, notice: 'Spore print was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @spore_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @printer = Printer.find(params[:id])\n\n respond_to do |format|\n if @printer.update_attributes(printer_params)\n format.html { redirect_to printers_path, notice: 'Printer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @printer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @printer.update(printer_params)\n format.html { redirect_to @printer, notice: 'Printer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @printer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @item_print = ItemPrint.find(params[:id])\n\n respond_to do |format|\n if @item_print.update_attributes(item_print_params)\n format.html { redirect_to @item_print, notice: 'Item print was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @item_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @foot_print.update(foot_print_params)\n format.html { redirect_to @foot_print, notice: 'Foot print was successfully updated.' }\n format.json { render :show, status: :ok, location: @foot_print }\n else\n format.html { render :edit }\n format.json { render json: @foot_print.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @finger_print = FingerPrint.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @finger_print }\n end\n end",
"def update\n @wifi_finger_prints_record = WifiFingerPrintsRecord.find(params[:id])\n\n respond_to do |format|\n if @wifi_finger_prints_record.update_attributes(params[:wifi_finger_prints_record])\n format.html { redirect_to @wifi_finger_prints_record, notice: 'Wifi finger prints record was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @wifi_finger_prints_record.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /finger_prints/1 DELETE /finger_prints/1.json | def destroy
@finger_print = FingerPrint.find(params[:id])
@finger_print.destroy
respond_to do |format|
format.html { redirect_to finger_prints_url }
format.json { head :no_content }
end
end | [
"def destroy\n @magnetic_finger_print = MagneticFingerPrint.find(params[:id])\n @magnetic_finger_print.destroy\n\n respond_to do |format|\n format.html { redirect_to magnetic_finger_prints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @print.destroy\n respond_to do |format|\n format.html { redirect_to prints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @spore_print.destroy\n respond_to do |format|\n format.html { redirect_to spore_prints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fingerprint.destroy\n respond_to do |format|\n format.html { redirect_to fingerprints_url, notice: 'Fingerprint was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @wifi_finger_prints_record = WifiFingerPrintsRecord.find(params[:id])\n @wifi_finger_prints_record.destroy\n\n respond_to do |format|\n format.html { redirect_to wifi_finger_prints_records_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @snap = Snap.find(params[:id])\n @snap.destroy\n\n respond_to do |format|\n format.html { redirect_to snaps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @printer = Printer.find(params[:id])\n @printer.destroy\n\n respond_to do |format|\n format.html { redirect_to printers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sprint = Sprint.find(params[:id])\n @sprint.destroy\n\n respond_to do |format|\n format.html { redirect_to sprints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @finger = Finger.find(params[:id])\n @finger.destroy\n\n respond_to do |format|\n format.html { redirect_to(fingers_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @item_print = ItemPrint.find(params[:id])\n @item_print.destroy\n\n respond_to do |format|\n format.html { redirect_to item_prints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @reference_print = ReferencePrint.find(params[:id])\n @reference_print.destroy\n\n respond_to do |format|\n format.html { redirect_to reference_prints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @print.destroy\n respond_to do |format|\n format.html { redirect_to prints_url, notice: \"Print was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @printing_screen = PrintingScreen.find(params[:id])\n @printing_screen.destroy\n\n respond_to do |format|\n format.html { redirect_to printing_screens_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @snap.destroy\n respond_to do |format|\n format.html { redirect_to snaps_url, notice: 'Snap was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend",
"def destroy\n @printer.destroy\n respond_to do |format|\n format.html { redirect_to printers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @printer.destroy\n respond_to do |format|\n format.html { redirect_to printers_url}\n format.json { head :no_content }\n end\n end",
"def destroy\n @flood.destroy\n respond_to do |format|\n format.html { redirect_to floods_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @print = Print.find(params[:id])\n @print.destroy\n\n respond_to do |format|\n format.html { redirect_to(prints_url) }\n format.xml { head :ok }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
responsible for handling the loc_page view | def loc_view
end | [
"def landing_page\n end",
"def selected_page\n end",
"def visited_page(url); end",
"def visit_path\n end",
"def page_view page\n self.views.find(:first, :conditions => [\"resource_type = 'Page' and resource_id = ?\", page.id ])\n end",
"def visit(page)\n end",
"def set_page_view\n @page_view = 1\n end",
"def uses_page page # only works with IE\n new_url = self.class.html_root + page\n browser.goto new_url unless browser.url == new_url\n end",
"def page!\n save_and_open_page\n end",
"def current_site_load_page\n if params[:preview] == 'true'\n @page = current_site.page_by_full_path(\"/#{params[:page_path]}?preview=#{params[:preview]}\") \n @page.destroy unless @page.nil?\n else\n @page = current_site.page_by_full_path(\"/#{params[:page_path]}\")\n end\n return render_404 if @page.nil?\n end",
"def page_view(title, page, utmhid = random_id)\n check_account_params\n hey(page_view_params(title, page, utmhid))\n end",
"def route\n # Page exists?\n if @node and @node.has_page?\n # Redirect to This Item's first category listing if it exists. To ensure the menus display correctly\n if @node.page_type=='Item' and @node.page.has_better_url?\n redirect_to shortcut_path(:shortcut => @node.page.better_url, :display_item => @node.page_id)\n return false\n else\n page_type = (@node.page_type == 'ItemCategory' ? 'Item' : @node.page_type)\n @item = Item.find(params[:display_item]) unless params[:display_item].blank?\n #render(\"#{page_type.tableize.pluralize}/show\", :layout => @node.layout)\n #render :action => \"#{page_type.tableize.pluralize}/show\", :layout => @node.layout\n render_page_from_node(\"#{page_type.tableize.pluralize}/show\", @node.layout)\n end\n else\n return error_redirect\n end\n end",
"def page_url(url)\n define_method(\"goto\") do\n lookup = url.kind_of?(Symbol) ? self.send(url) : url\n erb = ERB.new(%Q{#{lookup}})\n merged_params = self.class.instance_variable_get(\"@merged_params\")\n params = merged_params ? merged_params : self.class.params\n platform.navigate_to erb.result(binding)\n end\n end",
"def render_page_path\n if Settings.website.base_url.present?\n render :partial => \"admin/general/page_path\", :locals => { :page_path => Settings.website.base_url+@page.path }\n else\n render :partial => \"admin/general/page_path\", :locals => { :page_path => @page.path }\n end\n end",
"def page\n # 1.)\n # Before anything else, look to see if it's the most recent page:\n return @most_recent_page if @most_recent_page && @most_recent_page.on_page?\n process_browser\n url = @browser.url\n\n found_page = nil\n # 2.)\n # Ensure that static templates are always prioritized when attempting to\n # match, which will prevent the wrong template from getting matched in this\n # scenario:\n # - \"/accounts/{account_code}\"\n # - \"/accounts/new\"\n #\n # Start by working through the array from FRONT to BACK, since any static\n # templates will be at the front of the array. Stop when we start to see\n # templates whth vars (These will get handled in the next statement.)\n @pages.each do |pg|\n break if pg.url_template.variables.length > 0\n\n if pg.url_matcher && pg.url_matcher =~ url\n found_page = pg\n elsif pg.url_template.match(url)\n found_page = pg\n else\n next\n end\n\n break if found_page\n end\n\n # 3.) Now we've reached the templates that include one or more variables.\n # For these, we want to try to match on the templates with more variables.\n # This prevents an invalid match in the following situation and removes the\n # need to provide a URL matcher to override the URL template:\n # - \"/accounts/{account_code}/edit\"\n # - \"/accounts/{account_code}\"\n # Now work through all the array from BACK to FRONT, stopping when we reach\n # the point where we see templates without a var (since those were already\n # handled above.)\n @pages.reverse.each do |pg|\n break if pg.url_template.variables.length == 0\n\n if pg.url_matcher && pg.url_matcher =~ url\n found_page = pg\n elsif pg.url_template.match(url)\n found_page = pg\n else\n next\n end\n\n break if found_page\n end\n\n if found_page && found_page.required_arguments.present?\n if hsh = found_page.url_template.extract(url)\n return found_page.new(self, found_page.url_template.extract(url))\n else\n return found_page.new(self, found_page.url_template.extract(url.split(/(\\?|#)/)[0]))\n end\n elsif found_page\n return found_page.new(self)\n else\n return UndefinedPage.new(self)\n end\n end",
"def kopal_layout_before_page_meta\n\n end",
"def show\n send(\"page__#{params[:id].gsub(\"\\/\", '_')}\") if respond_to?(\"page__#{params[:id].gsub(\"\\/\", '_')}\")\n\n @id = params[:id]\n\n render :layout => false\n end",
"def find_location; end",
"def page!\n save_and_open_page\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Passes an array of fingerprint readings to the model to localize | def localization
#debugger
searched = params[:finger_print]
@coordinates = FingerPrint.KNN(searched)
puts @coordinates
respond_to do |format|
format.html
format.json {render json: @coordinates}
end
end | [
"def localization\n\t\t\tloc = []\n\t\t\t# we have to sort the hash in this case.\n#\t\t\titems.sort.each_index do |index|\n#\t\t\t\tloc << items[index+1].aln_from.to_s + \"..\" + items[index+1].aln_to.to_s\n#\t\t\tend\n#\t\t\tloc.join(\",\")\n\t\t\teach_domain do |domain|\n\t\t\t\tdomain.each_domainhit do |dh|\n\t\t\t\t\tloc << dh.aln_from.to_s + \"..\" + dh.aln_to.to_s\n\t\t\t\tend\n\t\t\tend\n\t\t\tloc.join(\",\")\n\t\tend",
"def locale_array=(locale_array)\n config.locale_array = locale_array\n end",
"def get_qualification_name_preflabel(type_array)\r\n\r\n#how to handle? there may be multiple qualification names\r\n#have updated the unqueried qualifications, need to do the others. need to update qual authorities and also to update the mappings for the postgrad diplomas\r\n\r\n\r\n#Arrays of qualification name variants\r\n#doctorates\r\nlettersDoctorates = ['Doctor of Letters (DLitt)','Doctor of Letters','DLitt']\r\nmusicDoctorates = ['Doctor of Music (DMus)','Doctor of Music','DMus']\r\nscienceDoctorates = ['Doctor of Science (ScD)','Doctor of Science','ScD']\r\nengineeringDoctorates = ['Doctor of Engineering (EngD)','Doctor of Engineering','EngD']\r\nmedicalDoctoratesbyPubs = ['Doctor of Medicine by publications (MD)','Doctor of Medicine by publications']\r\nmedicalDoctorates = ['Doctor of Medicine (MD)','MD']\r\nphilosophyDoctoratesbyPubs = ['Doctor of Philosophy by publications (PhD)','Doctor of Philosophy by publications']\r\nphilosophyDoctorates = ['Doctor of Philosophy (PhD)','PhD'] \r\n#masters\r\nphilosophyMastersbyPubs = ['Master of Philosophy by publications (MPhil)','Master of Philosophy by publications']\r\nphilosophyMasters = ['Master of Philosophy (MPhil)','MPhil']\r\nartMastersbyResearch = ['Master of Arts (by research) (MA (by research))','Master of Arts (by research)','(MA (by research)']\r\nartMasters = ['Master of Arts (MA)', 'Master of Arts', 'Master of Art (MA)', 'MA (Master of Arts)','Masters of Arts (MA)', 'MA']\r\nscienceMastersbyResearch = ['Master of Science (by research) (MSc (by research))','Master of Science (by research)']\r\nscienceMastersbyThesis = ['Master of Science (by thesis) (MSc (by thesis))','Master of Science (by thesis)','MSc (by thesis)']\r\nscienceMasters = ['Master of Science (MSc.)', '\"Master of Science (MSc),\"',\"'Master of Science (MSc)\",'Master of Science (MSc)','MSc', 'Master of Science']\r\nlawsMasters = ['Master of Laws (LLM)','Master of Laws','LLM']\r\nlawMasters = ['Master of Law (MLaw)','Master of Law','MLaw'] #not an error!\r\npublicAdminMasters = ['Master of Public Administration (MPA)','Master of Public Administration','MPA']\r\nbiologyMasters = ['Master of Biology (MBiol)','Master of Biology','MBiol']\r\nbiochemMasters = ['Master of Biochemistry (MBiochem)','Master of Biochemistry','MBiochem']\r\nbiomedMasters = ['Master of Biomedical Science (MBiomedsci)','Master of Biomedical Science','MBiomedsci']\r\nchemistryMasters = ['Master of Chemistry (MChem)','Master of Chemistry','MChem']\r\nengineeringMasters = ['Master of Engineering (MEng)', 'Master of Engineering (MEng','Master of Engineering','MEng']\r\nmathMasters = ['Master of Mathematics (MMath)','Master of Mathematics (MMAth)','Master of Mathematics','MMath']\r\nphysicsMasters = ['Master of Physics (MPhys)','Master of Physics','MPhys']\r\npsychMasters = ['Master of Psychology (MPsych)','Master of Psychology','MPsych']\r\nenvMasters = ['Master of Environment (MEnv)','Master of Environment','MEnv']\r\nnursingMasters = ['Master of Nursing','Master of Nursing (MNursing)','(MNursing)' ]\r\npublicHealthMasters = ['Master of Public Health (MPH)','Master of Public Health','MPH']\r\nsocialworkMasters = ['Master of Social Work and Social Science (MSWSS)','Master of Social Work and Social Science','(MSWSS)']\r\nresearchMasters = ['Master of Research (Mres)','Master of Research (MRes)','Mres','MRes']\r\n#the variant single quote character in Conservation Studies is invalid and causes invalid multibyte char (UTF-8) error so handled this in nokogiri open document call. however we also need to ensure the resulting string is included in the lookup array so the match will still be found. this means recreating it and inserting it into the array\r\n\r\n\r\n#bachelors\r\n\r\nmedicineSurgeryBachelors = ['Bachelor of Medicine, Bachelor of Surgery (MBBS)','Bachelor of Medicine, Bachelor of Surgery','MBBS']\r\nmedsciBachelors = ['Bachelor of Medical Science (BMedSci)','Bachelor of Medical Science','BMedSci']\r\nscienceBachelors = ['Batchelor of science (BSc)', 'Bachelor of Science (BSc)', '\"Bachelor of Science (BSc)\"', 'BSc', 'Bachelor of Science (BA)','Bachelor of Science (BSc )','Bachelor of Science', 'Bachelor of Science (Bsc)','Bachelor of Science (MSc)']\r\nartBachelors = ['Batchelor of Arts (BA)', '\"Bachelor of Arts (BA),\"', 'BA', 'Bachelor of Arts (BA)','Bachelor of Art (BA)', 'Bachelor of Arts', 'Bachelor of Arts (MA)']\r\nphilosophyBachelors = ['Bachelor of Philosophy (BPhil)','Bachelor of Philosophy' ,'BPhil']\r\nengineeringBachelors = ['Bachelor of Engineering (BEng)', 'Bachelor of Engineering','BEng']\r\nlawBachelors = ['Bachelor of Laws (LLB)','Bachelor of Laws','LLB']\r\n\r\n#others\r\nfoundationDegrees = ['Foundation Degree (FD)','Foundation Degree','FD']\r\ncertHEs = ['Certificate of Higher Education (CertHE)','Certificate of Higher Education','CertHE']\r\ndipHEs = ['Diploma of Higher Education (DipHE)','Diploma of Higher Education','DipHE']\r\ngradCerts = ['Graduate Certificate (GradCert)','Graduate Certificate','GradCert']\r\ngradDiplomas = ['Graduate Diploma (GradDip)','Graduate Diploma','GradDip']\r\nuniCerts = ['University Certificate']\r\nfoundationCerts = ['Foundation Certificate (F Cert)','Foundation Certificate','F Cert']\r\nfoundation = ['Foundation Year', 'Foundation Year Stage 0'] #not neccesarily the same as above\r\npreMasters = ['Pre-Masters']\r\n#the variant single quote character in Conservation Studies is invalid and causes invalid multibyte char (UTF-8) error so handled this in nokogiri open document call. however we also need to ensure the resulting string is included in the lookup array so the match will still be found. this means recreating it and inserting it into the array\r\nnot_valid = \"Postgraduate Diploma in ‘Conservation Studies’ (PGDip)\"\r\nvalid_now = not_valid.encode('UTF-8', :invalid => :replace, :undef => :replace)\r\n#pgDiplomas = ['Diploma in Conservation Studies', 'Postgraduate Diploma in Conservation Studies ( PGDip)','Postgraduate Diploma in Conservation Studies(PGDip)', 'Postgraduate Diploma in Medieval Studies (PGDip)','PGDip', 'Diploma','(Dip', '(Dip', 'Diploma (Dip)', valid_now] \r\npgDiplomas = ['PGDip', 'Diploma','(Dip', 'Dip', 'Diploma (Dip)','(Dip']\r\nmedievalDiplomas = ['Postgraduate Diploma in Medieval Studies (PGDip)']\r\n#conservationDiplomas = ['Diploma in Conservation Studies', 'Postgraduate Diploma in Conservation Studies ( PGDip)','Postgraduate Diploma in Conservation Studies(PGDip)', 'Postgraduate Diploma in Medieval Studies (PGDip)','PGDip', 'Diploma','(Dip', '(Dip', 'Diploma (Dip)', valid_now] \r\nconservationDiplomas = ['Diploma in Conservation Studies', 'Postgraduate Diploma in Conservation Studies ( PGDip)','Postgraduate Diploma in Conservation Studies(PGDip)', valid_now] #this dealt with an encoding problem in certain records\r\ncpds = ['Continuing Professional Development (CPD)','Continuing Professional Development','CPD']\r\npgces = ['Postgraduate Certificate in Education (PGCE)']\r\npgMedicalCerts = ['Postgraduate Certificate in Medical Education (PGCert)']\r\npgcerts = ['Postgraduate Certificate (PgCert)']\r\ncefrs = ['A1 of the CEFR', 'A1 of CEFR','A1/A2 of the CEFR','A2 of the CEFR','A2/B1 of the CEFR','B1/B2 of the CEFR','B2 of the CEFR','B2/C1 of the CEFR','C1 of the CEFR','C2 of the CEFR','C1/C2 of CEFR','C1/C2 of the CEFR']\r\n\r\n#all listed now need to do processing. add elsifs, but also CEFR and Foundations need stuff ading to as description, this will need doing in the specific migrator\r\n\r\nqualification_name_preflabels = [] \r\n\r\ntype_array.each do |t,|\t #loop1\r\n\ttype_to_test = t.to_s\r\n\tputs \"search term for qualification_name_preflabel was \" + type_to_test\r\n\t#outer loop tests for creation of qualification_name_preflabel\t\t\t \r\n\t\tif lettersDoctorates.include? type_to_test #loop2\r\n\t\t qualification_name_preflabels.push(\"Doctor of Letters (DLitt)\")\r\n\t\telsif musicDoctorates.include? type_to_test \r\n\t\t qualification_name_preflabels.push(\"Doctor of Music (DMus)\")\r\n\t\telsif scienceDoctorates.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Doctor of Science (ScD)\")\r\n\t\telsif engineeringDoctorates.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Doctor of Engineering (EngD)\")\r\n\t\telsif medicalDoctoratesbyPubs.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Doctor of Medicine by publications (MD)\")\r\n\t\telsif medicalDoctorates.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Doctor of Medicine (MD)\")\r\n\t\telsif philosophyDoctoratesbyPubs.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Doctor of Philosophy by publications (PhD)\")\r\n\t\telsif philosophyDoctorates.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Doctor of Philosophy (PhD)\")\r\n\t\t\r\n\t\telsif philosophyMastersbyPubs.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Philosophy by publications (MPhil)\") \r\n\t\telsif philosophyMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Philosophy (MPhil)\")\r\n\t\telsif artMastersbyResearch.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Arts (by research) (MA (by research))\")\t\t\r\n\t\telsif artMasters.include? type_to_test \r\n\t\t qualification_name_preflabels.push(\"Master of Arts (MA)\")\r\n\t\telsif scienceMastersbyResearch.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Science (by research) (MSc (by research))\")\r\n\t\telsif scienceMastersbyThesis.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Science (by thesis) (MSc (by thesis))\")\r\n\t\telsif scienceMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Science (MSc)\")\t\t\r\n\t\telsif lawsMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Laws (LLM)\") \r\n\t\telsif lawMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Law (MLaw)\")\r\n\t\telsif publicAdminMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Public Administration (MPA)\")\r\n\t\telsif biologyMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Biology (MBiol)\")\r\n\t\telsif biochemMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Biochemistry (MBiochem)\")\r\n\t\telsif biomedMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Biomedical Science (MBiomedsci)\")\r\n\t\telsif chemistryMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Chemistry (MChem)\")\r\n\t\telsif engineeringMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Engineering (MEng)\")\r\n\t\telsif mathMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Mathematics (MMath)\")\r\n\t\telsif physicsMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Physics (MPhys)\")\r\n\t\telsif psychMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Psychology (MPsych)\")\r\n\t\telsif envMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Environment (MEnv)\")\r\n\t\telsif nursingMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Nursing (MNursing)\")\r\n\t\telsif publicHealthMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Public Health (MPH)\")\r\n\t\telsif socialworkMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Master of Social Work and Social Science (MSWSS)\")\r\n\t\t\r\n\t\telsif medicineSurgeryBachelors.include? type_to_test \r\n\t\t qualification_name_preflabels.push(\"Bachelor of Medicine, Bachelor of Surgery (MBBS)\")\r\n\t\telsif medsciBachelors.include? type_to_test \r\n\t\t qualification_name_preflabels.push(\"Bachelor of Medical Science (BMedSci)\")\r\n\t\telsif scienceBachelors.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Bachelor of Science (BSc)\")\r\n\t\telsif artBachelors.include? type_to_test \r\n\t\t qualification_name_preflabels.push(\"Bachelor of Arts (BA)\")\r\n\t\telsif philosophyBachelors.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Bachelor of Philosophy (BPhil)\")\t\t\r\n\t\telsif engineeringBachelors.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Bachelor of Engineering (BEng)\")\r\n elsif lawBachelors.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Bachelor of Laws (LLB)\")\r\n\t\t \r\n\t\telsif foundationDegrees.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Foundation Degree (FD)\")\r\n\t\telsif certHEs.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Certificate of Higher Education (CertHE)\")\r\n\t\telsif dipHEs.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Diploma of Higher Education (DipHE)\")\r\n\t\telsif gradCerts.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Graduate Certificate (GradCert)\")\r\n\t\telsif gradDiplomas.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Graduate Diploma (GradDip)\")\r\n\t\telsif uniCerts.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"University Certificate\")\r\n\t\telsif foundationCerts.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Foundation Certificate (F Cert)\")\r\n\t\telsif foundation.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Foundation\")\r\n\t\telsif preMasters.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Pre-Masters\")\r\n\t\telsif conservationDiplomas.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Postgraduate Diploma in Conservation Studies (PGDip)\")\r\n\t\telsif medievalDiplomas.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Postgraduate Diploma in Medieval Studies (PGDip)\")\r\n\t\t#this is more general so crucial it is tested AFTER the more specific diplomas \r\n\t\telsif pgDiplomas.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Postgraduate Diploma (PGDip)\")\r\n\t\telsif pgces.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Postgraduate Certificate in Education (PGCE)\")\r\n\t\telsif pgMedicalCerts.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Postgraduate Certificate in Medical Education (PGCert)\")\r\n\t\telsif cpds.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Continuing Professional Development (CPD)\")\t\t\r\n\t\telsif pgcerts.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"Postgraduate Certificate (PgCert)\")\r\n\t\t#preparation for preflabel assignment when defined\r\n\t\t#order importand, look for most precise first\r\n\t\telsif cefrs.include? type_to_test\r\n\t\t qualification_name_preflabels.push(\"CEFR Module\")\t\t\r\n\tend #end 2\r\n\t\t\r\n\t#not found? check for plain research masters without arts or science specified (order of testing here is crucial) (required for theses)\r\n\t if qualification_name_preflabels.length <= 0 #loop2a\r\n\t\t\tif researchMasters.include? type_to_test #loop 3 not done with main list as \"MRes\" may be listed as separate type as well as a more specific type\r\n\t\t\t\tqualification_name_preflabels.push(\"Master of Research (MRes)\")\r\n\t\t\tend#end loop3\r\n\t\tend #'end loop 2a\r\n\tend #end loop1\r\n\treturn qualification_name_preflabels\r\nend",
"def upcase_local_patient_ids\n self.local_patient_id = local_patient_id.upcase if local_patient_id.present?\n (2..5).each{ |index| upcase_local_patient_id(index) }\n end",
"def names_of_people_who_passed_english(results_set)\nend",
"def localize(*args)\n helpers.localize(*args)\n end",
"def vlq_encode_mappings(ary); end",
"def load_translations(*filenames); end",
"def localize attribs, translate\n locale_classes = [Time, Date, DateTime]\n if translate.is_a?(Array) && I18n\n translate.each do |col|\n attribs[col.to_s] = I18n.l(attribs[col.to_s]) if locale_classes.include?(attribs[col.to_s].class)\n end\n end\n end",
"def localizations=(value)\n @localizations = value\n end",
"def names_of_people_who_took_french(results_set)\nend",
"def decode_vlq_mappings(str, sources: T.unsafe(nil), names: T.unsafe(nil)); end",
"def models_to_translate\n Utility.models_to_translate\n end",
"def initialize (array)\n\t\t@alimentos = array\n\tend",
"def fileLabelVectorTriples\n images.map{ |image| { \"url\" => File.basename(image.url), \"label\" => image.most_likely_label_text, \"vector\" => image.most_likely_label_onehot } }\n end",
"def names_of_people_who_failed_english(results_set)\nend",
"def localize(*args)\n I18n.localize(*args)\n end",
"def inflections(locale = T.unsafe(nil)); end",
"def floormap_data_collection(names)\n floormaps = []\n names.each do |name|\n floormaps << floormap_data(name)\n end\n floormaps\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Magnetic variation in degrees | def magnetic_variation_degrees
self.class.nsew_signed_float(@fields[10], @fields[11])
end | [
"def magnetic_variation_degrees\n self.class.nsew_signed_float(@fields[4], @fields[5])\n end",
"def magnetic_deviation_degrees\n self.class.nsew_signed_float(@fields[5], @fields[6])\n end",
"def true_to_magnetic(heading)\r\n value = heading - @variance\r\n value += 360.0 if value < 0\r\n value\r\n end",
"def track_degrees_magnetic\n f = new_format? ? 3 : 2\n return nil if @fields[f].nil? || @fields[f].empty?\n @fields[f].to_f\n end",
"def angular_kinetic_energy(moment_of_inertia, angular_velocity)\n return 0.5 * moment_of_inertia * (angular_velocity ** 2)\n end",
"def fuel_mass\n return self.mass - self.dry_mass\n end",
"def altitude_variance; 5 end",
"def angular_momentum(moment_of_inertia, angular_velocity)\n return moment_of_inertia * angular_velocity.to_f\n end",
"def magnetic_to_true(heading)\r\n value = heading + @variance\r\n value -= 360.0 if value > 360.0\r\n value\r\n end",
"def energy_v4(mass)\n return mass * (SPEED_OF_LIGHT ** 2)\n end",
"def density\n return self.mass / self.volume\n end",
"def wavelength_de_broglie(momentum)\n PLANCK / momentum\n end",
"def angular_velocity_rad_s\n 2 * PI / period_s\n end",
"def airmass\n a = alt\n ang = Math::PI*(90.0-hms2deg(a))/180.0\n return 1.0/Math::cos(ang)\n end",
"def magnification\n 1300\n end",
"def gravitational_constant\n Measure.new(GRAVITATIONAL_CONSTANT, Meter.new(3), Kilogram.new(-1), Second.new(-2))\n end",
"def get_magnetic_field\n send_request(FUNCTION_GET_MAGNETIC_FIELD, [], '', 6, 's s s')\n end",
"def mean_angular_velocity\n if self.closed?\n return 2.0 * Math::PI / self.period\n else\n return nil\n end\n end",
"def deg_per_met\n\t\tr = 6371000\n\t\tlatrad = lat.abs * (2*Math::PI/360) # Converts latitudinal degrees into radians for the sake of sake of Ruby's sin function.\n\t\trprime = r * Math.sin(latrad) # Using SOHCAHTOA to get the horizontal cross-sectional radius of the earth at the user's latitude.\n\t\treturn 360 / (rprime*2*Math::PI) # Takes this radius and uses it to get the cross-sectional circumference at that point in meters\n # and return 360 degrees by this circumferences to get degrees per meter.\n\tend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Disable SQL logging. You can use this method to turn off logging SQL when the migration is munging data that may vary between environments. | def disable_sql_logging(&block)
sql_logging(enabled: false, &block)
end | [
"def disable_activerecord_sql_logging\n ActiveRecord::Base.logger.level = 1\nend",
"def enable_activerecord_sql_logging\n ActiveRecord::Base.logger.level = 0\nend",
"def turn_off_qa_db_logging()\r\n @@logToQADatabase =0\r\n end",
"def disable_verbose_log()\n PureHailDB.ib_cfg_set(\"print_verbose_log\", :bool, false)\n end",
"def log_sql\n @opts[:log_sql]\n end",
"def log_off\n puts \"== Logging to log file.\"\n ActiveRecord::Base.logger.level = 1 # warn (?)\n nil\nend",
"def disable_logging\n @logger = nil\n end",
"def delete_logging_statement\n super\n end",
"def disable_sql_recording\n record_api_supportability_metric(:disable_sql_recording)\n\n return yield unless agent\n\n state = agent.set_record_sql(false)\n begin\n yield\n ensure\n agent.set_record_sql(state)\n end\n end",
"def reset_log_data\n self.class.without_logging { update_column(:log_data, nil) }\n end",
"def change_log_off\n self.change_log_active = false\n end",
"def hide_log\n change_log nil\n end",
"def logging_off\n @logger = NilLogger.new\n end",
"def disable_logging\n @communicator_logger = nil\n end",
"def disable_sql_caching!\n cache_set(:_no_cache_sql, true)\n end",
"def suppress_logger\n logging = params[:logging]\n logging = subsystems.blank? ? true?(logging) : !false?(logging)\n super(logging)\n end",
"def reset_sql_diff_text_log\n @sql_diff_text_log = ''\n end",
"def no_transaction\n migration.use_transactions = false\n end",
"def unset_log_source\n set_log_source(nil)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Enable SQL logging. You can call this method within a block where SQL logging was disabled to renable it. | def enable_sql_logging(&block)
sql_logging(enabled: true, &block)
end | [
"def log_sql\n @opts[:log_sql]\n end",
"def disable_sql_logging(&block)\n sql_logging(enabled: false, &block)\n end",
"def enable_activerecord_sql_logging\n ActiveRecord::Base.logger.level = 0\nend",
"def enable_logging\n initialize_logger\n end",
"def set_logging_statement(opts)\n opts = check_params(opts,[:statement])\n super(opts)\n end",
"def with_log_level(log_level, &_block)\n current_log_level = Mimi::DB.connection.sql_log_level\n Mimi::DB.connection.sql_log_level = log_level\n yield\n ensure\n Mimi::DB.connection.sql_log_level = current_log_level\n end",
"def disable_activerecord_sql_logging\n ActiveRecord::Base.logger.level = 1\nend",
"def enable_enhanced_logging=(enable)\n self.enable_debug_logging = enable\n self.aces_level = (enable ? 'full' : 'none')\n end",
"def disable_sql_recording\n record_api_supportability_metric(:disable_sql_recording)\n\n return yield unless agent\n\n state = agent.set_record_sql(false)\n begin\n yield\n ensure\n agent.set_record_sql(state)\n end\n end",
"def notice_sql sql\n return unless txn = NewRelic::Agent::Tracer.current_transaction\n\n current_segment = txn.current_segment\n return unless current_segment.is_a?(NewRelic::Agent::Transaction::DatastoreSegment)\n\n if current_segment.sql_statement\n current_segment.sql_statement.append_sql sql\n else\n current_segment._notice_sql sql, self.opts, explainer_for(sql)\n end\n end",
"def set_record_sql(should_record) # THREAD_LOCAL_ACCESS\n state = Tracer.state\n prev = state.record_sql\n state.record_sql = should_record\n prev.nil? || prev\n end",
"def enable\n {\n method: \"Log.enable\"\n }\n end",
"def notice_sql(sql)\n return unless txn = NewRelic::Agent::Tracer.current_transaction\n\n current_segment = txn.current_segment\n return unless current_segment.is_a?(NewRelic::Agent::Transaction::DatastoreSegment)\n\n if current_segment.sql_statement\n current_segment.sql_statement.append_sql(sql)\n else\n current_segment._notice_sql(sql, self.opts, explainer_for(sql))\n end\n end",
"def enable_logging(communicator_logger)\n @session.connection.enable_logging(communicator_logger)\n end",
"def activate_sql\n Cloud::Helpers::Sql.activate_sql(node)\n end",
"def log_sql\n if ActiveRecord::Base.logger = Logger.new(STDOUT)\n return 'Console will output SQL logs'\n else\n return 'Something went wrong!!!'\n end\n end",
"def enable_debug_mode!\n self.level = Enceladus::Logger::DEBUG\n end",
"def setup_connection(conn)\n mysql_connection_setting_sqls.each{|sql| statement(conn){|s| log_connection_yield(sql, conn){s.execute(sql)}}}\n super\n end",
"def turn_off_qa_db_logging()\r\n @@logToQADatabase =0\r\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Use a different database connection for the block. You can use this if your application has multiple databases to swap connections for the migration. You can pass in either a database connection or an ActiveRecord::Base class to use the connection used by that class. The label argument will be added to the logged SQL as a comment. | def using_connection(connection_or_class, label: nil, &block)
if connection_or_class.is_a?(Class) && connection_or_class < ActiveRecord::Base
label ||= connection_or_class.name
connection_or_class.connection_pool.with_connection do |connection|
switch_connection_in_block(connection, label: label, &block)
end
else
switch_connection_in_block(connection_or_class, label: label, &block)
end
end | [
"def repository_db &block\n Sequel.connect \"sqlite://#{ path }\", logger: LOGGER, &block\n end",
"def use(name)\n with(database: name)\n end",
"def database(dbname=nil, &block)\n dbname ||= database_name\n if dbname then\n repository dbname, &block\n else\n yield\n end\n end",
"def use_database database_alias \n database_alias = database_alias.to_s\n MongoMapper.db_config.should! :include, database_alias\n\n self.connection MongoMapper.connections[database_alias]\n set_database_name MongoMapper.db_config[database_alias]['name']\n end",
"def set_database_name(database = nil, &block)\n define_attr_method(:database_name, database, &block)\n end",
"def use(db_name)\n Sequel::Deprecation.deprecate(\"Database#use\", \"Create a new Sequel::Database instance instead of using Database#use\")\n disconnect\n @opts[:database] = db_name if self << \"USE #{db_name}\"\n @schemas = {}\n self\n end",
"def with_database_metric_name(model, method = nil, product = nil, &block) #THREAD_LOCAL_ACCESS\n if txn = TingYun::Agent::TransactionState.tl_get.current_transaction\n txn.with_database_metric_name(model, method, product, &block)\n else\n yield\n end\n end",
"def skip_database(&block)\n block.bind(self).call if SkipDatabase::Runner.skipping_database\n end",
"def database_name\n @opts[:database]\n end",
"def use(db_name)\n disconnect\n @opts[:database] = db_name if self << \"USE #{db_name}\"\n @schemas = nil\n self\n end",
"def use_old_database\n ActiveRecord::Base.connection.execute(\"use #{@oldDb}\")\n end",
"def database_name # :nodoc:\n return nil unless Module.constants.include? 'DataMapper' or Module.constants.include? :DataMapper\n raise \"No such database connection #{options[:database]}\" if options[:database] and DataMapper::Repository.adapters[options[:database]].nil?\n # Custom database connection specified\n return options[:database].to_sym if options[:database]\n # Leaf config name\n return leaf_name.to_sym if DataMapper::Repository.adapters[leaf_name.to_sym]\n # Leaf config name, underscored\n return leaf_name.methodize.to_sym if DataMapper::Repository.adapters[leaf_name.methodize.to_sym]\n # Leaf class name\n return self.class.to_s.to_sym if DataMapper::Repository.adapters[self.class.to_s.to_sym]\n # Leaf class name, underscored\n return self.class.to_s.methodize.to_sym if DataMapper::Repository.adapters[self.class.to_s.methodize.to_sym]\n # I give up\n return nil\n end",
"def set_database(name)\n @database = name.to_s\n end",
"def setup_connection(conn)\n conn = super(conn)\n statement(conn) do |stmt|\n connection_configuration_sqls.each{|sql| log_yield(sql){stmt.execute(sql)}}\n end\n conn\n end",
"def setup_connection(conn)\n mysql_connection_setting_sqls.each{|sql| statement(conn){|s| log_connection_yield(sql, conn){s.execute(sql)}}}\n super\n end",
"def connection_pool_name # :nodoc:\n replica = current_replica_name\n if replica\n \"#{name}_#{replica}\"\n elsif self == ActiveRecord::Base\n name\n else\n superclass.connection_pool_name\n end\n end",
"def set_current_database\n connect(request.host.split('.')[-3])\n end",
"def setup_connection(conn)\n conn = super(conn)\n statement(conn) do |stmt|\n connection_pragmas.each{|s| log_connection_yield(s, conn){stmt.execute(s)}}\n end\n conn\n end",
"def use_new_database\n ActiveRecord::Base.connection.execute(\"use #{@newDb}\")\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Extract the connection string for the rabbitmq service from the service information provided by Cloud Foundry in an environment variable. | def amqp_url
if not ENV['VCAP_SERVICES']
return {
:host => "172.16.32.11",
:port => 5672,
:username => "guest",
:password => "guest",
:vhost => "/",
}
end
services = JSON.parse(ENV['VCAP_SERVICES'], :symbolize_names => true)
url = services.values.map do |srvs|
srvs.map do |srv|
if srv[:label] =~ /^rabbitmq-/
srv[:credentials]
else
[]
end
end
end.flatten!.first
end | [
"def amqp_url\n if not ENV['VCAP_SERVICES']\n return {\n :host => \"localhost\",\n :port => 5672,\n :username => \"guest\",\n :password => \"guest\",\n :vhost => \"/\",\n }\n end\n\n services = JSON.parse(ENV['VCAP_SERVICES'], :symbolize_names => true)\n url = services.values.map do |srvs|\n srvs.map do |srv|\n if srv[:label] =~ /^rabbitmq-/\n srv[:credentials]\n else\n []\n end\n end\n end.flatten!.first\nend",
"def amqp_url\n services = JSON.parse(ENV['VCAP_SERVICES'], :symbolize_names => true)\n url = services.values.map do |srvs|\n srvs.map do |srv|\n if srv[:label] =~ /^rabbitmq-/\n srv[:credentials][:url]\n else\n []\n end\n end\n end.flatten!.first\nend",
"def amqp_url\n if not ENV['VCAP_SERVICES']\n return {\n :host => \"localhost\",\n :port => 5672,\n :username => \"guest\",\n :password => \"guest\",\n :vhost => \"/\",\n }\n end\n\n services = JSON.parse(ENV['VCAP_SERVICES'], :symbolize_names => true)\n url = services.values.map do |srvs|\n srvs.map do |srv|\n if srv[:label] =~ /^rabbitmq-/\n srv[:credentials]\n srv[:credentials][:heartbeat] = 20\n return srv[:credentials]\n else\n []\n end\n end\n end.flatten!.first\nend",
"def myservices_environment_details_host\n ENV['ENV_DETAILS'].nil? ? 'esu2v871:9080' : ENV['ENV_DETAILS']\n end",
"def amqp_url\n @amqp_url ||= (ENV[\"AMQP_URL\"] || \"amqp://localhost/\")\n end",
"def connection_string\n \"#{configuration.host}:#{configuration.port}\"\n end",
"def url\n @@url ||= (ENV[\"AMQP_URL\"] || \"amqp://guest:guest@localhost/\")\n end",
"def connection_url\n url = \"amqp://\"\n\n if username\n url += username\n url += ':' + password if password\n url += '@'\n end\n\n url += [ client_id, virtual_host ].join( '/' )\n\n url += '?'\n\n url += \"brokerlist='#{ broker_list }'\"\n\n url\n end",
"def connection_config_for(database_url)\n puts ENV[database_url]\n db = URI.parse(ENV[database_url])\n {\n adapter: db.scheme == 'postgres' ? 'postgresql' : db.scheme,\n host: db.host,\n username: db.user,\n password: db.password,\n database: db.path[1..-1],\n encoding: 'utf8'\n }\nend",
"def activemq_host\n 'activemq'\nend",
"def _host_from_env\n ENV['ALLQ_CLIENT_URL'].respond_to?(:length) && ENV['ALLQ_CLIENT_URL'].length > 0 && ENV['ALLQ_CLIENT_URL'].strip\n end",
"def connection_string\n return @connection_params.values.compact.join(';')\n end",
"def mqtt_app_name\n JSON.parse(ENV['VCAP_APPLICATION'.freeze])['application_name'.freeze]\nend",
"def read_env_address_and_port\n @sensu_address = ENV['SENSU_SOCKET_ADDRESS'] \\\n if ENV.key?('SENSU_SOCKET_ADDRESS')\n @sensu_port = ENV['SENSU_SOCKET_PORT'].to_i \\\n if ENV.key?('SENSU_SOCKET_PORT')\n end",
"def rabbitmqctl\n\t\t@rabbitmqctl ||= ( ENV['RABBITMQCTL'] || which('rabbitmqctl') ) or\n\t\t\traise \"Can't find rabbitmqctl in your PATH. Try running with \" +\n\t\t\t \"RABBITMQCTL=/path/to/rabbitmqctl\"\n\tend",
"def port_from_env\n return unless (port = ENV[\"AMQP_PORT\"])\n\n port.to_i\n end",
"def env_str\n @env_str ||= begin\n env = Rails.env\n env.include?('production') ? '' : env\n end\n end",
"def beaker_config_connection_address\n if @nexus_host[:ip]\n @nexus_host[:ip]\n elsif @nexus_host[:vmhostname]\n @nexus_host[:vmhostname]\n elsif @nexus_host[:hostname]\n @nexus_host[:hostname]\n else\n logger.error(\"stdout:\\n--\\nip, vmhostname or hostname not found, check beaker hosts configuration\\n--\")\n nil\n end\n end",
"def get_connection_string(url)\n conn = Utils.parse_connection_url(url)\n str = \"host=#{conn[:host]} dbname=#{conn[:db]}\"\n str << \" port=#{conn[:port]}\" if conn[:port].present?\n str << \" user=#{conn[:user]}\" if conn[:user].present?\n str << \" password=#{conn[:pass]}\" if conn[:pass].present?\n str\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Register a card that has been duplicated so it can be remapped | def register_duplicated_card(original_card_id:, to_card_id:)
Cache.hash_set("#{@batch_id}_duplicated_cards", original_card_id, to_card_id)
remapper = CardDuplicatorMapper::RemapLinkedCards.new(
batch_id: @batch_id,
)
# remap the card that was just duplicated
remapper.remap_cards(original_card_id, to_card_id)
# if all have completed, then we run through them all, basically a last pass
# to ensure we match any links that were created later than the duplicates, etc.
# TODO: How to ensure all cards were mapped or if duplication failed?
return unless all_cards_mapped?
remapper.call
end | [
"def addCard(givenCard)\n \tcards_users.create(card_id: givenCard.id, is_shared: false)\n end",
"def add_card(card)\n add_card_to_hand(card)\n end",
"def checkDuplicateCard source, customer, user\n \t#Retrieve the card fingerprint using the stripe_card_token \n newcard = Stripe::Token.retrieve(source)\n card_fingerprint = newcard.try(:card).try(:fingerprint) \n card_exp_month = newcard.try(:card).try(:exp_month) \n card_exp_year = newcard.try(:card).try(:exp_year) \n card_stripe_id = newcard.try(:card).try(:id)\n card_last4 = newcard.try(:card).try(:last4)\n card_brand = newcard.try(:card).try(:brand)\n card_funding = newcard.try(:card).try(:funding)\n # check whether a card with that fingerprint already exists\n mainCard = customer.sources.all(:object => \"card\").data.select{|card| ((card.fingerprint==card_fingerprint)and(card.exp_month==card_exp_month)and(card.exp_year==card_exp_year))}.last \n if !mainCard\n # Card is new, now going to add card\n mainCard = customer.sources.create(source: source)\n else\n # Card is already in the customer list\n end\n card = Card.save_card(user,mainCard,false)\n make_card_as_default(user,customer,card)\n\n #set the default card of the customer to be this card, as this is the last card provided by User and probably he want this card to be used for further transactions\n customer.default_card = mainCard.id \n # saving the customer \n customer.save \n \t# stripe card added to customer and now saving in our db.\n \tuser.update(stripe_customer_id: customer.id)\n \treturn card\n end",
"def setCard(c)\n @cards.push(c)\n removeCard(c)\n end",
"def add_card(card)\n @cards.append(card)\n end",
"def add_card_to_hand(card)\n @hand.add_card(card)\n end",
"def give_card(card)\n @hand.push(card)\n end",
"def add_card card_uri\n self.card_uri = card_uri\n save\n end",
"def add_card(card)\n card.save if card.kind_of?(Balanced::Card) && card.hash.nil?\n self.card_uri = Balanced::Utils.extract_uri_from_object(card)\n save\n end",
"def setCard(newCard)\n raise \"setCard was not implemented in #{self}\"\n end",
"def push_a_card(one_card)\n @deck << one_card\n end",
"def setCard(newCard)\n\t\t#add the card to this players hand\n\t\t@hand[newCard.value] = newCard\n\n\t\t#delete the card from the not seen list\n\t\tif newCard.type == :person\n\t\t\t@suspectsNotSeen.delete(newCard)\n\t\telsif newCard.type == :place\n\t\t\t@locationsNotSeen.delete(newCard)\n\t\telse \n\t\t\t@weaponsNotSeen.delete(newCard)\n\t\tend\n\tend",
"def setCard (card)\n # player has been dealt a particular card\n @cards << card\n\n end",
"def setCard(newCard)\n\t\t#place the card in the players hand\n\t\t@hand[newCard.value] = newCard\n\t\tputs \"You recieved the card #{newCard.value}!\"\n\tend",
"def add_card(card)\n @deck.add(card, :back)\n end",
"def addCard(card)\n \tif @can_get_cards\n \t @cards.push(card)\n else\n raise ArgumentError, \"Cards cannot be added to this hand anymore.\"\n \tend\n end",
"def <<(card)\n @cards << card\n end",
"def receive(card)\n @hand.add_to_hand(card)\n end",
"def register_seen(species, form = nil)\n @pokedex.register_seen(species, form)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Subsets and Splits